INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
List all the check groups that pylint knows about
def cb_list_groups(self, *args, **kwargs): """List all the check groups that pylint knows about These should be useful to know what check groups someone can disable or enable. """ for check in self.linter.get_checker_names(): print(check) sys.exit(0)
Wrap the text on the given line length.
def normalize_text(text, line_len=80, indent=""): """Wrap the text on the given line length.""" return "\n".join( textwrap.wrap( text, width=line_len, initial_indent=indent, subsequent_indent=indent ) )
return the module name and the frame id in the module
def get_module_and_frameid(node): """return the module name and the frame id in the module""" frame = node.frame() module, obj = "", [] while frame: if isinstance(frame, Module): module = frame.name else: obj.append(getattr(frame, "name", "<lambda>")) try: frame = frame.parent.frame() except AttributeError: frame = None obj.reverse() return module, ".".join(obj)
return decoded line from encoding or decode with default encoding
def safe_decode(line, encoding, *args, **kwargs): """return decoded line from encoding or decode with default encoding""" try: return line.decode(encoding or sys.getdefaultencoding(), *args, **kwargs) except LookupError: return line.decode(sys.getdefaultencoding(), *args, **kwargs)
Determines if the basename is matched in a regex blacklist
def _basename_in_blacklist_re(base_name, black_list_re): """Determines if the basename is matched in a regex blacklist :param str base_name: The basename of the file :param list black_list_re: A collection of regex patterns to match against. Successful matches are blacklisted. :returns: `True` if the basename is blacklisted, `False` otherwise. :rtype: bool """ for file_pattern in black_list_re: if file_pattern.match(base_name): return True return False
take a list of files/ modules/ packages and return the list of tuple ( file module name ) which have to be actually checked
def expand_modules(files_or_modules, black_list, black_list_re): """take a list of files/modules/packages and return the list of tuple (file, module name) which have to be actually checked """ result = [] errors = [] for something in files_or_modules: if basename(something) in black_list: continue if _basename_in_blacklist_re(basename(something), black_list_re): continue if exists(something): # this is a file or a directory try: modname = ".".join(modutils.modpath_from_file(something)) except ImportError: modname = splitext(basename(something))[0] if isdir(something): filepath = join(something, "__init__.py") else: filepath = something else: # suppose it's a module or package modname = something try: filepath = modutils.file_from_modpath(modname.split(".")) if filepath is None: continue except (ImportError, SyntaxError) as ex: # FIXME p3k : the SyntaxError is a Python bug and should be # removed as soon as possible http://bugs.python.org/issue10588 errors.append({"key": "fatal", "mod": modname, "ex": ex}) continue filepath = normpath(filepath) modparts = (modname or something).split(".") try: spec = modutils.file_info_from_modpath(modparts, path=sys.path) except ImportError: # Might not be acceptable, don't crash. is_namespace = False is_directory = isdir(something) else: is_namespace = modutils.is_namespace(spec) is_directory = modutils.is_directory(spec) if not is_namespace: result.append( { "path": filepath, "name": modname, "isarg": True, "basepath": filepath, "basename": modname, } ) has_init = ( not (modname.endswith(".__init__") or modname == "__init__") and basename(filepath) == "__init__.py" ) if has_init or is_namespace or is_directory: for subfilepath in modutils.get_module_files( dirname(filepath), black_list, list_all=is_namespace ): if filepath == subfilepath: continue if _basename_in_blacklist_re(basename(subfilepath), black_list_re): continue modpath = _modpath_from_file(subfilepath, is_namespace) submodname = ".".join(modpath) result.append( { "path": subfilepath, "name": submodname, "isarg": False, "basepath": filepath, "basename": modname, } ) return result, errors
load all module and package in the given directory looking for a register function in each one used to register pylint checkers
def register_plugins(linter, directory): """load all module and package in the given directory, looking for a 'register' function in each one, used to register pylint checkers """ imported = {} for filename in listdir(directory): base, extension = splitext(filename) if base in imported or base == "__pycache__": continue if ( extension in PY_EXTS and base != "__init__" or (not extension and isdir(join(directory, base))) ): try: module = modutils.load_module_from_file(join(directory, filename)) except ValueError: # empty module name (usually emacs auto-save files) continue except ImportError as exc: print( "Problem importing module %s: %s" % (filename, exc), file=sys.stderr ) else: if hasattr(module, "register"): module.register(linter) imported[base] = 1
Retrieve an option defined by the given * checker * or by all known option providers.
def get_global_option(checker, option, default=None): """ Retrieve an option defined by the given *checker* or by all known option providers. It will look in the list of all options providers until the given *option* will be found. If the option wasn't found, the *default* value will be returned. """ # First, try in the given checker's config. # After that, look in the options providers. try: return getattr(checker.config, option.replace("-", "_")) except AttributeError: pass for provider in checker.linter.options_providers: for options in provider.options: if options[0] == option: return getattr(provider.config, option.replace("-", "_")) return default
return a list of stripped string by splitting the string given as argument on sep ( by default ). Empty string are discarded.
def _splitstrip(string, sep=","): """return a list of stripped string by splitting the string given as argument on `sep` (',' by default). Empty string are discarded. >>> _splitstrip('a, b, c , 4,,') ['a', 'b', 'c', '4'] >>> _splitstrip('a') ['a'] >>> _splitstrip('a,\nb,\nc,') ['a', 'b', 'c'] :type string: str or unicode :param string: a csv line :type sep: str or unicode :param sep: field separator, default to the comma (',') :rtype: str or unicode :return: the unquoted string (or the input string if it wasn't quoted) """ return [word.strip() for word in string.split(sep) if word.strip()]
remove optional quotes ( simple or double ) from the string
def _unquote(string): """remove optional quotes (simple or double) from the string :type string: str or unicode :param string: an optionally quoted string :rtype: str or unicode :return: the unquoted string (or the input string if it wasn't quoted) """ if not string: return string if string[0] in "\"'": string = string[1:] if string[-1] in "\"'": string = string[:-1] return string
return string as a comment
def _comment(string): """return string as a comment""" lines = [line.strip() for line in string.splitlines()] return "# " + ("%s# " % linesep).join(lines)
return the user input s value from a compiled value
def _format_option_value(optdict, value): """return the user input's value from a 'compiled' value""" if isinstance(value, (list, tuple)): value = ",".join(_format_option_value(optdict, item) for item in value) elif isinstance(value, dict): value = ",".join("%s:%s" % (k, v) for k, v in value.items()) elif hasattr(value, "match"): # optdict.get('type') == 'regexp' # compiled regexp value = value.pattern elif optdict.get("type") == "yn": value = "yes" if value else "no" elif isinstance(value, str) and value.isspace(): value = "'%s'" % value return value
format an options section using the INI format
def format_section(stream, section, options, doc=None): """format an options section using the INI format""" if doc: print(_comment(doc), file=stream) print("[%s]" % section, file=stream) _ini_format(stream, options)
format options using the INI format
def _ini_format(stream, options): """format options using the INI format""" for optname, optdict, value in options: value = _format_option_value(optdict, value) help_opt = optdict.get("help") if help_opt: help_opt = normalize_text(help_opt, line_len=79, indent="# ") print(file=stream) print(help_opt, file=stream) else: print(file=stream) if value is None: print("#%s=" % optname, file=stream) else: value = str(value).strip() if re.match(r"^([\w-]+,)+[\w-]+$", str(value)): separator = "\n " + " " * len(optname) value = separator.join(x + "," for x in str(value).split(",")) # remove trailing ',' from last element of the list value = value[:-1] print("%s=%s" % (optname, value), file=stream)
insert a child node
def insert(self, index, child): """insert a child node""" self.children.insert(index, child) child.parent = self
return the visit name for the mixed class. When calling accept the method < visit_ + name returned by this method > will be called on the visitor
def _get_visit_name(self): """ return the visit name for the mixed class. When calling 'accept', the method <'visit_' + name returned by this method> will be called on the visitor """ try: # pylint: disable=no-member return self.TYPE.replace("-", "_") # pylint: disable=broad-except except Exception: return self.__class__.__name__.lower()
overridden to detect problems easily
def append(self, child): """overridden to detect problems easily""" assert child not in self.parents() VNode.append(self, child)
return the ancestor nodes
def parents(self): """return the ancestor nodes""" assert self.parent is not self if self.parent is None: return [] return [self.parent] + self.parent.parents()
format and write the given layout into the stream object
def format(self, layout, stream=None, encoding=None): """format and write the given layout into the stream object unicode policy: unicode strings may be found in the layout; try to call stream.write with it, but give it back encoded using the given encoding if it fails """ if stream is None: stream = sys.stdout if not encoding: encoding = getattr(stream, "encoding", "UTF-8") self.encoding = encoding or "UTF-8" self.out = stream self.begin_format() layout.accept(self) self.end_format()
trick to get table content without actually writing it
def get_table_content(self, table): """trick to get table content without actually writing it return an aligned list of lists containing table cells values as string """ result = [[]] cols = table.cols for cell in self.compute_content(table): if cols == 0: result.append([]) cols = table.cols cols -= 1 result[-1].append(cell) # fill missing cells while len(result[-1]) < cols: result[-1].append("") return result
trick to compute the formatting of children layout before actually writing it
def compute_content(self, layout): """trick to compute the formatting of children layout before actually writing it return an iterator on strings (one for each child element) """ # Patch the underlying output stream with a fresh-generated stream, # which is used to store a temporary representation of a child # node. out = self.out try: for child in layout.children: stream = StringIO() self.out = stream child.accept(self) yield stream.getvalue() finally: self.out = out
Walk the AST to collect block level options line numbers.
def collect_block_lines(self, msgs_store, module_node): """Walk the AST to collect block level options line numbers.""" for msg, lines in self._module_msgs_state.items(): self._raw_module_msgs_state[msg] = lines.copy() orig_state = self._module_msgs_state.copy() self._module_msgs_state = {} self._suppression_mapping = {} self._effective_max_line_number = module_node.tolineno self._collect_block_lines(msgs_store, module_node, orig_state)
Recursively walk ( depth first ) AST to collect block level options line numbers.
def _collect_block_lines(self, msgs_store, node, msg_state): """Recursively walk (depth first) AST to collect block level options line numbers. """ for child in node.get_children(): self._collect_block_lines(msgs_store, child, msg_state) first = node.fromlineno last = node.tolineno # first child line number used to distinguish between disable # which are the first child of scoped node with those defined later. # For instance in the code below: # # 1. def meth8(self): # 2. """test late disabling""" # 3. # pylint: disable=E1102 # 4. print self.blip # 5. # pylint: disable=E1101 # 6. print self.bla # # E1102 should be disabled from line 1 to 6 while E1101 from line 5 to 6 # # this is necessary to disable locally messages applying to class / # function using their fromlineno if ( isinstance(node, (nodes.Module, nodes.ClassDef, nodes.FunctionDef)) and node.body ): firstchildlineno = node.body[0].fromlineno else: firstchildlineno = last for msgid, lines in msg_state.items(): for lineno, state in list(lines.items()): original_lineno = lineno if first > lineno or last < lineno: continue # Set state for all lines for this block, if the # warning is applied to nodes. message_definitions = msgs_store.get_message_definitions(msgid) for message_definition in message_definitions: if message_definition.scope == WarningScope.NODE: if lineno > firstchildlineno: state = True first_, last_ = node.block_range(lineno) else: first_ = lineno last_ = last for line in range(first_, last_ + 1): # do not override existing entries if line in self._module_msgs_state.get(msgid, ()): continue if line in lines: # state change in the same block state = lines[line] original_lineno = line if not state: self._suppression_mapping[(msgid, line)] = original_lineno try: self._module_msgs_state[msgid][line] = state except KeyError: self._module_msgs_state[msgid] = {line: state} del lines[lineno]
Set status ( enabled/ disable ) for a given message at a given line
def set_msg_status(self, msg, line, status): """Set status (enabled/disable) for a given message at a given line""" assert line > 0 try: self._module_msgs_state[msg.msgid][line] = status except KeyError: self._module_msgs_state[msg.msgid] = {line: status}
Report an ignored message.
def handle_ignored_message( self, state_scope, msgid, line, node, args, confidence ): # pylint: disable=unused-argument """Report an ignored message. state_scope is either MSG_STATE_SCOPE_MODULE or MSG_STATE_SCOPE_CONFIG, depending on whether the message was disabled locally in the module, or globally. The other arguments are the same as for add_message. """ if state_scope == MSG_STATE_SCOPE_MODULE: try: orig_line = self._suppression_mapping[(msgid, line)] self._ignored_msgs[(msgid, orig_line)].add(line) except KeyError: pass
register a report
def register_report(self, reportid, r_title, r_cb, checker): """register a report reportid is the unique identifier for the report r_title the report's title r_cb the method to call to make the report checker is the checker defining the report """ reportid = reportid.upper() self._reports[checker].append((reportid, r_title, r_cb))
disable the report of the given id
def enable_report(self, reportid): """disable the report of the given id""" reportid = reportid.upper() self._reports_state[reportid] = True
disable the report of the given id
def disable_report(self, reportid): """disable the report of the given id""" reportid = reportid.upper() self._reports_state[reportid] = False
render registered reports
def make_reports(self, stats, old_stats): """render registered reports""" sect = Section("Report", "%s statements analysed." % (self.stats["statement"])) for checker in self.report_order(): for reportid, r_title, r_cb in self._reports[checker]: if not self.report_is_enabled(reportid): continue report_sect = Section(r_title) try: r_cb(report_sect, stats, old_stats) except EmptyReportError: continue report_sect.report_id = reportid sect.append(report_sect) return sect
add some stats entries to the statistic dictionary raise an AssertionError if there is a key conflict
def add_stats(self, **kwargs): """add some stats entries to the statistic dictionary raise an AssertionError if there is a key conflict """ for key, value in kwargs.items(): if key[-1] == "_": key = key[:-1] assert key not in self.stats self.stats[key] = value return self.stats
Get the name of the property that the given node is a setter for.
def get_setters_property_name(node): """Get the name of the property that the given node is a setter for. :param node: The node to get the property name for. :type node: str :rtype: str or None :returns: The name of the property that the node is a setter for, or None if one could not be found. """ decorators = node.decorators.nodes if node.decorators else [] for decorator in decorators: if ( isinstance(decorator, astroid.Attribute) and decorator.attrname == "setter" and isinstance(decorator.expr, astroid.Name) ): return decorator.expr.name return None
Get the property node for the given setter node.
def get_setters_property(node): """Get the property node for the given setter node. :param node: The node to get the property for. :type node: astroid.FunctionDef :rtype: astroid.FunctionDef or None :returns: The node relating to the property of the given setter node, or None if one could not be found. """ property_ = None property_name = get_setters_property_name(node) class_node = utils.node_frame_class(node) if property_name and class_node: class_attrs = class_node.getattr(node.name) for attr in class_attrs: if utils.decorated_with_property(attr): property_ = attr break return property_
Check if a return node returns a value other than None.
def returns_something(return_node): """Check if a return node returns a value other than None. :param return_node: The return node to check. :type return_node: astroid.Return :rtype: bool :return: True if the return node returns a value other than None, False otherwise. """ returns = return_node.value if returns is None: return False return not (isinstance(returns, astroid.Const) and returns.value is None)
Gets all of the possible raised exception types for the given raise node.
def possible_exc_types(node): """ Gets all of the possible raised exception types for the given raise node. .. note:: Caught exception types are ignored. :param node: The raise node to find exception types for. :type node: astroid.node_classes.NodeNG :returns: A list of exception types possibly raised by :param:`node`. :rtype: set(str) """ excs = [] if isinstance(node.exc, astroid.Name): inferred = utils.safe_infer(node.exc) if inferred: excs = [inferred.name] elif node.exc is None: handler = node.parent while handler and not isinstance(handler, astroid.ExceptHandler): handler = handler.parent if handler and handler.type: inferred_excs = astroid.unpack_infer(handler.type) excs = (exc.name for exc in inferred_excs if exc is not astroid.Uninferable) else: target = _get_raise_target(node) if isinstance(target, astroid.ClassDef): excs = [target.name] elif isinstance(target, astroid.FunctionDef): for ret in target.nodes_of_class(astroid.Return): if ret.frame() != target: # return from inner function - ignore it continue val = utils.safe_infer(ret.value) if ( val and isinstance(val, (astroid.Instance, astroid.ClassDef)) and utils.inherit_from_std_ex(val) ): excs.append(val.name) try: return {exc for exc in excs if not utils.node_ignores_exception(node, exc)} except astroid.InferenceError: return set()
required method to auto register this checker
def register(linter): """required method to auto register this checker""" linter.register_checker(EncodingChecker(linter)) linter.register_checker(ByIdManagedMessagesChecker(linter))
inspect the source file to find messages activated or deactivated by id.
def process_module(self, module): """inspect the source file to find messages activated or deactivated by id.""" managed_msgs = MessagesHandlerMixIn.get_by_id_managed_msgs() for (mod_name, msg_id, msg_symbol, lineno, is_disabled) in managed_msgs: if mod_name == module.name: if is_disabled: txt = "Id '{ident}' is used to disable '{symbol}' message emission".format( ident=msg_id, symbol=msg_symbol ) else: txt = "Id '{ident}' is used to enable '{symbol}' message emission".format( ident=msg_id, symbol=msg_symbol ) self.add_message("use-symbolic-message-instead", line=lineno, args=txt) MessagesHandlerMixIn.clear_by_id_managed_msgs()
inspect the source file to find encoding problem
def process_module(self, module): """inspect the source file to find encoding problem""" if module.file_encoding: encoding = module.file_encoding else: encoding = "ascii" with module.stream() as stream: for lineno, line in enumerate(stream): self._check_encoding(lineno + 1, line, encoding)
inspect the source to find fixme problems
def process_tokens(self, tokens): """inspect the source to find fixme problems""" if not self.config.notes: return comments = ( token_info for token_info in tokens if token_info.type == tokenize.COMMENT ) for comment in comments: comment_text = comment.string[1:].lstrip() # trim '#' and whitespaces # handle pylint disable clauses disable_option_match = OPTION_RGX.search(comment_text) if disable_option_match: try: _, value = disable_option_match.group(1).split("=", 1) values = [_val.strip().upper() for _val in value.split(",")] if set(values) & set(self.config.notes): continue except ValueError: self.add_message( "bad-inline-option", args=disable_option_match.group(1).strip(), line=comment.string, ) continue # emit warnings if necessary match = self._fixme_pattern.search("#" + comment_text.lower()) if match: note = match.group(1) self.add_message( "fixme", col_offset=comment.string.lower().index(note.lower()), args=comment_text, line=comment.start[0], )
Check if the name is a future import from another module.
def _is_from_future_import(stmt, name): """Check if the name is a future import from another module.""" try: module = stmt.do_import_module(stmt.modname) except astroid.AstroidBuildingException: return None for local_node in module.locals.get(name, []): if isinstance(local_node, astroid.ImportFrom) and local_node.modname == FUTURE: return True return None
Returns True if stmt in inside the else branch for a parent For stmt.
def in_for_else_branch(parent, stmt): """Returns True if stmt in inside the else branch for a parent For stmt.""" return isinstance(parent, astroid.For) and any( else_stmt.parent_of(stmt) or else_stmt == stmt for else_stmt in parent.orelse )
get overridden method if any
def overridden_method(klass, name): """get overridden method if any""" try: parent = next(klass.local_attr_ancestors(name)) except (StopIteration, KeyError): return None try: meth_node = parent[name] except KeyError: # We have found an ancestor defining <name> but it's not in the local # dictionary. This may happen with astroid built from living objects. return None if isinstance(meth_node, astroid.FunctionDef): return meth_node return None
return extra information to add to the message for unpacking - non - sequence and unbalanced - tuple - unpacking errors
def _get_unpacking_extra_info(node, infered): """return extra information to add to the message for unpacking-non-sequence and unbalanced-tuple-unpacking errors """ more = "" infered_module = infered.root().name if node.root().name == infered_module: if node.lineno == infered.lineno: more = " %s" % infered.as_string() elif infered.lineno: more = " defined at line %s" % infered.lineno elif infered.lineno: more = " defined at line %s of %s" % (infered.lineno, infered_module) return more
Detect that the given frames shares a global scope.
def _detect_global_scope(node, frame, defframe): """ Detect that the given frames shares a global scope. Two frames shares a global scope when neither of them are hidden under a function scope, as well as any of parent scope of them, until the root scope. In this case, depending from something defined later on will not work, because it is still undefined. Example: class A: # B has the same global scope as `C`, leading to a NameError. class B(C): ... class C: ... """ def_scope = scope = None if frame and frame.parent: scope = frame.parent.scope() if defframe and defframe.parent: def_scope = defframe.parent.scope() if isinstance(frame, astroid.FunctionDef): # If the parent of the current node is a # function, then it can be under its scope # (defined in, which doesn't concern us) or # the `->` part of annotations. The same goes # for annotations of function arguments, they'll have # their parent the Arguments node. if not isinstance(node.parent, (astroid.FunctionDef, astroid.Arguments)): return False elif any( not isinstance(f, (astroid.ClassDef, astroid.Module)) for f in (frame, defframe) ): # Not interested in other frames, since they are already # not in a global scope. return False break_scopes = [] for s in (scope, def_scope): # Look for parent scopes. If there is anything different # than a module or a class scope, then they frames don't # share a global scope. parent_scope = s while parent_scope: if not isinstance(parent_scope, (astroid.ClassDef, astroid.Module)): break_scopes.append(parent_scope) break if parent_scope.parent: parent_scope = parent_scope.parent.scope() else: break if break_scopes and len(set(break_scopes)) != 1: # Store different scopes than expected. # If the stored scopes are, in fact, the very same, then it means # that the two frames (frame and defframe) shares the same scope, # and we could apply our lineno analysis over them. # For instance, this works when they are inside a function, the node # that uses a definition and the definition itself. return False # At this point, we are certain that frame and defframe shares a scope # and the definition of the first depends on the second. return frame.lineno < defframe.lineno
Try to fix imports with multiple dots by returning a dictionary with the import names expanded. The function unflattens root imports like xml ( when we have both xml. etree and xml. sax ) to xml. etree and xml. sax respectively.
def _fix_dot_imports(not_consumed): """ Try to fix imports with multiple dots, by returning a dictionary with the import names expanded. The function unflattens root imports, like 'xml' (when we have both 'xml.etree' and 'xml.sax'), to 'xml.etree' and 'xml.sax' respectively. """ # TODO: this should be improved in issue astroid #46 names = {} for name, stmts in not_consumed.items(): if any( isinstance(stmt, astroid.AssignName) and isinstance(stmt.assign_type(), astroid.AugAssign) for stmt in stmts ): continue for stmt in stmts: if not isinstance(stmt, (astroid.ImportFrom, astroid.Import)): continue for imports in stmt.names: second_name = None import_module_name = imports[0] if import_module_name == "*": # In case of wildcard imports, # pick the name from inside the imported module. second_name = name else: name_matches_dotted_import = False if ( import_module_name.startswith(name) and import_module_name.find(".") > -1 ): name_matches_dotted_import = True if name_matches_dotted_import or name in imports: # Most likely something like 'xml.etree', # which will appear in the .locals as 'xml'. # Only pick the name if it wasn't consumed. second_name = import_module_name if second_name and second_name not in names: names[second_name] = stmt return sorted(names.items(), key=lambda a: a[1].fromlineno)
Detect imports in the frame with the required * name *. Such imports can be considered assignments. Returns True if an import for the given name was found.
def _find_frame_imports(name, frame): """ Detect imports in the frame, with the required *name*. Such imports can be considered assignments. Returns True if an import for the given name was found. """ imports = frame.nodes_of_class((astroid.Import, astroid.ImportFrom)) for import_node in imports: for import_name, import_alias in import_node.names: # If the import uses an alias, check only that. # Otherwise, check only the import name. if import_alias: if import_alias == name: return True elif import_name and import_name == name: return True return None
Checks if name_node has corresponding assign statement in same scope
def _assigned_locally(name_node): """ Checks if name_node has corresponding assign statement in same scope """ assign_stmts = name_node.scope().nodes_of_class(astroid.AssignName) return any(a.name == name_node.name for a in assign_stmts)
Mark the name as consumed and delete it from the to_consume dictionary
def mark_as_consumed(self, name, new_node): """ Mark the name as consumed and delete it from the to_consume dictionary """ self.consumed[name] = new_node del self.to_consume[name]
visit module: update consumption analysis variable checks globals doesn t overrides builtins
def visit_module(self, node): """visit module : update consumption analysis variable checks globals doesn't overrides builtins """ self._to_consume = [NamesConsumer(node, "module")] self._postponed_evaluation_enabled = is_postponed_evaluation_enabled(node) for name, stmts in node.locals.items(): if utils.is_builtin(name) and not utils.is_inside_except(stmts[0]): if self._should_ignore_redefined_builtin(stmts[0]) or name == "__doc__": continue self.add_message("redefined-builtin", args=name, node=stmts[0])
leave module: check globals
def leave_module(self, node): """leave module: check globals """ assert len(self._to_consume) == 1 not_consumed = self._to_consume.pop().to_consume # attempt to check for __all__ if defined if "__all__" in node.locals: self._check_all(node, not_consumed) # check for unused globals self._check_globals(not_consumed) # don't check unused imports in __init__ files if not self.config.init_import and node.package: return self._check_imports(not_consumed)
visit function: update consumption analysis variable and check locals
def visit_functiondef(self, node): """visit function: update consumption analysis variable and check locals """ self._to_consume.append(NamesConsumer(node, "function")) if not ( self.linter.is_message_enabled("redefined-outer-name") or self.linter.is_message_enabled("redefined-builtin") ): return globs = node.root().globals for name, stmt in node.items(): if utils.is_inside_except(stmt): continue if name in globs and not isinstance(stmt, astroid.Global): definition = globs[name][0] if ( isinstance(definition, astroid.ImportFrom) and definition.modname == FUTURE ): # It is a __future__ directive, not a symbol. continue line = definition.fromlineno if not self._is_name_ignored(stmt, name): self.add_message( "redefined-outer-name", args=(name, line), node=stmt ) elif utils.is_builtin(name) and not self._should_ignore_redefined_builtin( stmt ): # do not print Redefining builtin for additional builtins self.add_message("redefined-builtin", args=name, node=stmt)
leave function: check function s locals are consumed
def leave_functiondef(self, node): """leave function: check function's locals are consumed""" if node.type_comment_returns: self._store_type_annotation_node(node.type_comment_returns) if node.type_comment_args: for argument_annotation in node.type_comment_args: self._store_type_annotation_node(argument_annotation) not_consumed = self._to_consume.pop().to_consume if not ( self.linter.is_message_enabled("unused-variable") or self.linter.is_message_enabled("possibly-unused-variable") or self.linter.is_message_enabled("unused-argument") ): return # Don't check arguments of function which are only raising an exception. if utils.is_error(node): return # Don't check arguments of abstract methods or within an interface. is_method = node.is_method() if is_method and node.is_abstract(): return global_names = _flattened_scope_names(node.nodes_of_class(astroid.Global)) nonlocal_names = _flattened_scope_names(node.nodes_of_class(astroid.Nonlocal)) for name, stmts in not_consumed.items(): self._check_is_unused(name, node, stmts[0], global_names, nonlocal_names)
check names imported exists in the global scope
def visit_global(self, node): """check names imported exists in the global scope""" frame = node.frame() if isinstance(frame, astroid.Module): self.add_message("global-at-module-level", node=node) return module = frame.root() default_message = True locals_ = node.scope().locals for name in node.names: try: assign_nodes = module.getattr(name) except astroid.NotFoundError: # unassigned global, skip assign_nodes = [] not_defined_locally_by_import = not any( isinstance(local, astroid.node_classes.Import) for local in locals_.get(name, ()) ) if not assign_nodes and not_defined_locally_by_import: self.add_message("global-variable-not-assigned", args=name, node=node) default_message = False continue for anode in assign_nodes: if ( isinstance(anode, astroid.AssignName) and anode.name in module.special_attributes ): self.add_message("redefined-builtin", args=name, node=node) break if anode.frame() is module: # module level assignment break else: if not_defined_locally_by_import: # global undefined at the module scope self.add_message("global-variable-undefined", args=name, node=node) default_message = False if default_message: self.add_message("global-statement", node=node)
Return True if the node is in a local class scope as an assignment.
def _ignore_class_scope(self, node): """ Return True if the node is in a local class scope, as an assignment. :param node: Node considered :type node: astroid.Node :return: True if the node is in a local class scope, as an assignment. False otherwise. :rtype: bool """ # Detect if we are in a local class scope, as an assignment. # For example, the following is fair game. # # class A: # b = 1 # c = lambda b=b: b * b # # class B: # tp = 1 # def func(self, arg: tp): # ... # class C: # tp = 2 # def func(self, arg=tp): # ... name = node.name frame = node.statement().scope() in_annotation_or_default = self._defined_in_function_definition(node, frame) if in_annotation_or_default: frame_locals = frame.parent.scope().locals else: frame_locals = frame.locals return not ( (isinstance(frame, astroid.ClassDef) or in_annotation_or_default) and name in frame_locals )
check that a name is defined if the current scope and doesn t redefine a built - in
def visit_name(self, node): """check that a name is defined if the current scope and doesn't redefine a built-in """ stmt = node.statement() if stmt.fromlineno is None: # name node from an astroid built from live code, skip assert not stmt.root().file.endswith(".py") return name = node.name frame = stmt.scope() # if the name node is used as a function default argument's value or as # a decorator, then start from the parent frame of the function instead # of the function frame - and thus open an inner class scope if ( utils.is_default_argument(node) or utils.is_func_decorator(node) or utils.is_ancestor_name(frame, node) ): start_index = len(self._to_consume) - 2 else: start_index = len(self._to_consume) - 1 # iterates through parent scopes, from the inner to the outer base_scope_type = self._to_consume[start_index].scope_type # pylint: disable=too-many-nested-blocks; refactoring this block is a pain. for i in range(start_index, -1, -1): current_consumer = self._to_consume[i] # if the current scope is a class scope but it's not the inner # scope, ignore it. This prevents to access this scope instead of # the globals one in function members when there are some common # names. The only exception is when the starting scope is a # comprehension and its direct outer scope is a class if ( current_consumer.scope_type == "class" and i != start_index and not (base_scope_type == "comprehension" and i == start_index - 1) ): if self._ignore_class_scope(node): continue # the name has already been consumed, only check it's not a loop # variable used outside the loop # avoid the case where there are homonyms inside function scope and #  comprehension current scope (avoid bug #1731) if name in current_consumer.consumed and not ( current_consumer.scope_type == "comprehension" and self._has_homonym_in_upper_function_scope(node, i) ): defnode = utils.assign_parent(current_consumer.consumed[name][0]) self._check_late_binding_closure(node, defnode) self._loopvar_name(node, name) break found_node = current_consumer.get_next_to_consume(node) if found_node is None: continue # checks for use before assignment defnode = utils.assign_parent(current_consumer.to_consume[name][0]) if defnode is not None: self._check_late_binding_closure(node, defnode) defstmt = defnode.statement() defframe = defstmt.frame() # The class reuses itself in the class scope. recursive_klass = ( frame is defframe and defframe.parent_of(node) and isinstance(defframe, astroid.ClassDef) and node.name == defframe.name ) if ( recursive_klass and utils.is_inside_lambda(node) and ( not utils.is_default_argument(node) or node.scope().parent.scope() is not defframe ) ): # Self-referential class references are fine in lambda's -- # As long as they are not part of the default argument directly # under the scope of the parent self-referring class. # Example of valid default argument: # class MyName3: # myattr = 1 # mylambda3 = lambda: lambda a=MyName3: a # Example of invalid default argument: # class MyName4: # myattr = 1 # mylambda4 = lambda a=MyName4: lambda: a # If the above conditional is True, # there is no possibility of undefined-variable # Also do not consume class name # (since consuming blocks subsequent checks) # -- quit break maybee0601, annotation_return, use_outer_definition = self._is_variable_violation( node, name, defnode, stmt, defstmt, frame, defframe, base_scope_type, recursive_klass, ) if use_outer_definition: continue if ( maybee0601 and not utils.is_defined_before(node) and not astroid.are_exclusive(stmt, defstmt, ("NameError",)) ): # Used and defined in the same place, e.g `x += 1` and `del x` defined_by_stmt = defstmt is stmt and isinstance( node, (astroid.DelName, astroid.AssignName) ) if ( recursive_klass or defined_by_stmt or annotation_return or isinstance(defstmt, astroid.Delete) ): if not utils.node_ignores_exception(node, NameError): # Handle postponed evaluation of annotations if not ( self._postponed_evaluation_enabled and isinstance( stmt, ( astroid.AnnAssign, astroid.FunctionDef, astroid.Arguments, ), ) and name in node.root().locals ): self.add_message( "undefined-variable", args=name, node=node ) elif base_scope_type != "lambda": # E0601 may *not* occurs in lambda scope. # Handle postponed evaluation of annotations if not ( self._postponed_evaluation_enabled and isinstance( stmt, (astroid.AnnAssign, astroid.FunctionDef) ) ): self.add_message( "used-before-assignment", args=name, node=node ) elif base_scope_type == "lambda": # E0601 can occur in class-level scope in lambdas, as in # the following example: # class A: # x = lambda attr: f + attr # f = 42 if isinstance(frame, astroid.ClassDef) and name in frame.locals: if isinstance(node.parent, astroid.Arguments): if stmt.fromlineno <= defstmt.fromlineno: # Doing the following is fine: # class A: # x = 42 # y = lambda attr=x: attr self.add_message( "used-before-assignment", args=name, node=node ) else: self.add_message( "undefined-variable", args=name, node=node ) elif current_consumer.scope_type == "lambda": self.add_message("undefined-variable", node=node, args=name) current_consumer.mark_as_consumed(name, found_node) # check it's not a loop variable used outside the loop self._loopvar_name(node, name) break else: # we have not found the name, if it isn't a builtin, that's an # undefined name ! if not ( name in astroid.Module.scope_attrs or utils.is_builtin(name) or name in self.config.additional_builtins ): if not utils.node_ignores_exception(node, NameError): self.add_message("undefined-variable", args=name, node=node)
Return True if there is a node with the same name in the to_consume dict of an upper scope and if that scope is a function
def _has_homonym_in_upper_function_scope(self, node, index): """ Return True if there is a node with the same name in the to_consume dict of an upper scope and if that scope is a function :param node: node to check for :type node: astroid.Node :param index: index of the current consumer inside self._to_consume :type index: int :return: True if there is a node with the same name in the to_consume dict of an upper scope and if that scope is a function :rtype: bool """ for _consumer in self._to_consume[index - 1 :: -1]: if _consumer.scope_type == "function" and node.name in _consumer.to_consume: return True return False
check modules attribute accesses
def visit_import(self, node): """check modules attribute accesses""" if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node): # No need to verify this, since ImportError is already # handled by the client code. return for name, _ in node.names: parts = name.split(".") try: module = next(_infer_name_module(node, parts[0])) except astroid.ResolveError: continue self._check_module_attrs(node, module, parts[1:])
check modules attribute accesses
def visit_importfrom(self, node): """check modules attribute accesses""" if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node): # No need to verify this, since ImportError is already # handled by the client code. return name_parts = node.modname.split(".") try: module = node.do_import_module(name_parts[0]) except astroid.AstroidBuildingException: return module = self._check_module_attrs(node, module, name_parts[1:]) if not module: return for name, _ in node.names: if name == "*": continue self._check_module_attrs(node, module, name.split("."))
Check unbalanced tuple unpacking for assignments and unpacking non - sequences as well as in case self/ cls get assigned.
def visit_assign(self, node): """Check unbalanced tuple unpacking for assignments and unpacking non-sequences as well as in case self/cls get assigned. """ self._check_self_cls_assign(node) if not isinstance(node.targets[0], (astroid.Tuple, astroid.List)): return targets = node.targets[0].itered() try: infered = utils.safe_infer(node.value) if infered is not None: self._check_unpacking(infered, node, targets) except astroid.InferenceError: return
Check that self/ cls don t get assigned
def _check_self_cls_assign(self, node): """Check that self/cls don't get assigned""" assign_names = { target.name for target in node.targets if isinstance(target, astroid.AssignName) } scope = node.scope() nonlocals_with_same_name = any( child for child in scope.body if isinstance(child, astroid.Nonlocal) and assign_names & set(child.names) ) if nonlocals_with_same_name: scope = node.scope().parent.scope() if not ( isinstance(scope, astroid.scoped_nodes.FunctionDef) and scope.is_method() and "builtins.staticmethod" not in scope.decoratornames() ): return argument_names = scope.argnames() if not argument_names: return self_cls_name = argument_names[0] target_assign_names = ( target.name for target in node.targets if isinstance(target, astroid.node_classes.AssignName) ) if self_cls_name in target_assign_names: self.add_message("self-cls-assignment", node=node, args=(self_cls_name))
Check for unbalanced tuple unpacking and unpacking non sequences.
def _check_unpacking(self, infered, node, targets): """ Check for unbalanced tuple unpacking and unpacking non sequences. """ if utils.is_inside_abstract_class(node): return if utils.is_comprehension(node): return if infered is astroid.Uninferable: return if ( isinstance(infered.parent, astroid.Arguments) and isinstance(node.value, astroid.Name) and node.value.name == infered.parent.vararg ): # Variable-length argument, we can't determine the length. return if isinstance(infered, (astroid.Tuple, astroid.List)): # attempt to check unpacking is properly balanced values = infered.itered() if len(targets) != len(values): # Check if we have starred nodes. if any(isinstance(target, astroid.Starred) for target in targets): return self.add_message( "unbalanced-tuple-unpacking", node=node, args=( _get_unpacking_extra_info(node, infered), len(targets), len(values), ), ) # attempt to check unpacking may be possible (ie RHS is iterable) else: if not utils.is_iterable(infered): self.add_message( "unpacking-non-sequence", node=node, args=(_get_unpacking_extra_info(node, infered),), )
check that module_names ( list of string ) are accessible through the given module if the latest access name corresponds to a module return it
def _check_module_attrs(self, node, module, module_names): """check that module_names (list of string) are accessible through the given module if the latest access name corresponds to a module, return it """ assert isinstance(module, astroid.Module), module while module_names: name = module_names.pop(0) if name == "__dict__": module = None break try: module = next(module.getattr(name)[0].infer()) if module is astroid.Uninferable: return None except astroid.NotFoundError: if module.name in self._ignored_modules: return None self.add_message( "no-name-in-module", args=(name, module.name), node=node ) return None except astroid.InferenceError: return None if module_names: # FIXME: other message if name is not the latest part of # module_names ? modname = module.name if module else "__dict__" self.add_message( "no-name-in-module", node=node, args=(".".join(module_names), modname) ) return None if isinstance(module, astroid.Module): return module return None
Update consumption analysis for metaclasses.
def _check_metaclasses(self, node): """ Update consumption analysis for metaclasses. """ consumed = [] # [(scope_locals, consumed_key)] for child_node in node.get_children(): if isinstance(child_node, astroid.ClassDef): consumed.extend(self._check_classdef_metaclasses(child_node, node)) # Pop the consumed items, in order to avoid having # unused-import and unused-variable false positives for scope_locals, name in consumed: scope_locals.pop(name, None)
get values listed in <columns > from <stats > and <old_stats > and return a formated list of values designed to be given to a ureport. Table object
def table_lines_from_stats(stats, _, columns): """get values listed in <columns> from <stats> and <old_stats>, and return a formated list of values, designed to be given to a ureport.Table object """ lines = [] for m_type in columns: new = stats[m_type] new = "%.3f" % new if isinstance(new, float) else str(new) lines += (m_type.replace("_", " "), new, "NC", "NC") return lines
Creates the proper script names required for each platform ( taken from 4Suite )
def ensure_scripts(linux_scripts): """Creates the proper script names required for each platform (taken from 4Suite) """ from distutils import util if util.get_platform()[:3] == "win": return linux_scripts + [script + ".bat" for script in linux_scripts] return linux_scripts
return a list of subpackages for the given directory
def get_packages(directory, prefix): """return a list of subpackages for the given directory""" result = [] for package in os.listdir(directory): absfile = join(directory, package) if isdir(absfile): if exists(join(absfile, "__init__.py")): if prefix: result.append("%s.%s" % (prefix, package)) else: result.append(package) result += get_packages(absfile, result[-1]) return result
setup entry point
def install(**kwargs): """setup entry point""" if USE_SETUPTOOLS: if "--force-manifest" in sys.argv: sys.argv.remove("--force-manifest") packages = [modname] + get_packages(join(base_dir, "pylint"), modname) if USE_SETUPTOOLS: if install_requires: kwargs["install_requires"] = install_requires kwargs["dependency_links"] = dependency_links kwargs["entry_points"] = { "console_scripts": [ "pylint = pylint:run_pylint", "epylint = pylint:run_epylint", "pyreverse = pylint:run_pyreverse", "symilar = pylint:run_symilar", ] } kwargs["packages"] = packages cmdclass = {"install_lib": MyInstallLib, "build_py": build_py} if easy_install_lib: cmdclass["easy_install"] = easy_install return setup( name=distname, version=__pkginfo__["version"], license=__pkginfo__["license"], description=__pkginfo__["description"], long_description=long_description, author=__pkginfo__["author"], author_email=__pkginfo__["author_email"], url=__pkginfo__["web"], scripts=ensure_scripts(scripts), classifiers=__pkginfo__["classifiers"], data_files=data_files, ext_modules=ext_modules, cmdclass=cmdclass, extras_require=extras_require, test_suite="test", python_requires=">=3.4.*", setup_requires=["pytest-runner"], tests_require=["pytest"], **kwargs )
overridden from install_lib class
def run(self): """overridden from install_lib class""" install_lib.install_lib.run(self) # manually install included directories if any if include_dirs: for directory in include_dirs: dest = join(self.install_dir, directory) if sys.version_info >= (3, 0): exclude = {"invalid_encoded_data*", "unknown_encoding*"} else: exclude = set() shutil.rmtree(dest, ignore_errors=True) shutil.copytree( directory, dest, ignore=shutil.ignore_patterns(*exclude) )
return lines with leading/ trailing whitespace and any ignored code features removed
def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports): """return lines with leading/trailing whitespace and any ignored code features removed """ if ignore_imports: tree = astroid.parse("".join(lines)) node_is_import_by_lineno = ( (node.lineno, isinstance(node, (astroid.Import, astroid.ImportFrom))) for node in tree.body ) line_begins_import = { lineno: all(is_import for _, is_import in node_is_import_group) for lineno, node_is_import_group in groupby( node_is_import_by_lineno, key=lambda x: x[0] ) } current_line_is_import = False strippedlines = [] docstring = None for lineno, line in enumerate(lines, start=1): line = line.strip() if ignore_docstrings: if not docstring and any( line.startswith(i) for i in ['"""', "'''", 'r"""', "r'''"] ): docstring = line[:3] line = line[3:] if docstring: if line.endswith(docstring): docstring = None line = "" if ignore_imports: current_line_is_import = line_begins_import.get( lineno, current_line_is_import ) if current_line_is_import: line = "" if ignore_comments: # XXX should use regex in checkers/format to avoid cutting # at a "#" in a string line = line.split("#", 1)[0].strip() strippedlines.append(line) return strippedlines
make a layout with some stats about duplication
def report_similarities(sect, stats, old_stats): """make a layout with some stats about duplication""" lines = ["", "now", "previous", "difference"] lines += table_lines_from_stats( stats, old_stats, ("nb_duplicated_lines", "percent_duplicated_lines") ) sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
standalone command line access point
def Run(argv=None): """standalone command line access point""" if argv is None: argv = sys.argv[1:] from getopt import getopt s_opts = "hdi" l_opts = ( "help", "duplicates=", "ignore-comments", "ignore-imports", "ignore-docstrings", ) min_lines = 4 ignore_comments = False ignore_docstrings = False ignore_imports = False opts, args = getopt(argv, s_opts, l_opts) for opt, val in opts: if opt in ("-d", "--duplicates"): min_lines = int(val) elif opt in ("-h", "--help"): usage() elif opt in ("-i", "--ignore-comments"): ignore_comments = True elif opt in ("--ignore-docstrings",): ignore_docstrings = True elif opt in ("--ignore-imports",): ignore_imports = True if not args: usage(1) sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports) for filename in args: with open(filename) as stream: sim.append_stream(filename, stream) sim.run() sys.exit(0)
append a file to search for similarities
def append_stream(self, streamid, stream, encoding=None): """append a file to search for similarities""" if encoding is None: readlines = stream.readlines else: readlines = decoding_stream(stream, encoding).readlines try: self.linesets.append( LineSet( streamid, readlines(), self.ignore_comments, self.ignore_docstrings, self.ignore_imports, ) ) except UnicodeDecodeError: pass
compute similarities in appended files
def _compute_sims(self): """compute similarities in appended files""" no_duplicates = defaultdict(list) for num, lineset1, idx1, lineset2, idx2 in self._iter_sims(): duplicate = no_duplicates[num] for couples in duplicate: if (lineset1, idx1) in couples or (lineset2, idx2) in couples: couples.add((lineset1, idx1)) couples.add((lineset2, idx2)) break else: duplicate.append({(lineset1, idx1), (lineset2, idx2)}) sims = [] for num, ensembles in no_duplicates.items(): for couples in ensembles: sims.append((num, couples)) sims.sort() sims.reverse() return sims
display computed similarities on stdout
def _display_sims(self, sims): """display computed similarities on stdout""" nb_lignes_dupliquees = 0 for num, couples in sims: print() print(num, "similar lines in", len(couples), "files") couples = sorted(couples) for lineset, idx in couples: print("==%s:%s" % (lineset.name, idx)) # pylint: disable=W0631 for line in lineset._real_lines[idx : idx + num]: print(" ", line.rstrip()) nb_lignes_dupliquees += num * (len(couples) - 1) nb_total_lignes = sum([len(lineset) for lineset in self.linesets]) print( "TOTAL lines=%s duplicates=%s percent=%.2f" % ( nb_total_lignes, nb_lignes_dupliquees, nb_lignes_dupliquees * 100.0 / nb_total_lignes, ) )
find similarities in the two given linesets
def _find_common(self, lineset1, lineset2): """find similarities in the two given linesets""" lines1 = lineset1.enumerate_stripped lines2 = lineset2.enumerate_stripped find = lineset2.find index1 = 0 min_lines = self.min_lines while index1 < len(lineset1): skip = 1 num = 0 for index2 in find(lineset1[index1]): non_blank = 0 for num, ((_, line1), (_, line2)) in enumerate( zip(lines1(index1), lines2(index2)) ): if line1 != line2: if non_blank > min_lines: yield num, lineset1, index1, lineset2, index2 skip = max(skip, num) break if line1: non_blank += 1 else: # we may have reach the end num += 1 if non_blank > min_lines: yield num, lineset1, index1, lineset2, index2 skip = max(skip, num) index1 += skip
iterate on similarities among all files by making a cartesian product
def _iter_sims(self): """iterate on similarities among all files, by making a cartesian product """ for idx, lineset in enumerate(self.linesets[:-1]): for lineset2 in self.linesets[idx + 1 :]: for sim in self._find_common(lineset, lineset2): yield sim
return an iterator on stripped lines starting from a given index if specified else 0
def enumerate_stripped(self, start_at=0): """return an iterator on stripped lines, starting from a given index if specified, else 0 """ idx = start_at if start_at: lines = self._stripped_lines[start_at:] else: lines = self._stripped_lines for line in lines: # if line: yield idx, line idx += 1
create the index for this set
def _mk_index(self): """create the index for this set""" index = defaultdict(list) for line_no, line in enumerate(self._stripped_lines): if line: index[line].append(line_no) return index
method called to set an option ( registered in the options list )
def set_option(self, optname, value, action=None, optdict=None): """method called to set an option (registered in the options list) overridden to report options setting to Similar """ BaseChecker.set_option(self, optname, value, action, optdict) if optname == "min-similarity-lines": self.min_lines = self.config.min_similarity_lines elif optname == "ignore-comments": self.ignore_comments = self.config.ignore_comments elif optname == "ignore-docstrings": self.ignore_docstrings = self.config.ignore_docstrings elif optname == "ignore-imports": self.ignore_imports = self.config.ignore_imports
init the checkers: reset linesets and statistics information
def open(self): """init the checkers: reset linesets and statistics information""" self.linesets = [] self.stats = self.linter.add_stats( nb_duplicated_lines=0, percent_duplicated_lines=0 )
process a module
def process_module(self, node): """process a module the module's content is accessible via the stream object stream must implement the readlines method """ with node.stream() as stream: self.append_stream(self.linter.current_name, stream, node.file_encoding)
compute and display similarities on closing ( i. e. end of parsing )
def close(self): """compute and display similarities on closing (i.e. end of parsing)""" total = sum(len(lineset) for lineset in self.linesets) duplicated = 0 stats = self.stats for num, couples in self._compute_sims(): msg = [] for lineset, idx in couples: msg.append("==%s:%s" % (lineset.name, idx)) msg.sort() # pylint: disable=W0631 for line in lineset._real_lines[idx : idx + num]: msg.append(line.rstrip()) self.add_message("R0801", args=(len(couples), "\n".join(msg))) duplicated += num * (len(couples) - 1) stats["nb_duplicated_lines"] = duplicated stats["percent_duplicated_lines"] = total and duplicated * 100.0 / total
Check if a definition signature is equivalent to a call.
def _definition_equivalent_to_call(definition, call): """Check if a definition signature is equivalent to a call.""" if definition.kwargs: same_kw_variadics = definition.kwargs in call.starred_kws else: same_kw_variadics = not call.starred_kws if definition.varargs: same_args_variadics = definition.varargs in call.starred_args else: same_args_variadics = not call.starred_args same_kwonlyargs = all(kw in call.kws for kw in definition.kwonlyargs) same_args = definition.args == call.args no_additional_kwarg_arguments = True if call.kws: for keyword in call.kws: is_arg = keyword in call.args is_kwonly = keyword in definition.kwonlyargs if not is_arg and not is_kwonly: # Maybe this argument goes into **kwargs, # or it is an extraneous argument. # In any case, the signature is different than # the call site, which stops our search. no_additional_kwarg_arguments = False break return all( ( same_args, same_kwonlyargs, same_args_variadics, same_kw_variadics, no_additional_kwarg_arguments, ) )
Check equality of nodes based on the comparison of their attributes named attr_name.
def _check_arg_equality(node_a, node_b, attr_name): """ Check equality of nodes based on the comparison of their attributes named attr_name. Args: node_a (astroid.node): first node to compare. node_b (astroid.node): second node to compare. attr_name (str): name of the nodes attribute to use for comparison. Returns: bool: True if node_a.attr_name == node_b.attr_name, False otherwise. """ return getattr(node_a, attr_name) == getattr(node_b, attr_name)
Check if original and overridden methods arguments have different default values
def _has_different_parameters_default_value(original, overridden): """ Check if original and overridden methods arguments have different default values Return True if one of the overridden arguments has a default value different from the default value of the original argument If one of the method doesn't have argument (.args is None) return False """ if original.args is None or overridden.args is None: return False all_args = chain(original.args, original.kwonlyargs) original_param_names = [param.name for param in all_args] default_missing = object() for param_name in original_param_names: try: original_default = original.default_value(param_name) except astroid.exceptions.NoDefault: original_default = default_missing try: overridden_default = overridden.default_value(param_name) except astroid.exceptions.NoDefault: overridden_default = default_missing default_list = [ arg == default_missing for arg in (original_default, overridden_default) ] if any(default_list) and not all(default_list): # Only one arg has no default value return True astroid_type_compared_attr = { astroid.Const: "value", astroid.ClassDef: "name", astroid.Tuple: "elts", astroid.List: "elts", } handled_types = tuple( astroid_type for astroid_type in astroid_type_compared_attr ) original_type = _get_node_type(original_default, handled_types) if original_type: #  We handle only astroid types that are inside the dict astroid_type_compared_attr if not isinstance(overridden_default, original_type): #  Two args with same name but different types return True if not _check_arg_equality( original_default, overridden_default, astroid_type_compared_attr[original_type], ): # Two args with same type but different values return True return False
Determine if the two methods have different parameters
def _different_parameters(original, overridden, dummy_parameter_regex): """Determine if the two methods have different parameters They are considered to have different parameters if: * they have different positional parameters, including different names * one of the methods is having variadics, while the other is not * they have different keyword only parameters. """ original_parameters = _positional_parameters(original) overridden_parameters = _positional_parameters(overridden) different_positional = _has_different_parameters( original_parameters, overridden_parameters, dummy_parameter_regex ) different_kwonly = _has_different_parameters( original.args.kwonlyargs, overridden.args.kwonlyargs, dummy_parameter_regex ) if original.name in PYMETHODS: # Ignore the difference for special methods. If the parameter # numbers are different, then that is going to be caught by # unexpected-special-method-signature. # If the names are different, it doesn't matter, since they can't # be used as keyword arguments anyway. different_positional = different_kwonly = False # Both or none should have extra variadics, otherwise the method # loses or gains capabilities that are not reflected into the parent method, # leading to potential inconsistencies in the code. different_kwarg = ( sum(1 for param in (original.args.kwarg, overridden.args.kwarg) if not param) == 1 ) different_vararg = ( sum(1 for param in (original.args.vararg, overridden.args.vararg) if not param) == 1 ) return any( (different_positional, different_kwarg, different_vararg, different_kwonly) )
Check if the func was called in any of the given methods belonging to the * klass *. Returns True if so False otherwise.
def _called_in_methods(func, klass, methods): """ Check if the func was called in any of the given methods, belonging to the *klass*. Returns True if so, False otherwise. """ if not isinstance(func, astroid.FunctionDef): return False for method in methods: try: infered = klass.getattr(method) except astroid.NotFoundError: continue for infer_method in infered: for call in infer_method.nodes_of_class(astroid.Call): try: bound = next(call.func.infer()) except (astroid.InferenceError, StopIteration): continue if not isinstance(bound, astroid.BoundMethod): continue func_obj = bound._proxied if isinstance(func_obj, astroid.UnboundMethod): func_obj = func_obj._proxied if func_obj.name == func.name: return True return False
Check if the given attribute * name * is a property in the given * klass *.
def _is_attribute_property(name, klass): """ Check if the given attribute *name* is a property in the given *klass*. It will look for `property` calls or for functions with the given name, decorated by `property` or `property` subclasses. Returns ``True`` if the name is a property in the given klass, ``False`` otherwise. """ try: attributes = klass.getattr(name) except astroid.NotFoundError: return False property_name = "{}.property".format(BUILTINS) for attr in attributes: if attr is astroid.Uninferable: continue try: infered = next(attr.infer()) except astroid.InferenceError: continue if isinstance(infered, astroid.FunctionDef) and decorated_with_property( infered ): return True if infered.pytype() == property_name: return True return False
Safely infer the return value of a function.
def _safe_infer_call_result(node, caller, context=None): """ Safely infer the return value of a function. Returns None if inference failed or if there is some ambiguity (more than one node has been inferred). Otherwise returns infered value. """ try: inferit = node.infer_call_result(caller, context=context) value = next(inferit) except astroid.InferenceError: return None # inference failed except StopIteration: return None # no values infered try: next(inferit) return None # there is ambiguity on the inferred node except astroid.InferenceError: return None # there is some kind of ambiguity except StopIteration: return value
return a dictionary where keys are the list of base classes providing the queried method and so that should/ may be called from the method node
def _ancestors_to_call(klass_node, method="__init__"): """return a dictionary where keys are the list of base classes providing the queried method, and so that should/may be called from the method node """ to_call = {} for base_node in klass_node.ancestors(recurs=False): try: to_call[base_node] = next(base_node.igetattr(method)) except astroid.InferenceError: continue return to_call
required method to auto register this checker
def register(linter): """required method to auto register this checker """ linter.register_checker(ClassChecker(linter)) linter.register_checker(SpecialMethodsChecker(linter))
Set the given node as accessed.
def set_accessed(self, node): """Set the given node as accessed.""" frame = node_frame_class(node) if frame is None: # The node does not live in a class. return self._scopes[frame][node.attrname].append(node)
init visit variable _accessed
def visit_classdef(self, node): """init visit variable _accessed """ self._check_bases_classes(node) # if not an exception or a metaclass if node.type == "class" and has_known_bases(node): try: node.local_attr("__init__") except astroid.NotFoundError: self.add_message("no-init", args=node, node=node) self._check_slots(node) self._check_proper_bases(node) self._check_consistent_mro(node)
Detect that a class has a consistent mro or duplicate bases.
def _check_consistent_mro(self, node): """Detect that a class has a consistent mro or duplicate bases.""" try: node.mro() except InconsistentMroError: self.add_message("inconsistent-mro", args=node.name, node=node) except DuplicateBasesError: self.add_message("duplicate-bases", args=node.name, node=node) except NotImplementedError: # Old style class, there's no mro so don't do anything. pass
Detect that a class inherits something which is not a class or a type.
def _check_proper_bases(self, node): """ Detect that a class inherits something which is not a class or a type. """ for base in node.bases: ancestor = safe_infer(base) if ancestor in (astroid.Uninferable, None): continue if isinstance(ancestor, astroid.Instance) and ancestor.is_subtype_of( "%s.type" % (BUILTINS,) ): continue if not isinstance(ancestor, astroid.ClassDef) or _is_invalid_base_class( ancestor ): self.add_message("inherit-non-class", args=base.as_string(), node=node) if ancestor.name == object.__name__: self.add_message( "useless-object-inheritance", args=node.name, node=node )
close a class node: check that instance attributes are defined in __init__ and check access to existent members
def leave_classdef(self, cnode): """close a class node: check that instance attributes are defined in __init__ and check access to existent members """ # check access to existent members on non metaclass classes if self._ignore_mixin and cnode.name[-5:].lower() == "mixin": # We are in a mixin class. No need to try to figure out if # something is missing, since it is most likely that it will # miss. return accessed = self._accessed.accessed(cnode) if cnode.type != "metaclass": self._check_accessed_members(cnode, accessed) # checks attributes are defined in an allowed method such as __init__ if not self.linter.is_message_enabled("attribute-defined-outside-init"): return defining_methods = self.config.defining_attr_methods current_module = cnode.root() for attr, nodes in cnode.instance_attrs.items(): # skip nodes which are not in the current module and it may screw up # the output, while it's not worth it nodes = [ n for n in nodes if not isinstance(n.statement(), (astroid.Delete, astroid.AugAssign)) and n.root() is current_module ] if not nodes: continue # error detected by typechecking # check if any method attr is defined in is a defining method if any(node.frame().name in defining_methods for node in nodes): continue # check attribute is defined in a parent's __init__ for parent in cnode.instance_attr_ancestors(attr): attr_defined = False # check if any parent method attr is defined in is a defining method for node in parent.instance_attrs[attr]: if node.frame().name in defining_methods: attr_defined = True if attr_defined: # we're done :) break else: # check attribute is defined as a class attribute try: cnode.local_attr(attr) except astroid.NotFoundError: for node in nodes: if node.frame().name not in defining_methods: # If the attribute was set by a call in any # of the defining methods, then don't emit # the warning. if _called_in_methods( node.frame(), cnode, defining_methods ): continue self.add_message( "attribute-defined-outside-init", args=attr, node=node )
check method arguments overriding
def visit_functiondef(self, node): """check method arguments, overriding""" # ignore actual functions if not node.is_method(): return self._check_useless_super_delegation(node) klass = node.parent.frame() self._meth_could_be_func = True # check first argument is self if this is actually a method self._check_first_arg_for_type(node, klass.type == "metaclass") if node.name == "__init__": self._check_init(node) return # check signature if the method overloads inherited method for overridden in klass.local_attr_ancestors(node.name): # get astroid for the searched method try: meth_node = overridden[node.name] except KeyError: # we have found the method but it's not in the local # dictionary. # This may happen with astroid build from living objects continue if not isinstance(meth_node, astroid.FunctionDef): continue self._check_signature(node, meth_node, "overridden", klass) break if node.decorators: for decorator in node.decorators.nodes: if isinstance(decorator, astroid.Attribute) and decorator.attrname in ( "getter", "setter", "deleter", ): # attribute affectation will call this method, not hiding it return if isinstance(decorator, astroid.Name): if decorator.name == "property": # attribute affectation will either call a setter or raise # an attribute error, anyway not hiding the function return # Infer the decorator and see if it returns something useful inferred = safe_infer(decorator) if not inferred: return if isinstance(inferred, astroid.FunctionDef): # Okay, it's a decorator, let's see what it can infer. try: inferred = next(inferred.infer_call_result(inferred)) except astroid.InferenceError: return try: if ( isinstance(inferred, (astroid.Instance, astroid.ClassDef)) and inferred.getattr("__get__") and inferred.getattr("__set__") ): return except astroid.AttributeInferenceError: pass # check if the method is hidden by an attribute try: overridden = klass.instance_attr(node.name)[0] # XXX overridden_frame = overridden.frame() if ( isinstance(overridden_frame, astroid.FunctionDef) and overridden_frame.type == "method" ): overridden_frame = overridden_frame.parent.frame() if isinstance(overridden_frame, astroid.ClassDef) and klass.is_subtype_of( overridden_frame.qname() ): args = (overridden.root().name, overridden.fromlineno) self.add_message("method-hidden", args=args, node=node) except astroid.NotFoundError: pass
Check if the given function node is an useless method override
def _check_useless_super_delegation(self, function): """Check if the given function node is an useless method override We consider it *useless* if it uses the super() builtin, but having nothing additional whatsoever than not implementing the method at all. If the method uses super() to delegate an operation to the rest of the MRO, and if the method called is the same as the current one, the arguments passed to super() are the same as the parameters that were passed to this method, then the method could be removed altogether, by letting other implementation to take precedence. """ if ( not function.is_method() # With decorators is a change of use or function.decorators ): return body = function.body if len(body) != 1: # Multiple statements, which means this overridden method # could do multiple things we are not aware of. return statement = body[0] if not isinstance(statement, (astroid.Expr, astroid.Return)): # Doing something else than what we are interested into. return call = statement.value if ( not isinstance(call, astroid.Call) # Not a super() attribute access. or not isinstance(call.func, astroid.Attribute) ): return # Should be a super call. try: super_call = next(call.func.expr.infer()) except astroid.InferenceError: return else: if not isinstance(super_call, objects.Super): return # The name should be the same. if call.func.attrname != function.name: return # Should be a super call with the MRO pointer being the # current class and the type being the current instance. current_scope = function.parent.scope() if ( super_call.mro_pointer != current_scope or not isinstance(super_call.type, astroid.Instance) or super_call.type.name != current_scope.name ): return #  Check values of default args klass = function.parent.frame() meth_node = None for overridden in klass.local_attr_ancestors(function.name): # get astroid for the searched method try: meth_node = overridden[function.name] except KeyError: # we have found the method but it's not in the local # dictionary. # This may happen with astroid build from living objects continue if ( not isinstance(meth_node, astroid.FunctionDef) # If the method have an ancestor which is not a # function then it is legitimate to redefine it or _has_different_parameters_default_value( meth_node.args, function.args ) ): return break # Detect if the parameters are the same as the call's arguments. params = _signature_from_arguments(function.args) args = _signature_from_call(call) if meth_node is not None: def form_annotations(annotations): return [ annotation.as_string() for annotation in filter(None, annotations) ] called_annotations = form_annotations(function.args.annotations) overridden_annotations = form_annotations(meth_node.args.annotations) if called_annotations and overridden_annotations: if called_annotations != overridden_annotations: return if _definition_equivalent_to_call(params, args): self.add_message( "useless-super-delegation", node=function, args=(function.name,) )
on method node check if this method couldn t be a function
def leave_functiondef(self, node): """on method node, check if this method couldn't be a function ignore class, static and abstract methods, initializer, methods overridden from a parent class. """ if node.is_method(): if node.args.args is not None: self._first_attrs.pop() if not self.linter.is_message_enabled("no-self-use"): return class_node = node.parent.frame() if ( self._meth_could_be_func and node.type == "method" and node.name not in PYMETHODS and not ( node.is_abstract() or overrides_a_method(class_node, node.name) or decorated_with_property(node) or _has_bare_super_call(node) ) ): self.add_message("no-self-use", node=node)
check if the getattr is an access to a class member if so register it. Also check for access to protected class member from outside its class ( but ignore __special__ methods )
def visit_attribute(self, node): """check if the getattr is an access to a class member if so, register it. Also check for access to protected class member from outside its class (but ignore __special__ methods) """ # Check self if self._uses_mandatory_method_param(node): self._accessed.set_accessed(node) return if not self.linter.is_message_enabled("protected-access"): return self._check_protected_attribute_access(node)
Check that the given AssignAttr node is defined in the class slots.
def _check_in_slots(self, node): """ Check that the given AssignAttr node is defined in the class slots. """ inferred = safe_infer(node.expr) if not isinstance(inferred, astroid.Instance): return klass = inferred._proxied if not has_known_bases(klass): return if "__slots__" not in klass.locals or not klass.newstyle: return slots = klass.slots() if slots is None: return # If any ancestor doesn't use slots, the slots # defined for this class are superfluous. if any( "__slots__" not in ancestor.locals and ancestor.name != "object" for ancestor in klass.ancestors() ): return if not any(slot.value == node.attrname for slot in slots): # If we have a '__dict__' in slots, then # assigning any name is valid. if not any(slot.value == "__dict__" for slot in slots): if _is_attribute_property(node.attrname, klass): # Properties circumvent the slots mechanism, # so we should not emit a warning for them. return if node.attrname in klass.locals and _has_data_descriptor( klass, node.attrname ): # Descriptors circumvent the slots mechanism as well. return if node.attrname == "__class__" and _has_same_layout_slots( slots, node.parent.value ): return self.add_message("assigning-non-slot", args=(node.attrname,), node=node)