INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
leave the pyreverse. utils. Project node
def leave_project(self, node): # pylint: disable=unused-argument """leave the pyreverse.utils.Project node return the generated diagram definition """ if self.pkgdiagram: return self.pkgdiagram, self.classdiagram return (self.classdiagram,)
visit an astroid. Module node
def visit_module(self, node): """visit an astroid.Module node add this class to the package diagram definition """ if self.pkgdiagram: self.linker.visit(node) self.pkgdiagram.add_object(node.name, node)
visit an astroid. Class node
def visit_classdef(self, node): """visit an astroid.Class node add this class to the class diagram definition """ anc_level, association_level = self._get_levels() self.extract_classes(node, anc_level, association_level)
visit astroid. ImportFrom and catch modules for package diagram
def visit_importfrom(self, node): """visit astroid.ImportFrom and catch modules for package diagram """ if self.pkgdiagram: self.pkgdiagram.add_from_depend(node, node.modname)
return a class diagram definition for the given klass and its related klasses
def class_diagram(self, project, klass): """return a class diagram definition for the given klass and its related klasses """ self.classdiagram = ClassDiagram(klass, self.config.mode) if len(project.modules) > 1: module, klass = klass.rsplit(".", 1) module = project.get_module(module) else: module = project.modules[0] klass = klass.split(".")[-1] klass = next(module.ilookup(klass)) anc_level, association_level = self._get_levels() self.extract_classes(klass, anc_level, association_level) return self.classdiagram
Get the diagrams configuration data
def get_diadefs(self, project, linker): """Get the diagrams configuration data :param project:The pyreverse project :type project: pyreverse.utils.Project :param linker: The linker :type linker: pyreverse.inspector.Linker(IdGeneratorMixIn, LocalsVisitor) :returns: The list of diagram definitions :rtype: list(:class:`pylint.pyreverse.diagrams.ClassDiagram`) """ # read and interpret diagram definitions (Diadefs) diagrams = [] generator = ClassDiadefGenerator(linker, self) for klass in self.config.classes: diagrams.append(generator.class_diagram(project, klass)) if not diagrams: diagrams = DefaultDiadefGenerator(linker, self).visit(project) for diagram in diagrams: diagram.extract_relationships() return diagrams
Check if the given owner should be ignored
def _is_owner_ignored(owner, name, ignored_classes, ignored_modules): """Check if the given owner should be ignored This will verify if the owner's module is in *ignored_modules* or the owner's module fully qualified name is in *ignored_modules* or if the *ignored_modules* contains a pattern which catches the fully qualified name of the module. Also, similar checks are done for the owner itself, if its name matches any name from the *ignored_classes* or if its qualified name can be found in *ignored_classes*. """ ignored_modules = set(ignored_modules) module_name = owner.root().name module_qname = owner.root().qname() if any( module_name in ignored_modules or module_qname in ignored_modules or fnmatch.fnmatch(module_qname, ignore) for ignore in ignored_modules ): return True ignored_classes = set(ignored_classes) if hasattr(owner, "qname"): qname = owner.qname() else: qname = "" return any(ignore in (name, qname) for ignore in ignored_classes)
Given an owner and a name try to find similar names
def _similar_names(owner, attrname, distance_threshold, max_choices): """Given an owner and a name, try to find similar names The similar names are searched given a distance metric and only a given number of choices will be returned. """ possible_names = [] names = _node_names(owner) for name in names: if name == attrname: continue distance = _string_distance(attrname, name) if distance <= distance_threshold: possible_names.append((name, distance)) # Now get back the values with a minimum, up to the given # limit or choices. picked = [ name for (name, _) in heapq.nsmallest( max_choices, possible_names, key=operator.itemgetter(1) ) ] return sorted(picked)
Try to see if no - member should be emitted for the given owner.
def _emit_no_member(node, owner, owner_name, ignored_mixins=True, ignored_none=True): """Try to see if no-member should be emitted for the given owner. The following cases are ignored: * the owner is a function and it has decorators. * the owner is an instance and it has __getattr__, __getattribute__ implemented * the module is explicitly ignored from no-member checks * the owner is a class and the name can be found in its metaclass. * The access node is protected by an except handler, which handles AttributeError, Exception or bare except. """ # pylint: disable=too-many-return-statements if node_ignores_exception(node, AttributeError): return False if ignored_none and isinstance(owner, astroid.Const) and owner.value is None: return False if is_super(owner) or getattr(owner, "type", None) == "metaclass": return False if ignored_mixins and owner_name[-5:].lower() == "mixin": return False if isinstance(owner, astroid.FunctionDef) and owner.decorators: return False if isinstance(owner, (astroid.Instance, astroid.ClassDef)): if owner.has_dynamic_getattr(): # Issue #2565: Don't ignore enums, as they have a `__getattr__` but it's not # invoked at this point. try: metaclass = owner.metaclass() except exceptions.MroError: return False if metaclass: return metaclass.qname() == "enum.EnumMeta" return False if not has_known_bases(owner): return False if isinstance(owner, objects.Super): # Verify if we are dealing with an invalid Super object. # If it is invalid, then there's no point in checking that # it has the required attribute. Also, don't fail if the # MRO is invalid. try: owner.super_mro() except (exceptions.MroError, exceptions.SuperError): return False if not all(map(has_known_bases, owner.type.mro())): return False if isinstance(owner, astroid.Module): try: owner.getattr("__getattr__") return False except astroid.NotFoundError: pass if node.attrname.startswith("_" + owner_name): # Test if an attribute has been mangled ('private' attribute) unmangled_name = node.attrname.split("_" + owner_name)[-1] try: if owner.getattr(unmangled_name, context=None) is not None: return False except astroid.NotFoundError: return True return True
Check if the given node has a parent of the given type.
def _has_parent_of_type(node, node_type, statement): """Check if the given node has a parent of the given type.""" parent = node.parent while not isinstance(parent, node_type) and statement.parent_of(parent): parent = parent.parent return isinstance(parent, node_type)
Check if the given name is used as a variadic argument.
def _is_name_used_as_variadic(name, variadics): """Check if the given name is used as a variadic argument.""" return any( variadic.value == name or variadic.value.parent_of(name) for variadic in variadics )
Verify if the given call node has variadic nodes without context
def _no_context_variadic(node, variadic_name, variadic_type, variadics): """Verify if the given call node has variadic nodes without context This is a workaround for handling cases of nested call functions which don't have the specific call context at hand. Variadic arguments (variable positional arguments and variable keyword arguments) are inferred, inherently wrong, by astroid as a Tuple, respectively a Dict with empty elements. This can lead pylint to believe that a function call receives too few arguments. """ statement = node.statement() for name in statement.nodes_of_class(astroid.Name): if name.name != variadic_name: continue inferred = safe_infer(name) if isinstance(inferred, (astroid.List, astroid.Tuple)): length = len(inferred.elts) elif isinstance(inferred, astroid.Dict): length = len(inferred.items) else: continue inferred_statement = inferred.statement() if not length and isinstance(inferred_statement, astroid.FunctionDef): is_in_starred_context = _has_parent_of_type(node, variadic_type, statement) used_as_starred_argument = _is_name_used_as_variadic(name, variadics) if is_in_starred_context or used_as_starred_argument: return True return False
Try to infer what the given * func * constructor is building
def _infer_from_metaclass_constructor(cls, func): """Try to infer what the given *func* constructor is building :param astroid.FunctionDef func: A metaclass constructor. Metaclass definitions can be functions, which should accept three arguments, the name of the class, the bases of the class and the attributes. The function could return anything, but usually it should be a proper metaclass. :param astroid.ClassDef cls: The class for which the *func* parameter should generate a metaclass. :returns: The class generated by the function or None, if we couldn't infer it. :rtype: astroid.ClassDef """ context = astroid.context.InferenceContext() class_bases = astroid.List() class_bases.postinit(elts=cls.bases) attrs = astroid.Dict() local_names = [(name, values[-1]) for name, values in cls.locals.items()] attrs.postinit(local_names) builder_args = astroid.Tuple() builder_args.postinit([cls.name, class_bases, attrs]) context.callcontext = astroid.context.CallContext(builder_args) try: inferred = next(func.infer_call_result(func, context), None) except astroid.InferenceError: return None return inferred or None
required method to auto register this checker
def register(linter): """required method to auto register this checker """ linter.register_checker(TypeChecker(linter)) linter.register_checker(IterableChecker(linter))
check that the accessed attribute exists
def visit_attribute(self, node): """check that the accessed attribute exists to avoid too much false positives for now, we'll consider the code as correct if a single of the inferred nodes has the accessed attribute. function/method, super call and metaclasses are ignored """ for pattern in self.config.generated_members: # attribute is marked as generated, stop here if re.match(pattern, node.attrname): return if re.match(pattern, node.as_string()): return try: inferred = list(node.expr.infer()) except exceptions.InferenceError: return # list of (node, nodename) which are missing the attribute missingattr = set() non_opaque_inference_results = [ owner for owner in inferred if owner is not astroid.Uninferable and not isinstance(owner, astroid.nodes.Unknown) ] if ( len(non_opaque_inference_results) != len(inferred) and self.config.ignore_on_opaque_inference ): # There is an ambiguity in the inference. Since we can't # make sure that we won't emit a false positive, we just stop # whenever the inference returns an opaque inference object. return for owner in non_opaque_inference_results: name = getattr(owner, "name", None) if _is_owner_ignored( owner, name, self.config.ignored_classes, self.config.ignored_modules ): continue try: if not [ n for n in owner.getattr(node.attrname) if not isinstance(n.statement(), astroid.AugAssign) ]: missingattr.add((owner, name)) continue except AttributeError: # XXX method / function continue except exceptions.NotFoundError: # This can't be moved before the actual .getattr call, # because there can be more values inferred and we are # stopping after the first one which has the attribute in question. # The problem is that if the first one has the attribute, # but we continue to the next values which doesn't have the # attribute, then we'll have a false positive. # So call this only after the call has been made. if not _emit_no_member( node, owner, name, ignored_mixins=self.config.ignore_mixin_members, ignored_none=self.config.ignore_none, ): continue missingattr.add((owner, name)) continue # stop on the first found break else: # we have not found any node with the attributes, display the # message for infered nodes done = set() for owner, name in missingattr: if isinstance(owner, astroid.Instance): actual = owner._proxied else: actual = owner if actual in done: continue done.add(actual) msg, hint = self._get_nomember_msgid_hint(node, owner) self.add_message( msg, node=node, args=(owner.display_type(), name, node.attrname, hint), confidence=INFERENCE, )
check that if assigning to a function call the function is possibly returning something valuable
def visit_assign(self, node): """check that if assigning to a function call, the function is possibly returning something valuable """ if not isinstance(node.value, astroid.Call): return function_node = safe_infer(node.value.func) # skip class, generator and incomplete function definition funcs = (astroid.FunctionDef, astroid.UnboundMethod, astroid.BoundMethod) if not ( isinstance(function_node, funcs) and function_node.root().fully_defined() and not function_node.decorators ): return if ( function_node.is_generator() or function_node.is_abstract(pass_is_abstract=False) or isinstance(function_node, astroid.AsyncFunctionDef) ): return returns = list( function_node.nodes_of_class(astroid.Return, skip_klass=astroid.FunctionDef) ) if not returns: self.add_message("assignment-from-no-return", node=node) else: for rnode in returns: if not ( isinstance(rnode.value, astroid.Const) and rnode.value.value is None or rnode.value is None ): break else: self.add_message("assignment-from-none", node=node)
Check that the given uninferable Call node does not call an actual function.
def _check_uninferable_call(self, node): """ Check that the given uninferable Call node does not call an actual function. """ if not isinstance(node.func, astroid.Attribute): return # Look for properties. First, obtain # the lhs of the Attribute node and search the attribute # there. If that attribute is a property or a subclass of properties, # then most likely it's not callable. # TODO: since astroid doesn't understand descriptors very well # we will not handle them here, right now. expr = node.func.expr klass = safe_infer(expr) if ( klass is None or klass is astroid.Uninferable or not isinstance(klass, astroid.Instance) ): return try: attrs = klass._proxied.getattr(node.func.attrname) except exceptions.NotFoundError: return for attr in attrs: if attr is astroid.Uninferable: continue if not isinstance(attr, astroid.FunctionDef): continue # Decorated, see if it is decorated with a property. # Also, check the returns and see if they are callable. if decorated_with_property(attr): try: all_returns_are_callable = all( return_node.callable() or return_node is astroid.Uninferable for return_node in attr.infer_call_result(node) ) except astroid.InferenceError: continue if not all_returns_are_callable: self.add_message( "not-callable", node=node, args=node.func.as_string() ) break
check that called functions/ methods are inferred to callable objects and that the arguments passed to the function match the parameters in the inferred function s definition
def visit_call(self, node): """check that called functions/methods are inferred to callable objects, and that the arguments passed to the function match the parameters in the inferred function's definition """ called = safe_infer(node.func) # only function, generator and object defining __call__ are allowed # Ignore instances of descriptors since astroid cannot properly handle them # yet if called and not called.callable(): if isinstance(called, astroid.Instance) and ( not has_known_bases(called) or ( isinstance(called.scope(), astroid.ClassDef) and "__get__" in called.locals ) ): # Don't emit if we can't make sure this object is callable. pass else: self.add_message("not-callable", node=node, args=node.func.as_string()) self._check_uninferable_call(node) try: called, implicit_args, callable_name = _determine_callable(called) except ValueError: # Any error occurred during determining the function type, most of # those errors are handled by different warnings. return if called.args.args is None: # Built-in functions have no argument information. return if len(called.argnames()) != len(set(called.argnames())): # Duplicate parameter name (see duplicate-argument). We can't really # make sense of the function call in this case, so just return. return # Build the set of keyword arguments, checking for duplicate keywords, # and count the positional arguments. call_site = astroid.arguments.CallSite.from_call(node) # Warn about duplicated keyword arguments, such as `f=24, **{'f': 24}` for keyword in call_site.duplicated_keywords: self.add_message("repeated-keyword", node=node, args=(keyword,)) if call_site.has_invalid_arguments() or call_site.has_invalid_keywords(): # Can't make sense of this. return num_positional_args = len(call_site.positional_arguments) keyword_args = list(call_site.keyword_arguments.keys()) # Determine if we don't have a context for our call and we use variadics. if isinstance(node.scope(), astroid.FunctionDef): has_no_context_positional_variadic = _no_context_variadic_positional(node) has_no_context_keywords_variadic = _no_context_variadic_keywords(node) else: has_no_context_positional_variadic = ( has_no_context_keywords_variadic ) = False # These are coming from the functools.partial implementation in astroid already_filled_positionals = getattr(called, "filled_positionals", 0) already_filled_keywords = getattr(called, "filled_keywords", {}) keyword_args += list(already_filled_keywords) num_positional_args += implicit_args + already_filled_positionals # Analyze the list of formal parameters. num_mandatory_parameters = len(called.args.args) - len(called.args.defaults) parameters = [] parameter_name_to_index = {} for i, arg in enumerate(called.args.args): if isinstance(arg, astroid.Tuple): name = None # Don't store any parameter names within the tuple, since those # are not assignable from keyword arguments. else: assert isinstance(arg, astroid.AssignName) # This occurs with: # def f( (a), (b) ): pass name = arg.name parameter_name_to_index[name] = i if i >= num_mandatory_parameters: defval = called.args.defaults[i - num_mandatory_parameters] else: defval = None parameters.append([(name, defval), False]) kwparams = {} for i, arg in enumerate(called.args.kwonlyargs): if isinstance(arg, astroid.Keyword): name = arg.arg else: assert isinstance(arg, astroid.AssignName) name = arg.name kwparams[name] = [called.args.kw_defaults[i], False] # Match the supplied arguments against the function parameters. # 1. Match the positional arguments. for i in range(num_positional_args): if i < len(parameters): parameters[i][1] = True elif called.args.vararg is not None: # The remaining positional arguments get assigned to the *args # parameter. break else: # Too many positional arguments. self.add_message( "too-many-function-args", node=node, args=(callable_name,) ) break # 2. Match the keyword arguments. for keyword in keyword_args: if keyword in parameter_name_to_index: i = parameter_name_to_index[keyword] if parameters[i][1]: # Duplicate definition of function parameter. # Might be too hardcoded, but this can actually # happen when using str.format and `self` is passed # by keyword argument, as in `.format(self=self)`. # It's perfectly valid to so, so we're just skipping # it if that's the case. if not (keyword == "self" and called.qname() in STR_FORMAT): self.add_message( "redundant-keyword-arg", node=node, args=(keyword, callable_name), ) else: parameters[i][1] = True elif keyword in kwparams: if kwparams[keyword][1]: # XXX is that even possible? # Duplicate definition of function parameter. self.add_message( "redundant-keyword-arg", node=node, args=(keyword, callable_name), ) else: kwparams[keyword][1] = True elif called.args.kwarg is not None: # The keyword argument gets assigned to the **kwargs parameter. pass else: # Unexpected keyword argument. self.add_message( "unexpected-keyword-arg", node=node, args=(keyword, callable_name) ) # 3. Match the **kwargs, if any. if node.kwargs: for i, [(name, defval), assigned] in enumerate(parameters): # Assume that *kwargs provides values for all remaining # unassigned named parameters. if name is not None: parameters[i][1] = True else: # **kwargs can't assign to tuples. pass # Check that any parameters without a default have been assigned # values. for [(name, defval), assigned] in parameters: if (defval is None) and not assigned: if name is None: display_name = "<tuple>" else: display_name = repr(name) # TODO(cpopa): this should be removed after PyCQA/astroid/issues/177 if not has_no_context_positional_variadic: self.add_message( "no-value-for-parameter", node=node, args=(display_name, callable_name), ) for name in kwparams: defval, assigned = kwparams[name] if defval is None and not assigned and not has_no_context_keywords_variadic: self.add_message("missing-kwoa", node=node, args=(name, callable_name))
Detect TypeErrors for unary operands.
def visit_unaryop(self, node): """Detect TypeErrors for unary operands.""" for error in node.type_errors(): # Let the error customize its output. self.add_message("invalid-unary-operand-type", args=str(error), node=node)
Called when a: class:. astroid. node_classes. Call node is visited.
def visit_call(self, node): """Called when a :class:`.astroid.node_classes.Call` node is visited. See :mod:`astroid` for the description of available nodes. :param node: The node to check. :type node: astroid.node_classes.Call """ if not ( isinstance(node.func, astroid.Attribute) and isinstance(node.func.expr, astroid.Name) and node.func.expr.name == self.config.store_locals_indicator and node.func.attrname == "create" ): return in_class = node.frame() for param in node.args: in_class.locals[param.name] = node
Return an iterator on interfaces implemented by the given class node.
def interfaces(node, herited=True, handler_func=_iface_hdlr): """Return an iterator on interfaces implemented by the given class node.""" # FIXME: what if __implements__ = (MyIFace, MyParent.__implements__)... try: implements = bases.Instance(node).getattr("__implements__")[0] except exceptions.NotFoundError: return if not herited and implements.frame() is not node: return found = set() missing = False for iface in node_classes.unpack_infer(implements): if iface is astroid.Uninferable: missing = True continue if iface not in found and handler_func(iface): found.add(iface) yield iface if missing: raise exceptions.InferenceError()
return a Project from a list of files or modules
def project_from_files( files, func_wrapper=_astroid_wrapper, project_name="no name", black_list=("CVS",) ): """return a Project from a list of files or modules""" # build the project representation astroid_manager = manager.AstroidManager() project = Project(project_name) for something in files: if not os.path.exists(something): fpath = modutils.file_from_modpath(something.split(".")) elif os.path.isdir(something): fpath = os.path.join(something, "__init__.py") else: fpath = something ast = func_wrapper(astroid_manager.ast_from_file, fpath) if ast is None: continue # XXX why is first file defining the project.path ? project.path = project.path or ast.file project.add_module(ast) base_name = ast.name # recurse in package except if __init__ was explicitly given if ast.package and something.find("__init__") == -1: # recurse on others packages / modules if this is a package for fpath in modutils.get_module_files( os.path.dirname(ast.file), black_list ): ast = func_wrapper(astroid_manager.ast_from_file, fpath) if ast is None or ast.name == base_name: continue project.add_module(ast) return project
visit a pyreverse. utils. Project node
def visit_project(self, node): """visit a pyreverse.utils.Project node * optionally tag the node with a unique id """ if self.tag: node.uid = self.generate_id() for module in node.modules: self.visit(module)
visit an astroid. Package node
def visit_package(self, node): """visit an astroid.Package node * optionally tag the node with a unique id """ if self.tag: node.uid = self.generate_id() for subelmt in node.values(): self.visit(subelmt)
visit an astroid. Module node
def visit_module(self, node): """visit an astroid.Module node * set the locals_type mapping * set the depends mapping * optionally tag the node with a unique id """ if hasattr(node, "locals_type"): return node.locals_type = collections.defaultdict(list) node.depends = [] if self.tag: node.uid = self.generate_id()
visit an astroid. Class node
def visit_classdef(self, node): """visit an astroid.Class node * set the locals_type and instance_attrs_type mappings * set the implements list and build it * optionally tag the node with a unique id """ if hasattr(node, "locals_type"): return node.locals_type = collections.defaultdict(list) if self.tag: node.uid = self.generate_id() # resolve ancestors for baseobj in node.ancestors(recurs=False): specializations = getattr(baseobj, "specializations", []) specializations.append(node) baseobj.specializations = specializations # resolve instance attributes node.instance_attrs_type = collections.defaultdict(list) for assignattrs in node.instance_attrs.values(): for assignattr in assignattrs: self.handle_assignattr_type(assignattr, node) # resolve implemented interface try: node.implements = list(interfaces(node, self.inherited_interfaces)) except astroid.InferenceError: node.implements = ()
visit an astroid. Function node
def visit_functiondef(self, node): """visit an astroid.Function node * set the locals_type mapping * optionally tag the node with a unique id """ if hasattr(node, "locals_type"): return node.locals_type = collections.defaultdict(list) if self.tag: node.uid = self.generate_id()
visit an astroid. AssignName node
def visit_assignname(self, node): """visit an astroid.AssignName node handle locals_type """ # avoid double parsing done by different Linkers.visit # running over the same project: if hasattr(node, "_handled"): return node._handled = True if node.name in node.frame(): frame = node.frame() else: # the name has been defined as 'global' in the frame and belongs # there. frame = node.root() try: if not hasattr(frame, "locals_type"): # If the frame doesn't have a locals_type yet, # it means it wasn't yet visited. Visit it now # to add what's missing from it. if isinstance(frame, astroid.ClassDef): self.visit_classdef(frame) elif isinstance(frame, astroid.FunctionDef): self.visit_functiondef(frame) else: self.visit_module(frame) current = frame.locals_type[node.name] values = set(node.infer()) frame.locals_type[node.name] = list(set(current) | values) except astroid.InferenceError: pass
handle an astroid. assignattr node
def handle_assignattr_type(node, parent): """handle an astroid.assignattr node handle instance_attrs_type """ try: values = set(node.infer()) current = set(parent.instance_attrs_type[node.attrname]) parent.instance_attrs_type[node.attrname] = list(current | values) except astroid.InferenceError: pass
visit an astroid. Import node
def visit_import(self, node): """visit an astroid.Import node resolve module dependencies """ context_file = node.root().file for name in node.names: relative = modutils.is_relative(name[0], context_file) self._imported_module(node, name[0], relative)
visit an astroid. ImportFrom node
def visit_importfrom(self, node): """visit an astroid.ImportFrom node resolve module dependencies """ basename = node.modname context_file = node.root().file if context_file is not None: relative = modutils.is_relative(basename, context_file) else: relative = False for name in node.names: if name[0] == "*": continue # analyze dependencies fullname = "%s.%s" % (basename, name[0]) if fullname.find(".") > -1: try: # TODO: don't use get_module_part, # missing package precedence fullname = modutils.get_module_part(fullname, context_file) except ImportError: continue if fullname != basename: self._imported_module(node, fullname, relative)
return true if the module should be added to dependencies
def compute_module(self, context_name, mod_path): """return true if the module should be added to dependencies""" package_dir = os.path.dirname(self.project.path) if context_name == mod_path: return 0 if modutils.is_standard_module(mod_path, (package_dir,)): return 1 return 0
Notify an imported module used to analyze dependencies
def _imported_module(self, node, mod_path, relative): """Notify an imported module, used to analyze dependencies""" module = node.root() context_name = module.name if relative: mod_path = "%s.%s" % (".".join(context_name.split(".")[:-1]), mod_path) if self.compute_module(context_name, mod_path): # handle dependencies if not hasattr(module, "depends"): module.depends = [] mod_paths = module.depends if mod_path not in mod_paths: mod_paths.append(mod_path)
Return true if the give object ( maybe an instance or class ) implements the interface.
def implements(obj, interface): """Return true if the give object (maybe an instance or class) implements the interface. """ kimplements = getattr(obj, "__implements__", ()) if not isinstance(kimplements, (list, tuple)): kimplements = (kimplements,) for implementedinterface in kimplements: if issubclass(implementedinterface, interface): return True return False
return ansi escape code corresponding to color and style
def _get_ansi_code(color=None, style=None): """return ansi escape code corresponding to color and style :type color: str or None :param color: the color name (see `ANSI_COLORS` for available values) or the color number when 256 colors are available :type style: str or None :param style: style string (see `ANSI_COLORS` for available values). To get several style effects at the same time, use a coma as separator. :raise KeyError: if an unexistent color or style identifier is given :rtype: str :return: the built escape code """ ansi_code = [] if style: style_attrs = utils._splitstrip(style) for effect in style_attrs: ansi_code.append(ANSI_STYLES[effect]) if color: if color.isdigit(): ansi_code.extend(["38", "5"]) ansi_code.append(color) else: ansi_code.append(ANSI_COLORS[color]) if ansi_code: return ANSI_PREFIX + ";".join(ansi_code) + ANSI_END return ""
colorize message by wrapping it with ansi escape codes
def colorize_ansi(msg, color=None, style=None): """colorize message by wrapping it with ansi escape codes :type msg: str or unicode :param msg: the message string to colorize :type color: str or None :param color: the color identifier (see `ANSI_COLORS` for available values) :type style: str or None :param style: style string (see `ANSI_COLORS` for available values). To get several style effects at the same time, use a coma as separator. :raise KeyError: if an unexistent color or style identifier is given :rtype: str or unicode :return: the ansi escaped string """ # If both color and style are not defined, then leave the text as is if color is None and style is None: return msg escape_code = _get_ansi_code(color, style) # If invalid (or unknown) color, don't wrap msg with ansi codes if escape_code: return "%s%s%s" % (escape_code, msg, ANSI_RESET) return msg
Register the reporter classes with the linter.
def register(linter): """Register the reporter classes with the linter.""" linter.register_reporter(TextReporter) linter.register_reporter(ParseableTextReporter) linter.register_reporter(VSTextReporter) linter.register_reporter(ColorizedTextReporter)
manage message of different type and in the context of path
def handle_message(self, msg): """manage message of different type and in the context of path""" if msg.module not in self._modules: if msg.module: self.writeln("************* Module %s" % msg.module) self._modules.add(msg.module) else: self.writeln("************* ") self.write_message(msg)
launch layouts display
def _display(self, layout): """launch layouts display""" print(file=self.out) TextWriter().format(layout, self.out)
manage message of different types and colorize output using ansi escape codes
def handle_message(self, msg): """manage message of different types, and colorize output using ansi escape codes """ if msg.module not in self._modules: color, style = self._get_decoration("S") if msg.module: modsep = colorize_ansi( "************* Module %s" % msg.module, color, style ) else: modsep = colorize_ansi("************* %s" % msg.module, color, style) self.writeln(modsep) self._modules.add(msg.module) color, style = self._get_decoration(msg.C) msg = msg._replace( **{ attr: colorize_ansi(getattr(msg, attr), color, style) for attr in ("msg", "symbol", "category", "C") } ) self.write_message(msg)
open a vcg graph
def open_graph(self, **args): """open a vcg graph """ self._stream.write("%sgraph:{\n" % self._indent) self._inc_indent() self._write_attributes(GRAPH_ATTRS, **args)
draw a node
def node(self, title, **args): """draw a node """ self._stream.write('%snode: {title:"%s"' % (self._indent, title)) self._write_attributes(NODE_ATTRS, **args) self._stream.write("}\n")
draw an edge from a node to another.
def edge(self, from_node, to_node, edge_type="", **args): """draw an edge from a node to another. """ self._stream.write( '%s%sedge: {sourcename:"%s" targetname:"%s"' % (self._indent, edge_type, from_node, to_node) ) self._write_attributes(EDGE_ATTRS, **args) self._stream.write("}\n")
write graph node or edge attributes
def _write_attributes(self, attributes_dict, **args): """write graph, node or edge attributes """ for key, value in args.items(): try: _type = attributes_dict[key] except KeyError: raise Exception( """no such attribute %s possible attributes are %s""" % (key, attributes_dict.keys()) ) if not _type: self._stream.write('%s%s:"%s"\n' % (self._indent, key, value)) elif _type == 1: self._stream.write("%s%s:%s\n" % (self._indent, key, int(value))) elif value in _type: self._stream.write("%s%s:%s\n" % (self._indent, key, value)) else: raise Exception( """value %s isn\'t correct for attribute %s correct values are %s""" % (value, key, _type) )
Given a list of format specifiers returns the final access path ( e. g. a. b. c [ 0 ] [ 1 ] ).
def get_access_path(key, parts): """ Given a list of format specifiers, returns the final access path (e.g. a.b.c[0][1]). """ path = [] for is_attribute, specifier in parts: if is_attribute: path.append(".{}".format(specifier)) else: path.append("[{!r}]".format(specifier)) return str(key) + "".join(path)
required method to auto register this checker
def register(linter): """required method to auto register this checker """ linter.register_checker(StringFormatChecker(linter)) linter.register_checker(StringConstantChecker(linter))
Mostly replicate ast. literal_eval ( token ) manually to avoid any performance hit. This supports f - strings contrary to ast. literal_eval. We have to support all string literal notations: https:// docs. python. org/ 3/ reference/ lexical_analysis. html#string - and - bytes - literals
def str_eval(token): """ Mostly replicate `ast.literal_eval(token)` manually to avoid any performance hit. This supports f-strings, contrary to `ast.literal_eval`. We have to support all string literal notations: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals """ if token[0:2].lower() in ("fr", "rf"): token = token[2:] elif token[0].lower() in ("r", "u", "f"): token = token[1:] if token[0:3] in ('"""', "'''"): return token[3:-3] return token[1:-1]
Check the new string formatting.
def _check_new_format(self, node, func): """ Check the new string formatting. """ # TODO: skip (for now) format nodes which don't have # an explicit string on the left side of the format operation. # We do this because our inference engine can't properly handle # redefinitions of the original string. # For more details, see issue 287. # # Note that there may not be any left side at all, if the format method # has been assigned to another variable. See issue 351. For example: # # fmt = 'some string {}'.format # fmt('arg') if isinstance(node.func, astroid.Attribute) and not isinstance( node.func.expr, astroid.Const ): return if node.starargs or node.kwargs: return try: strnode = next(func.bound.infer()) except astroid.InferenceError: return if not (isinstance(strnode, astroid.Const) and isinstance(strnode.value, str)): return try: call_site = CallSite.from_call(node) except astroid.InferenceError: return try: fields, num_args, manual_pos = utils.parse_format_method_string( strnode.value ) except utils.IncompleteFormatString: self.add_message("bad-format-string", node=node) return positional_arguments = call_site.positional_arguments named_arguments = call_site.keyword_arguments named_fields = {field[0] for field in fields if isinstance(field[0], str)} if num_args and manual_pos: self.add_message("format-combined-specification", node=node) return check_args = False # Consider "{[0]} {[1]}" as num_args. num_args += sum(1 for field in named_fields if field == "") if named_fields: for field in named_fields: if field and field not in named_arguments: self.add_message( "missing-format-argument-key", node=node, args=(field,) ) for field in named_arguments: if field not in named_fields: self.add_message( "unused-format-string-argument", node=node, args=(field,) ) # num_args can be 0 if manual_pos is not. num_args = num_args or manual_pos if positional_arguments or num_args: empty = any(True for field in named_fields if field == "") if named_arguments or empty: # Verify the required number of positional arguments # only if the .format got at least one keyword argument. # This means that the format strings accepts both # positional and named fields and we should warn # when one of the them is missing or is extra. check_args = True else: check_args = True if check_args: # num_args can be 0 if manual_pos is not. num_args = num_args or manual_pos if len(positional_arguments) > num_args: self.add_message("too-many-format-args", node=node) elif len(positional_arguments) < num_args: self.add_message("too-few-format-args", node=node) self._detect_vacuous_formatting(node, positional_arguments) self._check_new_format_specifiers(node, fields, named_arguments)
Check attribute and index access in the format string ( { 0. a } and { 0 [ a ] } ).
def _check_new_format_specifiers(self, node, fields, named): """ Check attribute and index access in the format string ("{0.a}" and "{0[a]}"). """ for key, specifiers in fields: # Obtain the argument. If it can't be obtained # or infered, skip this check. if key == "": # {[0]} will have an unnamed argument, defaulting # to 0. It will not be present in `named`, so use the value # 0 for it. key = 0 if isinstance(key, numbers.Number): try: argname = utils.get_argument_from_call(node, key) except utils.NoSuchArgumentError: continue else: if key not in named: continue argname = named[key] if argname in (astroid.Uninferable, None): continue try: argument = next(argname.infer()) except astroid.InferenceError: continue if not specifiers or argument is astroid.Uninferable: # No need to check this key if it doesn't # use attribute / item access continue if argument.parent and isinstance(argument.parent, astroid.Arguments): # Ignore any object coming from an argument, # because we can't infer its value properly. continue previous = argument parsed = [] for is_attribute, specifier in specifiers: if previous is astroid.Uninferable: break parsed.append((is_attribute, specifier)) if is_attribute: try: previous = previous.getattr(specifier)[0] except astroid.NotFoundError: if ( hasattr(previous, "has_dynamic_getattr") and previous.has_dynamic_getattr() ): # Don't warn if the object has a custom __getattr__ break path = get_access_path(key, parsed) self.add_message( "missing-format-attribute", args=(specifier, path), node=node, ) break else: warn_error = False if hasattr(previous, "getitem"): try: previous = previous.getitem(astroid.Const(specifier)) except ( astroid.AstroidIndexError, astroid.AstroidTypeError, astroid.AttributeInferenceError, ): warn_error = True except astroid.InferenceError: break if previous is astroid.Uninferable: break else: try: # Lookup __getitem__ in the current node, # but skip further checks, because we can't # retrieve the looked object previous.getattr("__getitem__") break except astroid.NotFoundError: warn_error = True if warn_error: path = get_access_path(key, parsed) self.add_message( "invalid-format-index", args=(specifier, path), node=node ) break try: previous = next(previous.infer()) except astroid.InferenceError: # can't check further if we can't infer it break
check for bad escapes in a non - raw string.
def process_non_raw_string_token(self, prefix, string_body, start_row): """check for bad escapes in a non-raw string. prefix: lowercase string of eg 'ur' string prefix markers. string_body: the un-parsed body of the string, not including the quote marks. start_row: integer line number in the source. """ # Walk through the string; if we see a backslash then escape the next # character, and skip over it. If we see a non-escaped character, # alert, and continue. # # Accept a backslash when it escapes a backslash, or a quote, or # end-of-line, or one of the letters that introduce a special escape # sequence <http://docs.python.org/reference/lexical_analysis.html> # # TODO(mbp): Maybe give a separate warning about the rarely-used # \a \b \v \f? # # TODO(mbp): We could give the column of the problem character, but # add_message doesn't seem to have a way to pass it through at present. i = 0 while True: i = string_body.find("\\", i) if i == -1: break # There must be a next character; having a backslash at the end # of the string would be a SyntaxError. next_char = string_body[i + 1] match = string_body[i : i + 2] if next_char in self.UNICODE_ESCAPE_CHARACTERS: if "u" in prefix: pass elif (_PY3K or self._unicode_literals) and "b" not in prefix: pass # unicode by default else: self.add_message( "anomalous-unicode-escape-in-string", line=start_row, args=(match,), ) elif next_char not in self.ESCAPE_CHARACTERS: self.add_message( "anomalous-backslash-in-string", line=start_row, args=(match,) ) # Whether it was a valid escape or not, backslash followed by # another character can always be consumed whole: the second # character can never be the start of a new backslash escape. i += 2
display a section as text
def visit_section(self, layout): """display a section as text """ self.section += 1 self.writeln() self.format_children(layout) self.section -= 1 self.writeln()
Display an evaluation section as a text.
def visit_evaluationsection(self, layout): """Display an evaluation section as a text.""" self.section += 1 self.format_children(layout) self.section -= 1 self.writeln()
display a table as text
def visit_table(self, layout): """display a table as text""" table_content = self.get_table_content(layout) # get columns width cols_width = [0] * len(table_content[0]) for row in table_content: for index, col in enumerate(row): cols_width[index] = max(cols_width[index], len(col)) self.default_table(layout, table_content, cols_width) self.writeln()
format a table
def default_table(self, layout, table_content, cols_width): """format a table""" cols_width = [size + 1 for size in cols_width] format_strings = " ".join(["%%-%ss"] * len(cols_width)) format_strings = format_strings % tuple(cols_width) format_strings = format_strings.split(" ") table_linesep = "\n+" + "+".join(["-" * w for w in cols_width]) + "+\n" headsep = "\n+" + "+".join(["=" * w for w in cols_width]) + "+\n" # FIXME: layout.cheaders self.write(table_linesep) for index, line in enumerate(table_content): self.write("|") for line_index, at_index in enumerate(line): self.write(format_strings[line_index] % at_index) self.write("|") if index == 0 and layout.rheaders: self.write(headsep) else: self.write(table_linesep)
display a verbatim layout as text ( so difficult ; )
def visit_verbatimtext(self, layout): """display a verbatim layout as text (so difficult ;) """ self.writeln("::\n") for line in layout.data.splitlines(): self.writeln(" " + line) self.writeln()
Register the old ID and symbol for a warning that was renamed.
def add_renamed_message(self, old_id, old_symbol, new_symbol): """Register the old ID and symbol for a warning that was renamed. This allows users to keep using the old ID/symbol in suppressions. """ message_definition = self.get_message_definitions(new_symbol)[0] message_definition.old_names.append((old_id, old_symbol)) self._register_alternative_name(message_definition, old_id, old_symbol)
Register all messages from a checker.
def register_messages_from_checker(self, checker): """Register all messages from a checker. :param BaseChecker checker: """ checker.check_consistency() for message in checker.messages: self.register_message(message)
Register a MessageDefinition with consistency in mind.
def register_message(self, message): """Register a MessageDefinition with consistency in mind. :param MessageDefinition message: The message definition being added. """ self._check_id_and_symbol_consistency(message.msgid, message.symbol) self._check_symbol(message.msgid, message.symbol) self._check_msgid(message.msgid, message.symbol) for old_name in message.old_names: self._check_symbol(message.msgid, old_name[1]) self._messages_definitions[message.symbol] = message self._register_alternative_name(message, message.msgid, message.symbol) for old_id, old_symbol in message.old_names: self._register_alternative_name(message, old_id, old_symbol) self._msgs_by_category[message.msgid[0]].append(message.msgid)
helper for register_message ()
def _register_alternative_name(self, msg, msgid, symbol): """helper for register_message()""" self._check_id_and_symbol_consistency(msgid, symbol) self._alternative_names[msgid] = msg self._alternative_names[symbol] = msg
Check that a symbol is not already used.
def _check_symbol(self, msgid, symbol): """Check that a symbol is not already used. """ other_message = self._messages_definitions.get(symbol) if other_message: self._raise_duplicate_msg_id(symbol, msgid, other_message.msgid) else: alternative_msgid = None alternative_message = self._alternative_names.get(symbol) if alternative_message: if alternative_message.symbol == symbol: alternative_msgid = alternative_message.msgid else: for old_msgid, old_symbol in alternative_message.old_names: if old_symbol == symbol: alternative_msgid = old_msgid break if msgid != alternative_msgid: self._raise_duplicate_msg_id(symbol, msgid, alternative_msgid)
Raise an error when a symbol is duplicated.
def _raise_duplicate_symbol(msgid, symbol, other_symbol): """Raise an error when a symbol is duplicated. :param str msgid: The msgid corresponding to the symbols :param str symbol: Offending symbol :param str other_symbol: Other offending symbol :raises InvalidMessageError: when a symbol is duplicated. """ symbols = [symbol, other_symbol] symbols.sort() error_message = "Message id '{msgid}' cannot have both ".format(msgid=msgid) error_message += "'{other_symbol}' and '{symbol}' as symbolic name.".format( other_symbol=symbols[0], symbol=symbols[1] ) raise InvalidMessageError(error_message)
Raise an error when a msgid is duplicated.
def _raise_duplicate_msg_id(symbol, msgid, other_msgid): """Raise an error when a msgid is duplicated. :param str symbol: The symbol corresponding to the msgids :param str msgid: Offending msgid :param str other_msgid: Other offending msgid :raises InvalidMessageError: when a msgid is duplicated. """ msgids = [msgid, other_msgid] msgids.sort() error_message = "Message symbol '{symbol}' cannot be used for ".format( symbol=symbol ) error_message += "'{other_msgid}' and '{msgid}' at the same time.".format( other_msgid=msgids[0], msgid=msgids[1] ) raise InvalidMessageError(error_message)
Returns the Message object for this message.
def get_message_definitions(self, msgid_or_symbol: str) -> list: """Returns the Message object for this message. :param str msgid_or_symbol: msgid_or_symbol may be either a numeric or symbolic id. :raises UnknownMessageError: if the message id is not defined. :rtype: List of MessageDefinition :return: A message definition corresponding to msgid_or_symbol """ if msgid_or_symbol[1:].isdigit(): msgid_or_symbol = msgid_or_symbol.upper() for source in (self._alternative_names, self._messages_definitions): try: return [source[msgid_or_symbol]] except KeyError: pass error_msg = "No such message id or symbol '{msgid_or_symbol}'.".format( msgid_or_symbol=msgid_or_symbol ) raise UnknownMessageError(error_msg)
Generates a user - consumable representation of a message.
def get_msg_display_string(self, msgid): """Generates a user-consumable representation of a message. Can be just the message ID or the ID and the symbol. """ message_definitions = self.get_message_definitions(msgid) if len(message_definitions) == 1: return repr(message_definitions[0].symbol) return repr([md.symbol for md in message_definitions])
Display help messages for the given message identifiers
def help_message(self, msgids): """Display help messages for the given message identifiers""" for msgid in msgids: try: for message_definition in self.get_message_definitions(msgid): print(message_definition.format_help(checkerref=True)) print("") except UnknownMessageError as ex: print(ex) print("") continue
Output full messages list documentation in ReST format.
def list_messages(self): """Output full messages list documentation in ReST format. """ messages = sorted(self._messages_definitions.values(), key=lambda m: m.msgid) for message in messages: if not message.may_be_emitted(): continue print(message.format_help(checkerref=False)) print("")
Required method to auto register this checker.
def register(linter): """Required method to auto register this checker. :param linter: Main interface object for Pylint plugins :type linter: Pylint object """ warnings.warn( "This plugin is deprecated, use pylint.extensions.docparams instead.", DeprecationWarning, ) linter.register_checker(docparams.DocstringParameterChecker(linter))
Output full documentation in ReST format for all extension modules
def builder_inited(app): """Output full documentation in ReST format for all extension modules""" # PACKAGE/docs/exts/pylint_extensions.py --> PACKAGE/ base_path = os.path.dirname( os.path.dirname(os.path.dirname(os.path.abspath(__file__))) ) # PACKAGE/ --> PACKAGE/pylint/extensions ext_path = os.path.join(base_path, "pylint", "extensions") modules = [] doc_files = {} for filename in os.listdir(ext_path): name, ext = os.path.splitext(filename) if name[0] == "_" or name in DEPRECATED_MODULES: continue if ext == ".py": modules.append("pylint.extensions.%s" % name) elif ext == ".rst": doc_files["pylint.extensions." + name] = os.path.join(ext_path, filename) modules.sort() if not modules: sys.exit("No Pylint extensions found?") linter = PyLinter() linter.load_plugin_modules(modules) extensions_doc = os.path.join( base_path, "doc", "technical_reference", "extensions.rst" ) with open(extensions_doc, "w") as stream: stream.write("Optional Pylint checkers in the extensions module\n") stream.write("=================================================\n\n") stream.write("Pylint provides the following optional plugins:\n\n") for module in modules: stream.write("- :ref:`{}`\n".format(module)) stream.write("\n") stream.write( "You can activate any or all of these extensions " "by adding a ``load-plugins`` line to the ``MASTER`` " "section of your ``.pylintrc``, for example::\n" ) stream.write( "\n load-plugins=pylint.extensions.docparams," "pylint.extensions.docstyle\n\n" ) by_module = get_plugins_info(linter, doc_files) for module, info in sorted(by_module.items()): linter._print_checker_doc(info["name"], info, stream=stream)
run pylint
def run_pylint(): """run pylint""" from pylint.lint import Run try: Run(sys.argv[1:]) except KeyboardInterrupt: sys.exit(1)
Use sched_affinity if available for virtualized or containerized environments.
def _cpu_count() -> int: """Use sched_affinity if available for virtualized or containerized environments.""" sched_getaffinity = getattr(os, "sched_getaffinity", None) # pylint: disable=not-callable,using-constant-test if sched_getaffinity: return len(sched_getaffinity(0)) if multiprocessing: return multiprocessing.cpu_count() return 1
make total errors/ warnings report
def report_total_messages_stats(sect, stats, previous_stats): """make total errors / warnings report""" lines = ["type", "number", "previous", "difference"] lines += checkers.table_lines_from_stats( stats, previous_stats, ("convention", "refactor", "warning", "error") ) sect.append(report_nodes.Table(children=lines, cols=4, rheaders=1))
make messages type report
def report_messages_stats(sect, stats, _): """make messages type report""" if not stats["by_msg"]: # don't print this report when we didn't detected any errors raise exceptions.EmptyReportError() in_order = sorted( [ (value, msg_id) for msg_id, value in stats["by_msg"].items() if not msg_id.startswith("I") ] ) in_order.reverse() lines = ("message id", "occurrences") for value, msg_id in in_order: lines += (msg_id, str(value)) sect.append(report_nodes.Table(children=lines, cols=2, rheaders=1))
make errors/ warnings by modules report
def report_messages_by_module_stats(sect, stats, _): """make errors / warnings by modules report""" if len(stats["by_module"]) == 1: # don't print this report when we are analysing a single module raise exceptions.EmptyReportError() by_mod = collections.defaultdict(dict) for m_type in ("fatal", "error", "warning", "refactor", "convention"): total = stats[m_type] for module in stats["by_module"].keys(): mod_total = stats["by_module"][module][m_type] if total == 0: percent = 0 else: percent = float((mod_total) * 100) / total by_mod[module][m_type] = percent sorted_result = [] for module, mod_info in by_mod.items(): sorted_result.append( ( mod_info["error"], mod_info["warning"], mod_info["refactor"], mod_info["convention"], module, ) ) sorted_result.sort() sorted_result.reverse() lines = ["module", "error", "warning", "refactor", "convention"] for line in sorted_result: # Don't report clean modules. if all(entry == 0 for entry in line[:-1]): continue lines.append(line[-1]) for val in line[:-1]: lines.append("%.2f" % val) if len(lines) == 5: raise exceptions.EmptyReportError() sect.append(report_nodes.Table(children=lines, cols=5, rheaders=1))
look for some options ( keys of <search_for > ) which have to be processed before others
def preprocess_options(args, search_for): """look for some options (keys of <search_for>) which have to be processed before others values of <search_for> are callback functions to call when the option is found """ i = 0 while i < len(args): arg = args[i] if arg.startswith("--"): try: option, val = arg[2:].split("=", 1) except ValueError: option, val = arg[2:], None try: cb, takearg = search_for[option] except KeyError: i += 1 else: del args[i] if takearg and val is None: if i >= len(args) or args[i].startswith("-"): msg = "Option %s expects a value" % option raise ArgumentPreprocessingError(msg) val = args[i] del args[i] elif not takearg and val is not None: msg = "Option %s doesn't expects a value" % option raise ArgumentPreprocessingError(msg) cb(option, val) else: i += 1
Prepare sys. path for running the linter checks.
def fix_import_path(args): """Prepare sys.path for running the linter checks. Within this context, each of the given arguments is importable. Paths are added to sys.path in corresponding order to the arguments. We avoid adding duplicate directories to sys.path. `sys.path` is reset to its original value upon exiting this context. """ orig = list(sys.path) changes = [] for arg in args: path = _get_python_path(arg) if path in changes: continue else: changes.append(path) sys.path[:] = changes + ["."] + sys.path try: yield finally: sys.path[:] = orig
take a list of module names which are pylint plugins and load and register them
def load_plugin_modules(self, modnames): """take a list of module names which are pylint plugins and load and register them """ for modname in modnames: if modname in self._dynamic_plugins: continue self._dynamic_plugins.add(modname) module = modutils.load_module_from_name(modname) module.register(self)
Call the configuration hook for plugins
def load_plugin_configuration(self): """Call the configuration hook for plugins This walks through the list of plugins, grabs the "load_configuration" hook, if exposed, and calls it to allow plugins to configure specific settings. """ for modname in self._dynamic_plugins: module = modutils.load_module_from_name(modname) if hasattr(module, "load_configuration"): module.load_configuration(self)
overridden from config. OptionsProviderMixin to handle some special options
def set_option(self, optname, value, action=None, optdict=None): """overridden from config.OptionsProviderMixin to handle some special options """ if optname in self._options_methods or optname in self._bw_options_methods: if value: try: meth = self._options_methods[optname] except KeyError: meth = self._bw_options_methods[optname] warnings.warn( "%s is deprecated, replace it by %s" % (optname, optname.split("-")[0]), DeprecationWarning, ) value = utils._check_csv(value) if isinstance(value, (list, tuple)): for _id in value: meth(_id, ignore_unknown=True) else: meth(value) return # no need to call set_option, disable/enable methods do it elif optname == "output-format": self._reporter_name = value # If the reporters are already available, load # the reporter class. if self._reporters: self._load_reporter() try: checkers.BaseTokenChecker.set_option(self, optname, value, action, optdict) except config.UnsupportedAction: print("option %s can't be read from config file" % optname, file=sys.stderr)
register a new checker
def register_checker(self, checker): """register a new checker checker is an object implementing IRawChecker or / and IAstroidChecker """ assert checker.priority <= 0, "checker priority can't be >= 0" self._checkers[checker.name].append(checker) for r_id, r_title, r_cb in checker.reports: self.register_report(r_id, r_title, r_cb, checker) self.register_options_provider(checker) if hasattr(checker, "msgs"): self.msgs_store.register_messages_from_checker(checker) checker.load_defaults() # Register the checker, but disable all of its messages. # TODO(cpopa): we should have a better API for this. if not getattr(checker, "enabled", True): self.disable(checker.name)
disable all reporters
def disable_reporters(self): """disable all reporters""" for _reporters in self._reports.values(): for report_id, _, _ in _reporters: self.disable_report(report_id)
error mode: enable only errors ; no reports no persistent
def error_mode(self): """error mode: enable only errors; no reports, no persistent""" self._error_mode = True self.disable_noerror_messages() self.disable("miscellaneous") if self._python3_porting_mode: self.disable("all") for msg_id in self._checker_messages("python3"): if msg_id.startswith("E"): self.enable(msg_id) config_parser = self.cfgfile_parser if config_parser.has_option("MESSAGES CONTROL", "disable"): value = config_parser.get("MESSAGES CONTROL", "disable") self.global_set_option("disable", value) else: self.disable("python3") self.set_option("reports", False) self.set_option("persistent", False) self.set_option("score", False)
Disable all other checkers and enable Python 3 warnings.
def python3_porting_mode(self): """Disable all other checkers and enable Python 3 warnings.""" self.disable("all") self.enable("python3") if self._error_mode: # The error mode was activated, using the -E flag. # So we'll need to enable only the errors from the # Python 3 porting checker. for msg_id in self._checker_messages("python3"): if msg_id.startswith("E"): self.enable(msg_id) else: self.disable(msg_id) config_parser = self.cfgfile_parser if config_parser.has_option("MESSAGES CONTROL", "disable"): value = config_parser.get("MESSAGES CONTROL", "disable") self.global_set_option("disable", value) self._python3_porting_mode = True
process tokens from the current module to search for module/ block level options
def process_tokens(self, tokens): """process tokens from the current module to search for module/block level options """ control_pragmas = {"disable", "enable"} for (tok_type, content, start, _, _) in tokens: if tok_type != tokenize.COMMENT: continue match = OPTION_RGX.search(content) if match is None: continue first_group = match.group(1) if ( first_group.strip() == "disable-all" or first_group.strip() == "skip-file" ): if first_group.strip() == "disable-all": self.add_message( "deprecated-pragma", line=start[0], args=("disable-all", "skip-file"), ) self.add_message("file-ignored", line=start[0]) self._ignore_file = True return try: opt, value = first_group.split("=", 1) except ValueError: self.add_message( "bad-inline-option", args=first_group.strip(), line=start[0] ) continue opt = opt.strip() if opt in self._options_methods or opt in self._bw_options_methods: try: meth = self._options_methods[opt] except KeyError: meth = self._bw_options_methods[opt] # found a "(dis|en)able-msg" pragma deprecated suppression self.add_message( "deprecated-pragma", line=start[0], args=(opt, opt.replace("-msg", "")), ) for msgid in utils._splitstrip(value): # Add the line where a control pragma was encountered. if opt in control_pragmas: self._pragma_lineno[msgid] = start[0] try: if (opt, msgid) == ("disable", "all"): self.add_message( "deprecated-pragma", line=start[0], args=("disable=all", "skip-file"), ) self.add_message("file-ignored", line=start[0]) self._ignore_file = True return meth(msgid, "module", start[0]) except exceptions.UnknownMessageError: self.add_message("bad-option-value", args=msgid, line=start[0]) else: self.add_message("unrecognized-inline-option", args=opt, line=start[0])
return all available checkers as a list
def get_checkers(self): """return all available checkers as a list""" return [self] + [ c for _checkers in self._checkers.values() for c in _checkers if c is not self ]
Get all the checker names that this linter knows about.
def get_checker_names(self): """Get all the checker names that this linter knows about.""" current_checkers = self.get_checkers() return sorted( {check.name for check in current_checkers if check.name != "master"} )
return checkers needed for activated messages and reports
def prepare_checkers(self): """return checkers needed for activated messages and reports""" if not self.config.reports: self.disable_reporters() # get needed checkers neededcheckers = [self] for checker in self.get_checkers()[1:]: messages = {msg for msg in checker.msgs if self.is_message_enabled(msg)} if messages or any(self.report_is_enabled(r[0]) for r in checker.reports): neededcheckers.append(checker) # Sort checkers by priority neededcheckers = sorted( neededcheckers, key=operator.attrgetter("priority"), reverse=True ) return neededcheckers
main checking entry: check a list of files or modules from their name.
def check(self, files_or_modules): """main checking entry: check a list of files or modules from their name. """ # initialize msgs_state now that all messages have been registered into # the store for msg in self.msgs_store.messages: if not msg.may_be_emitted(): self._msgs_state[msg.msgid] = False if not isinstance(files_or_modules, (list, tuple)): files_or_modules = (files_or_modules,) if self.config.jobs == 1: self._do_check(files_or_modules) else: self._parallel_check(files_or_modules)
get modules and errors from a list of modules and handle errors
def expand_files(self, modules): """get modules and errors from a list of modules and handle errors """ result, errors = utils.expand_modules( modules, self.config.black_list, self.config.black_list_re ) for error in errors: message = modname = error["mod"] key = error["key"] self.set_current_module(modname) if key == "fatal": message = str(error["ex"]).replace(os.getcwd() + os.sep, "") self.add_message(key, args=message) return result
set the name of the currently analyzed module and init statistics for it
def set_current_module(self, modname, filepath=None): """set the name of the currently analyzed module and init statistics for it """ if not modname and filepath is None: return self.reporter.on_set_current_module(modname, filepath) self.current_name = modname self.current_file = filepath or modname self.stats["by_module"][modname] = {} self.stats["by_module"][modname]["statement"] = 0 for msg_cat in MSG_TYPES.values(): self.stats["by_module"][modname][msg_cat] = 0
return an ast ( roid ) representation for a module
def get_ast(self, filepath, modname): """return an ast(roid) representation for a module""" try: return MANAGER.ast_from_file(filepath, modname, source=True) except astroid.AstroidSyntaxError as ex: # pylint: disable=no-member self.add_message( "syntax-error", line=getattr(ex.error, "lineno", 0), args=str(ex.error) ) except astroid.AstroidBuildingException as ex: self.add_message("parse-error", args=ex) except Exception as ex: import traceback traceback.print_exc() self.add_message("astroid-error", args=(ex.__class__, ex))
Check a module from its astroid representation.
def check_astroid_module(self, ast_node, walker, rawcheckers, tokencheckers): """Check a module from its astroid representation.""" try: tokens = utils.tokenize_module(ast_node) except tokenize.TokenError as ex: self.add_message("syntax-error", line=ex.args[1][0], args=ex.args[0]) return None if not ast_node.pure_python: self.add_message("raw-checker-failed", args=ast_node.name) else: # assert astroid.file.endswith('.py') # invoke ITokenChecker interface on self to fetch module/block # level options self.process_tokens(tokens) if self._ignore_file: return False # walk ast to collect line numbers self.file_state.collect_block_lines(self.msgs_store, ast_node) # run raw and tokens checkers for checker in rawcheckers: checker.process_module(ast_node) for checker in tokencheckers: checker.process_tokens(tokens) # generate events to astroid checkers walker.walk(ast_node) return True
initialize counters
def open(self): """initialize counters""" self.stats = {"by_module": {}, "by_msg": {}} MANAGER.always_load_extensions = self.config.unsafe_load_any_extension MANAGER.max_inferable_values = self.config.limit_inference_results MANAGER.extension_package_whitelist.update(self.config.extension_pkg_whitelist) for msg_cat in MSG_TYPES.values(): self.stats[msg_cat] = 0
close the whole package/ module it s time to make reports !
def generate_reports(self): """close the whole package /module, it's time to make reports ! if persistent run, pickle results for later comparison """ # Display whatever messages are left on the reporter. self.reporter.display_messages(report_nodes.Section()) if self.file_state.base_name is not None: # load previous results if any previous_stats = config.load_results(self.file_state.base_name) # XXX code below needs refactoring to be more reporter agnostic self.reporter.on_close(self.stats, previous_stats) if self.config.reports: sect = self.make_reports(self.stats, previous_stats) else: sect = report_nodes.Section() if self.config.reports: self.reporter.display_reports(sect) self._report_evaluation() # save results if persistent run if self.config.persistent: config.save_results(self.stats, self.file_state.base_name) else: self.reporter.on_close(self.stats, {})
make the global evaluation report
def _report_evaluation(self): """make the global evaluation report""" # check with at least check 1 statements (usually 0 when there is a # syntax error preventing pylint from further processing) previous_stats = config.load_results(self.file_state.base_name) if self.stats["statement"] == 0: return # get a global note for the code evaluation = self.config.evaluation try: note = eval(evaluation, {}, self.stats) # pylint: disable=eval-used except Exception as ex: msg = "An exception occurred while rating: %s" % ex else: self.stats["global_note"] = note msg = "Your code has been rated at %.2f/10" % note pnote = previous_stats.get("global_note") if pnote is not None: msg += " (previous run: %.2f/10, %+.2f)" % (pnote, note - pnote) if self.config.score: sect = report_nodes.EvaluationSection(msg) self.reporter.display_reports(sect)
callback for option preprocessing ( i. e. before option parsing )
def cb_add_plugins(self, name, value): """callback for option preprocessing (i.e. before option parsing)""" self._plugins.extend(utils._splitstrip(value))
optik callback for sample config file generation
def cb_generate_config(self, *args, **kwargs): """optik callback for sample config file generation""" self.linter.generate_config(skipsections=("COMMANDS",)) sys.exit(0)
optik callback for sample config file generation
def cb_generate_manpage(self, *args, **kwargs): """optik callback for sample config file generation""" from pylint import __pkginfo__ self.linter.generate_manpage(__pkginfo__) sys.exit(0)
optik callback for printing some help about a particular message
def cb_help_message(self, option, optname, value, parser): """optik callback for printing some help about a particular message""" self.linter.msgs_store.help_message(utils._splitstrip(value)) sys.exit(0)
optik callback for printing full documentation
def cb_full_documentation(self, option, optname, value, parser): """optik callback for printing full documentation""" self.linter.print_full_documentation() sys.exit(0)
optik callback for printing available messages
def cb_list_messages(self, option, optname, value, parser): # FIXME """optik callback for printing available messages""" self.linter.msgs_store.list_messages() sys.exit(0)