Initial commit
This commit is contained in:
67
venv/lib/python3.8/site-packages/pylint/checkers/__init__.py
Normal file
67
venv/lib/python3.8/site-packages/pylint/checkers/__init__.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
|
||||
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018-2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2019 Bruno P. Kinoshita <kinow@users.noreply.github.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""utilities methods and classes for checkers
|
||||
|
||||
Base id of standard checkers (used in msg and report ids):
|
||||
01: base
|
||||
02: classes
|
||||
03: format
|
||||
04: import
|
||||
05: misc
|
||||
06: variables
|
||||
07: exceptions
|
||||
08: similar
|
||||
09: design_analysis
|
||||
10: newstyle
|
||||
11: typecheck
|
||||
12: logging
|
||||
13: string_format
|
||||
14: string_constant
|
||||
15: stdlib
|
||||
16: python3
|
||||
17: refactoring
|
||||
18-50: not yet used: reserved for future internal checkers.
|
||||
51-99: perhaps used: reserved for external checkers
|
||||
|
||||
The raw_metrics checker has no number associated since it doesn't emit any
|
||||
messages nor reports. XXX not true, emit a 07 report !
|
||||
|
||||
"""
|
||||
|
||||
from pylint.checkers.base_checker import BaseChecker, BaseTokenChecker
|
||||
from pylint.utils import register_plugins
|
||||
|
||||
|
||||
def table_lines_from_stats(stats, _, columns):
|
||||
"""get values listed in <columns> from <stats> and <old_stats>,
|
||||
and return a formated list of values, designed to be given to a
|
||||
ureport.Table object
|
||||
"""
|
||||
lines = []
|
||||
for m_type in columns:
|
||||
new = stats[m_type]
|
||||
new = "%.3f" % new if isinstance(new, float) else str(new)
|
||||
lines += (m_type.replace("_", " "), new, "NC", "NC")
|
||||
return lines
|
||||
|
||||
|
||||
def initialize(linter):
|
||||
"""initialize linter with checkers in this package """
|
||||
register_plugins(linter, __path__[0])
|
||||
|
||||
|
||||
__all__ = ("BaseChecker", "BaseTokenChecker", "initialize")
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
90
venv/lib/python3.8/site-packages/pylint/checkers/async.py
Normal file
90
venv/lib/python3.8/site-packages/pylint/checkers/async.py
Normal file
@@ -0,0 +1,90 @@
|
||||
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checker for anything related to the async protocol (PEP 492)."""
|
||||
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
from astroid import bases, exceptions
|
||||
|
||||
from pylint import checkers, interfaces, utils
|
||||
from pylint.checkers import utils as checker_utils
|
||||
from pylint.checkers.utils import decorated_with
|
||||
|
||||
|
||||
class AsyncChecker(checkers.BaseChecker):
|
||||
__implements__ = interfaces.IAstroidChecker
|
||||
name = "async"
|
||||
msgs = {
|
||||
"E1700": (
|
||||
"Yield inside async function",
|
||||
"yield-inside-async-function",
|
||||
"Used when an `yield` or `yield from` statement is "
|
||||
"found inside an async function.",
|
||||
{"minversion": (3, 5)},
|
||||
),
|
||||
"E1701": (
|
||||
"Async context manager '%s' doesn't implement __aenter__ and __aexit__.",
|
||||
"not-async-context-manager",
|
||||
"Used when an async context manager is used with an object "
|
||||
"that does not implement the async context management protocol.",
|
||||
{"minversion": (3, 5)},
|
||||
),
|
||||
}
|
||||
|
||||
def open(self):
|
||||
self._ignore_mixin_members = utils.get_global_option(
|
||||
self, "ignore-mixin-members"
|
||||
)
|
||||
self._async_generators = ["contextlib.asynccontextmanager"]
|
||||
|
||||
@checker_utils.check_messages("yield-inside-async-function")
|
||||
def visit_asyncfunctiondef(self, node):
|
||||
for child in node.nodes_of_class(astroid.Yield):
|
||||
if child.scope() is node and (
|
||||
sys.version_info[:2] == (3, 5) or isinstance(child, astroid.YieldFrom)
|
||||
):
|
||||
self.add_message("yield-inside-async-function", node=child)
|
||||
|
||||
@checker_utils.check_messages("not-async-context-manager")
|
||||
def visit_asyncwith(self, node):
|
||||
for ctx_mgr, _ in node.items:
|
||||
inferred = checker_utils.safe_infer(ctx_mgr)
|
||||
if inferred is None or inferred is astroid.Uninferable:
|
||||
continue
|
||||
|
||||
if isinstance(inferred, bases.AsyncGenerator):
|
||||
# Check if we are dealing with a function decorated
|
||||
# with contextlib.asynccontextmanager.
|
||||
if decorated_with(inferred.parent, self._async_generators):
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
inferred.getattr("__aenter__")
|
||||
inferred.getattr("__aexit__")
|
||||
except exceptions.NotFoundError:
|
||||
if isinstance(inferred, astroid.Instance):
|
||||
# If we do not know the bases of this class,
|
||||
# just skip it.
|
||||
if not checker_utils.has_known_bases(inferred):
|
||||
continue
|
||||
# Just ignore mixin classes.
|
||||
if self._ignore_mixin_members:
|
||||
if inferred.name[-5:].lower() == "mixin":
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
|
||||
self.add_message(
|
||||
"not-async-context-manager", node=node, args=(inferred.name,)
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker"""
|
||||
linter.register_checker(AsyncChecker(linter))
|
||||
2502
venv/lib/python3.8/site-packages/pylint/checkers/base.py
Normal file
2502
venv/lib/python3.8/site-packages/pylint/checkers/base.py
Normal file
File diff suppressed because it is too large
Load Diff
190
venv/lib/python3.8/site-packages/pylint/checkers/base_checker.py
Normal file
190
venv/lib/python3.8/site-packages/pylint/checkers/base_checker.py
Normal file
@@ -0,0 +1,190 @@
|
||||
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018-2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2019 Bruno P. Kinoshita <kinow@users.noreply.github.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
from inspect import cleandoc
|
||||
from typing import Any
|
||||
|
||||
from pylint.config import OptionsProviderMixIn
|
||||
from pylint.constants import _MSG_ORDER, WarningScope
|
||||
from pylint.exceptions import InvalidMessageError
|
||||
from pylint.interfaces import UNDEFINED, IRawChecker, ITokenChecker, implements
|
||||
from pylint.message.message_definition import MessageDefinition
|
||||
from pylint.utils import get_rst_section, get_rst_title
|
||||
|
||||
|
||||
class BaseChecker(OptionsProviderMixIn):
|
||||
|
||||
# checker name (you may reuse an existing one)
|
||||
name = None # type: str
|
||||
# options level (0 will be displaying in --help, 1 in --long-help)
|
||||
level = 1
|
||||
# ordered list of options to control the checker behaviour
|
||||
options = () # type: Any
|
||||
# messages issued by this checker
|
||||
msgs = {} # type: Any
|
||||
# reports issued by this checker
|
||||
reports = () # type: Any
|
||||
# mark this checker as enabled or not.
|
||||
enabled = True
|
||||
|
||||
def __init__(self, linter=None):
|
||||
"""checker instances should have the linter as argument
|
||||
|
||||
:param ILinter linter: is an object implementing ILinter."""
|
||||
if self.name is not None:
|
||||
self.name = self.name.lower()
|
||||
OptionsProviderMixIn.__init__(self)
|
||||
self.linter = linter
|
||||
|
||||
def __gt__(self, other):
|
||||
"""Permit to sort a list of Checker by name."""
|
||||
return "{}{}".format(self.name, self.msgs).__gt__(
|
||||
"{}{}".format(other.name, other.msgs)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
status = "Checker" if self.enabled else "Disabled checker"
|
||||
return "{} '{}' (responsible for '{}')".format(
|
||||
status, self.name, "', '".join(self.msgs.keys())
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
"""This might be incomplete because multiple class inheriting BaseChecker
|
||||
can have the same name. Cf MessageHandlerMixIn.get_full_documentation()"""
|
||||
return self.get_full_documentation(
|
||||
msgs=self.msgs, options=self.options_and_values(), reports=self.reports
|
||||
)
|
||||
|
||||
def get_full_documentation(self, msgs, options, reports, doc=None, module=None):
|
||||
result = ""
|
||||
checker_title = "%s checker" % (self.name.replace("_", " ").title())
|
||||
if module:
|
||||
# Provide anchor to link against
|
||||
result += ".. _%s:\n\n" % module
|
||||
result += "%s\n" % get_rst_title(checker_title, "~")
|
||||
if module:
|
||||
result += "This checker is provided by ``%s``.\n" % module
|
||||
result += "Verbatim name of the checker is ``%s``.\n\n" % self.name
|
||||
if doc:
|
||||
# Provide anchor to link against
|
||||
result += get_rst_title("{} Documentation".format(checker_title), "^")
|
||||
result += "%s\n\n" % cleandoc(doc)
|
||||
# options might be an empty generator and not be False when casted to boolean
|
||||
options = list(options)
|
||||
if options:
|
||||
result += get_rst_title("{} Options".format(checker_title), "^")
|
||||
result += "%s\n" % get_rst_section(None, options)
|
||||
if msgs:
|
||||
result += get_rst_title("{} Messages".format(checker_title), "^")
|
||||
for msgid, msg in sorted(
|
||||
msgs.items(), key=lambda kv: (_MSG_ORDER.index(kv[0][0]), kv[1])
|
||||
):
|
||||
msg = self.create_message_definition_from_tuple(msgid, msg)
|
||||
result += "%s\n" % msg.format_help(checkerref=False)
|
||||
result += "\n"
|
||||
if reports:
|
||||
result += get_rst_title("{} Reports".format(checker_title), "^")
|
||||
for report in reports:
|
||||
result += ":%s: %s\n" % report[:2]
|
||||
result += "\n"
|
||||
result += "\n"
|
||||
return result
|
||||
|
||||
def add_message(
|
||||
self, msgid, line=None, node=None, args=None, confidence=None, col_offset=None
|
||||
):
|
||||
if not confidence:
|
||||
confidence = UNDEFINED
|
||||
self.linter.add_message(msgid, line, node, args, confidence, col_offset)
|
||||
|
||||
def check_consistency(self):
|
||||
"""Check the consistency of msgid.
|
||||
|
||||
msg ids for a checker should be a string of len 4, where the two first
|
||||
characters are the checker id and the two last the msg id in this
|
||||
checker.
|
||||
|
||||
:raises InvalidMessageError: If the checker id in the messages are not
|
||||
always the same. """
|
||||
checker_id = None
|
||||
existing_ids = []
|
||||
for message in self.messages:
|
||||
if checker_id is not None and checker_id != message.msgid[1:3]:
|
||||
error_msg = "Inconsistent checker part in message id "
|
||||
error_msg += "'{}' (expected 'x{checker_id}xx' ".format(
|
||||
message.msgid, checker_id=checker_id
|
||||
)
|
||||
error_msg += "because we already had {existing_ids}).".format(
|
||||
existing_ids=existing_ids
|
||||
)
|
||||
raise InvalidMessageError(error_msg)
|
||||
checker_id = message.msgid[1:3]
|
||||
existing_ids.append(message.msgid)
|
||||
|
||||
def create_message_definition_from_tuple(self, msgid, msg_tuple):
|
||||
if implements(self, (IRawChecker, ITokenChecker)):
|
||||
default_scope = WarningScope.LINE
|
||||
else:
|
||||
default_scope = WarningScope.NODE
|
||||
options = {}
|
||||
if len(msg_tuple) > 3:
|
||||
(msg, symbol, descr, options) = msg_tuple
|
||||
elif len(msg_tuple) > 2:
|
||||
(msg, symbol, descr) = msg_tuple
|
||||
else:
|
||||
error_msg = """Messages should have a msgid and a symbol. Something like this :
|
||||
|
||||
"W1234": (
|
||||
"message",
|
||||
"message-symbol",
|
||||
"Message description with detail.",
|
||||
...
|
||||
),
|
||||
"""
|
||||
raise InvalidMessageError(error_msg)
|
||||
options.setdefault("scope", default_scope)
|
||||
return MessageDefinition(self, msgid, msg, descr, symbol, **options)
|
||||
|
||||
@property
|
||||
def messages(self) -> list:
|
||||
return [
|
||||
self.create_message_definition_from_tuple(msgid, msg_tuple)
|
||||
for msgid, msg_tuple in sorted(self.msgs.items())
|
||||
]
|
||||
|
||||
# dummy methods implementing the IChecker interface
|
||||
|
||||
def get_message_definition(self, msgid):
|
||||
for message_definition in self.messages:
|
||||
if message_definition.msgid == msgid:
|
||||
return message_definition
|
||||
error_msg = "MessageDefinition for '{}' does not exists. ".format(msgid)
|
||||
error_msg += "Choose from {}.".format([m.msgid for m in self.messages])
|
||||
raise InvalidMessageError(error_msg)
|
||||
|
||||
def open(self):
|
||||
"""called before visiting project (i.e set of modules)"""
|
||||
|
||||
def close(self):
|
||||
"""called after visiting project (i.e set of modules)"""
|
||||
|
||||
|
||||
class BaseTokenChecker(BaseChecker):
|
||||
"""Base class for checkers that want to have access to the token stream."""
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
"""Should be overridden by subclasses."""
|
||||
raise NotImplementedError()
|
||||
2093
venv/lib/python3.8/site-packages/pylint/checkers/classes.py
Normal file
2093
venv/lib/python3.8/site-packages/pylint/checkers/classes.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,500 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006, 2009-2010, 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012, 2014 Google, Inc.
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 ahirnish <ahirnish@gmail.com>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Mark Miller <725mrm@gmail.com>
|
||||
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2019 Michael Scott Cuthbert <cuthbert@mit.edu>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""check for signs of poor design"""
|
||||
|
||||
import re
|
||||
from collections import defaultdict
|
||||
|
||||
import astroid
|
||||
from astroid import BoolOp, If, decorators
|
||||
|
||||
from pylint import utils
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers.utils import check_messages
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
|
||||
MSGS = {
|
||||
"R0901": (
|
||||
"Too many ancestors (%s/%s)",
|
||||
"too-many-ancestors",
|
||||
"Used when class has too many parent classes, try to reduce "
|
||||
"this to get a simpler (and so easier to use) class.",
|
||||
),
|
||||
"R0902": (
|
||||
"Too many instance attributes (%s/%s)",
|
||||
"too-many-instance-attributes",
|
||||
"Used when class has too many instance attributes, try to reduce "
|
||||
"this to get a simpler (and so easier to use) class.",
|
||||
),
|
||||
"R0903": (
|
||||
"Too few public methods (%s/%s)",
|
||||
"too-few-public-methods",
|
||||
"Used when class has too few public methods, so be sure it's "
|
||||
"really worth it.",
|
||||
),
|
||||
"R0904": (
|
||||
"Too many public methods (%s/%s)",
|
||||
"too-many-public-methods",
|
||||
"Used when class has too many public methods, try to reduce "
|
||||
"this to get a simpler (and so easier to use) class.",
|
||||
),
|
||||
"R0911": (
|
||||
"Too many return statements (%s/%s)",
|
||||
"too-many-return-statements",
|
||||
"Used when a function or method has too many return statement, "
|
||||
"making it hard to follow.",
|
||||
),
|
||||
"R0912": (
|
||||
"Too many branches (%s/%s)",
|
||||
"too-many-branches",
|
||||
"Used when a function or method has too many branches, "
|
||||
"making it hard to follow.",
|
||||
),
|
||||
"R0913": (
|
||||
"Too many arguments (%s/%s)",
|
||||
"too-many-arguments",
|
||||
"Used when a function or method takes too many arguments.",
|
||||
),
|
||||
"R0914": (
|
||||
"Too many local variables (%s/%s)",
|
||||
"too-many-locals",
|
||||
"Used when a function or method has too many local variables.",
|
||||
),
|
||||
"R0915": (
|
||||
"Too many statements (%s/%s)",
|
||||
"too-many-statements",
|
||||
"Used when a function or method has too many statements. You "
|
||||
"should then split it in smaller functions / methods.",
|
||||
),
|
||||
"R0916": (
|
||||
"Too many boolean expressions in if statement (%s/%s)",
|
||||
"too-many-boolean-expressions",
|
||||
"Used when an if statement contains too many boolean expressions.",
|
||||
),
|
||||
}
|
||||
SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
|
||||
DATACLASSES_DECORATORS = frozenset({"dataclass", "attrs"})
|
||||
DATACLASS_IMPORT = "dataclasses"
|
||||
TYPING_NAMEDTUPLE = "typing.NamedTuple"
|
||||
|
||||
|
||||
def _is_exempt_from_public_methods(node: astroid.ClassDef) -> bool:
|
||||
"""Check if a class is exempt from too-few-public-methods"""
|
||||
|
||||
# If it's a typing.Namedtuple or an Enum
|
||||
for ancestor in node.ancestors():
|
||||
if ancestor.name == "Enum" and ancestor.root().name == "enum":
|
||||
return True
|
||||
if ancestor.qname() == TYPING_NAMEDTUPLE:
|
||||
return True
|
||||
|
||||
# Or if it's a dataclass
|
||||
if not node.decorators:
|
||||
return False
|
||||
|
||||
root_locals = set(node.root().locals)
|
||||
for decorator in node.decorators.nodes:
|
||||
if isinstance(decorator, astroid.Call):
|
||||
decorator = decorator.func
|
||||
if not isinstance(decorator, (astroid.Name, astroid.Attribute)):
|
||||
continue
|
||||
if isinstance(decorator, astroid.Name):
|
||||
name = decorator.name
|
||||
else:
|
||||
name = decorator.attrname
|
||||
if name in DATACLASSES_DECORATORS and (
|
||||
root_locals.intersection(DATACLASSES_DECORATORS)
|
||||
or DATACLASS_IMPORT in root_locals
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _count_boolean_expressions(bool_op):
|
||||
"""Counts the number of boolean expressions in BoolOp `bool_op` (recursive)
|
||||
|
||||
example: a and (b or c or (d and e)) ==> 5 boolean expressions
|
||||
"""
|
||||
nb_bool_expr = 0
|
||||
for bool_expr in bool_op.get_children():
|
||||
if isinstance(bool_expr, BoolOp):
|
||||
nb_bool_expr += _count_boolean_expressions(bool_expr)
|
||||
else:
|
||||
nb_bool_expr += 1
|
||||
return nb_bool_expr
|
||||
|
||||
|
||||
def _count_methods_in_class(node):
|
||||
all_methods = sum(1 for method in node.methods() if not method.name.startswith("_"))
|
||||
# Special methods count towards the number of public methods,
|
||||
# but don't count towards there being too many methods.
|
||||
for method in node.mymethods():
|
||||
if SPECIAL_OBJ.search(method.name) and method.name != "__init__":
|
||||
all_methods += 1
|
||||
return all_methods
|
||||
|
||||
|
||||
class MisdesignChecker(BaseChecker):
|
||||
"""checks for sign of poor/misdesign:
|
||||
* number of methods, attributes, local variables...
|
||||
* size, complexity of functions, methods
|
||||
"""
|
||||
|
||||
__implements__ = (IAstroidChecker,)
|
||||
|
||||
# configuration section name
|
||||
name = "design"
|
||||
# messages
|
||||
msgs = MSGS
|
||||
priority = -2
|
||||
# configuration options
|
||||
options = (
|
||||
(
|
||||
"max-args",
|
||||
{
|
||||
"default": 5,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of arguments for function / method.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-locals",
|
||||
{
|
||||
"default": 15,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of locals for function / method body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-returns",
|
||||
{
|
||||
"default": 6,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of return / yield for function / "
|
||||
"method body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-branches",
|
||||
{
|
||||
"default": 12,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of branch for function / method body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-statements",
|
||||
{
|
||||
"default": 50,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of statements in function / method " "body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-parents",
|
||||
{
|
||||
"default": 7,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of parents for a class (see R0901).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-attributes",
|
||||
{
|
||||
"default": 7,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of attributes for a class \
|
||||
(see R0902).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"min-public-methods",
|
||||
{
|
||||
"default": 2,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Minimum number of public methods for a class \
|
||||
(see R0903).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-public-methods",
|
||||
{
|
||||
"default": 20,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of public methods for a class \
|
||||
(see R0904).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-bool-expr",
|
||||
{
|
||||
"default": 5,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of boolean expressions in an if "
|
||||
"statement (see R0916).",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def __init__(self, linter=None):
|
||||
BaseChecker.__init__(self, linter)
|
||||
self.stats = None
|
||||
self._returns = None
|
||||
self._branches = None
|
||||
self._stmts = None
|
||||
|
||||
def open(self):
|
||||
"""initialize visit variables"""
|
||||
self.stats = self.linter.add_stats()
|
||||
self._returns = []
|
||||
self._branches = defaultdict(int)
|
||||
self._stmts = []
|
||||
|
||||
def _inc_all_stmts(self, amount):
|
||||
for i in range(len(self._stmts)):
|
||||
self._stmts[i] += amount
|
||||
|
||||
@decorators.cachedproperty
|
||||
def _ignored_argument_names(self):
|
||||
return utils.get_global_option(self, "ignored-argument-names", default=None)
|
||||
|
||||
@check_messages(
|
||||
"too-many-ancestors",
|
||||
"too-many-instance-attributes",
|
||||
"too-few-public-methods",
|
||||
"too-many-public-methods",
|
||||
)
|
||||
def visit_classdef(self, node):
|
||||
"""check size of inheritance hierarchy and number of instance attributes
|
||||
"""
|
||||
nb_parents = len(list(node.ancestors()))
|
||||
if nb_parents > self.config.max_parents:
|
||||
self.add_message(
|
||||
"too-many-ancestors",
|
||||
node=node,
|
||||
args=(nb_parents, self.config.max_parents),
|
||||
)
|
||||
|
||||
if len(node.instance_attrs) > self.config.max_attributes:
|
||||
self.add_message(
|
||||
"too-many-instance-attributes",
|
||||
node=node,
|
||||
args=(len(node.instance_attrs), self.config.max_attributes),
|
||||
)
|
||||
|
||||
@check_messages("too-few-public-methods", "too-many-public-methods")
|
||||
def leave_classdef(self, node):
|
||||
"""check number of public methods"""
|
||||
my_methods = sum(
|
||||
1 for method in node.mymethods() if not method.name.startswith("_")
|
||||
)
|
||||
|
||||
# Does the class contain less than n public methods ?
|
||||
# This checks only the methods defined in the current class,
|
||||
# since the user might not have control over the classes
|
||||
# from the ancestors. It avoids some false positives
|
||||
# for classes such as unittest.TestCase, which provides
|
||||
# a lot of assert methods. It doesn't make sense to warn
|
||||
# when the user subclasses TestCase to add his own tests.
|
||||
if my_methods > self.config.max_public_methods:
|
||||
self.add_message(
|
||||
"too-many-public-methods",
|
||||
node=node,
|
||||
args=(my_methods, self.config.max_public_methods),
|
||||
)
|
||||
|
||||
# Stop here for exception, metaclass, interface classes and other
|
||||
# classes for which we don't need to count the methods.
|
||||
if node.type != "class" or _is_exempt_from_public_methods(node):
|
||||
return
|
||||
|
||||
# Does the class contain more than n public methods ?
|
||||
# This checks all the methods defined by ancestors and
|
||||
# by the current class.
|
||||
all_methods = _count_methods_in_class(node)
|
||||
if all_methods < self.config.min_public_methods:
|
||||
self.add_message(
|
||||
"too-few-public-methods",
|
||||
node=node,
|
||||
args=(all_methods, self.config.min_public_methods),
|
||||
)
|
||||
|
||||
@check_messages(
|
||||
"too-many-return-statements",
|
||||
"too-many-branches",
|
||||
"too-many-arguments",
|
||||
"too-many-locals",
|
||||
"too-many-statements",
|
||||
"keyword-arg-before-vararg",
|
||||
)
|
||||
def visit_functiondef(self, node):
|
||||
"""check function name, docstring, arguments, redefinition,
|
||||
variable names, max locals
|
||||
"""
|
||||
# init branch and returns counters
|
||||
self._returns.append(0)
|
||||
# check number of arguments
|
||||
args = node.args.args
|
||||
ignored_argument_names = self._ignored_argument_names
|
||||
if args is not None:
|
||||
ignored_args_num = 0
|
||||
if ignored_argument_names:
|
||||
ignored_args_num = sum(
|
||||
1 for arg in args if ignored_argument_names.match(arg.name)
|
||||
)
|
||||
|
||||
argnum = len(args) - ignored_args_num
|
||||
if argnum > self.config.max_args:
|
||||
self.add_message(
|
||||
"too-many-arguments",
|
||||
node=node,
|
||||
args=(len(args), self.config.max_args),
|
||||
)
|
||||
else:
|
||||
ignored_args_num = 0
|
||||
# check number of local variables
|
||||
locnum = len(node.locals) - ignored_args_num
|
||||
if locnum > self.config.max_locals:
|
||||
self.add_message(
|
||||
"too-many-locals", node=node, args=(locnum, self.config.max_locals)
|
||||
)
|
||||
# init new statements counter
|
||||
self._stmts.append(1)
|
||||
|
||||
visit_asyncfunctiondef = visit_functiondef
|
||||
|
||||
@check_messages(
|
||||
"too-many-return-statements",
|
||||
"too-many-branches",
|
||||
"too-many-arguments",
|
||||
"too-many-locals",
|
||||
"too-many-statements",
|
||||
)
|
||||
def leave_functiondef(self, node):
|
||||
"""most of the work is done here on close:
|
||||
checks for max returns, branch, return in __init__
|
||||
"""
|
||||
returns = self._returns.pop()
|
||||
if returns > self.config.max_returns:
|
||||
self.add_message(
|
||||
"too-many-return-statements",
|
||||
node=node,
|
||||
args=(returns, self.config.max_returns),
|
||||
)
|
||||
branches = self._branches[node]
|
||||
if branches > self.config.max_branches:
|
||||
self.add_message(
|
||||
"too-many-branches",
|
||||
node=node,
|
||||
args=(branches, self.config.max_branches),
|
||||
)
|
||||
# check number of statements
|
||||
stmts = self._stmts.pop()
|
||||
if stmts > self.config.max_statements:
|
||||
self.add_message(
|
||||
"too-many-statements",
|
||||
node=node,
|
||||
args=(stmts, self.config.max_statements),
|
||||
)
|
||||
|
||||
leave_asyncfunctiondef = leave_functiondef
|
||||
|
||||
def visit_return(self, _):
|
||||
"""count number of returns"""
|
||||
if not self._returns:
|
||||
return # return outside function, reported by the base checker
|
||||
self._returns[-1] += 1
|
||||
|
||||
def visit_default(self, node):
|
||||
"""default visit method -> increments the statements counter if
|
||||
necessary
|
||||
"""
|
||||
if node.is_statement:
|
||||
self._inc_all_stmts(1)
|
||||
|
||||
def visit_tryexcept(self, node):
|
||||
"""increments the branches counter"""
|
||||
branches = len(node.handlers)
|
||||
if node.orelse:
|
||||
branches += 1
|
||||
self._inc_branch(node, branches)
|
||||
self._inc_all_stmts(branches)
|
||||
|
||||
def visit_tryfinally(self, node):
|
||||
"""increments the branches counter"""
|
||||
self._inc_branch(node, 2)
|
||||
self._inc_all_stmts(2)
|
||||
|
||||
@check_messages("too-many-boolean-expressions")
|
||||
def visit_if(self, node):
|
||||
"""increments the branches counter and checks boolean expressions"""
|
||||
self._check_boolean_expressions(node)
|
||||
branches = 1
|
||||
# don't double count If nodes coming from some 'elif'
|
||||
if node.orelse and (len(node.orelse) > 1 or not isinstance(node.orelse[0], If)):
|
||||
branches += 1
|
||||
self._inc_branch(node, branches)
|
||||
self._inc_all_stmts(branches)
|
||||
|
||||
def _check_boolean_expressions(self, node):
|
||||
"""Go through "if" node `node` and counts its boolean expressions
|
||||
|
||||
if the "if" node test is a BoolOp node
|
||||
"""
|
||||
condition = node.test
|
||||
if not isinstance(condition, BoolOp):
|
||||
return
|
||||
nb_bool_expr = _count_boolean_expressions(condition)
|
||||
if nb_bool_expr > self.config.max_bool_expr:
|
||||
self.add_message(
|
||||
"too-many-boolean-expressions",
|
||||
node=condition,
|
||||
args=(nb_bool_expr, self.config.max_bool_expr),
|
||||
)
|
||||
|
||||
def visit_while(self, node):
|
||||
"""increments the branches counter"""
|
||||
branches = 1
|
||||
if node.orelse:
|
||||
branches += 1
|
||||
self._inc_branch(node, branches)
|
||||
|
||||
visit_for = visit_while
|
||||
|
||||
def _inc_branch(self, node, branchesnum=1):
|
||||
"""increments the branches counter"""
|
||||
self._branches[node.scope()] += branchesnum
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(MisdesignChecker(linter))
|
||||
554
venv/lib/python3.8/site-packages/pylint/checkers/exceptions.py
Normal file
554
venv/lib/python3.8/site-packages/pylint/checkers/exceptions.py
Normal file
@@ -0,0 +1,554 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2011-2014 Google, Inc.
|
||||
# Copyright (c) 2012 Tim Hatch <tim@timhatch.com>
|
||||
# Copyright (c) 2013-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015 Steven Myint <hg@stevenmyint.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Erik <erik.eriksson@yahoo.com>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Martin von Gagern <gagern@google.com>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Natalie Serebryakova <natalie.serebryakova@Natalies-MacBook-Pro.local>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Carey Metcalfe <carey@cmetcalfe.ca>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019 Djailla <bastien.vallet@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checks for various exception related errors."""
|
||||
import builtins
|
||||
import inspect
|
||||
import typing
|
||||
|
||||
import astroid
|
||||
from astroid.node_classes import NodeNG
|
||||
|
||||
from pylint import checkers, interfaces
|
||||
from pylint.checkers import utils
|
||||
|
||||
|
||||
def _builtin_exceptions():
|
||||
def predicate(obj):
|
||||
return isinstance(obj, type) and issubclass(obj, BaseException)
|
||||
|
||||
members = inspect.getmembers(builtins, predicate)
|
||||
return {exc.__name__ for (_, exc) in members}
|
||||
|
||||
|
||||
def _annotated_unpack_infer(stmt, context=None):
|
||||
"""
|
||||
Recursively generate nodes inferred by the given statement.
|
||||
If the inferred value is a list or a tuple, recurse on the elements.
|
||||
Returns an iterator which yields tuples in the format
|
||||
('original node', 'inferred node').
|
||||
"""
|
||||
if isinstance(stmt, (astroid.List, astroid.Tuple)):
|
||||
for elt in stmt.elts:
|
||||
inferred = utils.safe_infer(elt)
|
||||
if inferred and inferred is not astroid.Uninferable:
|
||||
yield elt, inferred
|
||||
return
|
||||
for inferred in stmt.infer(context):
|
||||
if inferred is astroid.Uninferable:
|
||||
continue
|
||||
yield stmt, inferred
|
||||
|
||||
|
||||
def _is_raising(body: typing.List) -> bool:
|
||||
"""Return true if the given statement node raise an exception"""
|
||||
for node in body:
|
||||
if isinstance(node, astroid.Raise):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
OVERGENERAL_EXCEPTIONS = ("BaseException", "Exception")
|
||||
BUILTINS_NAME = builtins.__name__
|
||||
|
||||
MSGS = {
|
||||
"E0701": (
|
||||
"Bad except clauses order (%s)",
|
||||
"bad-except-order",
|
||||
"Used when except clauses are not in the correct order (from the "
|
||||
"more specific to the more generic). If you don't fix the order, "
|
||||
"some exceptions may not be caught by the most specific handler.",
|
||||
),
|
||||
"E0702": (
|
||||
"Raising %s while only classes or instances are allowed",
|
||||
"raising-bad-type",
|
||||
"Used when something which is neither a class, an instance or a "
|
||||
"string is raised (i.e. a `TypeError` will be raised).",
|
||||
),
|
||||
"E0703": (
|
||||
"Exception context set to something which is not an exception, nor None",
|
||||
"bad-exception-context",
|
||||
'Used when using the syntax "raise ... from ...", '
|
||||
"where the exception context is not an exception, "
|
||||
"nor None.",
|
||||
),
|
||||
"E0704": (
|
||||
"The raise statement is not inside an except clause",
|
||||
"misplaced-bare-raise",
|
||||
"Used when a bare raise is not used inside an except clause. "
|
||||
"This generates an error, since there are no active exceptions "
|
||||
"to be reraised. An exception to this rule is represented by "
|
||||
"a bare raise inside a finally clause, which might work, as long "
|
||||
"as an exception is raised inside the try block, but it is "
|
||||
"nevertheless a code smell that must not be relied upon.",
|
||||
),
|
||||
"E0710": (
|
||||
"Raising a new style class which doesn't inherit from BaseException",
|
||||
"raising-non-exception",
|
||||
"Used when a new style class which doesn't inherit from "
|
||||
"BaseException is raised.",
|
||||
),
|
||||
"E0711": (
|
||||
"NotImplemented raised - should raise NotImplementedError",
|
||||
"notimplemented-raised",
|
||||
"Used when NotImplemented is raised instead of NotImplementedError",
|
||||
),
|
||||
"E0712": (
|
||||
"Catching an exception which doesn't inherit from Exception: %s",
|
||||
"catching-non-exception",
|
||||
"Used when a class which doesn't inherit from "
|
||||
"Exception is used as an exception in an except clause.",
|
||||
),
|
||||
"W0702": (
|
||||
"No exception type(s) specified",
|
||||
"bare-except",
|
||||
"Used when an except clause doesn't specify exceptions type to catch.",
|
||||
),
|
||||
"W0703": (
|
||||
"Catching too general exception %s",
|
||||
"broad-except",
|
||||
"Used when an except catches a too general exception, "
|
||||
"possibly burying unrelated errors.",
|
||||
),
|
||||
"W0705": (
|
||||
"Catching previously caught exception type %s",
|
||||
"duplicate-except",
|
||||
"Used when an except catches a type that was already caught by "
|
||||
"a previous handler.",
|
||||
),
|
||||
"W0706": (
|
||||
"The except handler raises immediately",
|
||||
"try-except-raise",
|
||||
"Used when an except handler uses raise as its first or only "
|
||||
"operator. This is useless because it raises back the exception "
|
||||
"immediately. Remove the raise operator or the entire "
|
||||
"try-except-raise block!",
|
||||
),
|
||||
"W0711": (
|
||||
'Exception to catch is the result of a binary "%s" operation',
|
||||
"binary-op-exception",
|
||||
"Used when the exception to catch is of the form "
|
||||
'"except A or B:". If intending to catch multiple, '
|
||||
'rewrite as "except (A, B):"',
|
||||
),
|
||||
"W0715": (
|
||||
"Exception arguments suggest string formatting might be intended",
|
||||
"raising-format-tuple",
|
||||
"Used when passing multiple arguments to an exception "
|
||||
"constructor, the first of them a string literal containing what "
|
||||
"appears to be placeholders intended for formatting",
|
||||
),
|
||||
"W0716": (
|
||||
"Invalid exception operation. %s",
|
||||
"wrong-exception-operation",
|
||||
"Used when an operation is done against an exception, but the operation "
|
||||
"is not valid for the exception in question. Usually emitted when having "
|
||||
"binary operations between exceptions in except handlers.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class BaseVisitor:
|
||||
"""Base class for visitors defined in this module."""
|
||||
|
||||
def __init__(self, checker, node):
|
||||
self._checker = checker
|
||||
self._node = node
|
||||
|
||||
def visit(self, node):
|
||||
name = node.__class__.__name__.lower()
|
||||
dispatch_meth = getattr(self, "visit_" + name, None)
|
||||
if dispatch_meth:
|
||||
dispatch_meth(node)
|
||||
else:
|
||||
self.visit_default(node)
|
||||
|
||||
def visit_default(self, node): # pylint: disable=unused-argument
|
||||
"""Default implementation for all the nodes."""
|
||||
|
||||
|
||||
class ExceptionRaiseRefVisitor(BaseVisitor):
|
||||
"""Visit references (anything that is not an AST leaf)."""
|
||||
|
||||
def visit_name(self, name):
|
||||
if name.name == "NotImplemented":
|
||||
self._checker.add_message("notimplemented-raised", node=self._node)
|
||||
|
||||
def visit_call(self, call):
|
||||
if isinstance(call.func, astroid.Name):
|
||||
self.visit_name(call.func)
|
||||
if (
|
||||
len(call.args) > 1
|
||||
and isinstance(call.args[0], astroid.Const)
|
||||
and isinstance(call.args[0].value, str)
|
||||
):
|
||||
msg = call.args[0].value
|
||||
if "%" in msg or ("{" in msg and "}" in msg):
|
||||
self._checker.add_message("raising-format-tuple", node=self._node)
|
||||
|
||||
|
||||
class ExceptionRaiseLeafVisitor(BaseVisitor):
|
||||
"""Visitor for handling leaf kinds of a raise value."""
|
||||
|
||||
def visit_const(self, const):
|
||||
if not isinstance(const.value, str):
|
||||
# raising-string will be emitted from python3 porting checker.
|
||||
self._checker.add_message(
|
||||
"raising-bad-type", node=self._node, args=const.value.__class__.__name__
|
||||
)
|
||||
|
||||
def visit_instance(self, instance):
|
||||
# pylint: disable=protected-access
|
||||
cls = instance._proxied
|
||||
self.visit_classdef(cls)
|
||||
|
||||
# Exception instances have a particular class type
|
||||
visit_exceptioninstance = visit_instance
|
||||
|
||||
def visit_classdef(self, cls):
|
||||
if not utils.inherit_from_std_ex(cls) and utils.has_known_bases(cls):
|
||||
if cls.newstyle:
|
||||
self._checker.add_message("raising-non-exception", node=self._node)
|
||||
|
||||
def visit_tuple(self, _):
|
||||
self._checker.add_message("raising-bad-type", node=self._node, args="tuple")
|
||||
|
||||
def visit_default(self, node):
|
||||
name = getattr(node, "name", node.__class__.__name__)
|
||||
self._checker.add_message("raising-bad-type", node=self._node, args=name)
|
||||
|
||||
|
||||
class ExceptionsChecker(checkers.BaseChecker):
|
||||
"""Exception related checks."""
|
||||
|
||||
__implements__ = interfaces.IAstroidChecker
|
||||
|
||||
name = "exceptions"
|
||||
msgs = MSGS
|
||||
priority = -4
|
||||
options = (
|
||||
(
|
||||
"overgeneral-exceptions",
|
||||
{
|
||||
"default": OVERGENERAL_EXCEPTIONS,
|
||||
"type": "csv",
|
||||
"metavar": "<comma-separated class names>",
|
||||
"help": "Exceptions that will emit a warning "
|
||||
'when being caught. Defaults to "%s".'
|
||||
% (", ".join(OVERGENERAL_EXCEPTIONS),),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def open(self):
|
||||
self._builtin_exceptions = _builtin_exceptions()
|
||||
super().open()
|
||||
|
||||
@utils.check_messages(
|
||||
"misplaced-bare-raise",
|
||||
"raising-bad-type",
|
||||
"raising-non-exception",
|
||||
"notimplemented-raised",
|
||||
"bad-exception-context",
|
||||
"raising-format-tuple",
|
||||
)
|
||||
def visit_raise(self, node):
|
||||
if node.exc is None:
|
||||
self._check_misplaced_bare_raise(node)
|
||||
return
|
||||
|
||||
if node.cause:
|
||||
self._check_bad_exception_context(node)
|
||||
|
||||
expr = node.exc
|
||||
ExceptionRaiseRefVisitor(self, node).visit(expr)
|
||||
|
||||
try:
|
||||
inferred_value = expr.inferred()[-1]
|
||||
except astroid.InferenceError:
|
||||
pass
|
||||
else:
|
||||
if inferred_value:
|
||||
ExceptionRaiseLeafVisitor(self, node).visit(inferred_value)
|
||||
|
||||
def _check_misplaced_bare_raise(self, node):
|
||||
# Filter out if it's present in __exit__.
|
||||
scope = node.scope()
|
||||
if (
|
||||
isinstance(scope, astroid.FunctionDef)
|
||||
and scope.is_method()
|
||||
and scope.name == "__exit__"
|
||||
):
|
||||
return
|
||||
|
||||
current = node
|
||||
# Stop when a new scope is generated or when the raise
|
||||
# statement is found inside a TryFinally.
|
||||
ignores = (astroid.ExceptHandler, astroid.FunctionDef)
|
||||
while current and not isinstance(current.parent, ignores):
|
||||
current = current.parent
|
||||
|
||||
expected = (astroid.ExceptHandler,)
|
||||
if not current or not isinstance(current.parent, expected):
|
||||
self.add_message("misplaced-bare-raise", node=node)
|
||||
|
||||
def _check_bad_exception_context(self, node):
|
||||
"""Verify that the exception context is properly set.
|
||||
|
||||
An exception context can be only `None` or an exception.
|
||||
"""
|
||||
cause = utils.safe_infer(node.cause)
|
||||
if cause in (astroid.Uninferable, None):
|
||||
return
|
||||
|
||||
if isinstance(cause, astroid.Const):
|
||||
if cause.value is not None:
|
||||
self.add_message("bad-exception-context", node=node)
|
||||
elif not isinstance(cause, astroid.ClassDef) and not utils.inherit_from_std_ex(
|
||||
cause
|
||||
):
|
||||
self.add_message("bad-exception-context", node=node)
|
||||
|
||||
def _check_catching_non_exception(self, handler, exc, part):
|
||||
if isinstance(exc, astroid.Tuple):
|
||||
# Check if it is a tuple of exceptions.
|
||||
inferred = [utils.safe_infer(elt) for elt in exc.elts]
|
||||
if any(node is astroid.Uninferable for node in inferred):
|
||||
# Don't emit if we don't know every component.
|
||||
return
|
||||
if all(
|
||||
node
|
||||
and (utils.inherit_from_std_ex(node) or not utils.has_known_bases(node))
|
||||
for node in inferred
|
||||
):
|
||||
return
|
||||
|
||||
if not isinstance(exc, astroid.ClassDef):
|
||||
# Don't emit the warning if the inferred stmt
|
||||
# is None, but the exception handler is something else,
|
||||
# maybe it was redefined.
|
||||
if isinstance(exc, astroid.Const) and exc.value is None:
|
||||
if (
|
||||
isinstance(handler.type, astroid.Const)
|
||||
and handler.type.value is None
|
||||
) or handler.type.parent_of(exc):
|
||||
# If the exception handler catches None or
|
||||
# the exception component, which is None, is
|
||||
# defined by the entire exception handler, then
|
||||
# emit a warning.
|
||||
self.add_message(
|
||||
"catching-non-exception",
|
||||
node=handler.type,
|
||||
args=(part.as_string(),),
|
||||
)
|
||||
else:
|
||||
self.add_message(
|
||||
"catching-non-exception",
|
||||
node=handler.type,
|
||||
args=(part.as_string(),),
|
||||
)
|
||||
return
|
||||
|
||||
if (
|
||||
not utils.inherit_from_std_ex(exc)
|
||||
and exc.name not in self._builtin_exceptions
|
||||
):
|
||||
if utils.has_known_bases(exc):
|
||||
self.add_message(
|
||||
"catching-non-exception", node=handler.type, args=(exc.name,)
|
||||
)
|
||||
|
||||
def _check_try_except_raise(self, node):
|
||||
def gather_exceptions_from_handler(
|
||||
handler,
|
||||
) -> typing.Optional[typing.List[NodeNG]]:
|
||||
exceptions = [] # type: typing.List[NodeNG]
|
||||
if handler.type:
|
||||
exceptions_in_handler = utils.safe_infer(handler.type)
|
||||
if isinstance(exceptions_in_handler, astroid.Tuple):
|
||||
exceptions = list(
|
||||
{
|
||||
exception
|
||||
for exception in exceptions_in_handler.elts
|
||||
if isinstance(exception, astroid.Name)
|
||||
}
|
||||
)
|
||||
elif exceptions_in_handler:
|
||||
exceptions = [exceptions_in_handler]
|
||||
else:
|
||||
# Break when we cannot infer anything reliably.
|
||||
return None
|
||||
return exceptions
|
||||
|
||||
bare_raise = False
|
||||
handler_having_bare_raise = None
|
||||
excs_in_bare_handler = []
|
||||
for handler in node.handlers:
|
||||
if bare_raise:
|
||||
# check that subsequent handler is not parent of handler which had bare raise.
|
||||
# since utils.safe_infer can fail for bare except, check it before.
|
||||
# also break early if bare except is followed by bare except.
|
||||
|
||||
excs_in_current_handler = gather_exceptions_from_handler(handler)
|
||||
|
||||
if not excs_in_current_handler:
|
||||
bare_raise = False
|
||||
break
|
||||
if excs_in_bare_handler is None:
|
||||
# It can be `None` when the inference failed
|
||||
break
|
||||
|
||||
for exc_in_current_handler in excs_in_current_handler:
|
||||
inferred_current = utils.safe_infer(exc_in_current_handler)
|
||||
if any(
|
||||
utils.is_subclass_of(
|
||||
utils.safe_infer(exc_in_bare_handler), inferred_current
|
||||
)
|
||||
for exc_in_bare_handler in excs_in_bare_handler
|
||||
):
|
||||
bare_raise = False
|
||||
break
|
||||
|
||||
# `raise` as the first operator inside the except handler
|
||||
if _is_raising([handler.body[0]]):
|
||||
# flags when there is a bare raise
|
||||
if handler.body[0].exc is None:
|
||||
bare_raise = True
|
||||
handler_having_bare_raise = handler
|
||||
excs_in_bare_handler = gather_exceptions_from_handler(handler)
|
||||
else:
|
||||
if bare_raise:
|
||||
self.add_message("try-except-raise", node=handler_having_bare_raise)
|
||||
|
||||
@utils.check_messages("wrong-exception-operation")
|
||||
def visit_binop(self, node):
|
||||
if isinstance(node.parent, astroid.ExceptHandler):
|
||||
# except (V | A)
|
||||
suggestion = "Did you mean '(%s, %s)' instead?" % (
|
||||
node.left.as_string(),
|
||||
node.right.as_string(),
|
||||
)
|
||||
self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
|
||||
|
||||
@utils.check_messages("wrong-exception-operation")
|
||||
def visit_compare(self, node):
|
||||
if isinstance(node.parent, astroid.ExceptHandler):
|
||||
# except (V < A)
|
||||
suggestion = "Did you mean '(%s, %s)' instead?" % (
|
||||
node.left.as_string(),
|
||||
", ".join(operand.as_string() for _, operand in node.ops),
|
||||
)
|
||||
self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
|
||||
|
||||
@utils.check_messages(
|
||||
"bare-except",
|
||||
"broad-except",
|
||||
"try-except-raise",
|
||||
"binary-op-exception",
|
||||
"bad-except-order",
|
||||
"catching-non-exception",
|
||||
"duplicate-except",
|
||||
)
|
||||
def visit_tryexcept(self, node):
|
||||
"""check for empty except"""
|
||||
self._check_try_except_raise(node)
|
||||
exceptions_classes = []
|
||||
nb_handlers = len(node.handlers)
|
||||
for index, handler in enumerate(node.handlers):
|
||||
if handler.type is None:
|
||||
if not _is_raising(handler.body):
|
||||
self.add_message("bare-except", node=handler)
|
||||
|
||||
# check if an "except:" is followed by some other
|
||||
# except
|
||||
if index < (nb_handlers - 1):
|
||||
msg = "empty except clause should always appear last"
|
||||
self.add_message("bad-except-order", node=node, args=msg)
|
||||
|
||||
elif isinstance(handler.type, astroid.BoolOp):
|
||||
self.add_message(
|
||||
"binary-op-exception", node=handler, args=handler.type.op
|
||||
)
|
||||
else:
|
||||
try:
|
||||
excs = list(_annotated_unpack_infer(handler.type))
|
||||
except astroid.InferenceError:
|
||||
continue
|
||||
|
||||
for part, exc in excs:
|
||||
if exc is astroid.Uninferable:
|
||||
continue
|
||||
if isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(
|
||||
exc
|
||||
):
|
||||
# pylint: disable=protected-access
|
||||
exc = exc._proxied
|
||||
|
||||
self._check_catching_non_exception(handler, exc, part)
|
||||
|
||||
if not isinstance(exc, astroid.ClassDef):
|
||||
continue
|
||||
|
||||
exc_ancestors = [
|
||||
anc
|
||||
for anc in exc.ancestors()
|
||||
if isinstance(anc, astroid.ClassDef)
|
||||
]
|
||||
|
||||
for previous_exc in exceptions_classes:
|
||||
if previous_exc in exc_ancestors:
|
||||
msg = "%s is an ancestor class of %s" % (
|
||||
previous_exc.name,
|
||||
exc.name,
|
||||
)
|
||||
self.add_message(
|
||||
"bad-except-order", node=handler.type, args=msg
|
||||
)
|
||||
if (
|
||||
exc.name in self.config.overgeneral_exceptions
|
||||
and exc.root().name == utils.EXCEPTIONS_MODULE
|
||||
and not _is_raising(handler.body)
|
||||
):
|
||||
self.add_message(
|
||||
"broad-except", args=exc.name, node=handler.type
|
||||
)
|
||||
|
||||
if exc in exceptions_classes:
|
||||
self.add_message(
|
||||
"duplicate-except", args=exc.name, node=handler.type
|
||||
)
|
||||
|
||||
exceptions_classes += [exc for _, exc in excs]
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker"""
|
||||
linter.register_checker(ExceptionsChecker(linter))
|
||||
1392
venv/lib/python3.8/site-packages/pylint/checkers/format.py
Normal file
1392
venv/lib/python3.8/site-packages/pylint/checkers/format.py
Normal file
File diff suppressed because it is too large
Load Diff
991
venv/lib/python3.8/site-packages/pylint/checkers/imports.py
Normal file
991
venv/lib/python3.8/site-packages/pylint/checkers/imports.py
Normal file
@@ -0,0 +1,991 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015-2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
|
||||
# Copyright (c) 2015 Cezar <celnazli@bitdefender.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015 Noam Yorav-Raphael <noamraph@gmail.com>
|
||||
# Copyright (c) 2015 James Morgensen <james.morgensen@gmail.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Jared Garst <cultofjared@gmail.com>
|
||||
# Copyright (c) 2016 Maik Röder <maikroeder@gmail.com>
|
||||
# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
|
||||
# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2017 Michka Popoff <michkapopoff@gmail.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Erik Wright <erik.wright@shopify.com>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Hornwitser <github@hornwitser.no>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Natalie Serebryakova <natalie.serebryakova@Natalies-MacBook-Pro.local>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Marianna Polatoglou <mpolatoglou@bloomberg.net>
|
||||
# Copyright (c) 2019 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2019 Nick Smith <clickthisnick@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Paul Renvoisé <renvoisepaul@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""imports checkers for Python code"""
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import os
|
||||
import sys
|
||||
from distutils import sysconfig
|
||||
|
||||
import astroid
|
||||
import isort
|
||||
from astroid import modutils
|
||||
from astroid.decorators import cached
|
||||
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers.utils import (
|
||||
check_messages,
|
||||
is_from_fallback_block,
|
||||
node_ignores_exception,
|
||||
)
|
||||
from pylint.exceptions import EmptyReportError
|
||||
from pylint.graph import DotBackend, get_cycles
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
from pylint.reporters.ureports.nodes import Paragraph, VerbatimText
|
||||
from pylint.utils import get_global_option
|
||||
|
||||
|
||||
def _qualified_names(modname):
|
||||
"""Split the names of the given module into subparts
|
||||
|
||||
For example,
|
||||
_qualified_names('pylint.checkers.ImportsChecker')
|
||||
returns
|
||||
['pylint', 'pylint.checkers', 'pylint.checkers.ImportsChecker']
|
||||
"""
|
||||
names = modname.split(".")
|
||||
return [".".join(names[0 : i + 1]) for i in range(len(names))]
|
||||
|
||||
|
||||
def _get_import_name(importnode, modname):
|
||||
"""Get a prepared module name from the given import node
|
||||
|
||||
In the case of relative imports, this will return the
|
||||
absolute qualified module name, which might be useful
|
||||
for debugging. Otherwise, the initial module name
|
||||
is returned unchanged.
|
||||
"""
|
||||
if isinstance(importnode, astroid.ImportFrom):
|
||||
if importnode.level:
|
||||
root = importnode.root()
|
||||
if isinstance(root, astroid.Module):
|
||||
modname = root.relative_to_absolute_name(
|
||||
modname, level=importnode.level
|
||||
)
|
||||
return modname
|
||||
|
||||
|
||||
def _get_first_import(node, context, name, base, level, alias):
|
||||
"""return the node where [base.]<name> is imported or None if not found
|
||||
"""
|
||||
fullname = "%s.%s" % (base, name) if base else name
|
||||
|
||||
first = None
|
||||
found = False
|
||||
for first in context.body:
|
||||
if first is node:
|
||||
continue
|
||||
if first.scope() is node.scope() and first.fromlineno > node.fromlineno:
|
||||
continue
|
||||
if isinstance(first, astroid.Import):
|
||||
if any(fullname == iname[0] for iname in first.names):
|
||||
found = True
|
||||
break
|
||||
elif isinstance(first, astroid.ImportFrom):
|
||||
if level == first.level:
|
||||
for imported_name, imported_alias in first.names:
|
||||
if fullname == "%s.%s" % (first.modname, imported_name):
|
||||
found = True
|
||||
break
|
||||
if (
|
||||
name != "*"
|
||||
and name == imported_name
|
||||
and not (alias or imported_alias)
|
||||
):
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
break
|
||||
if found and not astroid.are_exclusive(first, node):
|
||||
return first
|
||||
return None
|
||||
|
||||
|
||||
def _ignore_import_failure(node, modname, ignored_modules):
|
||||
for submodule in _qualified_names(modname):
|
||||
if submodule in ignored_modules:
|
||||
return True
|
||||
|
||||
return node_ignores_exception(node, ImportError)
|
||||
|
||||
|
||||
# utilities to represents import dependencies as tree and dot graph ###########
|
||||
|
||||
|
||||
def _make_tree_defs(mod_files_list):
|
||||
"""get a list of 2-uple (module, list_of_files_which_import_this_module),
|
||||
it will return a dictionary to represent this as a tree
|
||||
"""
|
||||
tree_defs = {}
|
||||
for mod, files in mod_files_list:
|
||||
node = (tree_defs, ())
|
||||
for prefix in mod.split("."):
|
||||
node = node[0].setdefault(prefix, [{}, []])
|
||||
node[1] += files
|
||||
return tree_defs
|
||||
|
||||
|
||||
def _repr_tree_defs(data, indent_str=None):
|
||||
"""return a string which represents imports as a tree"""
|
||||
lines = []
|
||||
nodes = data.items()
|
||||
for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])):
|
||||
if not files:
|
||||
files = ""
|
||||
else:
|
||||
files = "(%s)" % ",".join(sorted(files))
|
||||
if indent_str is None:
|
||||
lines.append("%s %s" % (mod, files))
|
||||
sub_indent_str = " "
|
||||
else:
|
||||
lines.append(r"%s\-%s %s" % (indent_str, mod, files))
|
||||
if i == len(nodes) - 1:
|
||||
sub_indent_str = "%s " % indent_str
|
||||
else:
|
||||
sub_indent_str = "%s| " % indent_str
|
||||
if sub:
|
||||
lines.append(_repr_tree_defs(sub, sub_indent_str))
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _dependencies_graph(filename, dep_info):
|
||||
"""write dependencies as a dot (graphviz) file
|
||||
"""
|
||||
done = {}
|
||||
printer = DotBackend(filename[:-4], rankdir="LR")
|
||||
printer.emit('URL="." node[shape="box"]')
|
||||
for modname, dependencies in sorted(dep_info.items()):
|
||||
done[modname] = 1
|
||||
printer.emit_node(modname)
|
||||
for depmodname in dependencies:
|
||||
if depmodname not in done:
|
||||
done[depmodname] = 1
|
||||
printer.emit_node(depmodname)
|
||||
for depmodname, dependencies in sorted(dep_info.items()):
|
||||
for modname in dependencies:
|
||||
printer.emit_edge(modname, depmodname)
|
||||
printer.generate(filename)
|
||||
|
||||
|
||||
def _make_graph(filename, dep_info, sect, gtype):
|
||||
"""generate a dependencies graph and add some information about it in the
|
||||
report's section
|
||||
"""
|
||||
_dependencies_graph(filename, dep_info)
|
||||
sect.append(Paragraph("%simports graph has been written to %s" % (gtype, filename)))
|
||||
|
||||
|
||||
# the import checker itself ###################################################
|
||||
|
||||
MSGS = {
|
||||
"E0401": (
|
||||
"Unable to import %s",
|
||||
"import-error",
|
||||
"Used when pylint has been unable to import a module.",
|
||||
{"old_names": [("F0401", "old-import-error")]},
|
||||
),
|
||||
"E0402": (
|
||||
"Attempted relative import beyond top-level package",
|
||||
"relative-beyond-top-level",
|
||||
"Used when a relative import tries to access too many levels "
|
||||
"in the current package.",
|
||||
),
|
||||
"R0401": (
|
||||
"Cyclic import (%s)",
|
||||
"cyclic-import",
|
||||
"Used when a cyclic import between two or more modules is detected.",
|
||||
),
|
||||
"W0401": (
|
||||
"Wildcard import %s",
|
||||
"wildcard-import",
|
||||
"Used when `from module import *` is detected.",
|
||||
),
|
||||
"W0402": (
|
||||
"Uses of a deprecated module %r",
|
||||
"deprecated-module",
|
||||
"Used a module marked as deprecated is imported.",
|
||||
),
|
||||
"W0404": (
|
||||
"Reimport %r (imported line %s)",
|
||||
"reimported",
|
||||
"Used when a module is reimported multiple times.",
|
||||
),
|
||||
"W0406": (
|
||||
"Module import itself",
|
||||
"import-self",
|
||||
"Used when a module is importing itself.",
|
||||
),
|
||||
"W0407": (
|
||||
"Prefer importing %r instead of %r",
|
||||
"preferred-module",
|
||||
"Used when a module imported has a preferred replacement module.",
|
||||
),
|
||||
"W0410": (
|
||||
"__future__ import is not the first non docstring statement",
|
||||
"misplaced-future",
|
||||
"Python 2.5 and greater require __future__ import to be the "
|
||||
"first non docstring statement in the module.",
|
||||
),
|
||||
"C0410": (
|
||||
"Multiple imports on one line (%s)",
|
||||
"multiple-imports",
|
||||
"Used when import statement importing multiple modules is detected.",
|
||||
),
|
||||
"C0411": (
|
||||
"%s should be placed before %s",
|
||||
"wrong-import-order",
|
||||
"Used when PEP8 import order is not respected (standard imports "
|
||||
"first, then third-party libraries, then local imports)",
|
||||
),
|
||||
"C0412": (
|
||||
"Imports from package %s are not grouped",
|
||||
"ungrouped-imports",
|
||||
"Used when imports are not grouped by packages",
|
||||
),
|
||||
"C0413": (
|
||||
'Import "%s" should be placed at the top of the module',
|
||||
"wrong-import-position",
|
||||
"Used when code and imports are mixed",
|
||||
),
|
||||
"C0414": (
|
||||
"Import alias does not rename original package",
|
||||
"useless-import-alias",
|
||||
"Used when an import alias is same as original package."
|
||||
"e.g using import numpy as numpy instead of import numpy as np",
|
||||
),
|
||||
"C0415": (
|
||||
"Import outside toplevel (%s)",
|
||||
"import-outside-toplevel",
|
||||
"Used when an import statement is used anywhere other than the module "
|
||||
"toplevel. Move this import to the top of the file.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
DEFAULT_STANDARD_LIBRARY = ()
|
||||
DEFAULT_KNOWN_THIRD_PARTY = ("enchant",)
|
||||
DEFAULT_PREFERRED_MODULES = ()
|
||||
|
||||
|
||||
class ImportsChecker(BaseChecker):
|
||||
"""checks for
|
||||
* external modules dependencies
|
||||
* relative / wildcard imports
|
||||
* cyclic imports
|
||||
* uses of deprecated modules
|
||||
* uses of modules instead of preferred modules
|
||||
"""
|
||||
|
||||
__implements__ = IAstroidChecker
|
||||
|
||||
name = "imports"
|
||||
msgs = MSGS
|
||||
priority = -2
|
||||
deprecated_modules = ("optparse", "tkinter.tix")
|
||||
|
||||
options = (
|
||||
(
|
||||
"deprecated-modules",
|
||||
{
|
||||
"default": deprecated_modules,
|
||||
"type": "csv",
|
||||
"metavar": "<modules>",
|
||||
"help": "Deprecated modules which should not be used,"
|
||||
" separated by a comma.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"preferred-modules",
|
||||
{
|
||||
"default": DEFAULT_PREFERRED_MODULES,
|
||||
"type": "csv",
|
||||
"metavar": "<module:preferred-module>",
|
||||
"help": "Couples of modules and preferred modules,"
|
||||
" separated by a comma.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"import-graph",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<file.dot>",
|
||||
"help": "Create a graph of every (i.e. internal and"
|
||||
" external) dependencies in the given file"
|
||||
" (report RP0402 must not be disabled).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ext-import-graph",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<file.dot>",
|
||||
"help": "Create a graph of external dependencies in the"
|
||||
" given file (report RP0402 must not be disabled).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"int-import-graph",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<file.dot>",
|
||||
"help": "Create a graph of internal dependencies in the"
|
||||
" given file (report RP0402 must not be disabled).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"known-standard-library",
|
||||
{
|
||||
"default": DEFAULT_STANDARD_LIBRARY,
|
||||
"type": "csv",
|
||||
"metavar": "<modules>",
|
||||
"help": "Force import order to recognize a module as part of "
|
||||
"the standard compatibility libraries.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"known-third-party",
|
||||
{
|
||||
"default": DEFAULT_KNOWN_THIRD_PARTY,
|
||||
"type": "csv",
|
||||
"metavar": "<modules>",
|
||||
"help": "Force import order to recognize a module as part of "
|
||||
"a third party library.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"allow-any-import-level",
|
||||
{
|
||||
"default": (),
|
||||
"type": "csv",
|
||||
"metavar": "<modules>",
|
||||
"help": (
|
||||
"List of modules that can be imported at any level, not just "
|
||||
"the top level one."
|
||||
),
|
||||
},
|
||||
),
|
||||
(
|
||||
"analyse-fallback-blocks",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "Analyse import fallback blocks. This can be used to "
|
||||
"support both Python 2 and 3 compatible code, which "
|
||||
"means that the block might have code that exists "
|
||||
"only in one or another interpreter, leading to false "
|
||||
"positives when analysed.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"allow-wildcard-with-all",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "Allow wildcard imports from modules that define __all__.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def __init__(self, linter=None):
|
||||
BaseChecker.__init__(self, linter)
|
||||
self.stats = None
|
||||
self.import_graph = None
|
||||
self._imports_stack = []
|
||||
self._first_non_import_node = None
|
||||
self._module_pkg = {} # mapping of modules to the pkg they belong in
|
||||
self._allow_any_import_level = set()
|
||||
self.reports = (
|
||||
("RP0401", "External dependencies", self._report_external_dependencies),
|
||||
("RP0402", "Modules dependencies graph", self._report_dependencies_graph),
|
||||
)
|
||||
|
||||
self._site_packages = self._compute_site_packages()
|
||||
|
||||
@staticmethod
|
||||
def _compute_site_packages():
|
||||
def _normalized_path(path):
|
||||
return os.path.normcase(os.path.abspath(path))
|
||||
|
||||
paths = set()
|
||||
real_prefix = getattr(sys, "real_prefix", None)
|
||||
for prefix in filter(None, (real_prefix, sys.prefix)):
|
||||
path = sysconfig.get_python_lib(prefix=prefix)
|
||||
path = _normalized_path(path)
|
||||
paths.add(path)
|
||||
|
||||
# Handle Debian's derivatives /usr/local.
|
||||
if os.path.isfile("/etc/debian_version"):
|
||||
for prefix in filter(None, (real_prefix, sys.prefix)):
|
||||
libpython = os.path.join(
|
||||
prefix,
|
||||
"local",
|
||||
"lib",
|
||||
"python" + sysconfig.get_python_version(),
|
||||
"dist-packages",
|
||||
)
|
||||
paths.add(libpython)
|
||||
return paths
|
||||
|
||||
def open(self):
|
||||
"""called before visiting project (i.e set of modules)"""
|
||||
self.linter.add_stats(dependencies={})
|
||||
self.linter.add_stats(cycles=[])
|
||||
self.stats = self.linter.stats
|
||||
self.import_graph = collections.defaultdict(set)
|
||||
self._module_pkg = {} # mapping of modules to the pkg they belong in
|
||||
self._excluded_edges = collections.defaultdict(set)
|
||||
self._ignored_modules = get_global_option(self, "ignored-modules", default=[])
|
||||
# Build a mapping {'module': 'preferred-module'}
|
||||
self.preferred_modules = dict(
|
||||
module.split(":")
|
||||
for module in self.config.preferred_modules
|
||||
if ":" in module
|
||||
)
|
||||
self._allow_any_import_level = set(self.config.allow_any_import_level)
|
||||
|
||||
def _import_graph_without_ignored_edges(self):
|
||||
filtered_graph = copy.deepcopy(self.import_graph)
|
||||
for node in filtered_graph:
|
||||
filtered_graph[node].difference_update(self._excluded_edges[node])
|
||||
return filtered_graph
|
||||
|
||||
def close(self):
|
||||
"""called before visiting project (i.e set of modules)"""
|
||||
if self.linter.is_message_enabled("cyclic-import"):
|
||||
graph = self._import_graph_without_ignored_edges()
|
||||
vertices = list(graph)
|
||||
for cycle in get_cycles(graph, vertices=vertices):
|
||||
self.add_message("cyclic-import", args=" -> ".join(cycle))
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_import(self, node):
|
||||
"""triggered when an import statement is seen"""
|
||||
self._check_reimport(node)
|
||||
self._check_import_as_rename(node)
|
||||
self._check_toplevel(node)
|
||||
|
||||
names = [name for name, _ in node.names]
|
||||
if len(names) >= 2:
|
||||
self.add_message("multiple-imports", args=", ".join(names), node=node)
|
||||
|
||||
for name in names:
|
||||
self._check_deprecated_module(node, name)
|
||||
self._check_preferred_module(node, name)
|
||||
imported_module = self._get_imported_module(node, name)
|
||||
if isinstance(node.parent, astroid.Module):
|
||||
# Allow imports nested
|
||||
self._check_position(node)
|
||||
if isinstance(node.scope(), astroid.Module):
|
||||
self._record_import(node, imported_module)
|
||||
|
||||
if imported_module is None:
|
||||
continue
|
||||
|
||||
self._add_imported_module(node, imported_module.name)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_importfrom(self, node):
|
||||
"""triggered when a from statement is seen"""
|
||||
basename = node.modname
|
||||
imported_module = self._get_imported_module(node, basename)
|
||||
|
||||
self._check_import_as_rename(node)
|
||||
self._check_misplaced_future(node)
|
||||
self._check_deprecated_module(node, basename)
|
||||
self._check_preferred_module(node, basename)
|
||||
self._check_wildcard_imports(node, imported_module)
|
||||
self._check_same_line_imports(node)
|
||||
self._check_reimport(node, basename=basename, level=node.level)
|
||||
self._check_toplevel(node)
|
||||
|
||||
if isinstance(node.parent, astroid.Module):
|
||||
# Allow imports nested
|
||||
self._check_position(node)
|
||||
if isinstance(node.scope(), astroid.Module):
|
||||
self._record_import(node, imported_module)
|
||||
if imported_module is None:
|
||||
return
|
||||
for name, _ in node.names:
|
||||
if name != "*":
|
||||
self._add_imported_module(node, "%s.%s" % (imported_module.name, name))
|
||||
else:
|
||||
self._add_imported_module(node, imported_module.name)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def leave_module(self, node):
|
||||
# Check imports are grouped by category (standard, 3rd party, local)
|
||||
std_imports, ext_imports, loc_imports = self._check_imports_order(node)
|
||||
|
||||
# Check that imports are grouped by package within a given category
|
||||
met_import = set() # set for 'import x' style
|
||||
met_from = set() # set for 'from x import y' style
|
||||
current_package = None
|
||||
for import_node, import_name in std_imports + ext_imports + loc_imports:
|
||||
if not self.linter.is_message_enabled(
|
||||
"ungrouped-imports", import_node.fromlineno
|
||||
):
|
||||
continue
|
||||
if isinstance(import_node, astroid.node_classes.ImportFrom):
|
||||
met = met_from
|
||||
else:
|
||||
met = met_import
|
||||
package, _, _ = import_name.partition(".")
|
||||
if current_package and current_package != package and package in met:
|
||||
self.add_message("ungrouped-imports", node=import_node, args=package)
|
||||
current_package = package
|
||||
met.add(package)
|
||||
|
||||
self._imports_stack = []
|
||||
self._first_non_import_node = None
|
||||
|
||||
def compute_first_non_import_node(self, node):
|
||||
if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
|
||||
return
|
||||
# if the node does not contain an import instruction, and if it is the
|
||||
# first node of the module, keep a track of it (all the import positions
|
||||
# of the module will be compared to the position of this first
|
||||
# instruction)
|
||||
if self._first_non_import_node:
|
||||
return
|
||||
if not isinstance(node.parent, astroid.Module):
|
||||
return
|
||||
nested_allowed = [astroid.TryExcept, astroid.TryFinally]
|
||||
is_nested_allowed = [
|
||||
allowed for allowed in nested_allowed if isinstance(node, allowed)
|
||||
]
|
||||
if is_nested_allowed and any(
|
||||
node.nodes_of_class((astroid.Import, astroid.ImportFrom))
|
||||
):
|
||||
return
|
||||
if isinstance(node, astroid.Assign):
|
||||
# Add compatibility for module level dunder names
|
||||
# https://www.python.org/dev/peps/pep-0008/#module-level-dunder-names
|
||||
valid_targets = [
|
||||
isinstance(target, astroid.AssignName)
|
||||
and target.name.startswith("__")
|
||||
and target.name.endswith("__")
|
||||
for target in node.targets
|
||||
]
|
||||
if all(valid_targets):
|
||||
return
|
||||
self._first_non_import_node = node
|
||||
|
||||
visit_tryfinally = (
|
||||
visit_tryexcept
|
||||
) = (
|
||||
visit_assignattr
|
||||
) = (
|
||||
visit_assign
|
||||
) = (
|
||||
visit_ifexp
|
||||
) = visit_comprehension = visit_expr = visit_if = compute_first_non_import_node
|
||||
|
||||
def visit_functiondef(self, node):
|
||||
if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
|
||||
return
|
||||
# If it is the first non import instruction of the module, record it.
|
||||
if self._first_non_import_node:
|
||||
return
|
||||
|
||||
# Check if the node belongs to an `If` or a `Try` block. If they
|
||||
# contain imports, skip recording this node.
|
||||
if not isinstance(node.parent.scope(), astroid.Module):
|
||||
return
|
||||
|
||||
root = node
|
||||
while not isinstance(root.parent, astroid.Module):
|
||||
root = root.parent
|
||||
|
||||
if isinstance(root, (astroid.If, astroid.TryFinally, astroid.TryExcept)):
|
||||
if any(root.nodes_of_class((astroid.Import, astroid.ImportFrom))):
|
||||
return
|
||||
|
||||
self._first_non_import_node = node
|
||||
|
||||
visit_classdef = visit_for = visit_while = visit_functiondef
|
||||
|
||||
def _check_misplaced_future(self, node):
|
||||
basename = node.modname
|
||||
if basename == "__future__":
|
||||
# check if this is the first non-docstring statement in the module
|
||||
prev = node.previous_sibling()
|
||||
if prev:
|
||||
# consecutive future statements are possible
|
||||
if not (
|
||||
isinstance(prev, astroid.ImportFrom)
|
||||
and prev.modname == "__future__"
|
||||
):
|
||||
self.add_message("misplaced-future", node=node)
|
||||
return
|
||||
|
||||
def _check_same_line_imports(self, node):
|
||||
# Detect duplicate imports on the same line.
|
||||
names = (name for name, _ in node.names)
|
||||
counter = collections.Counter(names)
|
||||
for name, count in counter.items():
|
||||
if count > 1:
|
||||
self.add_message("reimported", node=node, args=(name, node.fromlineno))
|
||||
|
||||
def _check_position(self, node):
|
||||
"""Check `node` import or importfrom node position is correct
|
||||
|
||||
Send a message if `node` comes before another instruction
|
||||
"""
|
||||
# if a first non-import instruction has already been encountered,
|
||||
# it means the import comes after it and therefore is not well placed
|
||||
if self._first_non_import_node:
|
||||
self.add_message("wrong-import-position", node=node, args=node.as_string())
|
||||
|
||||
def _record_import(self, node, importedmodnode):
|
||||
"""Record the package `node` imports from"""
|
||||
if isinstance(node, astroid.ImportFrom):
|
||||
importedname = node.modname
|
||||
else:
|
||||
importedname = importedmodnode.name if importedmodnode else None
|
||||
if not importedname:
|
||||
importedname = node.names[0][0].split(".")[0]
|
||||
|
||||
if isinstance(node, astroid.ImportFrom) and (node.level or 0) >= 1:
|
||||
# We need the importedname with first point to detect local package
|
||||
# Example of node:
|
||||
# 'from .my_package1 import MyClass1'
|
||||
# the output should be '.my_package1' instead of 'my_package1'
|
||||
# Example of node:
|
||||
# 'from . import my_package2'
|
||||
# the output should be '.my_package2' instead of '{pyfile}'
|
||||
importedname = "." + importedname
|
||||
|
||||
self._imports_stack.append((node, importedname))
|
||||
|
||||
@staticmethod
|
||||
def _is_fallback_import(node, imports):
|
||||
imports = [import_node for (import_node, _) in imports]
|
||||
return any(astroid.are_exclusive(import_node, node) for import_node in imports)
|
||||
|
||||
def _check_imports_order(self, _module_node):
|
||||
"""Checks imports of module `node` are grouped by category
|
||||
|
||||
Imports must follow this order: standard, 3rd party, local
|
||||
"""
|
||||
std_imports = []
|
||||
third_party_imports = []
|
||||
first_party_imports = []
|
||||
# need of a list that holds third or first party ordered import
|
||||
external_imports = []
|
||||
local_imports = []
|
||||
third_party_not_ignored = []
|
||||
first_party_not_ignored = []
|
||||
local_not_ignored = []
|
||||
isort_obj = isort.SortImports(
|
||||
file_contents="",
|
||||
known_third_party=self.config.known_third_party,
|
||||
known_standard_library=self.config.known_standard_library,
|
||||
)
|
||||
for node, modname in self._imports_stack:
|
||||
if modname.startswith("."):
|
||||
package = "." + modname.split(".")[1]
|
||||
else:
|
||||
package = modname.split(".")[0]
|
||||
nested = not isinstance(node.parent, astroid.Module)
|
||||
ignore_for_import_order = not self.linter.is_message_enabled(
|
||||
"wrong-import-order", node.fromlineno
|
||||
)
|
||||
import_category = isort_obj.place_module(package)
|
||||
node_and_package_import = (node, package)
|
||||
if import_category in ("FUTURE", "STDLIB"):
|
||||
std_imports.append(node_and_package_import)
|
||||
wrong_import = (
|
||||
third_party_not_ignored
|
||||
or first_party_not_ignored
|
||||
or local_not_ignored
|
||||
)
|
||||
if self._is_fallback_import(node, wrong_import):
|
||||
continue
|
||||
if wrong_import and not nested:
|
||||
self.add_message(
|
||||
"wrong-import-order",
|
||||
node=node,
|
||||
args=(
|
||||
'standard import "%s"' % node.as_string(),
|
||||
'"%s"' % wrong_import[0][0].as_string(),
|
||||
),
|
||||
)
|
||||
elif import_category == "THIRDPARTY":
|
||||
third_party_imports.append(node_and_package_import)
|
||||
external_imports.append(node_and_package_import)
|
||||
if not nested and not ignore_for_import_order:
|
||||
third_party_not_ignored.append(node_and_package_import)
|
||||
wrong_import = first_party_not_ignored or local_not_ignored
|
||||
if wrong_import and not nested:
|
||||
self.add_message(
|
||||
"wrong-import-order",
|
||||
node=node,
|
||||
args=(
|
||||
'third party import "%s"' % node.as_string(),
|
||||
'"%s"' % wrong_import[0][0].as_string(),
|
||||
),
|
||||
)
|
||||
elif import_category == "FIRSTPARTY":
|
||||
first_party_imports.append(node_and_package_import)
|
||||
external_imports.append(node_and_package_import)
|
||||
if not nested and not ignore_for_import_order:
|
||||
first_party_not_ignored.append(node_and_package_import)
|
||||
wrong_import = local_not_ignored
|
||||
if wrong_import and not nested:
|
||||
self.add_message(
|
||||
"wrong-import-order",
|
||||
node=node,
|
||||
args=(
|
||||
'first party import "%s"' % node.as_string(),
|
||||
'"%s"' % wrong_import[0][0].as_string(),
|
||||
),
|
||||
)
|
||||
elif import_category == "LOCALFOLDER":
|
||||
local_imports.append((node, package))
|
||||
if not nested and not ignore_for_import_order:
|
||||
local_not_ignored.append((node, package))
|
||||
return std_imports, external_imports, local_imports
|
||||
|
||||
def _get_imported_module(self, importnode, modname):
|
||||
try:
|
||||
return importnode.do_import_module(modname)
|
||||
except astroid.TooManyLevelsError:
|
||||
if _ignore_import_failure(importnode, modname, self._ignored_modules):
|
||||
return None
|
||||
self.add_message("relative-beyond-top-level", node=importnode)
|
||||
except astroid.AstroidSyntaxError as exc:
|
||||
message = "Cannot import {!r} due to syntax error {!r}".format(
|
||||
modname, str(exc.error) # pylint: disable=no-member; false positive
|
||||
)
|
||||
self.add_message("syntax-error", line=importnode.lineno, args=message)
|
||||
|
||||
except astroid.AstroidBuildingException:
|
||||
if not self.linter.is_message_enabled("import-error"):
|
||||
return None
|
||||
if _ignore_import_failure(importnode, modname, self._ignored_modules):
|
||||
return None
|
||||
if not self.config.analyse_fallback_blocks and is_from_fallback_block(
|
||||
importnode
|
||||
):
|
||||
return None
|
||||
|
||||
dotted_modname = _get_import_name(importnode, modname)
|
||||
self.add_message("import-error", args=repr(dotted_modname), node=importnode)
|
||||
|
||||
def _add_imported_module(self, node, importedmodname):
|
||||
"""notify an imported module, used to analyze dependencies"""
|
||||
module_file = node.root().file
|
||||
context_name = node.root().name
|
||||
base = os.path.splitext(os.path.basename(module_file))[0]
|
||||
|
||||
try:
|
||||
importedmodname = modutils.get_module_part(importedmodname, module_file)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if context_name == importedmodname:
|
||||
self.add_message("import-self", node=node)
|
||||
|
||||
elif not modutils.is_standard_module(importedmodname):
|
||||
# if this is not a package __init__ module
|
||||
if base != "__init__" and context_name not in self._module_pkg:
|
||||
# record the module's parent, or the module itself if this is
|
||||
# a top level module, as the package it belongs to
|
||||
self._module_pkg[context_name] = context_name.rsplit(".", 1)[0]
|
||||
|
||||
# handle dependencies
|
||||
importedmodnames = self.stats["dependencies"].setdefault(
|
||||
importedmodname, set()
|
||||
)
|
||||
if context_name not in importedmodnames:
|
||||
importedmodnames.add(context_name)
|
||||
|
||||
# update import graph
|
||||
self.import_graph[context_name].add(importedmodname)
|
||||
if not self.linter.is_message_enabled("cyclic-import", line=node.lineno):
|
||||
self._excluded_edges[context_name].add(importedmodname)
|
||||
|
||||
def _check_deprecated_module(self, node, mod_path):
|
||||
"""check if the module is deprecated"""
|
||||
for mod_name in self.config.deprecated_modules:
|
||||
if mod_path == mod_name or mod_path.startswith(mod_name + "."):
|
||||
self.add_message("deprecated-module", node=node, args=mod_path)
|
||||
|
||||
def _check_preferred_module(self, node, mod_path):
|
||||
"""check if the module has a preferred replacement"""
|
||||
if mod_path in self.preferred_modules:
|
||||
self.add_message(
|
||||
"preferred-module",
|
||||
node=node,
|
||||
args=(self.preferred_modules[mod_path], mod_path),
|
||||
)
|
||||
|
||||
def _check_import_as_rename(self, node):
|
||||
names = node.names
|
||||
for name in names:
|
||||
if not all(name):
|
||||
return
|
||||
|
||||
real_name = name[0]
|
||||
splitted_packages = real_name.rsplit(".")
|
||||
real_name = splitted_packages[-1]
|
||||
imported_name = name[1]
|
||||
# consider only following cases
|
||||
# import x as x
|
||||
# and ignore following
|
||||
# import x.y.z as z
|
||||
if real_name == imported_name and len(splitted_packages) == 1:
|
||||
self.add_message("useless-import-alias", node=node)
|
||||
|
||||
def _check_reimport(self, node, basename=None, level=None):
|
||||
"""check if the import is necessary (i.e. not already done)"""
|
||||
if not self.linter.is_message_enabled("reimported"):
|
||||
return
|
||||
|
||||
frame = node.frame()
|
||||
root = node.root()
|
||||
contexts = [(frame, level)]
|
||||
if root is not frame:
|
||||
contexts.append((root, None))
|
||||
|
||||
for known_context, known_level in contexts:
|
||||
for name, alias in node.names:
|
||||
first = _get_first_import(
|
||||
node, known_context, name, basename, known_level, alias
|
||||
)
|
||||
if first is not None:
|
||||
self.add_message(
|
||||
"reimported", node=node, args=(name, first.fromlineno)
|
||||
)
|
||||
|
||||
def _report_external_dependencies(self, sect, _, _dummy):
|
||||
"""return a verbatim layout for displaying dependencies"""
|
||||
dep_info = _make_tree_defs(self._external_dependencies_info().items())
|
||||
if not dep_info:
|
||||
raise EmptyReportError()
|
||||
tree_str = _repr_tree_defs(dep_info)
|
||||
sect.append(VerbatimText(tree_str))
|
||||
|
||||
def _report_dependencies_graph(self, sect, _, _dummy):
|
||||
"""write dependencies as a dot (graphviz) file"""
|
||||
dep_info = self.stats["dependencies"]
|
||||
if not dep_info or not (
|
||||
self.config.import_graph
|
||||
or self.config.ext_import_graph
|
||||
or self.config.int_import_graph
|
||||
):
|
||||
raise EmptyReportError()
|
||||
filename = self.config.import_graph
|
||||
if filename:
|
||||
_make_graph(filename, dep_info, sect, "")
|
||||
filename = self.config.ext_import_graph
|
||||
if filename:
|
||||
_make_graph(filename, self._external_dependencies_info(), sect, "external ")
|
||||
filename = self.config.int_import_graph
|
||||
if filename:
|
||||
_make_graph(filename, self._internal_dependencies_info(), sect, "internal ")
|
||||
|
||||
def _filter_dependencies_graph(self, internal):
|
||||
"""build the internal or the external dependency graph"""
|
||||
graph = collections.defaultdict(set)
|
||||
for importee, importers in self.stats["dependencies"].items():
|
||||
for importer in importers:
|
||||
package = self._module_pkg.get(importer, importer)
|
||||
is_inside = importee.startswith(package)
|
||||
if is_inside and internal or not is_inside and not internal:
|
||||
graph[importee].add(importer)
|
||||
return graph
|
||||
|
||||
@cached
|
||||
def _external_dependencies_info(self):
|
||||
"""return cached external dependencies information or build and
|
||||
cache them
|
||||
"""
|
||||
return self._filter_dependencies_graph(internal=False)
|
||||
|
||||
@cached
|
||||
def _internal_dependencies_info(self):
|
||||
"""return cached internal dependencies information or build and
|
||||
cache them
|
||||
"""
|
||||
return self._filter_dependencies_graph(internal=True)
|
||||
|
||||
def _check_wildcard_imports(self, node, imported_module):
|
||||
if node.root().package:
|
||||
# Skip the check if in __init__.py issue #2026
|
||||
return
|
||||
|
||||
wildcard_import_is_allowed = self._wildcard_import_is_allowed(imported_module)
|
||||
for name, _ in node.names:
|
||||
if name == "*" and not wildcard_import_is_allowed:
|
||||
self.add_message("wildcard-import", args=node.modname, node=node)
|
||||
|
||||
def _wildcard_import_is_allowed(self, imported_module):
|
||||
return (
|
||||
self.config.allow_wildcard_with_all
|
||||
and imported_module is not None
|
||||
and "__all__" in imported_module.locals
|
||||
)
|
||||
|
||||
def _check_toplevel(self, node):
|
||||
"""Check whether the import is made outside the module toplevel.
|
||||
"""
|
||||
# If the scope of the import is a module, then obviously it is
|
||||
# not outside the module toplevel.
|
||||
if isinstance(node.scope(), astroid.Module):
|
||||
return
|
||||
|
||||
module_names = [
|
||||
"{}.{}".format(node.modname, name[0])
|
||||
if isinstance(node, astroid.ImportFrom)
|
||||
else name[0]
|
||||
for name in node.names
|
||||
]
|
||||
|
||||
# Get the full names of all the imports that are not whitelisted.
|
||||
scoped_imports = [
|
||||
name for name in module_names if name not in self._allow_any_import_level
|
||||
]
|
||||
|
||||
if scoped_imports:
|
||||
self.add_message(
|
||||
"import-outside-toplevel", args=", ".join(scoped_imports), node=node
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(ImportsChecker(linter))
|
||||
415
venv/lib/python3.8/site-packages/pylint/checkers/logging.py
Normal file
415
venv/lib/python3.8/site-packages/pylint/checkers/logging.py
Normal file
@@ -0,0 +1,415 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2009, 2012, 2014 Google, Inc.
|
||||
# Copyright (c) 2012 Mike Bryant <leachim@leachim.info>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016, 2019-2020 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2016 Chris Murray <chris@chrismurray.scot>
|
||||
# Copyright (c) 2017 guillaume2 <guillaume.peillex@gmail.col>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018 Alan Chan <achan961117@gmail.com>
|
||||
# Copyright (c) 2018 Yury Gribov <tetra2005@gmail.com>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Mariatta Wijaya <mariatta@python.org>
|
||||
# Copyright (c) 2019 Djailla <bastien.vallet@gmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2019 Svet <svet@hyperscience.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""checker for use of Python logging
|
||||
"""
|
||||
import string
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint import checkers, interfaces
|
||||
from pylint.checkers import utils
|
||||
from pylint.checkers.utils import check_messages
|
||||
|
||||
MSGS = {
|
||||
"W1201": (
|
||||
"Use %s formatting in logging functions",
|
||||
"logging-not-lazy",
|
||||
"Used when a logging statement has a call form of "
|
||||
'"logging.<logging method>(format_string % (format_args...))". '
|
||||
"Use another type of string formatting instead. "
|
||||
"You can use % formatting but leave interpolation to "
|
||||
"the logging function by passing the parameters as arguments. "
|
||||
"If logging-fstring-interpolation is disabled then "
|
||||
"you can use fstring formatting. "
|
||||
"If logging-format-interpolation is disabled then "
|
||||
"you can use str.format.",
|
||||
),
|
||||
"W1202": (
|
||||
"Use %s formatting in logging functions",
|
||||
"logging-format-interpolation",
|
||||
"Used when a logging statement has a call form of "
|
||||
'"logging.<logging method>(format_string.format(format_args...))". '
|
||||
"Use another type of string formatting instead. "
|
||||
"You can use % formatting but leave interpolation to "
|
||||
"the logging function by passing the parameters as arguments. "
|
||||
"If logging-fstring-interpolation is disabled then "
|
||||
"you can use fstring formatting. "
|
||||
"If logging-not-lazy is disabled then "
|
||||
"you can use % formatting as normal.",
|
||||
),
|
||||
"W1203": (
|
||||
"Use %s formatting in logging functions",
|
||||
"logging-fstring-interpolation",
|
||||
"Used when a logging statement has a call form of "
|
||||
'"logging.<logging method>(f"...")".'
|
||||
"Use another type of string formatting instead. "
|
||||
"You can use % formatting but leave interpolation to "
|
||||
"the logging function by passing the parameters as arguments. "
|
||||
"If logging-format-interpolation is disabled then "
|
||||
"you can use str.format. "
|
||||
"If logging-not-lazy is disabled then "
|
||||
"you can use % formatting as normal.",
|
||||
),
|
||||
"E1200": (
|
||||
"Unsupported logging format character %r (%#02x) at index %d",
|
||||
"logging-unsupported-format",
|
||||
"Used when an unsupported format character is used in a logging "
|
||||
"statement format string.",
|
||||
),
|
||||
"E1201": (
|
||||
"Logging format string ends in middle of conversion specifier",
|
||||
"logging-format-truncated",
|
||||
"Used when a logging statement format string terminates before "
|
||||
"the end of a conversion specifier.",
|
||||
),
|
||||
"E1205": (
|
||||
"Too many arguments for logging format string",
|
||||
"logging-too-many-args",
|
||||
"Used when a logging format string is given too many arguments.",
|
||||
),
|
||||
"E1206": (
|
||||
"Not enough arguments for logging format string",
|
||||
"logging-too-few-args",
|
||||
"Used when a logging format string is given too few arguments.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
CHECKED_CONVENIENCE_FUNCTIONS = {
|
||||
"critical",
|
||||
"debug",
|
||||
"error",
|
||||
"exception",
|
||||
"fatal",
|
||||
"info",
|
||||
"warn",
|
||||
"warning",
|
||||
}
|
||||
|
||||
|
||||
def is_method_call(func, types=(), methods=()):
|
||||
"""Determines if a BoundMethod node represents a method call.
|
||||
|
||||
Args:
|
||||
func (astroid.BoundMethod): The BoundMethod AST node to check.
|
||||
types (Optional[String]): Optional sequence of caller type names to restrict check.
|
||||
methods (Optional[String]): Optional sequence of method names to restrict check.
|
||||
|
||||
Returns:
|
||||
bool: true if the node represents a method call for the given type and
|
||||
method names, False otherwise.
|
||||
"""
|
||||
return (
|
||||
isinstance(func, astroid.BoundMethod)
|
||||
and isinstance(func.bound, astroid.Instance)
|
||||
and (func.bound.name in types if types else True)
|
||||
and (func.name in methods if methods else True)
|
||||
)
|
||||
|
||||
|
||||
class LoggingChecker(checkers.BaseChecker):
|
||||
"""Checks use of the logging module."""
|
||||
|
||||
__implements__ = interfaces.IAstroidChecker
|
||||
name = "logging"
|
||||
msgs = MSGS
|
||||
|
||||
options = (
|
||||
(
|
||||
"logging-modules",
|
||||
{
|
||||
"default": ("logging",),
|
||||
"type": "csv",
|
||||
"metavar": "<comma separated list>",
|
||||
"help": "Logging modules to check that the string format "
|
||||
"arguments are in logging function parameter format.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"logging-format-style",
|
||||
{
|
||||
"default": "old",
|
||||
"type": "choice",
|
||||
"metavar": "<old (%) or new ({)>",
|
||||
"choices": ["old", "new"],
|
||||
"help": "The type of string formatting that logging methods do. "
|
||||
"`old` means using % formatting, `new` is for `{}` formatting.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def visit_module(self, node): # pylint: disable=unused-argument
|
||||
"""Clears any state left in this checker from last module checked."""
|
||||
# The code being checked can just as easily "import logging as foo",
|
||||
# so it is necessary to process the imports and store in this field
|
||||
# what name the logging module is actually given.
|
||||
self._logging_names = set()
|
||||
logging_mods = self.config.logging_modules
|
||||
|
||||
self._format_style = self.config.logging_format_style
|
||||
|
||||
self._logging_modules = set(logging_mods)
|
||||
self._from_imports = {}
|
||||
for logging_mod in logging_mods:
|
||||
parts = logging_mod.rsplit(".", 1)
|
||||
if len(parts) > 1:
|
||||
self._from_imports[parts[0]] = parts[1]
|
||||
|
||||
def visit_importfrom(self, node):
|
||||
"""Checks to see if a module uses a non-Python logging module."""
|
||||
try:
|
||||
logging_name = self._from_imports[node.modname]
|
||||
for module, as_name in node.names:
|
||||
if module == logging_name:
|
||||
self._logging_names.add(as_name or module)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def visit_import(self, node):
|
||||
"""Checks to see if this module uses Python's built-in logging."""
|
||||
for module, as_name in node.names:
|
||||
if module in self._logging_modules:
|
||||
self._logging_names.add(as_name or module)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_call(self, node):
|
||||
"""Checks calls to logging methods."""
|
||||
|
||||
def is_logging_name():
|
||||
return (
|
||||
isinstance(node.func, astroid.Attribute)
|
||||
and isinstance(node.func.expr, astroid.Name)
|
||||
and node.func.expr.name in self._logging_names
|
||||
)
|
||||
|
||||
def is_logger_class():
|
||||
try:
|
||||
for inferred in node.func.infer():
|
||||
if isinstance(inferred, astroid.BoundMethod):
|
||||
parent = inferred._proxied.parent
|
||||
if isinstance(parent, astroid.ClassDef) and (
|
||||
parent.qname() == "logging.Logger"
|
||||
or any(
|
||||
ancestor.qname() == "logging.Logger"
|
||||
for ancestor in parent.ancestors()
|
||||
)
|
||||
):
|
||||
return True, inferred._proxied.name
|
||||
except astroid.exceptions.InferenceError:
|
||||
pass
|
||||
return False, None
|
||||
|
||||
if is_logging_name():
|
||||
name = node.func.attrname
|
||||
else:
|
||||
result, name = is_logger_class()
|
||||
if not result:
|
||||
return
|
||||
self._check_log_method(node, name)
|
||||
|
||||
def _check_log_method(self, node, name):
|
||||
"""Checks calls to logging.log(level, format, *format_args)."""
|
||||
if name == "log":
|
||||
if node.starargs or node.kwargs or len(node.args) < 2:
|
||||
# Either a malformed call, star args, or double-star args. Beyond
|
||||
# the scope of this checker.
|
||||
return
|
||||
format_pos = 1
|
||||
elif name in CHECKED_CONVENIENCE_FUNCTIONS:
|
||||
if node.starargs or node.kwargs or not node.args:
|
||||
# Either no args, star args, or double-star args. Beyond the
|
||||
# scope of this checker.
|
||||
return
|
||||
format_pos = 0
|
||||
else:
|
||||
return
|
||||
|
||||
if isinstance(node.args[format_pos], astroid.BinOp):
|
||||
binop = node.args[format_pos]
|
||||
emit = binop.op == "%"
|
||||
if binop.op == "+":
|
||||
total_number_of_strings = sum(
|
||||
1
|
||||
for operand in (binop.left, binop.right)
|
||||
if self._is_operand_literal_str(utils.safe_infer(operand))
|
||||
)
|
||||
emit = total_number_of_strings > 0
|
||||
if emit:
|
||||
self.add_message(
|
||||
"logging-not-lazy", node=node, args=(self._helper_string(node),),
|
||||
)
|
||||
elif isinstance(node.args[format_pos], astroid.Call):
|
||||
self._check_call_func(node.args[format_pos])
|
||||
elif isinstance(node.args[format_pos], astroid.Const):
|
||||
self._check_format_string(node, format_pos)
|
||||
elif isinstance(node.args[format_pos], astroid.JoinedStr):
|
||||
self.add_message(
|
||||
"logging-fstring-interpolation",
|
||||
node=node,
|
||||
args=(self._helper_string(node),),
|
||||
)
|
||||
|
||||
def _helper_string(self, node):
|
||||
"""Create a string that lists the valid types of formatting for this node."""
|
||||
valid_types = ["lazy %"]
|
||||
|
||||
if not self.linter.is_message_enabled(
|
||||
"logging-fstring-formatting", node.fromlineno
|
||||
):
|
||||
valid_types.append("fstring")
|
||||
if not self.linter.is_message_enabled(
|
||||
"logging-format-interpolation", node.fromlineno
|
||||
):
|
||||
valid_types.append(".format()")
|
||||
if not self.linter.is_message_enabled("logging-not-lazy", node.fromlineno):
|
||||
valid_types.append("%")
|
||||
|
||||
return " or ".join(valid_types)
|
||||
|
||||
@staticmethod
|
||||
def _is_operand_literal_str(operand):
|
||||
"""
|
||||
Return True if the operand in argument is a literal string
|
||||
"""
|
||||
return isinstance(operand, astroid.Const) and operand.name == "str"
|
||||
|
||||
def _check_call_func(self, node):
|
||||
"""Checks that function call is not format_string.format().
|
||||
|
||||
Args:
|
||||
node (astroid.node_classes.Call):
|
||||
Call AST node to be checked.
|
||||
"""
|
||||
func = utils.safe_infer(node.func)
|
||||
types = ("str", "unicode")
|
||||
methods = ("format",)
|
||||
if is_method_call(func, types, methods) and not is_complex_format_str(
|
||||
func.bound
|
||||
):
|
||||
self.add_message(
|
||||
"logging-format-interpolation",
|
||||
node=node,
|
||||
args=(self._helper_string(node),),
|
||||
)
|
||||
|
||||
def _check_format_string(self, node, format_arg):
|
||||
"""Checks that format string tokens match the supplied arguments.
|
||||
|
||||
Args:
|
||||
node (astroid.node_classes.NodeNG): AST node to be checked.
|
||||
format_arg (int): Index of the format string in the node arguments.
|
||||
"""
|
||||
num_args = _count_supplied_tokens(node.args[format_arg + 1 :])
|
||||
if not num_args:
|
||||
# If no args were supplied the string is not interpolated and can contain
|
||||
# formatting characters - it's used verbatim. Don't check any further.
|
||||
return
|
||||
|
||||
format_string = node.args[format_arg].value
|
||||
required_num_args = 0
|
||||
if isinstance(format_string, bytes):
|
||||
format_string = format_string.decode()
|
||||
if isinstance(format_string, str):
|
||||
try:
|
||||
if self._format_style == "old":
|
||||
keyword_args, required_num_args, _, _ = utils.parse_format_string(
|
||||
format_string
|
||||
)
|
||||
if keyword_args:
|
||||
# Keyword checking on logging strings is complicated by
|
||||
# special keywords - out of scope.
|
||||
return
|
||||
elif self._format_style == "new":
|
||||
(
|
||||
keyword_arguments,
|
||||
implicit_pos_args,
|
||||
explicit_pos_args,
|
||||
) = utils.parse_format_method_string(format_string)
|
||||
|
||||
keyword_args_cnt = len(
|
||||
{k for k, l in keyword_arguments if not isinstance(k, int)}
|
||||
)
|
||||
required_num_args = (
|
||||
keyword_args_cnt + implicit_pos_args + explicit_pos_args
|
||||
)
|
||||
except utils.UnsupportedFormatCharacter as ex:
|
||||
char = format_string[ex.index]
|
||||
self.add_message(
|
||||
"logging-unsupported-format",
|
||||
node=node,
|
||||
args=(char, ord(char), ex.index),
|
||||
)
|
||||
return
|
||||
except utils.IncompleteFormatString:
|
||||
self.add_message("logging-format-truncated", node=node)
|
||||
return
|
||||
if num_args > required_num_args:
|
||||
self.add_message("logging-too-many-args", node=node)
|
||||
elif num_args < required_num_args:
|
||||
self.add_message("logging-too-few-args", node=node)
|
||||
|
||||
|
||||
def is_complex_format_str(node):
|
||||
"""Checks if node represents a string with complex formatting specs.
|
||||
|
||||
Args:
|
||||
node (astroid.node_classes.NodeNG): AST node to check
|
||||
Returns:
|
||||
bool: True if inferred string uses complex formatting, False otherwise
|
||||
"""
|
||||
inferred = utils.safe_infer(node)
|
||||
if inferred is None or not (
|
||||
isinstance(inferred, astroid.Const) and isinstance(inferred.value, str)
|
||||
):
|
||||
return True
|
||||
try:
|
||||
parsed = list(string.Formatter().parse(inferred.value))
|
||||
except ValueError:
|
||||
# This format string is invalid
|
||||
return False
|
||||
for _, _, format_spec, _ in parsed:
|
||||
if format_spec:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _count_supplied_tokens(args):
|
||||
"""Counts the number of tokens in an args list.
|
||||
|
||||
The Python log functions allow for special keyword arguments: func,
|
||||
exc_info and extra. To handle these cases correctly, we only count
|
||||
arguments that aren't keywords.
|
||||
|
||||
Args:
|
||||
args (list): AST nodes that are arguments for a log format string.
|
||||
|
||||
Returns:
|
||||
int: Number of AST nodes that aren't keywords.
|
||||
"""
|
||||
return sum(1 for arg in args if not isinstance(arg, astroid.Keyword))
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto-register this checker."""
|
||||
linter.register_checker(LoggingChecker(linter))
|
||||
199
venv/lib/python3.8/site-packages/pylint/checkers/misc.py
Normal file
199
venv/lib/python3.8/site-packages/pylint/checkers/misc.py
Normal file
@@ -0,0 +1,199 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006, 2009-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Alexandru Coman <fcoman@bitdefender.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2016 glegoux <gilles.legoux@gmail.com>
|
||||
# Copyright (c) 2017-2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
|
||||
# Copyright (c) 2018 Rogalski, Lukasz <lukasz.rogalski@intel.com>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019-2020 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2020 Benny <benny.mueller91@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
|
||||
"""Check source code is ascii only or has an encoding declaration (PEP 263)"""
|
||||
|
||||
import re
|
||||
import tokenize
|
||||
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.interfaces import IRawChecker, ITokenChecker
|
||||
from pylint.message import MessagesHandlerMixIn
|
||||
from pylint.utils.pragma_parser import OPTION_PO, PragmaParserError, parse_pragma
|
||||
|
||||
|
||||
class ByIdManagedMessagesChecker(BaseChecker):
|
||||
|
||||
"""checks for messages that are enabled or disabled by id instead of symbol."""
|
||||
|
||||
__implements__ = IRawChecker
|
||||
|
||||
# configuration section name
|
||||
name = "miscellaneous"
|
||||
msgs = {
|
||||
"I0023": (
|
||||
"%s",
|
||||
"use-symbolic-message-instead",
|
||||
"Used when a message is enabled or disabled by id.",
|
||||
)
|
||||
}
|
||||
|
||||
options = ()
|
||||
|
||||
def process_module(self, module):
|
||||
"""inspect the source file to find messages activated or deactivated by id."""
|
||||
managed_msgs = MessagesHandlerMixIn.get_by_id_managed_msgs()
|
||||
for (mod_name, msg_id, msg_symbol, lineno, is_disabled) in managed_msgs:
|
||||
if mod_name == module.name:
|
||||
if is_disabled:
|
||||
txt = "Id '{ident}' is used to disable '{symbol}' message emission".format(
|
||||
ident=msg_id, symbol=msg_symbol
|
||||
)
|
||||
else:
|
||||
txt = "Id '{ident}' is used to enable '{symbol}' message emission".format(
|
||||
ident=msg_id, symbol=msg_symbol
|
||||
)
|
||||
self.add_message("use-symbolic-message-instead", line=lineno, args=txt)
|
||||
MessagesHandlerMixIn.clear_by_id_managed_msgs()
|
||||
|
||||
|
||||
class EncodingChecker(BaseChecker):
|
||||
|
||||
"""checks for:
|
||||
* warning notes in the code like FIXME, XXX
|
||||
* encoding issues.
|
||||
"""
|
||||
|
||||
__implements__ = (IRawChecker, ITokenChecker)
|
||||
|
||||
# configuration section name
|
||||
name = "miscellaneous"
|
||||
msgs = {
|
||||
"W0511": (
|
||||
"%s",
|
||||
"fixme",
|
||||
"Used when a warning note as FIXME or XXX is detected.",
|
||||
)
|
||||
}
|
||||
|
||||
options = (
|
||||
(
|
||||
"notes",
|
||||
{
|
||||
"type": "csv",
|
||||
"metavar": "<comma separated values>",
|
||||
"default": ("FIXME", "XXX", "TODO"),
|
||||
"help": (
|
||||
"List of note tags to take in consideration, "
|
||||
"separated by a comma."
|
||||
),
|
||||
},
|
||||
),
|
||||
(
|
||||
"notes-rgx",
|
||||
{
|
||||
"type": "string",
|
||||
"metavar": "<regexp>",
|
||||
"help": "Regular expression of note tags to take in consideration.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def open(self):
|
||||
super().open()
|
||||
|
||||
notes = "|".join(map(re.escape, self.config.notes))
|
||||
if self.config.notes_rgx:
|
||||
regex_string = r"#\s*(%s|%s)\b" % (notes, self.config.notes_rgx)
|
||||
else:
|
||||
regex_string = r"#\s*(%s)\b" % (notes)
|
||||
|
||||
self._fixme_pattern = re.compile(regex_string, re.I)
|
||||
|
||||
def _check_encoding(self, lineno, line, file_encoding):
|
||||
try:
|
||||
return line.decode(file_encoding)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
except LookupError:
|
||||
if line.startswith("#") and "coding" in line and file_encoding in line:
|
||||
self.add_message(
|
||||
"syntax-error",
|
||||
line=lineno,
|
||||
args='Cannot decode using encoding "{}",'
|
||||
" bad encoding".format(file_encoding),
|
||||
)
|
||||
|
||||
def process_module(self, module):
|
||||
"""inspect the source file to find encoding problem"""
|
||||
if module.file_encoding:
|
||||
encoding = module.file_encoding
|
||||
else:
|
||||
encoding = "ascii"
|
||||
|
||||
with module.stream() as stream:
|
||||
for lineno, line in enumerate(stream):
|
||||
self._check_encoding(lineno + 1, line, encoding)
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
"""inspect the source to find fixme problems"""
|
||||
if not self.config.notes:
|
||||
return
|
||||
comments = (
|
||||
token_info for token_info in tokens if token_info.type == tokenize.COMMENT
|
||||
)
|
||||
for comment in comments:
|
||||
comment_text = comment.string[1:].lstrip() # trim '#' and whitespaces
|
||||
|
||||
# handle pylint disable clauses
|
||||
disable_option_match = OPTION_PO.search(comment_text)
|
||||
if disable_option_match:
|
||||
try:
|
||||
values = []
|
||||
try:
|
||||
for pragma_repr in (
|
||||
p_rep
|
||||
for p_rep in parse_pragma(disable_option_match.group(2))
|
||||
if p_rep.action == "disable"
|
||||
):
|
||||
values.extend(pragma_repr.messages)
|
||||
except PragmaParserError:
|
||||
# Printing useful information dealing with this error is done in the lint package
|
||||
pass
|
||||
values = [_val.upper() for _val in values]
|
||||
if set(values) & set(self.config.notes):
|
||||
continue
|
||||
except ValueError:
|
||||
self.add_message(
|
||||
"bad-inline-option",
|
||||
args=disable_option_match.group(1).strip(),
|
||||
line=comment.start[0],
|
||||
)
|
||||
continue
|
||||
|
||||
# emit warnings if necessary
|
||||
match = self._fixme_pattern.search("#" + comment_text.lower())
|
||||
if match:
|
||||
note = match.group(1)
|
||||
self.add_message(
|
||||
"fixme",
|
||||
col_offset=comment.string.lower().index(note.lower()),
|
||||
args=comment_text,
|
||||
line=comment.start[0],
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker"""
|
||||
linter.register_checker(EncodingChecker(linter))
|
||||
linter.register_checker(ByIdManagedMessagesChecker(linter))
|
||||
133
venv/lib/python3.8/site-packages/pylint/checkers/newstyle.py
Normal file
133
venv/lib/python3.8/site-packages/pylint/checkers/newstyle.py
Normal file
@@ -0,0 +1,133 @@
|
||||
# Copyright (c) 2006, 2008-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2013-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Natalie Serebryakova <natalie.serebryakova@Natalies-MacBook-Pro.local>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Robert Schweizer <robert_schweizer@gmx.de>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""check for new / old style related problems
|
||||
"""
|
||||
import astroid
|
||||
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers.utils import check_messages, has_known_bases, node_frame_class
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
|
||||
MSGS = {
|
||||
"E1003": (
|
||||
"Bad first argument %r given to super()",
|
||||
"bad-super-call",
|
||||
"Used when another argument than the current class is given as "
|
||||
"first argument of the super builtin.",
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
class NewStyleConflictChecker(BaseChecker):
|
||||
"""checks for usage of new style capabilities on old style classes and
|
||||
other new/old styles conflicts problems
|
||||
* use of property, __slots__, super
|
||||
* "super" usage
|
||||
"""
|
||||
|
||||
__implements__ = (IAstroidChecker,)
|
||||
|
||||
# configuration section name
|
||||
name = "newstyle"
|
||||
# messages
|
||||
msgs = MSGS
|
||||
priority = -2
|
||||
# configuration options
|
||||
options = ()
|
||||
|
||||
@check_messages("bad-super-call")
|
||||
def visit_functiondef(self, node):
|
||||
"""check use of super"""
|
||||
# ignore actual functions or method within a new style class
|
||||
if not node.is_method():
|
||||
return
|
||||
klass = node.parent.frame()
|
||||
for stmt in node.nodes_of_class(astroid.Call):
|
||||
if node_frame_class(stmt) != node_frame_class(node):
|
||||
# Don't look down in other scopes.
|
||||
continue
|
||||
|
||||
expr = stmt.func
|
||||
if not isinstance(expr, astroid.Attribute):
|
||||
continue
|
||||
|
||||
call = expr.expr
|
||||
# skip the test if using super
|
||||
if not (
|
||||
isinstance(call, astroid.Call)
|
||||
and isinstance(call.func, astroid.Name)
|
||||
and call.func.name == "super"
|
||||
):
|
||||
continue
|
||||
|
||||
# super should not be used on an old style class
|
||||
if klass.newstyle or not has_known_bases(klass):
|
||||
# super first arg should not be the class
|
||||
if not call.args:
|
||||
continue
|
||||
|
||||
# calling super(type(self), self) can lead to recursion loop
|
||||
# in derived classes
|
||||
arg0 = call.args[0]
|
||||
if (
|
||||
isinstance(arg0, astroid.Call)
|
||||
and isinstance(arg0.func, astroid.Name)
|
||||
and arg0.func.name == "type"
|
||||
):
|
||||
self.add_message("bad-super-call", node=call, args=("type",))
|
||||
continue
|
||||
|
||||
# calling super(self.__class__, self) can lead to recursion loop
|
||||
# in derived classes
|
||||
if (
|
||||
len(call.args) >= 2
|
||||
and isinstance(call.args[1], astroid.Name)
|
||||
and call.args[1].name == "self"
|
||||
and isinstance(arg0, astroid.Attribute)
|
||||
and arg0.attrname == "__class__"
|
||||
):
|
||||
self.add_message(
|
||||
"bad-super-call", node=call, args=("self.__class__",)
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
supcls = call.args and next(call.args[0].infer(), None)
|
||||
except astroid.InferenceError:
|
||||
continue
|
||||
|
||||
if klass is not supcls:
|
||||
name = None
|
||||
# if supcls is not Uninferable, then supcls was inferred
|
||||
# and use its name. Otherwise, try to look
|
||||
# for call.args[0].name
|
||||
if supcls:
|
||||
name = supcls.name
|
||||
elif call.args and hasattr(call.args[0], "name"):
|
||||
name = call.args[0].name
|
||||
if name:
|
||||
self.add_message("bad-super-call", node=call, args=(name,))
|
||||
|
||||
visit_asyncfunctiondef = visit_functiondef
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(NewStyleConflictChecker(linter))
|
||||
1425
venv/lib/python3.8/site-packages/pylint/checkers/python3.py
Normal file
1425
venv/lib/python3.8/site-packages/pylint/checkers/python3.py
Normal file
File diff suppressed because it is too large
Load Diff
121
venv/lib/python3.8/site-packages/pylint/checkers/raw_metrics.py
Normal file
121
venv/lib/python3.8/site-packages/pylint/checkers/raw_metrics.py
Normal file
@@ -0,0 +1,121 @@
|
||||
# Copyright (c) 2007, 2010, 2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013 Google, Inc.
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 Mike Frysinger <vapier@gentoo.org>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
""" Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
|
||||
http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
|
||||
Raw metrics checker
|
||||
"""
|
||||
|
||||
import tokenize
|
||||
from typing import Any
|
||||
|
||||
from pylint.checkers import BaseTokenChecker
|
||||
from pylint.exceptions import EmptyReportError
|
||||
from pylint.interfaces import ITokenChecker
|
||||
from pylint.reporters.ureports.nodes import Table
|
||||
|
||||
|
||||
def report_raw_stats(sect, stats, _):
|
||||
"""calculate percentage of code / doc / comment / empty
|
||||
"""
|
||||
total_lines = stats["total_lines"]
|
||||
if not total_lines:
|
||||
raise EmptyReportError()
|
||||
sect.description = "%s lines have been analyzed" % total_lines
|
||||
lines = ("type", "number", "%", "previous", "difference")
|
||||
for node_type in ("code", "docstring", "comment", "empty"):
|
||||
key = node_type + "_lines"
|
||||
total = stats[key]
|
||||
percent = float(total * 100) / total_lines
|
||||
lines += (node_type, str(total), "%.2f" % percent, "NC", "NC")
|
||||
sect.append(Table(children=lines, cols=5, rheaders=1))
|
||||
|
||||
|
||||
class RawMetricsChecker(BaseTokenChecker):
|
||||
"""does not check anything but gives some raw metrics :
|
||||
* total number of lines
|
||||
* total number of code lines
|
||||
* total number of docstring lines
|
||||
* total number of comments lines
|
||||
* total number of empty lines
|
||||
"""
|
||||
|
||||
__implements__ = (ITokenChecker,)
|
||||
|
||||
# configuration section name
|
||||
name = "metrics"
|
||||
# configuration options
|
||||
options = ()
|
||||
# messages
|
||||
msgs = {} # type: Any
|
||||
# reports
|
||||
reports = (("RP0701", "Raw metrics", report_raw_stats),)
|
||||
|
||||
def __init__(self, linter):
|
||||
BaseTokenChecker.__init__(self, linter)
|
||||
self.stats = None
|
||||
|
||||
def open(self):
|
||||
"""init statistics"""
|
||||
self.stats = self.linter.add_stats(
|
||||
total_lines=0,
|
||||
code_lines=0,
|
||||
empty_lines=0,
|
||||
docstring_lines=0,
|
||||
comment_lines=0,
|
||||
)
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
"""update stats"""
|
||||
i = 0
|
||||
tokens = list(tokens)
|
||||
while i < len(tokens):
|
||||
i, lines_number, line_type = get_type(tokens, i)
|
||||
self.stats["total_lines"] += lines_number
|
||||
self.stats[line_type] += lines_number
|
||||
|
||||
|
||||
JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)
|
||||
|
||||
|
||||
def get_type(tokens, start_index):
|
||||
"""return the line type : docstring, comment, code, empty"""
|
||||
i = start_index
|
||||
tok_type = tokens[i][0]
|
||||
start = tokens[i][2]
|
||||
pos = start
|
||||
line_type = None
|
||||
while i < len(tokens) and tokens[i][2][0] == start[0]:
|
||||
tok_type = tokens[i][0]
|
||||
pos = tokens[i][3]
|
||||
if line_type is None:
|
||||
if tok_type == tokenize.STRING:
|
||||
line_type = "docstring_lines"
|
||||
elif tok_type == tokenize.COMMENT:
|
||||
line_type = "comment_lines"
|
||||
elif tok_type in JUNK:
|
||||
pass
|
||||
else:
|
||||
line_type = "code_lines"
|
||||
i += 1
|
||||
if line_type is None:
|
||||
line_type = "empty_lines"
|
||||
elif i < len(tokens) and tokens[i][0] == tokenize.NEWLINE:
|
||||
i += 1
|
||||
return i, pos[0] - start[0] + 1, line_type
|
||||
|
||||
|
||||
def register(linter):
|
||||
""" required method to auto register this checker """
|
||||
linter.register_checker(RawMetricsChecker(linter))
|
||||
1547
venv/lib/python3.8/site-packages/pylint/checkers/refactoring.py
Normal file
1547
venv/lib/python3.8/site-packages/pylint/checkers/refactoring.py
Normal file
File diff suppressed because it is too large
Load Diff
455
venv/lib/python3.8/site-packages/pylint/checkers/similar.py
Normal file
455
venv/lib/python3.8/site-packages/pylint/checkers/similar.py
Normal file
@@ -0,0 +1,455 @@
|
||||
# Copyright (c) 2006, 2008-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012 Ry4an Brase <ry4an-hg@ry4an.org>
|
||||
# Copyright (c) 2012 Google, Inc.
|
||||
# Copyright (c) 2012 Anthony VEREZ <anthony.verez.external@cassidian.com>
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2017, 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
|
||||
# Copyright (c) 2018 Scott Worley <scottworley@scottworley.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Taewon D. Kim <kimt33@mcmaster.ca>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
# pylint: disable=redefined-builtin
|
||||
"""a similarities / code duplication command line tool and pylint checker
|
||||
"""
|
||||
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from getopt import getopt
|
||||
from itertools import groupby
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint.checkers import BaseChecker, table_lines_from_stats
|
||||
from pylint.interfaces import IRawChecker
|
||||
from pylint.reporters.ureports.nodes import Table
|
||||
from pylint.utils import decoding_stream
|
||||
|
||||
|
||||
class Similar:
|
||||
"""finds copy-pasted lines of code in a project"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
min_lines=4,
|
||||
ignore_comments=False,
|
||||
ignore_docstrings=False,
|
||||
ignore_imports=False,
|
||||
):
|
||||
self.min_lines = min_lines
|
||||
self.ignore_comments = ignore_comments
|
||||
self.ignore_docstrings = ignore_docstrings
|
||||
self.ignore_imports = ignore_imports
|
||||
self.linesets = []
|
||||
|
||||
def append_stream(self, streamid, stream, encoding=None):
|
||||
"""append a file to search for similarities"""
|
||||
if encoding is None:
|
||||
readlines = stream.readlines
|
||||
else:
|
||||
readlines = decoding_stream(stream, encoding).readlines
|
||||
try:
|
||||
self.linesets.append(
|
||||
LineSet(
|
||||
streamid,
|
||||
readlines(),
|
||||
self.ignore_comments,
|
||||
self.ignore_docstrings,
|
||||
self.ignore_imports,
|
||||
)
|
||||
)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""start looking for similarities and display results on stdout"""
|
||||
self._display_sims(self._compute_sims())
|
||||
|
||||
def _compute_sims(self):
|
||||
"""compute similarities in appended files"""
|
||||
no_duplicates = defaultdict(list)
|
||||
for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
|
||||
duplicate = no_duplicates[num]
|
||||
for couples in duplicate:
|
||||
if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
|
||||
couples.add((lineset1, idx1))
|
||||
couples.add((lineset2, idx2))
|
||||
break
|
||||
else:
|
||||
duplicate.append({(lineset1, idx1), (lineset2, idx2)})
|
||||
sims = []
|
||||
for num, ensembles in no_duplicates.items():
|
||||
for couples in ensembles:
|
||||
sims.append((num, couples))
|
||||
sims.sort()
|
||||
sims.reverse()
|
||||
return sims
|
||||
|
||||
def _display_sims(self, sims):
|
||||
"""display computed similarities on stdout"""
|
||||
nb_lignes_dupliquees = 0
|
||||
for num, couples in sims:
|
||||
print()
|
||||
print(num, "similar lines in", len(couples), "files")
|
||||
couples = sorted(couples)
|
||||
lineset = idx = None
|
||||
for lineset, idx in couples:
|
||||
print("==%s:%s" % (lineset.name, idx))
|
||||
if lineset:
|
||||
for line in lineset._real_lines[idx : idx + num]:
|
||||
print(" ", line.rstrip())
|
||||
nb_lignes_dupliquees += num * (len(couples) - 1)
|
||||
nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
|
||||
print(
|
||||
"TOTAL lines=%s duplicates=%s percent=%.2f"
|
||||
% (
|
||||
nb_total_lignes,
|
||||
nb_lignes_dupliquees,
|
||||
nb_lignes_dupliquees * 100.0 / nb_total_lignes,
|
||||
)
|
||||
)
|
||||
|
||||
def _find_common(self, lineset1, lineset2):
|
||||
"""find similarities in the two given linesets"""
|
||||
lines1 = lineset1.enumerate_stripped
|
||||
lines2 = lineset2.enumerate_stripped
|
||||
find = lineset2.find
|
||||
index1 = 0
|
||||
min_lines = self.min_lines
|
||||
while index1 < len(lineset1):
|
||||
skip = 1
|
||||
num = 0
|
||||
for index2 in find(lineset1[index1]):
|
||||
non_blank = 0
|
||||
for num, ((_, line1), (_, line2)) in enumerate(
|
||||
zip(lines1(index1), lines2(index2))
|
||||
):
|
||||
if line1 != line2:
|
||||
if non_blank > min_lines:
|
||||
yield num, lineset1, index1, lineset2, index2
|
||||
skip = max(skip, num)
|
||||
break
|
||||
if line1:
|
||||
non_blank += 1
|
||||
else:
|
||||
# we may have reach the end
|
||||
num += 1
|
||||
if non_blank > min_lines:
|
||||
yield num, lineset1, index1, lineset2, index2
|
||||
skip = max(skip, num)
|
||||
index1 += skip
|
||||
|
||||
def _iter_sims(self):
|
||||
"""iterate on similarities among all files, by making a cartesian
|
||||
product
|
||||
"""
|
||||
for idx, lineset in enumerate(self.linesets[:-1]):
|
||||
for lineset2 in self.linesets[idx + 1 :]:
|
||||
yield from self._find_common(lineset, lineset2)
|
||||
|
||||
|
||||
def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
|
||||
"""return lines with leading/trailing whitespace and any ignored code
|
||||
features removed
|
||||
"""
|
||||
if ignore_imports:
|
||||
tree = astroid.parse("".join(lines))
|
||||
node_is_import_by_lineno = (
|
||||
(node.lineno, isinstance(node, (astroid.Import, astroid.ImportFrom)))
|
||||
for node in tree.body
|
||||
)
|
||||
line_begins_import = {
|
||||
lineno: all(is_import for _, is_import in node_is_import_group)
|
||||
for lineno, node_is_import_group in groupby(
|
||||
node_is_import_by_lineno, key=lambda x: x[0]
|
||||
)
|
||||
}
|
||||
current_line_is_import = False
|
||||
|
||||
strippedlines = []
|
||||
docstring = None
|
||||
for lineno, line in enumerate(lines, start=1):
|
||||
line = line.strip()
|
||||
if ignore_docstrings:
|
||||
if not docstring and any(
|
||||
line.startswith(i) for i in ['"""', "'''", 'r"""', "r'''"]
|
||||
):
|
||||
docstring = line[:3]
|
||||
line = line[3:]
|
||||
if docstring:
|
||||
if line.endswith(docstring):
|
||||
docstring = None
|
||||
line = ""
|
||||
if ignore_imports:
|
||||
current_line_is_import = line_begins_import.get(
|
||||
lineno, current_line_is_import
|
||||
)
|
||||
if current_line_is_import:
|
||||
line = ""
|
||||
if ignore_comments:
|
||||
line = line.split("#", 1)[0].strip()
|
||||
strippedlines.append(line)
|
||||
return strippedlines
|
||||
|
||||
|
||||
class LineSet:
|
||||
"""Holds and indexes all the lines of a single source file"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
lines,
|
||||
ignore_comments=False,
|
||||
ignore_docstrings=False,
|
||||
ignore_imports=False,
|
||||
):
|
||||
self.name = name
|
||||
self._real_lines = lines
|
||||
self._stripped_lines = stripped_lines(
|
||||
lines, ignore_comments, ignore_docstrings, ignore_imports
|
||||
)
|
||||
self._index = self._mk_index()
|
||||
|
||||
def __str__(self):
|
||||
return "<Lineset for %s>" % self.name
|
||||
|
||||
def __len__(self):
|
||||
return len(self._real_lines)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._stripped_lines[index]
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.name < other.name
|
||||
|
||||
def __hash__(self):
|
||||
return id(self)
|
||||
|
||||
def enumerate_stripped(self, start_at=0):
|
||||
"""return an iterator on stripped lines, starting from a given index
|
||||
if specified, else 0
|
||||
"""
|
||||
idx = start_at
|
||||
if start_at:
|
||||
lines = self._stripped_lines[start_at:]
|
||||
else:
|
||||
lines = self._stripped_lines
|
||||
for line in lines:
|
||||
# if line:
|
||||
yield idx, line
|
||||
idx += 1
|
||||
|
||||
def find(self, stripped_line):
|
||||
"""return positions of the given stripped line in this set"""
|
||||
return self._index.get(stripped_line, ())
|
||||
|
||||
def _mk_index(self):
|
||||
"""create the index for this set"""
|
||||
index = defaultdict(list)
|
||||
for line_no, line in enumerate(self._stripped_lines):
|
||||
if line:
|
||||
index[line].append(line_no)
|
||||
return index
|
||||
|
||||
|
||||
MSGS = {
|
||||
"R0801": (
|
||||
"Similar lines in %s files\n%s",
|
||||
"duplicate-code",
|
||||
"Indicates that a set of similar lines has been detected "
|
||||
"among multiple file. This usually means that the code should "
|
||||
"be refactored to avoid this duplication.",
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def report_similarities(sect, stats, old_stats):
|
||||
"""make a layout with some stats about duplication"""
|
||||
lines = ["", "now", "previous", "difference"]
|
||||
lines += table_lines_from_stats(
|
||||
stats, old_stats, ("nb_duplicated_lines", "percent_duplicated_lines")
|
||||
)
|
||||
sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
|
||||
|
||||
|
||||
# wrapper to get a pylint checker from the similar class
|
||||
class SimilarChecker(BaseChecker, Similar):
|
||||
"""checks for similarities and duplicated code. This computation may be
|
||||
memory / CPU intensive, so you should disable it if you experiment some
|
||||
problems.
|
||||
"""
|
||||
|
||||
__implements__ = (IRawChecker,)
|
||||
# configuration section name
|
||||
name = "similarities"
|
||||
# messages
|
||||
msgs = MSGS
|
||||
# configuration options
|
||||
# for available dict keys/values see the optik parser 'add_option' method
|
||||
options = (
|
||||
(
|
||||
"min-similarity-lines", # type: ignore
|
||||
{
|
||||
"default": 4,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Minimum lines number of a similarity.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ignore-comments",
|
||||
{
|
||||
"default": True,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Ignore comments when computing similarities.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ignore-docstrings",
|
||||
{
|
||||
"default": True,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Ignore docstrings when computing similarities.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ignore-imports",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Ignore imports when computing similarities.",
|
||||
},
|
||||
),
|
||||
)
|
||||
# reports
|
||||
reports = (("RP0801", "Duplication", report_similarities),) # type: ignore
|
||||
|
||||
def __init__(self, linter=None):
|
||||
BaseChecker.__init__(self, linter)
|
||||
Similar.__init__(
|
||||
self, min_lines=4, ignore_comments=True, ignore_docstrings=True
|
||||
)
|
||||
self.stats = None
|
||||
|
||||
def set_option(self, optname, value, action=None, optdict=None):
|
||||
"""method called to set an option (registered in the options list)
|
||||
|
||||
overridden to report options setting to Similar
|
||||
"""
|
||||
BaseChecker.set_option(self, optname, value, action, optdict)
|
||||
if optname == "min-similarity-lines":
|
||||
self.min_lines = self.config.min_similarity_lines
|
||||
elif optname == "ignore-comments":
|
||||
self.ignore_comments = self.config.ignore_comments
|
||||
elif optname == "ignore-docstrings":
|
||||
self.ignore_docstrings = self.config.ignore_docstrings
|
||||
elif optname == "ignore-imports":
|
||||
self.ignore_imports = self.config.ignore_imports
|
||||
|
||||
def open(self):
|
||||
"""init the checkers: reset linesets and statistics information"""
|
||||
self.linesets = []
|
||||
self.stats = self.linter.add_stats(
|
||||
nb_duplicated_lines=0, percent_duplicated_lines=0
|
||||
)
|
||||
|
||||
def process_module(self, node):
|
||||
"""process a module
|
||||
|
||||
the module's content is accessible via the stream object
|
||||
|
||||
stream must implement the readlines method
|
||||
"""
|
||||
with node.stream() as stream:
|
||||
self.append_stream(self.linter.current_name, stream, node.file_encoding)
|
||||
|
||||
def close(self):
|
||||
"""compute and display similarities on closing (i.e. end of parsing)"""
|
||||
total = sum(len(lineset) for lineset in self.linesets)
|
||||
duplicated = 0
|
||||
stats = self.stats
|
||||
for num, couples in self._compute_sims():
|
||||
msg = []
|
||||
lineset = idx = None
|
||||
for lineset, idx in couples:
|
||||
msg.append("==%s:%s" % (lineset.name, idx))
|
||||
msg.sort()
|
||||
|
||||
if lineset:
|
||||
for line in lineset._real_lines[idx : idx + num]:
|
||||
msg.append(line.rstrip())
|
||||
|
||||
self.add_message("R0801", args=(len(couples), "\n".join(msg)))
|
||||
duplicated += num * (len(couples) - 1)
|
||||
stats["nb_duplicated_lines"] = duplicated
|
||||
stats["percent_duplicated_lines"] = total and duplicated * 100.0 / total
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(SimilarChecker(linter))
|
||||
|
||||
|
||||
def usage(status=0):
|
||||
"""display command line usage information"""
|
||||
print("finds copy pasted blocks in a set of files")
|
||||
print()
|
||||
print(
|
||||
"Usage: symilar [-d|--duplicates min_duplicated_lines] \
|
||||
[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1..."
|
||||
)
|
||||
sys.exit(status)
|
||||
|
||||
|
||||
def Run(argv=None):
|
||||
"""standalone command line access point"""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
s_opts = "hdi"
|
||||
l_opts = (
|
||||
"help",
|
||||
"duplicates=",
|
||||
"ignore-comments",
|
||||
"ignore-imports",
|
||||
"ignore-docstrings",
|
||||
)
|
||||
min_lines = 4
|
||||
ignore_comments = False
|
||||
ignore_docstrings = False
|
||||
ignore_imports = False
|
||||
opts, args = getopt(argv, s_opts, l_opts)
|
||||
for opt, val in opts:
|
||||
if opt in ("-d", "--duplicates"):
|
||||
min_lines = int(val)
|
||||
elif opt in ("-h", "--help"):
|
||||
usage()
|
||||
elif opt in ("-i", "--ignore-comments"):
|
||||
ignore_comments = True
|
||||
elif opt in ("--ignore-docstrings",):
|
||||
ignore_docstrings = True
|
||||
elif opt in ("--ignore-imports",):
|
||||
ignore_imports = True
|
||||
if not args:
|
||||
usage(1)
|
||||
sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
|
||||
for filename in args:
|
||||
with open(filename) as stream:
|
||||
sim.append_stream(filename, stream)
|
||||
sim.run()
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
Run()
|
||||
415
venv/lib/python3.8/site-packages/pylint/checkers/spelling.py
Normal file
415
venv/lib/python3.8/site-packages/pylint/checkers/spelling.py
Normal file
@@ -0,0 +1,415 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
|
||||
# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2015 Pavel Roskin <proski@gnu.org>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016-2017 Pedro Algarvio <pedro@algarvio.me>
|
||||
# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
|
||||
# Copyright (c) 2018, 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Peter Kolbus <peter.kolbus@gmail.com>
|
||||
# Copyright (c) 2019 agutole <toldo_carp@hotmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checker for spelling errors in comments and docstrings.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import tokenize
|
||||
|
||||
from pylint.checkers import BaseTokenChecker
|
||||
from pylint.checkers.utils import check_messages
|
||||
from pylint.interfaces import IAstroidChecker, ITokenChecker
|
||||
|
||||
try:
|
||||
import enchant
|
||||
from enchant.tokenize import ( # type: ignore
|
||||
get_tokenizer,
|
||||
Chunker,
|
||||
Filter,
|
||||
EmailFilter,
|
||||
URLFilter,
|
||||
WikiWordFilter,
|
||||
)
|
||||
except ImportError:
|
||||
enchant = None
|
||||
# pylint: disable=no-init
|
||||
class Filter: # type: ignore
|
||||
def _skip(self, word):
|
||||
raise NotImplementedError
|
||||
|
||||
class Chunker: # type: ignore
|
||||
pass
|
||||
|
||||
|
||||
if enchant is not None:
|
||||
br = enchant.Broker()
|
||||
dicts = br.list_dicts()
|
||||
dict_choices = [""] + [d[0] for d in dicts]
|
||||
dicts = ["%s (%s)" % (d[0], d[1].name) for d in dicts]
|
||||
dicts = ", ".join(dicts)
|
||||
instr = ""
|
||||
else:
|
||||
dicts = "none"
|
||||
dict_choices = [""]
|
||||
instr = " To make it work, install the python-enchant package."
|
||||
|
||||
|
||||
class WordsWithDigigtsFilter(Filter):
|
||||
"""Skips words with digits.
|
||||
"""
|
||||
|
||||
def _skip(self, word):
|
||||
for char in word:
|
||||
if char.isdigit():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class WordsWithUnderscores(Filter):
|
||||
"""Skips words with underscores.
|
||||
|
||||
They are probably function parameter names.
|
||||
"""
|
||||
|
||||
def _skip(self, word):
|
||||
return "_" in word
|
||||
|
||||
|
||||
class CamelCasedWord(Filter):
|
||||
r"""Filter skipping over camelCasedWords.
|
||||
This filter skips any words matching the following regular expression:
|
||||
|
||||
^([a-z]\w+[A-Z]+\w+)
|
||||
|
||||
That is, any words that are camelCasedWords.
|
||||
"""
|
||||
_pattern = re.compile(r"^([a-z]+([\d]|[A-Z])(?:\w+)?)")
|
||||
|
||||
def _skip(self, word):
|
||||
return bool(self._pattern.match(word))
|
||||
|
||||
|
||||
class SphinxDirectives(Filter):
|
||||
r"""Filter skipping over Sphinx Directives.
|
||||
This filter skips any words matching the following regular expression:
|
||||
|
||||
^:([a-z]+):`([^`]+)(`)?
|
||||
|
||||
That is, for example, :class:`BaseQuery`
|
||||
"""
|
||||
# The final ` in the pattern is optional because enchant strips it out
|
||||
_pattern = re.compile(r"^:([a-z]+):`([^`]+)(`)?")
|
||||
|
||||
def _skip(self, word):
|
||||
return bool(self._pattern.match(word))
|
||||
|
||||
|
||||
class ForwardSlashChunkder(Chunker):
|
||||
"""
|
||||
This chunker allows splitting words like 'before/after' into 'before' and 'after'
|
||||
"""
|
||||
|
||||
def next(self):
|
||||
while True:
|
||||
if not self._text:
|
||||
raise StopIteration()
|
||||
if "/" not in self._text:
|
||||
text = self._text
|
||||
self._offset = 0
|
||||
self._text = ""
|
||||
return (text, 0)
|
||||
pre_text, post_text = self._text.split("/", 1)
|
||||
self._text = post_text
|
||||
self._offset = 0
|
||||
if (
|
||||
not pre_text
|
||||
or not post_text
|
||||
or not pre_text[-1].isalpha()
|
||||
or not post_text[0].isalpha()
|
||||
):
|
||||
self._text = ""
|
||||
self._offset = 0
|
||||
return (pre_text + "/" + post_text, 0)
|
||||
return (pre_text, 0)
|
||||
|
||||
def _next(self):
|
||||
while True:
|
||||
if "/" not in self._text:
|
||||
return (self._text, 0)
|
||||
pre_text, post_text = self._text.split("/", 1)
|
||||
if not pre_text or not post_text:
|
||||
break
|
||||
if not pre_text[-1].isalpha() or not post_text[0].isalpha():
|
||||
raise StopIteration()
|
||||
self._text = pre_text + " " + post_text
|
||||
raise StopIteration()
|
||||
|
||||
|
||||
class SpellingChecker(BaseTokenChecker):
|
||||
"""Check spelling in comments and docstrings"""
|
||||
|
||||
__implements__ = (ITokenChecker, IAstroidChecker)
|
||||
name = "spelling"
|
||||
msgs = {
|
||||
"C0401": (
|
||||
"Wrong spelling of a word '%s' in a comment:\n%s\n"
|
||||
"%s\nDid you mean: '%s'?",
|
||||
"wrong-spelling-in-comment",
|
||||
"Used when a word in comment is not spelled correctly.",
|
||||
),
|
||||
"C0402": (
|
||||
"Wrong spelling of a word '%s' in a docstring:\n%s\n"
|
||||
"%s\nDid you mean: '%s'?",
|
||||
"wrong-spelling-in-docstring",
|
||||
"Used when a word in docstring is not spelled correctly.",
|
||||
),
|
||||
"C0403": (
|
||||
"Invalid characters %r in a docstring",
|
||||
"invalid-characters-in-docstring",
|
||||
"Used when a word in docstring cannot be checked by enchant.",
|
||||
),
|
||||
}
|
||||
options = (
|
||||
(
|
||||
"spelling-dict",
|
||||
{
|
||||
"default": "",
|
||||
"type": "choice",
|
||||
"metavar": "<dict name>",
|
||||
"choices": dict_choices,
|
||||
"help": "Spelling dictionary name. "
|
||||
"Available dictionaries: %s.%s" % (dicts, instr),
|
||||
},
|
||||
),
|
||||
(
|
||||
"spelling-ignore-words",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<comma separated words>",
|
||||
"help": "List of comma separated words that " "should not be checked.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"spelling-private-dict-file",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<path to file>",
|
||||
"help": "A path to a file that contains the private "
|
||||
"dictionary; one word per line.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"spelling-store-unknown-words",
|
||||
{
|
||||
"default": "n",
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "Tells whether to store unknown words to the "
|
||||
"private dictionary (see the "
|
||||
"--spelling-private-dict-file option) instead of "
|
||||
"raising a message.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-spelling-suggestions",
|
||||
{
|
||||
"default": 4,
|
||||
"type": "int",
|
||||
"metavar": "N",
|
||||
"help": "Limits count of emitted suggestions for " "spelling mistakes.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def open(self):
|
||||
self.initialized = False
|
||||
self.private_dict_file = None
|
||||
|
||||
if enchant is None:
|
||||
return
|
||||
dict_name = self.config.spelling_dict
|
||||
if not dict_name:
|
||||
return
|
||||
|
||||
self.ignore_list = [
|
||||
w.strip() for w in self.config.spelling_ignore_words.split(",")
|
||||
]
|
||||
# "param" appears in docstring in param description and
|
||||
# "pylint" appears in comments in pylint pragmas.
|
||||
self.ignore_list.extend(["param", "pylint"])
|
||||
|
||||
# Expand tilde to allow e.g. spelling-private-dict-file = ~/.pylintdict
|
||||
if self.config.spelling_private_dict_file:
|
||||
self.config.spelling_private_dict_file = os.path.expanduser(
|
||||
self.config.spelling_private_dict_file
|
||||
)
|
||||
|
||||
if self.config.spelling_private_dict_file:
|
||||
self.spelling_dict = enchant.DictWithPWL(
|
||||
dict_name, self.config.spelling_private_dict_file
|
||||
)
|
||||
self.private_dict_file = open(self.config.spelling_private_dict_file, "a")
|
||||
else:
|
||||
self.spelling_dict = enchant.Dict(dict_name)
|
||||
|
||||
if self.config.spelling_store_unknown_words:
|
||||
self.unknown_words = set()
|
||||
|
||||
self.tokenizer = get_tokenizer(
|
||||
dict_name,
|
||||
chunkers=[ForwardSlashChunkder],
|
||||
filters=[
|
||||
EmailFilter,
|
||||
URLFilter,
|
||||
WikiWordFilter,
|
||||
WordsWithDigigtsFilter,
|
||||
WordsWithUnderscores,
|
||||
CamelCasedWord,
|
||||
SphinxDirectives,
|
||||
],
|
||||
)
|
||||
self.initialized = True
|
||||
|
||||
def close(self):
|
||||
if self.private_dict_file:
|
||||
self.private_dict_file.close()
|
||||
|
||||
def _check_spelling(self, msgid, line, line_num):
|
||||
original_line = line
|
||||
try:
|
||||
initial_space = re.search(r"^[^\S]\s*", line).regs[0][1]
|
||||
except (IndexError, AttributeError):
|
||||
initial_space = 0
|
||||
if line.strip().startswith("#"):
|
||||
line = line.strip()[1:]
|
||||
starts_with_comment = True
|
||||
else:
|
||||
starts_with_comment = False
|
||||
for word, word_start_at in self.tokenizer(line.strip()):
|
||||
word_start_at += initial_space
|
||||
lower_cased_word = word.casefold()
|
||||
|
||||
# Skip words from ignore list.
|
||||
if word in self.ignore_list or lower_cased_word in self.ignore_list:
|
||||
continue
|
||||
|
||||
# Strip starting u' from unicode literals and r' from raw strings.
|
||||
if word.startswith(("u'", 'u"', "r'", 'r"')) and len(word) > 2:
|
||||
word = word[2:]
|
||||
lower_cased_word = lower_cased_word[2:]
|
||||
|
||||
# If it is a known word, then continue.
|
||||
try:
|
||||
if self.spelling_dict.check(lower_cased_word):
|
||||
# The lower cased version of word passed spell checking
|
||||
continue
|
||||
|
||||
# If we reached this far, it means there was a spelling mistake.
|
||||
# Let's retry with the original work because 'unicode' is a
|
||||
# spelling mistake but 'Unicode' is not
|
||||
if self.spelling_dict.check(word):
|
||||
continue
|
||||
except enchant.errors.Error:
|
||||
self.add_message(
|
||||
"invalid-characters-in-docstring", line=line_num, args=(word,)
|
||||
)
|
||||
continue
|
||||
|
||||
# Store word to private dict or raise a message.
|
||||
if self.config.spelling_store_unknown_words:
|
||||
if lower_cased_word not in self.unknown_words:
|
||||
self.private_dict_file.write("%s\n" % lower_cased_word)
|
||||
self.unknown_words.add(lower_cased_word)
|
||||
else:
|
||||
# Present up to N suggestions.
|
||||
suggestions = self.spelling_dict.suggest(word)
|
||||
del suggestions[self.config.max_spelling_suggestions :]
|
||||
|
||||
line_segment = line[word_start_at:]
|
||||
match = re.search(r"(\W|^)(%s)(\W|$)" % word, line_segment)
|
||||
if match:
|
||||
# Start position of second group in regex.
|
||||
col = match.regs[2][0]
|
||||
else:
|
||||
col = line_segment.index(word)
|
||||
|
||||
col += word_start_at
|
||||
|
||||
if starts_with_comment:
|
||||
col += 1
|
||||
indicator = (" " * col) + ("^" * len(word))
|
||||
|
||||
self.add_message(
|
||||
msgid,
|
||||
line=line_num,
|
||||
args=(
|
||||
word,
|
||||
original_line,
|
||||
indicator,
|
||||
"'{}'".format("' or '".join(suggestions)),
|
||||
),
|
||||
)
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
if not self.initialized:
|
||||
return
|
||||
|
||||
# Process tokens and look for comments.
|
||||
for (tok_type, token, (start_row, _), _, _) in tokens:
|
||||
if tok_type == tokenize.COMMENT:
|
||||
if start_row == 1 and token.startswith("#!/"):
|
||||
# Skip shebang lines
|
||||
continue
|
||||
if token.startswith("# pylint:"):
|
||||
# Skip pylint enable/disable comments
|
||||
continue
|
||||
self._check_spelling("wrong-spelling-in-comment", token, start_row)
|
||||
|
||||
@check_messages("wrong-spelling-in-docstring")
|
||||
def visit_module(self, node):
|
||||
if not self.initialized:
|
||||
return
|
||||
self._check_docstring(node)
|
||||
|
||||
@check_messages("wrong-spelling-in-docstring")
|
||||
def visit_classdef(self, node):
|
||||
if not self.initialized:
|
||||
return
|
||||
self._check_docstring(node)
|
||||
|
||||
@check_messages("wrong-spelling-in-docstring")
|
||||
def visit_functiondef(self, node):
|
||||
if not self.initialized:
|
||||
return
|
||||
self._check_docstring(node)
|
||||
|
||||
visit_asyncfunctiondef = visit_functiondef
|
||||
|
||||
def _check_docstring(self, node):
|
||||
"""check the node has any spelling errors"""
|
||||
docstring = node.doc
|
||||
if not docstring:
|
||||
return
|
||||
|
||||
start_line = node.lineno + 1
|
||||
|
||||
# Go through lines of docstring
|
||||
for idx, line in enumerate(docstring.splitlines()):
|
||||
self._check_spelling("wrong-spelling-in-docstring", line, start_line + idx)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(SpellingChecker(linter))
|
||||
458
venv/lib/python3.8/site-packages/pylint/checkers/stdlib.py
Normal file
458
venv/lib/python3.8/site-packages/pylint/checkers/stdlib.py
Normal file
@@ -0,0 +1,458 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Cosmin Poieana <cmin@ropython.org>
|
||||
# Copyright (c) 2014 Vlad Temian <vladtemian@gmail.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Cezar <celnazli@bitdefender.com>
|
||||
# Copyright (c) 2015 Chris Rebert <code@rebertia.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Jared Garst <cultofjared@gmail.com>
|
||||
# Copyright (c) 2017 Renat Galimov <renat2017@gmail.com>
|
||||
# Copyright (c) 2017 Martin <MartinBasti@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Christopher Zurcher <zurcher@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Banjamin Freeman <befreeman@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
|
||||
# Copyright (c) 2019 Julien Palard <julien@palard.fr>
|
||||
# Copyright (c) 2019 laike9m <laike9m@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Robert Schweizer <robert_schweizer@gmx.de>
|
||||
# Copyright (c) 2019 fadedDexofan <fadedDexofan@gmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checkers for various standard library functions."""
|
||||
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
from astroid.bases import Instance
|
||||
from astroid.node_classes import Const
|
||||
|
||||
from pylint.checkers import BaseChecker, utils
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
|
||||
OPEN_FILES = {"open", "file"}
|
||||
UNITTEST_CASE = "unittest.case"
|
||||
THREADING_THREAD = "threading.Thread"
|
||||
COPY_COPY = "copy.copy"
|
||||
OS_ENVIRON = "os._Environ"
|
||||
ENV_GETTERS = {"os.getenv"}
|
||||
SUBPROCESS_POPEN = "subprocess.Popen"
|
||||
SUBPROCESS_RUN = "subprocess.run"
|
||||
OPEN_MODULE = "_io"
|
||||
|
||||
|
||||
def _check_mode_str(mode):
|
||||
# check type
|
||||
if not isinstance(mode, str):
|
||||
return False
|
||||
# check syntax
|
||||
modes = set(mode)
|
||||
_mode = "rwatb+Ux"
|
||||
creating = "x" in modes
|
||||
if modes - set(_mode) or len(mode) > len(modes):
|
||||
return False
|
||||
# check logic
|
||||
reading = "r" in modes
|
||||
writing = "w" in modes
|
||||
appending = "a" in modes
|
||||
text = "t" in modes
|
||||
binary = "b" in modes
|
||||
if "U" in modes:
|
||||
if writing or appending or creating:
|
||||
return False
|
||||
reading = True
|
||||
if text and binary:
|
||||
return False
|
||||
total = reading + writing + appending + creating
|
||||
if total > 1:
|
||||
return False
|
||||
if not (reading or writing or appending or creating):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class StdlibChecker(BaseChecker):
|
||||
__implements__ = (IAstroidChecker,)
|
||||
name = "stdlib"
|
||||
|
||||
msgs = {
|
||||
"W1501": (
|
||||
'"%s" is not a valid mode for open.',
|
||||
"bad-open-mode",
|
||||
"Python supports: r, w, a[, x] modes with b, +, "
|
||||
"and U (only with r) options. "
|
||||
"See http://docs.python.org/2/library/functions.html#open",
|
||||
),
|
||||
"W1502": (
|
||||
"Using datetime.time in a boolean context.",
|
||||
"boolean-datetime",
|
||||
"Using datetime.time in a boolean context can hide "
|
||||
"subtle bugs when the time they represent matches "
|
||||
"midnight UTC. This behaviour was fixed in Python 3.5. "
|
||||
"See http://bugs.python.org/issue13936 for reference.",
|
||||
{"maxversion": (3, 5)},
|
||||
),
|
||||
"W1503": (
|
||||
"Redundant use of %s with constant value %r",
|
||||
"redundant-unittest-assert",
|
||||
"The first argument of assertTrue and assertFalse is "
|
||||
"a condition. If a constant is passed as parameter, that "
|
||||
"condition will be always true. In this case a warning "
|
||||
"should be emitted.",
|
||||
),
|
||||
"W1505": (
|
||||
"Using deprecated method %s()",
|
||||
"deprecated-method",
|
||||
"The method is marked as deprecated and will be removed in "
|
||||
"a future version of Python. Consider looking for an "
|
||||
"alternative in the documentation.",
|
||||
),
|
||||
"W1506": (
|
||||
"threading.Thread needs the target function",
|
||||
"bad-thread-instantiation",
|
||||
"The warning is emitted when a threading.Thread class "
|
||||
"is instantiated without the target function being passed. "
|
||||
"By default, the first parameter is the group param, not the target param. ",
|
||||
),
|
||||
"W1507": (
|
||||
"Using copy.copy(os.environ). Use os.environ.copy() instead. ",
|
||||
"shallow-copy-environ",
|
||||
"os.environ is not a dict object but proxy object, so "
|
||||
"shallow copy has still effects on original object. "
|
||||
"See https://bugs.python.org/issue15373 for reference. ",
|
||||
),
|
||||
"E1507": (
|
||||
"%s does not support %s type argument",
|
||||
"invalid-envvar-value",
|
||||
"Env manipulation functions support only string type arguments. "
|
||||
"See https://docs.python.org/3/library/os.html#os.getenv. ",
|
||||
),
|
||||
"W1508": (
|
||||
"%s default type is %s. Expected str or None.",
|
||||
"invalid-envvar-default",
|
||||
"Env manipulation functions return None or str values. "
|
||||
"Supplying anything different as a default may cause bugs. "
|
||||
"See https://docs.python.org/3/library/os.html#os.getenv. ",
|
||||
),
|
||||
"W1509": (
|
||||
"Using preexec_fn keyword which may be unsafe in the presence "
|
||||
"of threads",
|
||||
"subprocess-popen-preexec-fn",
|
||||
"The preexec_fn parameter is not safe to use in the presence "
|
||||
"of threads in your application. The child process could "
|
||||
"deadlock before exec is called. If you must use it, keep it "
|
||||
"trivial! Minimize the number of libraries you call into."
|
||||
"https://docs.python.org/3/library/subprocess.html#popen-constructor",
|
||||
),
|
||||
"W1510": (
|
||||
"Using subprocess.run without explicitly set `check` is not recommended.",
|
||||
"subprocess-run-check",
|
||||
"The check parameter should always be used with explicitly set "
|
||||
"`check` keyword to make clear what the error-handling behavior is."
|
||||
"https://docs.python.org/3/library/subprocess.html#subprocess.run",
|
||||
),
|
||||
}
|
||||
|
||||
deprecated = {
|
||||
0: {
|
||||
"cgi.parse_qs",
|
||||
"cgi.parse_qsl",
|
||||
"ctypes.c_buffer",
|
||||
"distutils.command.register.register.check_metadata",
|
||||
"distutils.command.sdist.sdist.check_metadata",
|
||||
"tkinter.Misc.tk_menuBar",
|
||||
"tkinter.Menu.tk_bindForTraversal",
|
||||
},
|
||||
2: {
|
||||
(2, 6, 0): {
|
||||
"commands.getstatus",
|
||||
"os.popen2",
|
||||
"os.popen3",
|
||||
"os.popen4",
|
||||
"macostools.touched",
|
||||
},
|
||||
(2, 7, 0): {
|
||||
"unittest.case.TestCase.assertEquals",
|
||||
"unittest.case.TestCase.assertNotEquals",
|
||||
"unittest.case.TestCase.assertAlmostEquals",
|
||||
"unittest.case.TestCase.assertNotAlmostEquals",
|
||||
"unittest.case.TestCase.assert_",
|
||||
"xml.etree.ElementTree.Element.getchildren",
|
||||
"xml.etree.ElementTree.Element.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.doctype",
|
||||
},
|
||||
},
|
||||
3: {
|
||||
(3, 0, 0): {
|
||||
"inspect.getargspec",
|
||||
"failUnlessEqual",
|
||||
"assertEquals",
|
||||
"failIfEqual",
|
||||
"assertNotEquals",
|
||||
"failUnlessAlmostEqual",
|
||||
"assertAlmostEquals",
|
||||
"failIfAlmostEqual",
|
||||
"assertNotAlmostEquals",
|
||||
"failUnless",
|
||||
"assert_",
|
||||
"failUnlessRaises",
|
||||
"failIf",
|
||||
"assertRaisesRegexp",
|
||||
"assertRegexpMatches",
|
||||
"assertNotRegexpMatches",
|
||||
},
|
||||
(3, 1, 0): {
|
||||
"base64.encodestring",
|
||||
"base64.decodestring",
|
||||
"ntpath.splitunc",
|
||||
},
|
||||
(3, 2, 0): {
|
||||
"cgi.escape",
|
||||
"configparser.RawConfigParser.readfp",
|
||||
"xml.etree.ElementTree.Element.getchildren",
|
||||
"xml.etree.ElementTree.Element.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.doctype",
|
||||
},
|
||||
(3, 3, 0): {
|
||||
"inspect.getmoduleinfo",
|
||||
"logging.warn",
|
||||
"logging.Logger.warn",
|
||||
"logging.LoggerAdapter.warn",
|
||||
"nntplib._NNTPBase.xpath",
|
||||
"platform.popen",
|
||||
},
|
||||
(3, 4, 0): {
|
||||
"importlib.find_loader",
|
||||
"plistlib.readPlist",
|
||||
"plistlib.writePlist",
|
||||
"plistlib.readPlistFromBytes",
|
||||
"plistlib.writePlistToBytes",
|
||||
},
|
||||
(3, 4, 4): {"asyncio.tasks.async"},
|
||||
(3, 5, 0): {
|
||||
"fractions.gcd",
|
||||
"inspect.formatargspec",
|
||||
"inspect.getcallargs",
|
||||
"platform.linux_distribution",
|
||||
"platform.dist",
|
||||
},
|
||||
(3, 6, 0): {"importlib._bootstrap_external.FileLoader.load_module"},
|
||||
},
|
||||
}
|
||||
|
||||
def _check_bad_thread_instantiation(self, node):
|
||||
if not node.kwargs and not node.keywords and len(node.args) <= 1:
|
||||
self.add_message("bad-thread-instantiation", node=node)
|
||||
|
||||
def _check_for_preexec_fn_in_popen(self, node):
|
||||
if node.keywords:
|
||||
for keyword in node.keywords:
|
||||
if keyword.arg == "preexec_fn":
|
||||
self.add_message("subprocess-popen-preexec-fn", node=node)
|
||||
|
||||
def _check_for_check_kw_in_run(self, node):
|
||||
kwargs = {keyword.arg for keyword in (node.keywords or ())}
|
||||
if "check" not in kwargs:
|
||||
self.add_message("subprocess-run-check", node=node)
|
||||
|
||||
def _check_shallow_copy_environ(self, node):
|
||||
arg = utils.get_argument_from_call(node, position=0)
|
||||
for inferred in arg.inferred():
|
||||
if inferred.qname() == OS_ENVIRON:
|
||||
self.add_message("shallow-copy-environ", node=node)
|
||||
break
|
||||
|
||||
@utils.check_messages(
|
||||
"bad-open-mode",
|
||||
"redundant-unittest-assert",
|
||||
"deprecated-method",
|
||||
"bad-thread-instantiation",
|
||||
"shallow-copy-environ",
|
||||
"invalid-envvar-value",
|
||||
"invalid-envvar-default",
|
||||
"subprocess-popen-preexec-fn",
|
||||
"subprocess-run-check",
|
||||
)
|
||||
def visit_call(self, node):
|
||||
"""Visit a Call node."""
|
||||
try:
|
||||
for inferred in node.func.infer():
|
||||
if inferred is astroid.Uninferable:
|
||||
continue
|
||||
if inferred.root().name == OPEN_MODULE:
|
||||
if getattr(node.func, "name", None) in OPEN_FILES:
|
||||
self._check_open_mode(node)
|
||||
elif inferred.root().name == UNITTEST_CASE:
|
||||
self._check_redundant_assert(node, inferred)
|
||||
elif isinstance(inferred, astroid.ClassDef):
|
||||
if inferred.qname() == THREADING_THREAD:
|
||||
self._check_bad_thread_instantiation(node)
|
||||
elif inferred.qname() == SUBPROCESS_POPEN:
|
||||
self._check_for_preexec_fn_in_popen(node)
|
||||
elif isinstance(inferred, astroid.FunctionDef):
|
||||
name = inferred.qname()
|
||||
if name == COPY_COPY:
|
||||
self._check_shallow_copy_environ(node)
|
||||
elif name in ENV_GETTERS:
|
||||
self._check_env_function(node, inferred)
|
||||
elif name == SUBPROCESS_RUN:
|
||||
self._check_for_check_kw_in_run(node)
|
||||
self._check_deprecated_method(node, inferred)
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_unaryop(self, node):
|
||||
if node.op == "not":
|
||||
self._check_datetime(node.operand)
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_if(self, node):
|
||||
self._check_datetime(node.test)
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_ifexp(self, node):
|
||||
self._check_datetime(node.test)
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_boolop(self, node):
|
||||
for value in node.values:
|
||||
self._check_datetime(value)
|
||||
|
||||
def _check_deprecated_method(self, node, inferred):
|
||||
py_vers = sys.version_info[0]
|
||||
|
||||
if isinstance(node.func, astroid.Attribute):
|
||||
func_name = node.func.attrname
|
||||
elif isinstance(node.func, astroid.Name):
|
||||
func_name = node.func.name
|
||||
else:
|
||||
# Not interested in other nodes.
|
||||
return
|
||||
|
||||
# Reject nodes which aren't of interest to us.
|
||||
acceptable_nodes = (
|
||||
astroid.BoundMethod,
|
||||
astroid.UnboundMethod,
|
||||
astroid.FunctionDef,
|
||||
)
|
||||
if not isinstance(inferred, acceptable_nodes):
|
||||
return
|
||||
|
||||
qname = inferred.qname()
|
||||
if any(name in self.deprecated[0] for name in (qname, func_name)):
|
||||
self.add_message("deprecated-method", node=node, args=(func_name,))
|
||||
else:
|
||||
for since_vers, func_list in self.deprecated[py_vers].items():
|
||||
if since_vers <= sys.version_info and any(
|
||||
name in func_list for name in (qname, func_name)
|
||||
):
|
||||
self.add_message("deprecated-method", node=node, args=(func_name,))
|
||||
break
|
||||
|
||||
def _check_redundant_assert(self, node, infer):
|
||||
if (
|
||||
isinstance(infer, astroid.BoundMethod)
|
||||
and node.args
|
||||
and isinstance(node.args[0], astroid.Const)
|
||||
and infer.name in ["assertTrue", "assertFalse"]
|
||||
):
|
||||
self.add_message(
|
||||
"redundant-unittest-assert",
|
||||
args=(infer.name, node.args[0].value),
|
||||
node=node,
|
||||
)
|
||||
|
||||
def _check_datetime(self, node):
|
||||
""" Check that a datetime was inferred.
|
||||
If so, emit boolean-datetime warning.
|
||||
"""
|
||||
try:
|
||||
inferred = next(node.infer())
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
if isinstance(inferred, Instance) and inferred.qname() == "datetime.time":
|
||||
self.add_message("boolean-datetime", node=node)
|
||||
|
||||
def _check_open_mode(self, node):
|
||||
"""Check that the mode argument of an open or file call is valid."""
|
||||
try:
|
||||
mode_arg = utils.get_argument_from_call(node, position=1, keyword="mode")
|
||||
except utils.NoSuchArgumentError:
|
||||
return
|
||||
if mode_arg:
|
||||
mode_arg = utils.safe_infer(mode_arg)
|
||||
if isinstance(mode_arg, astroid.Const) and not _check_mode_str(
|
||||
mode_arg.value
|
||||
):
|
||||
self.add_message("bad-open-mode", node=node, args=mode_arg.value)
|
||||
|
||||
def _check_env_function(self, node, infer):
|
||||
env_name_kwarg = "key"
|
||||
env_value_kwarg = "default"
|
||||
if node.keywords:
|
||||
kwargs = {keyword.arg: keyword.value for keyword in node.keywords}
|
||||
else:
|
||||
kwargs = None
|
||||
if node.args:
|
||||
env_name_arg = node.args[0]
|
||||
elif kwargs and env_name_kwarg in kwargs:
|
||||
env_name_arg = kwargs[env_name_kwarg]
|
||||
else:
|
||||
env_name_arg = None
|
||||
|
||||
if env_name_arg:
|
||||
self._check_invalid_envvar_value(
|
||||
node=node,
|
||||
message="invalid-envvar-value",
|
||||
call_arg=utils.safe_infer(env_name_arg),
|
||||
infer=infer,
|
||||
allow_none=False,
|
||||
)
|
||||
|
||||
if len(node.args) == 2:
|
||||
env_value_arg = node.args[1]
|
||||
elif kwargs and env_value_kwarg in kwargs:
|
||||
env_value_arg = kwargs[env_value_kwarg]
|
||||
else:
|
||||
env_value_arg = None
|
||||
|
||||
if env_value_arg:
|
||||
self._check_invalid_envvar_value(
|
||||
node=node,
|
||||
infer=infer,
|
||||
message="invalid-envvar-default",
|
||||
call_arg=utils.safe_infer(env_value_arg),
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
def _check_invalid_envvar_value(self, node, infer, message, call_arg, allow_none):
|
||||
if call_arg in (astroid.Uninferable, None):
|
||||
return
|
||||
|
||||
name = infer.qname()
|
||||
if isinstance(call_arg, Const):
|
||||
emit = False
|
||||
if call_arg.value is None:
|
||||
emit = not allow_none
|
||||
elif not isinstance(call_arg.value, str):
|
||||
emit = True
|
||||
if emit:
|
||||
self.add_message(message, node=node, args=(name, call_arg.pytype()))
|
||||
else:
|
||||
self.add_message(message, node=node, args=(name, call_arg.pytype()))
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(StdlibChecker(linter))
|
||||
952
venv/lib/python3.8/site-packages/pylint/checkers/strings.py
Normal file
952
venv/lib/python3.8/site-packages/pylint/checkers/strings.py
Normal file
@@ -0,0 +1,952 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2009 Charles Hebert <charles.hebert@logilab.fr>
|
||||
# Copyright (c) 2010-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2013-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016, 2018 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2016 Peter Dawyndt <Peter.Dawyndt@UGent.be>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018, 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2018-2019 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Alan Chan <achan961117@gmail.com>
|
||||
# Copyright (c) 2018 Yury Gribov <tetra2005@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2019 Wes Turner <westurner@google.com>
|
||||
# Copyright (c) 2019 Djailla <bastien.vallet@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony <tanant@users.noreply.github.com>
|
||||
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checker for string formatting operations.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
import collections
|
||||
import numbers
|
||||
import re
|
||||
import tokenize
|
||||
import typing
|
||||
from typing import Iterable
|
||||
|
||||
import astroid
|
||||
from astroid.arguments import CallSite
|
||||
from astroid.node_classes import Const
|
||||
|
||||
from pylint.checkers import BaseChecker, BaseTokenChecker, utils
|
||||
from pylint.checkers.utils import check_messages
|
||||
from pylint.interfaces import IAstroidChecker, IRawChecker, ITokenChecker
|
||||
|
||||
_AST_NODE_STR_TYPES = ("__builtin__.unicode", "__builtin__.str", "builtins.str")
|
||||
# Prefixes for both strings and bytes literals per
|
||||
# https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
|
||||
_PREFIXES = {
|
||||
"r",
|
||||
"u",
|
||||
"R",
|
||||
"U",
|
||||
"f",
|
||||
"F",
|
||||
"fr",
|
||||
"Fr",
|
||||
"fR",
|
||||
"FR",
|
||||
"rf",
|
||||
"rF",
|
||||
"Rf",
|
||||
"RF",
|
||||
"b",
|
||||
"B",
|
||||
"br",
|
||||
"Br",
|
||||
"bR",
|
||||
"BR",
|
||||
"rb",
|
||||
"rB",
|
||||
"Rb",
|
||||
"RB",
|
||||
}
|
||||
SINGLE_QUOTED_REGEX = re.compile("(%s)?'''" % "|".join(_PREFIXES))
|
||||
DOUBLE_QUOTED_REGEX = re.compile('(%s)?"""' % "|".join(_PREFIXES))
|
||||
QUOTE_DELIMITER_REGEX = re.compile("(%s)?(\"|')" % "|".join(_PREFIXES), re.DOTALL)
|
||||
|
||||
MSGS = {
|
||||
"E1300": (
|
||||
"Unsupported format character %r (%#02x) at index %d",
|
||||
"bad-format-character",
|
||||
"Used when an unsupported format character is used in a format string.",
|
||||
),
|
||||
"E1301": (
|
||||
"Format string ends in middle of conversion specifier",
|
||||
"truncated-format-string",
|
||||
"Used when a format string terminates before the end of a "
|
||||
"conversion specifier.",
|
||||
),
|
||||
"E1302": (
|
||||
"Mixing named and unnamed conversion specifiers in format string",
|
||||
"mixed-format-string",
|
||||
"Used when a format string contains both named (e.g. '%(foo)d') "
|
||||
"and unnamed (e.g. '%d') conversion specifiers. This is also "
|
||||
"used when a named conversion specifier contains * for the "
|
||||
"minimum field width and/or precision.",
|
||||
),
|
||||
"E1303": (
|
||||
"Expected mapping for format string, not %s",
|
||||
"format-needs-mapping",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with an argument that is not a mapping.",
|
||||
),
|
||||
"W1300": (
|
||||
"Format string dictionary key should be a string, not %s",
|
||||
"bad-format-string-key",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with a dictionary whose keys are not all strings.",
|
||||
),
|
||||
"W1301": (
|
||||
"Unused key %r in format string dictionary",
|
||||
"unused-format-string-key",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with a dictionary that contains keys not required by the "
|
||||
"format string.",
|
||||
),
|
||||
"E1304": (
|
||||
"Missing key %r in format string dictionary",
|
||||
"missing-format-string-key",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with a dictionary that doesn't contain all the keys "
|
||||
"required by the format string.",
|
||||
),
|
||||
"E1305": (
|
||||
"Too many arguments for format string",
|
||||
"too-many-format-args",
|
||||
"Used when a format string that uses unnamed conversion "
|
||||
"specifiers is given too many arguments.",
|
||||
),
|
||||
"E1306": (
|
||||
"Not enough arguments for format string",
|
||||
"too-few-format-args",
|
||||
"Used when a format string that uses unnamed conversion "
|
||||
"specifiers is given too few arguments",
|
||||
),
|
||||
"E1307": (
|
||||
"Argument %r does not match format type %r",
|
||||
"bad-string-format-type",
|
||||
"Used when a type required by format string "
|
||||
"is not suitable for actual argument type",
|
||||
),
|
||||
"E1310": (
|
||||
"Suspicious argument in %s.%s call",
|
||||
"bad-str-strip-call",
|
||||
"The argument to a str.{l,r,}strip call contains a duplicate character, ",
|
||||
),
|
||||
"W1302": (
|
||||
"Invalid format string",
|
||||
"bad-format-string",
|
||||
"Used when a PEP 3101 format string is invalid.",
|
||||
),
|
||||
"W1303": (
|
||||
"Missing keyword argument %r for format string",
|
||||
"missing-format-argument-key",
|
||||
"Used when a PEP 3101 format string that uses named fields "
|
||||
"doesn't receive one or more required keywords.",
|
||||
),
|
||||
"W1304": (
|
||||
"Unused format argument %r",
|
||||
"unused-format-string-argument",
|
||||
"Used when a PEP 3101 format string that uses named "
|
||||
"fields is used with an argument that "
|
||||
"is not required by the format string.",
|
||||
),
|
||||
"W1305": (
|
||||
"Format string contains both automatic field numbering "
|
||||
"and manual field specification",
|
||||
"format-combined-specification",
|
||||
"Used when a PEP 3101 format string contains both automatic "
|
||||
"field numbering (e.g. '{}') and manual field "
|
||||
"specification (e.g. '{0}').",
|
||||
),
|
||||
"W1306": (
|
||||
"Missing format attribute %r in format specifier %r",
|
||||
"missing-format-attribute",
|
||||
"Used when a PEP 3101 format string uses an "
|
||||
"attribute specifier ({0.length}), but the argument "
|
||||
"passed for formatting doesn't have that attribute.",
|
||||
),
|
||||
"W1307": (
|
||||
"Using invalid lookup key %r in format specifier %r",
|
||||
"invalid-format-index",
|
||||
"Used when a PEP 3101 format string uses a lookup specifier "
|
||||
"({a[1]}), but the argument passed for formatting "
|
||||
"doesn't contain or doesn't have that key as an attribute.",
|
||||
),
|
||||
"W1308": (
|
||||
"Duplicate string formatting argument %r, consider passing as named argument",
|
||||
"duplicate-string-formatting-argument",
|
||||
"Used when we detect that a string formatting is "
|
||||
"repeating an argument instead of using named string arguments",
|
||||
),
|
||||
"W1309": (
|
||||
"Using an f-string that does not have any interpolated variables",
|
||||
"f-string-without-interpolation",
|
||||
"Used when we detect an f-string that does not use any interpolation variables, "
|
||||
"in which case it can be either a normal string or a bug in the code.",
|
||||
),
|
||||
}
|
||||
|
||||
OTHER_NODES = (
|
||||
astroid.Const,
|
||||
astroid.List,
|
||||
astroid.Lambda,
|
||||
astroid.FunctionDef,
|
||||
astroid.ListComp,
|
||||
astroid.SetComp,
|
||||
astroid.GeneratorExp,
|
||||
)
|
||||
|
||||
BUILTINS_STR = builtins.__name__ + ".str"
|
||||
BUILTINS_FLOAT = builtins.__name__ + ".float"
|
||||
BUILTINS_INT = builtins.__name__ + ".int"
|
||||
|
||||
|
||||
def get_access_path(key, parts):
|
||||
""" Given a list of format specifiers, returns
|
||||
the final access path (e.g. a.b.c[0][1]).
|
||||
"""
|
||||
path = []
|
||||
for is_attribute, specifier in parts:
|
||||
if is_attribute:
|
||||
path.append(".{}".format(specifier))
|
||||
else:
|
||||
path.append("[{!r}]".format(specifier))
|
||||
return str(key) + "".join(path)
|
||||
|
||||
|
||||
def arg_matches_format_type(arg_type, format_type):
|
||||
if format_type in "sr":
|
||||
# All types can be printed with %s and %r
|
||||
return True
|
||||
if isinstance(arg_type, astroid.Instance):
|
||||
arg_type = arg_type.pytype()
|
||||
if arg_type == BUILTINS_STR:
|
||||
return format_type == "c"
|
||||
if arg_type == BUILTINS_FLOAT:
|
||||
return format_type in "deEfFgGn%"
|
||||
if arg_type == BUILTINS_INT:
|
||||
# Integers allow all types
|
||||
return True
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class StringFormatChecker(BaseChecker):
|
||||
"""Checks string formatting operations to ensure that the format string
|
||||
is valid and the arguments match the format string.
|
||||
"""
|
||||
|
||||
__implements__ = (IAstroidChecker,)
|
||||
name = "string"
|
||||
msgs = MSGS
|
||||
|
||||
# pylint: disable=too-many-branches
|
||||
@check_messages(
|
||||
"bad-format-character",
|
||||
"truncated-format-string",
|
||||
"mixed-format-string",
|
||||
"bad-format-string-key",
|
||||
"missing-format-string-key",
|
||||
"unused-format-string-key",
|
||||
"bad-string-format-type",
|
||||
"format-needs-mapping",
|
||||
"too-many-format-args",
|
||||
"too-few-format-args",
|
||||
"bad-string-format-type",
|
||||
)
|
||||
def visit_binop(self, node):
|
||||
if node.op != "%":
|
||||
return
|
||||
left = node.left
|
||||
args = node.right
|
||||
|
||||
if not (isinstance(left, astroid.Const) and isinstance(left.value, str)):
|
||||
return
|
||||
format_string = left.value
|
||||
try:
|
||||
(
|
||||
required_keys,
|
||||
required_num_args,
|
||||
required_key_types,
|
||||
required_arg_types,
|
||||
) = utils.parse_format_string(format_string)
|
||||
except utils.UnsupportedFormatCharacter as exc:
|
||||
formatted = format_string[exc.index]
|
||||
self.add_message(
|
||||
"bad-format-character",
|
||||
node=node,
|
||||
args=(formatted, ord(formatted), exc.index),
|
||||
)
|
||||
return
|
||||
except utils.IncompleteFormatString:
|
||||
self.add_message("truncated-format-string", node=node)
|
||||
return
|
||||
if required_keys and required_num_args:
|
||||
# The format string uses both named and unnamed format
|
||||
# specifiers.
|
||||
self.add_message("mixed-format-string", node=node)
|
||||
elif required_keys:
|
||||
# The format string uses only named format specifiers.
|
||||
# Check that the RHS of the % operator is a mapping object
|
||||
# that contains precisely the set of keys required by the
|
||||
# format string.
|
||||
if isinstance(args, astroid.Dict):
|
||||
keys = set()
|
||||
unknown_keys = False
|
||||
for k, _ in args.items:
|
||||
if isinstance(k, astroid.Const):
|
||||
key = k.value
|
||||
if isinstance(key, str):
|
||||
keys.add(key)
|
||||
else:
|
||||
self.add_message(
|
||||
"bad-format-string-key", node=node, args=key
|
||||
)
|
||||
else:
|
||||
# One of the keys was something other than a
|
||||
# constant. Since we can't tell what it is,
|
||||
# suppress checks for missing keys in the
|
||||
# dictionary.
|
||||
unknown_keys = True
|
||||
if not unknown_keys:
|
||||
for key in required_keys:
|
||||
if key not in keys:
|
||||
self.add_message(
|
||||
"missing-format-string-key", node=node, args=key
|
||||
)
|
||||
for key in keys:
|
||||
if key not in required_keys:
|
||||
self.add_message(
|
||||
"unused-format-string-key", node=node, args=key
|
||||
)
|
||||
for key, arg in args.items:
|
||||
if not isinstance(key, astroid.Const):
|
||||
continue
|
||||
format_type = required_key_types.get(key.value, None)
|
||||
arg_type = utils.safe_infer(arg)
|
||||
if (
|
||||
format_type is not None
|
||||
and arg_type not in (None, astroid.Uninferable)
|
||||
and not arg_matches_format_type(arg_type, format_type)
|
||||
):
|
||||
self.add_message(
|
||||
"bad-string-format-type",
|
||||
node=node,
|
||||
args=(arg_type.pytype(), format_type),
|
||||
)
|
||||
elif isinstance(args, (OTHER_NODES, astroid.Tuple)):
|
||||
type_name = type(args).__name__
|
||||
self.add_message("format-needs-mapping", node=node, args=type_name)
|
||||
# else:
|
||||
# The RHS of the format specifier is a name or
|
||||
# expression. It may be a mapping object, so
|
||||
# there's nothing we can check.
|
||||
else:
|
||||
# The format string uses only unnamed format specifiers.
|
||||
# Check that the number of arguments passed to the RHS of
|
||||
# the % operator matches the number required by the format
|
||||
# string.
|
||||
args_elts = ()
|
||||
if isinstance(args, astroid.Tuple):
|
||||
rhs_tuple = utils.safe_infer(args)
|
||||
num_args = None
|
||||
if hasattr(rhs_tuple, "elts"):
|
||||
args_elts = rhs_tuple.elts
|
||||
num_args = len(args_elts)
|
||||
elif isinstance(args, (OTHER_NODES, (astroid.Dict, astroid.DictComp))):
|
||||
args_elts = [args]
|
||||
num_args = 1
|
||||
else:
|
||||
# The RHS of the format specifier is a name or
|
||||
# expression. It could be a tuple of unknown size, so
|
||||
# there's nothing we can check.
|
||||
num_args = None
|
||||
if num_args is not None:
|
||||
if num_args > required_num_args:
|
||||
self.add_message("too-many-format-args", node=node)
|
||||
elif num_args < required_num_args:
|
||||
self.add_message("too-few-format-args", node=node)
|
||||
for arg, format_type in zip(args_elts, required_arg_types):
|
||||
if not arg:
|
||||
continue
|
||||
arg_type = utils.safe_infer(arg)
|
||||
if arg_type not in (
|
||||
None,
|
||||
astroid.Uninferable,
|
||||
) and not arg_matches_format_type(arg_type, format_type):
|
||||
self.add_message(
|
||||
"bad-string-format-type",
|
||||
node=node,
|
||||
args=(arg_type.pytype(), format_type),
|
||||
)
|
||||
|
||||
@check_messages("f-string-without-interpolation")
|
||||
def visit_joinedstr(self, node):
|
||||
if isinstance(node.parent, astroid.FormattedValue):
|
||||
return
|
||||
for value in node.values:
|
||||
if isinstance(value, astroid.FormattedValue):
|
||||
return
|
||||
self.add_message("f-string-without-interpolation", node=node)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_call(self, node):
|
||||
func = utils.safe_infer(node.func)
|
||||
if (
|
||||
isinstance(func, astroid.BoundMethod)
|
||||
and isinstance(func.bound, astroid.Instance)
|
||||
and func.bound.name in ("str", "unicode", "bytes")
|
||||
):
|
||||
if func.name in ("strip", "lstrip", "rstrip") and node.args:
|
||||
arg = utils.safe_infer(node.args[0])
|
||||
if not isinstance(arg, astroid.Const) or not isinstance(arg.value, str):
|
||||
return
|
||||
if len(arg.value) != len(set(arg.value)):
|
||||
self.add_message(
|
||||
"bad-str-strip-call",
|
||||
node=node,
|
||||
args=(func.bound.name, func.name),
|
||||
)
|
||||
elif func.name == "format":
|
||||
self._check_new_format(node, func)
|
||||
|
||||
def _detect_vacuous_formatting(self, node, positional_arguments):
|
||||
counter = collections.Counter(
|
||||
arg.name for arg in positional_arguments if isinstance(arg, astroid.Name)
|
||||
)
|
||||
for name, count in counter.items():
|
||||
if count == 1:
|
||||
continue
|
||||
self.add_message(
|
||||
"duplicate-string-formatting-argument", node=node, args=(name,)
|
||||
)
|
||||
|
||||
def _check_new_format(self, node, func):
|
||||
"""Check the new string formatting. """
|
||||
# Skip ormat nodes which don't have an explicit string on the
|
||||
# left side of the format operation.
|
||||
# We do this because our inference engine can't properly handle
|
||||
# redefinitions of the original string.
|
||||
# Note that there may not be any left side at all, if the format method
|
||||
# has been assigned to another variable. See issue 351. For example:
|
||||
#
|
||||
# fmt = 'some string {}'.format
|
||||
# fmt('arg')
|
||||
if isinstance(node.func, astroid.Attribute) and not isinstance(
|
||||
node.func.expr, astroid.Const
|
||||
):
|
||||
return
|
||||
if node.starargs or node.kwargs:
|
||||
return
|
||||
try:
|
||||
strnode = next(func.bound.infer())
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
if not (isinstance(strnode, astroid.Const) and isinstance(strnode.value, str)):
|
||||
return
|
||||
try:
|
||||
call_site = CallSite.from_call(node)
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
|
||||
try:
|
||||
fields, num_args, manual_pos = utils.parse_format_method_string(
|
||||
strnode.value
|
||||
)
|
||||
except utils.IncompleteFormatString:
|
||||
self.add_message("bad-format-string", node=node)
|
||||
return
|
||||
|
||||
positional_arguments = call_site.positional_arguments
|
||||
named_arguments = call_site.keyword_arguments
|
||||
named_fields = {field[0] for field in fields if isinstance(field[0], str)}
|
||||
if num_args and manual_pos:
|
||||
self.add_message("format-combined-specification", node=node)
|
||||
return
|
||||
|
||||
check_args = False
|
||||
# Consider "{[0]} {[1]}" as num_args.
|
||||
num_args += sum(1 for field in named_fields if field == "")
|
||||
if named_fields:
|
||||
for field in named_fields:
|
||||
if field and field not in named_arguments:
|
||||
self.add_message(
|
||||
"missing-format-argument-key", node=node, args=(field,)
|
||||
)
|
||||
for field in named_arguments:
|
||||
if field not in named_fields:
|
||||
self.add_message(
|
||||
"unused-format-string-argument", node=node, args=(field,)
|
||||
)
|
||||
# num_args can be 0 if manual_pos is not.
|
||||
num_args = num_args or manual_pos
|
||||
if positional_arguments or num_args:
|
||||
empty = any(True for field in named_fields if field == "")
|
||||
if named_arguments or empty:
|
||||
# Verify the required number of positional arguments
|
||||
# only if the .format got at least one keyword argument.
|
||||
# This means that the format strings accepts both
|
||||
# positional and named fields and we should warn
|
||||
# when one of the them is missing or is extra.
|
||||
check_args = True
|
||||
else:
|
||||
check_args = True
|
||||
if check_args:
|
||||
# num_args can be 0 if manual_pos is not.
|
||||
num_args = num_args or manual_pos
|
||||
if len(positional_arguments) > num_args:
|
||||
self.add_message("too-many-format-args", node=node)
|
||||
elif len(positional_arguments) < num_args:
|
||||
self.add_message("too-few-format-args", node=node)
|
||||
|
||||
self._detect_vacuous_formatting(node, positional_arguments)
|
||||
self._check_new_format_specifiers(node, fields, named_arguments)
|
||||
|
||||
def _check_new_format_specifiers(self, node, fields, named):
|
||||
"""
|
||||
Check attribute and index access in the format
|
||||
string ("{0.a}" and "{0[a]}").
|
||||
"""
|
||||
for key, specifiers in fields:
|
||||
# Obtain the argument. If it can't be obtained
|
||||
# or inferred, skip this check.
|
||||
if key == "":
|
||||
# {[0]} will have an unnamed argument, defaulting
|
||||
# to 0. It will not be present in `named`, so use the value
|
||||
# 0 for it.
|
||||
key = 0
|
||||
if isinstance(key, numbers.Number):
|
||||
try:
|
||||
argname = utils.get_argument_from_call(node, key)
|
||||
except utils.NoSuchArgumentError:
|
||||
continue
|
||||
else:
|
||||
if key not in named:
|
||||
continue
|
||||
argname = named[key]
|
||||
if argname in (astroid.Uninferable, None):
|
||||
continue
|
||||
try:
|
||||
argument = utils.safe_infer(argname)
|
||||
except astroid.InferenceError:
|
||||
continue
|
||||
if not specifiers or not argument:
|
||||
# No need to check this key if it doesn't
|
||||
# use attribute / item access
|
||||
continue
|
||||
if argument.parent and isinstance(argument.parent, astroid.Arguments):
|
||||
# Ignore any object coming from an argument,
|
||||
# because we can't infer its value properly.
|
||||
continue
|
||||
previous = argument
|
||||
parsed = []
|
||||
for is_attribute, specifier in specifiers:
|
||||
if previous is astroid.Uninferable:
|
||||
break
|
||||
parsed.append((is_attribute, specifier))
|
||||
if is_attribute:
|
||||
try:
|
||||
previous = previous.getattr(specifier)[0]
|
||||
except astroid.NotFoundError:
|
||||
if (
|
||||
hasattr(previous, "has_dynamic_getattr")
|
||||
and previous.has_dynamic_getattr()
|
||||
):
|
||||
# Don't warn if the object has a custom __getattr__
|
||||
break
|
||||
path = get_access_path(key, parsed)
|
||||
self.add_message(
|
||||
"missing-format-attribute",
|
||||
args=(specifier, path),
|
||||
node=node,
|
||||
)
|
||||
break
|
||||
else:
|
||||
warn_error = False
|
||||
if hasattr(previous, "getitem"):
|
||||
try:
|
||||
previous = previous.getitem(astroid.Const(specifier))
|
||||
except (
|
||||
astroid.AstroidIndexError,
|
||||
astroid.AstroidTypeError,
|
||||
astroid.AttributeInferenceError,
|
||||
):
|
||||
warn_error = True
|
||||
except astroid.InferenceError:
|
||||
break
|
||||
if previous is astroid.Uninferable:
|
||||
break
|
||||
else:
|
||||
try:
|
||||
# Lookup __getitem__ in the current node,
|
||||
# but skip further checks, because we can't
|
||||
# retrieve the looked object
|
||||
previous.getattr("__getitem__")
|
||||
break
|
||||
except astroid.NotFoundError:
|
||||
warn_error = True
|
||||
if warn_error:
|
||||
path = get_access_path(key, parsed)
|
||||
self.add_message(
|
||||
"invalid-format-index", args=(specifier, path), node=node
|
||||
)
|
||||
break
|
||||
|
||||
try:
|
||||
previous = next(previous.infer())
|
||||
except astroid.InferenceError:
|
||||
# can't check further if we can't infer it
|
||||
break
|
||||
|
||||
|
||||
class StringConstantChecker(BaseTokenChecker):
|
||||
"""Check string literals"""
|
||||
|
||||
__implements__ = (IAstroidChecker, ITokenChecker, IRawChecker)
|
||||
name = "string"
|
||||
msgs = {
|
||||
"W1401": (
|
||||
"Anomalous backslash in string: '%s'. "
|
||||
"String constant might be missing an r prefix.",
|
||||
"anomalous-backslash-in-string",
|
||||
"Used when a backslash is in a literal string but not as an escape.",
|
||||
),
|
||||
"W1402": (
|
||||
"Anomalous Unicode escape in byte string: '%s'. "
|
||||
"String constant might be missing an r or u prefix.",
|
||||
"anomalous-unicode-escape-in-string",
|
||||
"Used when an escape like \\u is encountered in a byte "
|
||||
"string where it has no effect.",
|
||||
),
|
||||
"W1404": (
|
||||
"Implicit string concatenation found in %s",
|
||||
"implicit-str-concat",
|
||||
"String literals are implicitly concatenated in a "
|
||||
"literal iterable definition : "
|
||||
"maybe a comma is missing ?",
|
||||
{"old_names": [("W1403", "implicit-str-concat-in-sequence")]},
|
||||
),
|
||||
"W1405": (
|
||||
"Quote delimiter %s is inconsistent with the rest of the file",
|
||||
"inconsistent-quotes",
|
||||
"Quote delimiters are not used consistently throughout a module "
|
||||
"(with allowances made for avoiding unnecessary escaping).",
|
||||
),
|
||||
}
|
||||
options = (
|
||||
(
|
||||
"check-str-concat-over-line-jumps",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "This flag controls whether the "
|
||||
"implicit-str-concat should generate a warning "
|
||||
"on implicit string concatenation in sequences defined over "
|
||||
"several lines.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"check-quote-consistency",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "This flag controls whether inconsistent-quotes generates a "
|
||||
"warning when the character used as a quote delimiter is used "
|
||||
"inconsistently within a module.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
# Characters that have a special meaning after a backslash in either
|
||||
# Unicode or byte strings.
|
||||
ESCAPE_CHARACTERS = "abfnrtvx\n\r\t\\'\"01234567"
|
||||
|
||||
# Characters that have a special meaning after a backslash but only in
|
||||
# Unicode strings.
|
||||
UNICODE_ESCAPE_CHARACTERS = "uUN"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.string_tokens = {} # token position -> (token value, next token)
|
||||
|
||||
def process_module(self, module):
|
||||
self._unicode_literals = "unicode_literals" in module.future_imports
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
encoding = "ascii"
|
||||
for i, (tok_type, token, start, _, line) in enumerate(tokens):
|
||||
if tok_type == tokenize.ENCODING:
|
||||
# this is always the first token processed
|
||||
encoding = token
|
||||
elif tok_type == tokenize.STRING:
|
||||
# 'token' is the whole un-parsed token; we can look at the start
|
||||
# of it to see whether it's a raw or unicode string etc.
|
||||
self.process_string_token(token, start[0])
|
||||
# We figure the next token, ignoring comments & newlines:
|
||||
j = i + 1
|
||||
while j < len(tokens) and tokens[j].type in (
|
||||
tokenize.NEWLINE,
|
||||
tokenize.NL,
|
||||
tokenize.COMMENT,
|
||||
):
|
||||
j += 1
|
||||
next_token = tokens[j] if j < len(tokens) else None
|
||||
if encoding != "ascii":
|
||||
# We convert `tokenize` character count into a byte count,
|
||||
# to match with astroid `.col_offset`
|
||||
start = (start[0], len(line[: start[1]].encode(encoding)))
|
||||
self.string_tokens[start] = (str_eval(token), next_token)
|
||||
|
||||
if self.config.check_quote_consistency:
|
||||
self.check_for_consistent_string_delimiters(tokens)
|
||||
|
||||
@check_messages("implicit-str-concat")
|
||||
def visit_list(self, node):
|
||||
self.check_for_concatenated_strings(node.elts, "list")
|
||||
|
||||
@check_messages("implicit-str-concat")
|
||||
def visit_set(self, node):
|
||||
self.check_for_concatenated_strings(node.elts, "set")
|
||||
|
||||
@check_messages("implicit-str-concat")
|
||||
def visit_tuple(self, node):
|
||||
self.check_for_concatenated_strings(node.elts, "tuple")
|
||||
|
||||
def visit_assign(self, node):
|
||||
if isinstance(node.value, astroid.Const) and isinstance(node.value.value, str):
|
||||
self.check_for_concatenated_strings([node.value], "assignment")
|
||||
|
||||
def check_for_consistent_string_delimiters(
|
||||
self, tokens: Iterable[tokenize.TokenInfo]
|
||||
) -> None:
|
||||
"""Adds a message for each string using inconsistent quote delimiters.
|
||||
|
||||
Quote delimiters are used inconsistently if " and ' are mixed in a module's
|
||||
shortstrings without having done so to avoid escaping an internal quote
|
||||
character.
|
||||
|
||||
Args:
|
||||
tokens: The tokens to be checked against for consistent usage.
|
||||
"""
|
||||
string_delimiters = collections.Counter() # type: typing.Counter[str]
|
||||
|
||||
# First, figure out which quote character predominates in the module
|
||||
for tok_type, token, _, _, _ in tokens:
|
||||
if tok_type == tokenize.STRING and _is_quote_delimiter_chosen_freely(token):
|
||||
string_delimiters[_get_quote_delimiter(token)] += 1
|
||||
|
||||
if len(string_delimiters) > 1:
|
||||
# Ties are broken arbitrarily
|
||||
most_common_delimiter = string_delimiters.most_common(1)[0][0]
|
||||
for tok_type, token, start, _, _ in tokens:
|
||||
if tok_type != tokenize.STRING:
|
||||
continue
|
||||
quote_delimiter = _get_quote_delimiter(token)
|
||||
if (
|
||||
_is_quote_delimiter_chosen_freely(token)
|
||||
and quote_delimiter != most_common_delimiter
|
||||
):
|
||||
self.add_message(
|
||||
"inconsistent-quotes", line=start[0], args=(quote_delimiter,)
|
||||
)
|
||||
|
||||
def check_for_concatenated_strings(self, elements, iterable_type):
|
||||
for elt in elements:
|
||||
if not (isinstance(elt, Const) and elt.pytype() in _AST_NODE_STR_TYPES):
|
||||
continue
|
||||
if elt.col_offset < 0:
|
||||
# This can happen in case of escaped newlines
|
||||
continue
|
||||
if (elt.lineno, elt.col_offset) not in self.string_tokens:
|
||||
# This may happen with Latin1 encoding
|
||||
# cf. https://github.com/PyCQA/pylint/issues/2610
|
||||
continue
|
||||
matching_token, next_token = self.string_tokens[
|
||||
(elt.lineno, elt.col_offset)
|
||||
]
|
||||
# We detect string concatenation: the AST Const is the
|
||||
# combination of 2 string tokens
|
||||
if matching_token != elt.value and next_token is not None:
|
||||
if next_token.type == tokenize.STRING and (
|
||||
next_token.start[0] == elt.lineno
|
||||
or self.config.check_str_concat_over_line_jumps
|
||||
):
|
||||
self.add_message(
|
||||
"implicit-str-concat", line=elt.lineno, args=(iterable_type,)
|
||||
)
|
||||
|
||||
def process_string_token(self, token, start_row):
|
||||
quote_char = None
|
||||
index = None
|
||||
for index, char in enumerate(token):
|
||||
if char in "'\"":
|
||||
quote_char = char
|
||||
break
|
||||
if quote_char is None:
|
||||
return
|
||||
|
||||
prefix = token[:index].lower() # markers like u, b, r.
|
||||
after_prefix = token[index:]
|
||||
if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
|
||||
string_body = after_prefix[3:-3]
|
||||
else:
|
||||
string_body = after_prefix[1:-1] # Chop off quotes
|
||||
# No special checks on raw strings at the moment.
|
||||
if "r" not in prefix:
|
||||
self.process_non_raw_string_token(prefix, string_body, start_row)
|
||||
|
||||
def process_non_raw_string_token(self, prefix, string_body, start_row):
|
||||
"""check for bad escapes in a non-raw string.
|
||||
|
||||
prefix: lowercase string of eg 'ur' string prefix markers.
|
||||
string_body: the un-parsed body of the string, not including the quote
|
||||
marks.
|
||||
start_row: integer line number in the source.
|
||||
"""
|
||||
# Walk through the string; if we see a backslash then escape the next
|
||||
# character, and skip over it. If we see a non-escaped character,
|
||||
# alert, and continue.
|
||||
#
|
||||
# Accept a backslash when it escapes a backslash, or a quote, or
|
||||
# end-of-line, or one of the letters that introduce a special escape
|
||||
# sequence <http://docs.python.org/reference/lexical_analysis.html>
|
||||
#
|
||||
index = 0
|
||||
while True:
|
||||
index = string_body.find("\\", index)
|
||||
if index == -1:
|
||||
break
|
||||
# There must be a next character; having a backslash at the end
|
||||
# of the string would be a SyntaxError.
|
||||
next_char = string_body[index + 1]
|
||||
match = string_body[index : index + 2]
|
||||
if next_char in self.UNICODE_ESCAPE_CHARACTERS:
|
||||
if "u" in prefix:
|
||||
pass
|
||||
elif "b" not in prefix:
|
||||
pass # unicode by default
|
||||
else:
|
||||
self.add_message(
|
||||
"anomalous-unicode-escape-in-string",
|
||||
line=start_row,
|
||||
args=(match,),
|
||||
col_offset=index,
|
||||
)
|
||||
elif next_char not in self.ESCAPE_CHARACTERS:
|
||||
self.add_message(
|
||||
"anomalous-backslash-in-string",
|
||||
line=start_row,
|
||||
args=(match,),
|
||||
col_offset=index,
|
||||
)
|
||||
# Whether it was a valid escape or not, backslash followed by
|
||||
# another character can always be consumed whole: the second
|
||||
# character can never be the start of a new backslash escape.
|
||||
index += 2
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(StringFormatChecker(linter))
|
||||
linter.register_checker(StringConstantChecker(linter))
|
||||
|
||||
|
||||
def str_eval(token):
|
||||
"""
|
||||
Mostly replicate `ast.literal_eval(token)` manually to avoid any performance hit.
|
||||
This supports f-strings, contrary to `ast.literal_eval`.
|
||||
We have to support all string literal notations:
|
||||
https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
|
||||
"""
|
||||
if token[0:2].lower() in ("fr", "rf"):
|
||||
token = token[2:]
|
||||
elif token[0].lower() in ("r", "u", "f"):
|
||||
token = token[1:]
|
||||
if token[0:3] in ('"""', "'''"):
|
||||
return token[3:-3]
|
||||
return token[1:-1]
|
||||
|
||||
|
||||
def _is_long_string(string_token: str) -> bool:
|
||||
"""Is this string token a "longstring" (is it triple-quoted)?
|
||||
|
||||
Long strings are triple-quoted as defined in
|
||||
https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
|
||||
|
||||
This function only checks characters up through the open quotes. Because it's meant
|
||||
to be applied only to tokens that represent string literals, it doesn't bother to
|
||||
check for close-quotes (demonstrating that the literal is a well-formed string).
|
||||
|
||||
Args:
|
||||
string_token: The string token to be parsed.
|
||||
|
||||
Returns:
|
||||
A boolean representing whether or not this token matches a longstring
|
||||
regex.
|
||||
"""
|
||||
return bool(
|
||||
SINGLE_QUOTED_REGEX.match(string_token)
|
||||
or DOUBLE_QUOTED_REGEX.match(string_token)
|
||||
)
|
||||
|
||||
|
||||
def _get_quote_delimiter(string_token: str) -> str:
|
||||
"""Returns the quote character used to delimit this token string.
|
||||
|
||||
This function does little checking for whether the token is a well-formed
|
||||
string.
|
||||
|
||||
Args:
|
||||
string_token: The token to be parsed.
|
||||
|
||||
Returns:
|
||||
A string containing solely the first quote delimiter character in the passed
|
||||
string.
|
||||
|
||||
Raises:
|
||||
ValueError: No quote delimiter characters are present.
|
||||
"""
|
||||
match = QUOTE_DELIMITER_REGEX.match(string_token)
|
||||
if not match:
|
||||
raise ValueError("string token %s is not a well-formed string" % string_token)
|
||||
return match.group(2)
|
||||
|
||||
|
||||
def _is_quote_delimiter_chosen_freely(string_token: str) -> bool:
|
||||
"""Was there a non-awkward option for the quote delimiter?
|
||||
|
||||
Args:
|
||||
string_token: The quoted string whose delimiters are to be checked.
|
||||
|
||||
Returns:
|
||||
Whether there was a choice in this token's quote character that would
|
||||
not have involved backslash-escaping an interior quote character. Long
|
||||
strings are excepted from this analysis under the assumption that their
|
||||
quote characters are set by policy.
|
||||
"""
|
||||
quote_delimiter = _get_quote_delimiter(string_token)
|
||||
unchosen_delimiter = '"' if quote_delimiter == "'" else "'"
|
||||
return bool(
|
||||
quote_delimiter
|
||||
and not _is_long_string(string_token)
|
||||
and unchosen_delimiter not in str_eval(string_token)
|
||||
)
|
||||
1862
venv/lib/python3.8/site-packages/pylint/checkers/typecheck.py
Normal file
1862
venv/lib/python3.8/site-packages/pylint/checkers/typecheck.py
Normal file
File diff suppressed because it is too large
Load Diff
1295
venv/lib/python3.8/site-packages/pylint/checkers/utils.py
Normal file
1295
venv/lib/python3.8/site-packages/pylint/checkers/utils.py
Normal file
File diff suppressed because it is too large
Load Diff
2076
venv/lib/python3.8/site-packages/pylint/checkers/variables.py
Normal file
2076
venv/lib/python3.8/site-packages/pylint/checkers/variables.py
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user