Initial commit
This commit is contained in:
44
venv/lib/python3.8/site-packages/pylint/__init__.py
Normal file
44
venv/lib/python3.8/site-packages/pylint/__init__.py
Normal file
@@ -0,0 +1,44 @@
|
||||
# Copyright (c) 2008, 2012 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014, 2016-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2020 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
import sys
|
||||
|
||||
from pylint.__pkginfo__ import version as __version__
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
|
||||
|
||||
def run_pylint():
|
||||
from pylint.lint import Run as PylintRun
|
||||
|
||||
try:
|
||||
PylintRun(sys.argv[1:])
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def run_epylint():
|
||||
from pylint.epylint import Run as EpylintRun
|
||||
|
||||
EpylintRun()
|
||||
|
||||
|
||||
def run_pyreverse():
|
||||
"""run pyreverse"""
|
||||
from pylint.pyreverse.main import Run as PyreverseRun
|
||||
|
||||
PyreverseRun(sys.argv[1:])
|
||||
|
||||
|
||||
def run_symilar():
|
||||
"""run symilar"""
|
||||
from pylint.checkers.similar import Run as SimilarRun
|
||||
|
||||
SimilarRun(sys.argv[1:])
|
||||
18
venv/lib/python3.8/site-packages/pylint/__main__.py
Normal file
18
venv/lib/python3.8/site-packages/pylint/__main__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pylint
|
||||
|
||||
# Strip out the current working directory from sys.path.
|
||||
# Having the working directory in `sys.path` means that `pylint` might
|
||||
# inadvertently import user code from modules having the same name as
|
||||
# stdlib or pylint's own modules.
|
||||
# CPython issue: https://bugs.python.org/issue33053
|
||||
if sys.path[0] == "" or sys.path[0] == os.getcwd():
|
||||
sys.path.pop(0)
|
||||
|
||||
pylint.run_pylint()
|
||||
97
venv/lib/python3.8/site-packages/pylint/__pkginfo__.py
Normal file
97
venv/lib/python3.8/site-packages/pylint/__pkginfo__.py
Normal file
@@ -0,0 +1,97 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2010 Julien Jehannet <julien.jehannet@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Ricardo Gemignani <ricardo.gemignani@gmail.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2016 Florian Bruhin <git@the-compiler.org>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2017-2018 Hugo <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2018-2019 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Dan Hemberger <846186+hemberger@users.noreply.github.com>
|
||||
# Copyright (c) 2019 jab <jab@users.noreply.github.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
# pylint: disable=redefined-builtin,invalid-name
|
||||
"""pylint packaging information"""
|
||||
|
||||
from os.path import join
|
||||
|
||||
# For an official release, use dev_version = None
|
||||
numversion = (2, 5, 0)
|
||||
dev_version = None
|
||||
|
||||
version = ".".join(str(num) for num in numversion)
|
||||
if dev_version is not None:
|
||||
version += "-dev" + str(dev_version)
|
||||
|
||||
install_requires = [
|
||||
"astroid>=2.4.0,<=2.5",
|
||||
"isort>=4.2.5,<5",
|
||||
"mccabe>=0.6,<0.7",
|
||||
"toml>=0.7.1",
|
||||
]
|
||||
|
||||
dependency_links = [] # type: ignore
|
||||
|
||||
extras_require = {}
|
||||
extras_require[':sys_platform=="win32"'] = ["colorama"]
|
||||
|
||||
license = "GPL"
|
||||
description = "python code static checker"
|
||||
web = "https://github.com/PyCQA/pylint"
|
||||
mailinglist = "mailto:code-quality@python.org"
|
||||
author = "Python Code Quality Authority"
|
||||
author_email = "code-quality@python.org"
|
||||
|
||||
classifiers = [
|
||||
"Development Status :: 6 - Mature",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: GNU General Public License (GPL)",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Topic :: Software Development :: Debuggers",
|
||||
"Topic :: Software Development :: Quality Assurance",
|
||||
"Topic :: Software Development :: Testing",
|
||||
]
|
||||
|
||||
|
||||
long_desc = """\
|
||||
Pylint is a Python source code analyzer which looks for programming
|
||||
errors, helps enforcing a coding standard and sniffs for some code
|
||||
smells (as defined in Martin Fowler's Refactoring book)
|
||||
.
|
||||
Pylint can be seen as another PyChecker since nearly all tests you
|
||||
can do with PyChecker can also be done with Pylint. However, Pylint
|
||||
offers some more features, like checking length of lines of code,
|
||||
checking if variable names are well-formed according to your coding
|
||||
standard, or checking if declared interfaces are truly implemented,
|
||||
and much more.
|
||||
.
|
||||
Additionally, it is possible to write plugins to add your own checks.
|
||||
.
|
||||
Pylint is shipped with "pyreverse" (UML diagram generator)
|
||||
and "symilar" (an independent similarities checker)."""
|
||||
|
||||
scripts = [
|
||||
join("bin", filename) for filename in ("pylint", "symilar", "epylint", "pyreverse")
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
67
venv/lib/python3.8/site-packages/pylint/checkers/__init__.py
Normal file
67
venv/lib/python3.8/site-packages/pylint/checkers/__init__.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
|
||||
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018-2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2019 Bruno P. Kinoshita <kinow@users.noreply.github.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""utilities methods and classes for checkers
|
||||
|
||||
Base id of standard checkers (used in msg and report ids):
|
||||
01: base
|
||||
02: classes
|
||||
03: format
|
||||
04: import
|
||||
05: misc
|
||||
06: variables
|
||||
07: exceptions
|
||||
08: similar
|
||||
09: design_analysis
|
||||
10: newstyle
|
||||
11: typecheck
|
||||
12: logging
|
||||
13: string_format
|
||||
14: string_constant
|
||||
15: stdlib
|
||||
16: python3
|
||||
17: refactoring
|
||||
18-50: not yet used: reserved for future internal checkers.
|
||||
51-99: perhaps used: reserved for external checkers
|
||||
|
||||
The raw_metrics checker has no number associated since it doesn't emit any
|
||||
messages nor reports. XXX not true, emit a 07 report !
|
||||
|
||||
"""
|
||||
|
||||
from pylint.checkers.base_checker import BaseChecker, BaseTokenChecker
|
||||
from pylint.utils import register_plugins
|
||||
|
||||
|
||||
def table_lines_from_stats(stats, _, columns):
|
||||
"""get values listed in <columns> from <stats> and <old_stats>,
|
||||
and return a formated list of values, designed to be given to a
|
||||
ureport.Table object
|
||||
"""
|
||||
lines = []
|
||||
for m_type in columns:
|
||||
new = stats[m_type]
|
||||
new = "%.3f" % new if isinstance(new, float) else str(new)
|
||||
lines += (m_type.replace("_", " "), new, "NC", "NC")
|
||||
return lines
|
||||
|
||||
|
||||
def initialize(linter):
|
||||
"""initialize linter with checkers in this package """
|
||||
register_plugins(linter, __path__[0])
|
||||
|
||||
|
||||
__all__ = ("BaseChecker", "BaseTokenChecker", "initialize")
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
90
venv/lib/python3.8/site-packages/pylint/checkers/async.py
Normal file
90
venv/lib/python3.8/site-packages/pylint/checkers/async.py
Normal file
@@ -0,0 +1,90 @@
|
||||
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checker for anything related to the async protocol (PEP 492)."""
|
||||
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
from astroid import bases, exceptions
|
||||
|
||||
from pylint import checkers, interfaces, utils
|
||||
from pylint.checkers import utils as checker_utils
|
||||
from pylint.checkers.utils import decorated_with
|
||||
|
||||
|
||||
class AsyncChecker(checkers.BaseChecker):
|
||||
__implements__ = interfaces.IAstroidChecker
|
||||
name = "async"
|
||||
msgs = {
|
||||
"E1700": (
|
||||
"Yield inside async function",
|
||||
"yield-inside-async-function",
|
||||
"Used when an `yield` or `yield from` statement is "
|
||||
"found inside an async function.",
|
||||
{"minversion": (3, 5)},
|
||||
),
|
||||
"E1701": (
|
||||
"Async context manager '%s' doesn't implement __aenter__ and __aexit__.",
|
||||
"not-async-context-manager",
|
||||
"Used when an async context manager is used with an object "
|
||||
"that does not implement the async context management protocol.",
|
||||
{"minversion": (3, 5)},
|
||||
),
|
||||
}
|
||||
|
||||
def open(self):
|
||||
self._ignore_mixin_members = utils.get_global_option(
|
||||
self, "ignore-mixin-members"
|
||||
)
|
||||
self._async_generators = ["contextlib.asynccontextmanager"]
|
||||
|
||||
@checker_utils.check_messages("yield-inside-async-function")
|
||||
def visit_asyncfunctiondef(self, node):
|
||||
for child in node.nodes_of_class(astroid.Yield):
|
||||
if child.scope() is node and (
|
||||
sys.version_info[:2] == (3, 5) or isinstance(child, astroid.YieldFrom)
|
||||
):
|
||||
self.add_message("yield-inside-async-function", node=child)
|
||||
|
||||
@checker_utils.check_messages("not-async-context-manager")
|
||||
def visit_asyncwith(self, node):
|
||||
for ctx_mgr, _ in node.items:
|
||||
inferred = checker_utils.safe_infer(ctx_mgr)
|
||||
if inferred is None or inferred is astroid.Uninferable:
|
||||
continue
|
||||
|
||||
if isinstance(inferred, bases.AsyncGenerator):
|
||||
# Check if we are dealing with a function decorated
|
||||
# with contextlib.asynccontextmanager.
|
||||
if decorated_with(inferred.parent, self._async_generators):
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
inferred.getattr("__aenter__")
|
||||
inferred.getattr("__aexit__")
|
||||
except exceptions.NotFoundError:
|
||||
if isinstance(inferred, astroid.Instance):
|
||||
# If we do not know the bases of this class,
|
||||
# just skip it.
|
||||
if not checker_utils.has_known_bases(inferred):
|
||||
continue
|
||||
# Just ignore mixin classes.
|
||||
if self._ignore_mixin_members:
|
||||
if inferred.name[-5:].lower() == "mixin":
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
|
||||
self.add_message(
|
||||
"not-async-context-manager", node=node, args=(inferred.name,)
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker"""
|
||||
linter.register_checker(AsyncChecker(linter))
|
||||
2502
venv/lib/python3.8/site-packages/pylint/checkers/base.py
Normal file
2502
venv/lib/python3.8/site-packages/pylint/checkers/base.py
Normal file
File diff suppressed because it is too large
Load Diff
190
venv/lib/python3.8/site-packages/pylint/checkers/base_checker.py
Normal file
190
venv/lib/python3.8/site-packages/pylint/checkers/base_checker.py
Normal file
@@ -0,0 +1,190 @@
|
||||
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018-2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2019 Bruno P. Kinoshita <kinow@users.noreply.github.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
from inspect import cleandoc
|
||||
from typing import Any
|
||||
|
||||
from pylint.config import OptionsProviderMixIn
|
||||
from pylint.constants import _MSG_ORDER, WarningScope
|
||||
from pylint.exceptions import InvalidMessageError
|
||||
from pylint.interfaces import UNDEFINED, IRawChecker, ITokenChecker, implements
|
||||
from pylint.message.message_definition import MessageDefinition
|
||||
from pylint.utils import get_rst_section, get_rst_title
|
||||
|
||||
|
||||
class BaseChecker(OptionsProviderMixIn):
|
||||
|
||||
# checker name (you may reuse an existing one)
|
||||
name = None # type: str
|
||||
# options level (0 will be displaying in --help, 1 in --long-help)
|
||||
level = 1
|
||||
# ordered list of options to control the checker behaviour
|
||||
options = () # type: Any
|
||||
# messages issued by this checker
|
||||
msgs = {} # type: Any
|
||||
# reports issued by this checker
|
||||
reports = () # type: Any
|
||||
# mark this checker as enabled or not.
|
||||
enabled = True
|
||||
|
||||
def __init__(self, linter=None):
|
||||
"""checker instances should have the linter as argument
|
||||
|
||||
:param ILinter linter: is an object implementing ILinter."""
|
||||
if self.name is not None:
|
||||
self.name = self.name.lower()
|
||||
OptionsProviderMixIn.__init__(self)
|
||||
self.linter = linter
|
||||
|
||||
def __gt__(self, other):
|
||||
"""Permit to sort a list of Checker by name."""
|
||||
return "{}{}".format(self.name, self.msgs).__gt__(
|
||||
"{}{}".format(other.name, other.msgs)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
status = "Checker" if self.enabled else "Disabled checker"
|
||||
return "{} '{}' (responsible for '{}')".format(
|
||||
status, self.name, "', '".join(self.msgs.keys())
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
"""This might be incomplete because multiple class inheriting BaseChecker
|
||||
can have the same name. Cf MessageHandlerMixIn.get_full_documentation()"""
|
||||
return self.get_full_documentation(
|
||||
msgs=self.msgs, options=self.options_and_values(), reports=self.reports
|
||||
)
|
||||
|
||||
def get_full_documentation(self, msgs, options, reports, doc=None, module=None):
|
||||
result = ""
|
||||
checker_title = "%s checker" % (self.name.replace("_", " ").title())
|
||||
if module:
|
||||
# Provide anchor to link against
|
||||
result += ".. _%s:\n\n" % module
|
||||
result += "%s\n" % get_rst_title(checker_title, "~")
|
||||
if module:
|
||||
result += "This checker is provided by ``%s``.\n" % module
|
||||
result += "Verbatim name of the checker is ``%s``.\n\n" % self.name
|
||||
if doc:
|
||||
# Provide anchor to link against
|
||||
result += get_rst_title("{} Documentation".format(checker_title), "^")
|
||||
result += "%s\n\n" % cleandoc(doc)
|
||||
# options might be an empty generator and not be False when casted to boolean
|
||||
options = list(options)
|
||||
if options:
|
||||
result += get_rst_title("{} Options".format(checker_title), "^")
|
||||
result += "%s\n" % get_rst_section(None, options)
|
||||
if msgs:
|
||||
result += get_rst_title("{} Messages".format(checker_title), "^")
|
||||
for msgid, msg in sorted(
|
||||
msgs.items(), key=lambda kv: (_MSG_ORDER.index(kv[0][0]), kv[1])
|
||||
):
|
||||
msg = self.create_message_definition_from_tuple(msgid, msg)
|
||||
result += "%s\n" % msg.format_help(checkerref=False)
|
||||
result += "\n"
|
||||
if reports:
|
||||
result += get_rst_title("{} Reports".format(checker_title), "^")
|
||||
for report in reports:
|
||||
result += ":%s: %s\n" % report[:2]
|
||||
result += "\n"
|
||||
result += "\n"
|
||||
return result
|
||||
|
||||
def add_message(
|
||||
self, msgid, line=None, node=None, args=None, confidence=None, col_offset=None
|
||||
):
|
||||
if not confidence:
|
||||
confidence = UNDEFINED
|
||||
self.linter.add_message(msgid, line, node, args, confidence, col_offset)
|
||||
|
||||
def check_consistency(self):
|
||||
"""Check the consistency of msgid.
|
||||
|
||||
msg ids for a checker should be a string of len 4, where the two first
|
||||
characters are the checker id and the two last the msg id in this
|
||||
checker.
|
||||
|
||||
:raises InvalidMessageError: If the checker id in the messages are not
|
||||
always the same. """
|
||||
checker_id = None
|
||||
existing_ids = []
|
||||
for message in self.messages:
|
||||
if checker_id is not None and checker_id != message.msgid[1:3]:
|
||||
error_msg = "Inconsistent checker part in message id "
|
||||
error_msg += "'{}' (expected 'x{checker_id}xx' ".format(
|
||||
message.msgid, checker_id=checker_id
|
||||
)
|
||||
error_msg += "because we already had {existing_ids}).".format(
|
||||
existing_ids=existing_ids
|
||||
)
|
||||
raise InvalidMessageError(error_msg)
|
||||
checker_id = message.msgid[1:3]
|
||||
existing_ids.append(message.msgid)
|
||||
|
||||
def create_message_definition_from_tuple(self, msgid, msg_tuple):
|
||||
if implements(self, (IRawChecker, ITokenChecker)):
|
||||
default_scope = WarningScope.LINE
|
||||
else:
|
||||
default_scope = WarningScope.NODE
|
||||
options = {}
|
||||
if len(msg_tuple) > 3:
|
||||
(msg, symbol, descr, options) = msg_tuple
|
||||
elif len(msg_tuple) > 2:
|
||||
(msg, symbol, descr) = msg_tuple
|
||||
else:
|
||||
error_msg = """Messages should have a msgid and a symbol. Something like this :
|
||||
|
||||
"W1234": (
|
||||
"message",
|
||||
"message-symbol",
|
||||
"Message description with detail.",
|
||||
...
|
||||
),
|
||||
"""
|
||||
raise InvalidMessageError(error_msg)
|
||||
options.setdefault("scope", default_scope)
|
||||
return MessageDefinition(self, msgid, msg, descr, symbol, **options)
|
||||
|
||||
@property
|
||||
def messages(self) -> list:
|
||||
return [
|
||||
self.create_message_definition_from_tuple(msgid, msg_tuple)
|
||||
for msgid, msg_tuple in sorted(self.msgs.items())
|
||||
]
|
||||
|
||||
# dummy methods implementing the IChecker interface
|
||||
|
||||
def get_message_definition(self, msgid):
|
||||
for message_definition in self.messages:
|
||||
if message_definition.msgid == msgid:
|
||||
return message_definition
|
||||
error_msg = "MessageDefinition for '{}' does not exists. ".format(msgid)
|
||||
error_msg += "Choose from {}.".format([m.msgid for m in self.messages])
|
||||
raise InvalidMessageError(error_msg)
|
||||
|
||||
def open(self):
|
||||
"""called before visiting project (i.e set of modules)"""
|
||||
|
||||
def close(self):
|
||||
"""called after visiting project (i.e set of modules)"""
|
||||
|
||||
|
||||
class BaseTokenChecker(BaseChecker):
|
||||
"""Base class for checkers that want to have access to the token stream."""
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
"""Should be overridden by subclasses."""
|
||||
raise NotImplementedError()
|
||||
2093
venv/lib/python3.8/site-packages/pylint/checkers/classes.py
Normal file
2093
venv/lib/python3.8/site-packages/pylint/checkers/classes.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,500 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006, 2009-2010, 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012, 2014 Google, Inc.
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 ahirnish <ahirnish@gmail.com>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Mark Miller <725mrm@gmail.com>
|
||||
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2019 Michael Scott Cuthbert <cuthbert@mit.edu>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""check for signs of poor design"""
|
||||
|
||||
import re
|
||||
from collections import defaultdict
|
||||
|
||||
import astroid
|
||||
from astroid import BoolOp, If, decorators
|
||||
|
||||
from pylint import utils
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers.utils import check_messages
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
|
||||
MSGS = {
|
||||
"R0901": (
|
||||
"Too many ancestors (%s/%s)",
|
||||
"too-many-ancestors",
|
||||
"Used when class has too many parent classes, try to reduce "
|
||||
"this to get a simpler (and so easier to use) class.",
|
||||
),
|
||||
"R0902": (
|
||||
"Too many instance attributes (%s/%s)",
|
||||
"too-many-instance-attributes",
|
||||
"Used when class has too many instance attributes, try to reduce "
|
||||
"this to get a simpler (and so easier to use) class.",
|
||||
),
|
||||
"R0903": (
|
||||
"Too few public methods (%s/%s)",
|
||||
"too-few-public-methods",
|
||||
"Used when class has too few public methods, so be sure it's "
|
||||
"really worth it.",
|
||||
),
|
||||
"R0904": (
|
||||
"Too many public methods (%s/%s)",
|
||||
"too-many-public-methods",
|
||||
"Used when class has too many public methods, try to reduce "
|
||||
"this to get a simpler (and so easier to use) class.",
|
||||
),
|
||||
"R0911": (
|
||||
"Too many return statements (%s/%s)",
|
||||
"too-many-return-statements",
|
||||
"Used when a function or method has too many return statement, "
|
||||
"making it hard to follow.",
|
||||
),
|
||||
"R0912": (
|
||||
"Too many branches (%s/%s)",
|
||||
"too-many-branches",
|
||||
"Used when a function or method has too many branches, "
|
||||
"making it hard to follow.",
|
||||
),
|
||||
"R0913": (
|
||||
"Too many arguments (%s/%s)",
|
||||
"too-many-arguments",
|
||||
"Used when a function or method takes too many arguments.",
|
||||
),
|
||||
"R0914": (
|
||||
"Too many local variables (%s/%s)",
|
||||
"too-many-locals",
|
||||
"Used when a function or method has too many local variables.",
|
||||
),
|
||||
"R0915": (
|
||||
"Too many statements (%s/%s)",
|
||||
"too-many-statements",
|
||||
"Used when a function or method has too many statements. You "
|
||||
"should then split it in smaller functions / methods.",
|
||||
),
|
||||
"R0916": (
|
||||
"Too many boolean expressions in if statement (%s/%s)",
|
||||
"too-many-boolean-expressions",
|
||||
"Used when an if statement contains too many boolean expressions.",
|
||||
),
|
||||
}
|
||||
SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
|
||||
DATACLASSES_DECORATORS = frozenset({"dataclass", "attrs"})
|
||||
DATACLASS_IMPORT = "dataclasses"
|
||||
TYPING_NAMEDTUPLE = "typing.NamedTuple"
|
||||
|
||||
|
||||
def _is_exempt_from_public_methods(node: astroid.ClassDef) -> bool:
|
||||
"""Check if a class is exempt from too-few-public-methods"""
|
||||
|
||||
# If it's a typing.Namedtuple or an Enum
|
||||
for ancestor in node.ancestors():
|
||||
if ancestor.name == "Enum" and ancestor.root().name == "enum":
|
||||
return True
|
||||
if ancestor.qname() == TYPING_NAMEDTUPLE:
|
||||
return True
|
||||
|
||||
# Or if it's a dataclass
|
||||
if not node.decorators:
|
||||
return False
|
||||
|
||||
root_locals = set(node.root().locals)
|
||||
for decorator in node.decorators.nodes:
|
||||
if isinstance(decorator, astroid.Call):
|
||||
decorator = decorator.func
|
||||
if not isinstance(decorator, (astroid.Name, astroid.Attribute)):
|
||||
continue
|
||||
if isinstance(decorator, astroid.Name):
|
||||
name = decorator.name
|
||||
else:
|
||||
name = decorator.attrname
|
||||
if name in DATACLASSES_DECORATORS and (
|
||||
root_locals.intersection(DATACLASSES_DECORATORS)
|
||||
or DATACLASS_IMPORT in root_locals
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _count_boolean_expressions(bool_op):
|
||||
"""Counts the number of boolean expressions in BoolOp `bool_op` (recursive)
|
||||
|
||||
example: a and (b or c or (d and e)) ==> 5 boolean expressions
|
||||
"""
|
||||
nb_bool_expr = 0
|
||||
for bool_expr in bool_op.get_children():
|
||||
if isinstance(bool_expr, BoolOp):
|
||||
nb_bool_expr += _count_boolean_expressions(bool_expr)
|
||||
else:
|
||||
nb_bool_expr += 1
|
||||
return nb_bool_expr
|
||||
|
||||
|
||||
def _count_methods_in_class(node):
|
||||
all_methods = sum(1 for method in node.methods() if not method.name.startswith("_"))
|
||||
# Special methods count towards the number of public methods,
|
||||
# but don't count towards there being too many methods.
|
||||
for method in node.mymethods():
|
||||
if SPECIAL_OBJ.search(method.name) and method.name != "__init__":
|
||||
all_methods += 1
|
||||
return all_methods
|
||||
|
||||
|
||||
class MisdesignChecker(BaseChecker):
|
||||
"""checks for sign of poor/misdesign:
|
||||
* number of methods, attributes, local variables...
|
||||
* size, complexity of functions, methods
|
||||
"""
|
||||
|
||||
__implements__ = (IAstroidChecker,)
|
||||
|
||||
# configuration section name
|
||||
name = "design"
|
||||
# messages
|
||||
msgs = MSGS
|
||||
priority = -2
|
||||
# configuration options
|
||||
options = (
|
||||
(
|
||||
"max-args",
|
||||
{
|
||||
"default": 5,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of arguments for function / method.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-locals",
|
||||
{
|
||||
"default": 15,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of locals for function / method body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-returns",
|
||||
{
|
||||
"default": 6,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of return / yield for function / "
|
||||
"method body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-branches",
|
||||
{
|
||||
"default": 12,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of branch for function / method body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-statements",
|
||||
{
|
||||
"default": 50,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of statements in function / method " "body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-parents",
|
||||
{
|
||||
"default": 7,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of parents for a class (see R0901).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-attributes",
|
||||
{
|
||||
"default": 7,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of attributes for a class \
|
||||
(see R0902).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"min-public-methods",
|
||||
{
|
||||
"default": 2,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Minimum number of public methods for a class \
|
||||
(see R0903).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-public-methods",
|
||||
{
|
||||
"default": 20,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of public methods for a class \
|
||||
(see R0904).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-bool-expr",
|
||||
{
|
||||
"default": 5,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of boolean expressions in an if "
|
||||
"statement (see R0916).",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def __init__(self, linter=None):
|
||||
BaseChecker.__init__(self, linter)
|
||||
self.stats = None
|
||||
self._returns = None
|
||||
self._branches = None
|
||||
self._stmts = None
|
||||
|
||||
def open(self):
|
||||
"""initialize visit variables"""
|
||||
self.stats = self.linter.add_stats()
|
||||
self._returns = []
|
||||
self._branches = defaultdict(int)
|
||||
self._stmts = []
|
||||
|
||||
def _inc_all_stmts(self, amount):
|
||||
for i in range(len(self._stmts)):
|
||||
self._stmts[i] += amount
|
||||
|
||||
@decorators.cachedproperty
|
||||
def _ignored_argument_names(self):
|
||||
return utils.get_global_option(self, "ignored-argument-names", default=None)
|
||||
|
||||
@check_messages(
|
||||
"too-many-ancestors",
|
||||
"too-many-instance-attributes",
|
||||
"too-few-public-methods",
|
||||
"too-many-public-methods",
|
||||
)
|
||||
def visit_classdef(self, node):
|
||||
"""check size of inheritance hierarchy and number of instance attributes
|
||||
"""
|
||||
nb_parents = len(list(node.ancestors()))
|
||||
if nb_parents > self.config.max_parents:
|
||||
self.add_message(
|
||||
"too-many-ancestors",
|
||||
node=node,
|
||||
args=(nb_parents, self.config.max_parents),
|
||||
)
|
||||
|
||||
if len(node.instance_attrs) > self.config.max_attributes:
|
||||
self.add_message(
|
||||
"too-many-instance-attributes",
|
||||
node=node,
|
||||
args=(len(node.instance_attrs), self.config.max_attributes),
|
||||
)
|
||||
|
||||
@check_messages("too-few-public-methods", "too-many-public-methods")
|
||||
def leave_classdef(self, node):
|
||||
"""check number of public methods"""
|
||||
my_methods = sum(
|
||||
1 for method in node.mymethods() if not method.name.startswith("_")
|
||||
)
|
||||
|
||||
# Does the class contain less than n public methods ?
|
||||
# This checks only the methods defined in the current class,
|
||||
# since the user might not have control over the classes
|
||||
# from the ancestors. It avoids some false positives
|
||||
# for classes such as unittest.TestCase, which provides
|
||||
# a lot of assert methods. It doesn't make sense to warn
|
||||
# when the user subclasses TestCase to add his own tests.
|
||||
if my_methods > self.config.max_public_methods:
|
||||
self.add_message(
|
||||
"too-many-public-methods",
|
||||
node=node,
|
||||
args=(my_methods, self.config.max_public_methods),
|
||||
)
|
||||
|
||||
# Stop here for exception, metaclass, interface classes and other
|
||||
# classes for which we don't need to count the methods.
|
||||
if node.type != "class" or _is_exempt_from_public_methods(node):
|
||||
return
|
||||
|
||||
# Does the class contain more than n public methods ?
|
||||
# This checks all the methods defined by ancestors and
|
||||
# by the current class.
|
||||
all_methods = _count_methods_in_class(node)
|
||||
if all_methods < self.config.min_public_methods:
|
||||
self.add_message(
|
||||
"too-few-public-methods",
|
||||
node=node,
|
||||
args=(all_methods, self.config.min_public_methods),
|
||||
)
|
||||
|
||||
@check_messages(
|
||||
"too-many-return-statements",
|
||||
"too-many-branches",
|
||||
"too-many-arguments",
|
||||
"too-many-locals",
|
||||
"too-many-statements",
|
||||
"keyword-arg-before-vararg",
|
||||
)
|
||||
def visit_functiondef(self, node):
|
||||
"""check function name, docstring, arguments, redefinition,
|
||||
variable names, max locals
|
||||
"""
|
||||
# init branch and returns counters
|
||||
self._returns.append(0)
|
||||
# check number of arguments
|
||||
args = node.args.args
|
||||
ignored_argument_names = self._ignored_argument_names
|
||||
if args is not None:
|
||||
ignored_args_num = 0
|
||||
if ignored_argument_names:
|
||||
ignored_args_num = sum(
|
||||
1 for arg in args if ignored_argument_names.match(arg.name)
|
||||
)
|
||||
|
||||
argnum = len(args) - ignored_args_num
|
||||
if argnum > self.config.max_args:
|
||||
self.add_message(
|
||||
"too-many-arguments",
|
||||
node=node,
|
||||
args=(len(args), self.config.max_args),
|
||||
)
|
||||
else:
|
||||
ignored_args_num = 0
|
||||
# check number of local variables
|
||||
locnum = len(node.locals) - ignored_args_num
|
||||
if locnum > self.config.max_locals:
|
||||
self.add_message(
|
||||
"too-many-locals", node=node, args=(locnum, self.config.max_locals)
|
||||
)
|
||||
# init new statements counter
|
||||
self._stmts.append(1)
|
||||
|
||||
visit_asyncfunctiondef = visit_functiondef
|
||||
|
||||
@check_messages(
|
||||
"too-many-return-statements",
|
||||
"too-many-branches",
|
||||
"too-many-arguments",
|
||||
"too-many-locals",
|
||||
"too-many-statements",
|
||||
)
|
||||
def leave_functiondef(self, node):
|
||||
"""most of the work is done here on close:
|
||||
checks for max returns, branch, return in __init__
|
||||
"""
|
||||
returns = self._returns.pop()
|
||||
if returns > self.config.max_returns:
|
||||
self.add_message(
|
||||
"too-many-return-statements",
|
||||
node=node,
|
||||
args=(returns, self.config.max_returns),
|
||||
)
|
||||
branches = self._branches[node]
|
||||
if branches > self.config.max_branches:
|
||||
self.add_message(
|
||||
"too-many-branches",
|
||||
node=node,
|
||||
args=(branches, self.config.max_branches),
|
||||
)
|
||||
# check number of statements
|
||||
stmts = self._stmts.pop()
|
||||
if stmts > self.config.max_statements:
|
||||
self.add_message(
|
||||
"too-many-statements",
|
||||
node=node,
|
||||
args=(stmts, self.config.max_statements),
|
||||
)
|
||||
|
||||
leave_asyncfunctiondef = leave_functiondef
|
||||
|
||||
def visit_return(self, _):
|
||||
"""count number of returns"""
|
||||
if not self._returns:
|
||||
return # return outside function, reported by the base checker
|
||||
self._returns[-1] += 1
|
||||
|
||||
def visit_default(self, node):
|
||||
"""default visit method -> increments the statements counter if
|
||||
necessary
|
||||
"""
|
||||
if node.is_statement:
|
||||
self._inc_all_stmts(1)
|
||||
|
||||
def visit_tryexcept(self, node):
|
||||
"""increments the branches counter"""
|
||||
branches = len(node.handlers)
|
||||
if node.orelse:
|
||||
branches += 1
|
||||
self._inc_branch(node, branches)
|
||||
self._inc_all_stmts(branches)
|
||||
|
||||
def visit_tryfinally(self, node):
|
||||
"""increments the branches counter"""
|
||||
self._inc_branch(node, 2)
|
||||
self._inc_all_stmts(2)
|
||||
|
||||
@check_messages("too-many-boolean-expressions")
|
||||
def visit_if(self, node):
|
||||
"""increments the branches counter and checks boolean expressions"""
|
||||
self._check_boolean_expressions(node)
|
||||
branches = 1
|
||||
# don't double count If nodes coming from some 'elif'
|
||||
if node.orelse and (len(node.orelse) > 1 or not isinstance(node.orelse[0], If)):
|
||||
branches += 1
|
||||
self._inc_branch(node, branches)
|
||||
self._inc_all_stmts(branches)
|
||||
|
||||
def _check_boolean_expressions(self, node):
|
||||
"""Go through "if" node `node` and counts its boolean expressions
|
||||
|
||||
if the "if" node test is a BoolOp node
|
||||
"""
|
||||
condition = node.test
|
||||
if not isinstance(condition, BoolOp):
|
||||
return
|
||||
nb_bool_expr = _count_boolean_expressions(condition)
|
||||
if nb_bool_expr > self.config.max_bool_expr:
|
||||
self.add_message(
|
||||
"too-many-boolean-expressions",
|
||||
node=condition,
|
||||
args=(nb_bool_expr, self.config.max_bool_expr),
|
||||
)
|
||||
|
||||
def visit_while(self, node):
|
||||
"""increments the branches counter"""
|
||||
branches = 1
|
||||
if node.orelse:
|
||||
branches += 1
|
||||
self._inc_branch(node, branches)
|
||||
|
||||
visit_for = visit_while
|
||||
|
||||
def _inc_branch(self, node, branchesnum=1):
|
||||
"""increments the branches counter"""
|
||||
self._branches[node.scope()] += branchesnum
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(MisdesignChecker(linter))
|
||||
554
venv/lib/python3.8/site-packages/pylint/checkers/exceptions.py
Normal file
554
venv/lib/python3.8/site-packages/pylint/checkers/exceptions.py
Normal file
@@ -0,0 +1,554 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2011-2014 Google, Inc.
|
||||
# Copyright (c) 2012 Tim Hatch <tim@timhatch.com>
|
||||
# Copyright (c) 2013-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015 Steven Myint <hg@stevenmyint.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Erik <erik.eriksson@yahoo.com>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Martin von Gagern <gagern@google.com>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Natalie Serebryakova <natalie.serebryakova@Natalies-MacBook-Pro.local>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Carey Metcalfe <carey@cmetcalfe.ca>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019 Djailla <bastien.vallet@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checks for various exception related errors."""
|
||||
import builtins
|
||||
import inspect
|
||||
import typing
|
||||
|
||||
import astroid
|
||||
from astroid.node_classes import NodeNG
|
||||
|
||||
from pylint import checkers, interfaces
|
||||
from pylint.checkers import utils
|
||||
|
||||
|
||||
def _builtin_exceptions():
|
||||
def predicate(obj):
|
||||
return isinstance(obj, type) and issubclass(obj, BaseException)
|
||||
|
||||
members = inspect.getmembers(builtins, predicate)
|
||||
return {exc.__name__ for (_, exc) in members}
|
||||
|
||||
|
||||
def _annotated_unpack_infer(stmt, context=None):
|
||||
"""
|
||||
Recursively generate nodes inferred by the given statement.
|
||||
If the inferred value is a list or a tuple, recurse on the elements.
|
||||
Returns an iterator which yields tuples in the format
|
||||
('original node', 'inferred node').
|
||||
"""
|
||||
if isinstance(stmt, (astroid.List, astroid.Tuple)):
|
||||
for elt in stmt.elts:
|
||||
inferred = utils.safe_infer(elt)
|
||||
if inferred and inferred is not astroid.Uninferable:
|
||||
yield elt, inferred
|
||||
return
|
||||
for inferred in stmt.infer(context):
|
||||
if inferred is astroid.Uninferable:
|
||||
continue
|
||||
yield stmt, inferred
|
||||
|
||||
|
||||
def _is_raising(body: typing.List) -> bool:
|
||||
"""Return true if the given statement node raise an exception"""
|
||||
for node in body:
|
||||
if isinstance(node, astroid.Raise):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
OVERGENERAL_EXCEPTIONS = ("BaseException", "Exception")
|
||||
BUILTINS_NAME = builtins.__name__
|
||||
|
||||
MSGS = {
|
||||
"E0701": (
|
||||
"Bad except clauses order (%s)",
|
||||
"bad-except-order",
|
||||
"Used when except clauses are not in the correct order (from the "
|
||||
"more specific to the more generic). If you don't fix the order, "
|
||||
"some exceptions may not be caught by the most specific handler.",
|
||||
),
|
||||
"E0702": (
|
||||
"Raising %s while only classes or instances are allowed",
|
||||
"raising-bad-type",
|
||||
"Used when something which is neither a class, an instance or a "
|
||||
"string is raised (i.e. a `TypeError` will be raised).",
|
||||
),
|
||||
"E0703": (
|
||||
"Exception context set to something which is not an exception, nor None",
|
||||
"bad-exception-context",
|
||||
'Used when using the syntax "raise ... from ...", '
|
||||
"where the exception context is not an exception, "
|
||||
"nor None.",
|
||||
),
|
||||
"E0704": (
|
||||
"The raise statement is not inside an except clause",
|
||||
"misplaced-bare-raise",
|
||||
"Used when a bare raise is not used inside an except clause. "
|
||||
"This generates an error, since there are no active exceptions "
|
||||
"to be reraised. An exception to this rule is represented by "
|
||||
"a bare raise inside a finally clause, which might work, as long "
|
||||
"as an exception is raised inside the try block, but it is "
|
||||
"nevertheless a code smell that must not be relied upon.",
|
||||
),
|
||||
"E0710": (
|
||||
"Raising a new style class which doesn't inherit from BaseException",
|
||||
"raising-non-exception",
|
||||
"Used when a new style class which doesn't inherit from "
|
||||
"BaseException is raised.",
|
||||
),
|
||||
"E0711": (
|
||||
"NotImplemented raised - should raise NotImplementedError",
|
||||
"notimplemented-raised",
|
||||
"Used when NotImplemented is raised instead of NotImplementedError",
|
||||
),
|
||||
"E0712": (
|
||||
"Catching an exception which doesn't inherit from Exception: %s",
|
||||
"catching-non-exception",
|
||||
"Used when a class which doesn't inherit from "
|
||||
"Exception is used as an exception in an except clause.",
|
||||
),
|
||||
"W0702": (
|
||||
"No exception type(s) specified",
|
||||
"bare-except",
|
||||
"Used when an except clause doesn't specify exceptions type to catch.",
|
||||
),
|
||||
"W0703": (
|
||||
"Catching too general exception %s",
|
||||
"broad-except",
|
||||
"Used when an except catches a too general exception, "
|
||||
"possibly burying unrelated errors.",
|
||||
),
|
||||
"W0705": (
|
||||
"Catching previously caught exception type %s",
|
||||
"duplicate-except",
|
||||
"Used when an except catches a type that was already caught by "
|
||||
"a previous handler.",
|
||||
),
|
||||
"W0706": (
|
||||
"The except handler raises immediately",
|
||||
"try-except-raise",
|
||||
"Used when an except handler uses raise as its first or only "
|
||||
"operator. This is useless because it raises back the exception "
|
||||
"immediately. Remove the raise operator or the entire "
|
||||
"try-except-raise block!",
|
||||
),
|
||||
"W0711": (
|
||||
'Exception to catch is the result of a binary "%s" operation',
|
||||
"binary-op-exception",
|
||||
"Used when the exception to catch is of the form "
|
||||
'"except A or B:". If intending to catch multiple, '
|
||||
'rewrite as "except (A, B):"',
|
||||
),
|
||||
"W0715": (
|
||||
"Exception arguments suggest string formatting might be intended",
|
||||
"raising-format-tuple",
|
||||
"Used when passing multiple arguments to an exception "
|
||||
"constructor, the first of them a string literal containing what "
|
||||
"appears to be placeholders intended for formatting",
|
||||
),
|
||||
"W0716": (
|
||||
"Invalid exception operation. %s",
|
||||
"wrong-exception-operation",
|
||||
"Used when an operation is done against an exception, but the operation "
|
||||
"is not valid for the exception in question. Usually emitted when having "
|
||||
"binary operations between exceptions in except handlers.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class BaseVisitor:
|
||||
"""Base class for visitors defined in this module."""
|
||||
|
||||
def __init__(self, checker, node):
|
||||
self._checker = checker
|
||||
self._node = node
|
||||
|
||||
def visit(self, node):
|
||||
name = node.__class__.__name__.lower()
|
||||
dispatch_meth = getattr(self, "visit_" + name, None)
|
||||
if dispatch_meth:
|
||||
dispatch_meth(node)
|
||||
else:
|
||||
self.visit_default(node)
|
||||
|
||||
def visit_default(self, node): # pylint: disable=unused-argument
|
||||
"""Default implementation for all the nodes."""
|
||||
|
||||
|
||||
class ExceptionRaiseRefVisitor(BaseVisitor):
|
||||
"""Visit references (anything that is not an AST leaf)."""
|
||||
|
||||
def visit_name(self, name):
|
||||
if name.name == "NotImplemented":
|
||||
self._checker.add_message("notimplemented-raised", node=self._node)
|
||||
|
||||
def visit_call(self, call):
|
||||
if isinstance(call.func, astroid.Name):
|
||||
self.visit_name(call.func)
|
||||
if (
|
||||
len(call.args) > 1
|
||||
and isinstance(call.args[0], astroid.Const)
|
||||
and isinstance(call.args[0].value, str)
|
||||
):
|
||||
msg = call.args[0].value
|
||||
if "%" in msg or ("{" in msg and "}" in msg):
|
||||
self._checker.add_message("raising-format-tuple", node=self._node)
|
||||
|
||||
|
||||
class ExceptionRaiseLeafVisitor(BaseVisitor):
|
||||
"""Visitor for handling leaf kinds of a raise value."""
|
||||
|
||||
def visit_const(self, const):
|
||||
if not isinstance(const.value, str):
|
||||
# raising-string will be emitted from python3 porting checker.
|
||||
self._checker.add_message(
|
||||
"raising-bad-type", node=self._node, args=const.value.__class__.__name__
|
||||
)
|
||||
|
||||
def visit_instance(self, instance):
|
||||
# pylint: disable=protected-access
|
||||
cls = instance._proxied
|
||||
self.visit_classdef(cls)
|
||||
|
||||
# Exception instances have a particular class type
|
||||
visit_exceptioninstance = visit_instance
|
||||
|
||||
def visit_classdef(self, cls):
|
||||
if not utils.inherit_from_std_ex(cls) and utils.has_known_bases(cls):
|
||||
if cls.newstyle:
|
||||
self._checker.add_message("raising-non-exception", node=self._node)
|
||||
|
||||
def visit_tuple(self, _):
|
||||
self._checker.add_message("raising-bad-type", node=self._node, args="tuple")
|
||||
|
||||
def visit_default(self, node):
|
||||
name = getattr(node, "name", node.__class__.__name__)
|
||||
self._checker.add_message("raising-bad-type", node=self._node, args=name)
|
||||
|
||||
|
||||
class ExceptionsChecker(checkers.BaseChecker):
|
||||
"""Exception related checks."""
|
||||
|
||||
__implements__ = interfaces.IAstroidChecker
|
||||
|
||||
name = "exceptions"
|
||||
msgs = MSGS
|
||||
priority = -4
|
||||
options = (
|
||||
(
|
||||
"overgeneral-exceptions",
|
||||
{
|
||||
"default": OVERGENERAL_EXCEPTIONS,
|
||||
"type": "csv",
|
||||
"metavar": "<comma-separated class names>",
|
||||
"help": "Exceptions that will emit a warning "
|
||||
'when being caught. Defaults to "%s".'
|
||||
% (", ".join(OVERGENERAL_EXCEPTIONS),),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def open(self):
|
||||
self._builtin_exceptions = _builtin_exceptions()
|
||||
super().open()
|
||||
|
||||
@utils.check_messages(
|
||||
"misplaced-bare-raise",
|
||||
"raising-bad-type",
|
||||
"raising-non-exception",
|
||||
"notimplemented-raised",
|
||||
"bad-exception-context",
|
||||
"raising-format-tuple",
|
||||
)
|
||||
def visit_raise(self, node):
|
||||
if node.exc is None:
|
||||
self._check_misplaced_bare_raise(node)
|
||||
return
|
||||
|
||||
if node.cause:
|
||||
self._check_bad_exception_context(node)
|
||||
|
||||
expr = node.exc
|
||||
ExceptionRaiseRefVisitor(self, node).visit(expr)
|
||||
|
||||
try:
|
||||
inferred_value = expr.inferred()[-1]
|
||||
except astroid.InferenceError:
|
||||
pass
|
||||
else:
|
||||
if inferred_value:
|
||||
ExceptionRaiseLeafVisitor(self, node).visit(inferred_value)
|
||||
|
||||
def _check_misplaced_bare_raise(self, node):
|
||||
# Filter out if it's present in __exit__.
|
||||
scope = node.scope()
|
||||
if (
|
||||
isinstance(scope, astroid.FunctionDef)
|
||||
and scope.is_method()
|
||||
and scope.name == "__exit__"
|
||||
):
|
||||
return
|
||||
|
||||
current = node
|
||||
# Stop when a new scope is generated or when the raise
|
||||
# statement is found inside a TryFinally.
|
||||
ignores = (astroid.ExceptHandler, astroid.FunctionDef)
|
||||
while current and not isinstance(current.parent, ignores):
|
||||
current = current.parent
|
||||
|
||||
expected = (astroid.ExceptHandler,)
|
||||
if not current or not isinstance(current.parent, expected):
|
||||
self.add_message("misplaced-bare-raise", node=node)
|
||||
|
||||
def _check_bad_exception_context(self, node):
|
||||
"""Verify that the exception context is properly set.
|
||||
|
||||
An exception context can be only `None` or an exception.
|
||||
"""
|
||||
cause = utils.safe_infer(node.cause)
|
||||
if cause in (astroid.Uninferable, None):
|
||||
return
|
||||
|
||||
if isinstance(cause, astroid.Const):
|
||||
if cause.value is not None:
|
||||
self.add_message("bad-exception-context", node=node)
|
||||
elif not isinstance(cause, astroid.ClassDef) and not utils.inherit_from_std_ex(
|
||||
cause
|
||||
):
|
||||
self.add_message("bad-exception-context", node=node)
|
||||
|
||||
def _check_catching_non_exception(self, handler, exc, part):
|
||||
if isinstance(exc, astroid.Tuple):
|
||||
# Check if it is a tuple of exceptions.
|
||||
inferred = [utils.safe_infer(elt) for elt in exc.elts]
|
||||
if any(node is astroid.Uninferable for node in inferred):
|
||||
# Don't emit if we don't know every component.
|
||||
return
|
||||
if all(
|
||||
node
|
||||
and (utils.inherit_from_std_ex(node) or not utils.has_known_bases(node))
|
||||
for node in inferred
|
||||
):
|
||||
return
|
||||
|
||||
if not isinstance(exc, astroid.ClassDef):
|
||||
# Don't emit the warning if the inferred stmt
|
||||
# is None, but the exception handler is something else,
|
||||
# maybe it was redefined.
|
||||
if isinstance(exc, astroid.Const) and exc.value is None:
|
||||
if (
|
||||
isinstance(handler.type, astroid.Const)
|
||||
and handler.type.value is None
|
||||
) or handler.type.parent_of(exc):
|
||||
# If the exception handler catches None or
|
||||
# the exception component, which is None, is
|
||||
# defined by the entire exception handler, then
|
||||
# emit a warning.
|
||||
self.add_message(
|
||||
"catching-non-exception",
|
||||
node=handler.type,
|
||||
args=(part.as_string(),),
|
||||
)
|
||||
else:
|
||||
self.add_message(
|
||||
"catching-non-exception",
|
||||
node=handler.type,
|
||||
args=(part.as_string(),),
|
||||
)
|
||||
return
|
||||
|
||||
if (
|
||||
not utils.inherit_from_std_ex(exc)
|
||||
and exc.name not in self._builtin_exceptions
|
||||
):
|
||||
if utils.has_known_bases(exc):
|
||||
self.add_message(
|
||||
"catching-non-exception", node=handler.type, args=(exc.name,)
|
||||
)
|
||||
|
||||
def _check_try_except_raise(self, node):
|
||||
def gather_exceptions_from_handler(
|
||||
handler,
|
||||
) -> typing.Optional[typing.List[NodeNG]]:
|
||||
exceptions = [] # type: typing.List[NodeNG]
|
||||
if handler.type:
|
||||
exceptions_in_handler = utils.safe_infer(handler.type)
|
||||
if isinstance(exceptions_in_handler, astroid.Tuple):
|
||||
exceptions = list(
|
||||
{
|
||||
exception
|
||||
for exception in exceptions_in_handler.elts
|
||||
if isinstance(exception, astroid.Name)
|
||||
}
|
||||
)
|
||||
elif exceptions_in_handler:
|
||||
exceptions = [exceptions_in_handler]
|
||||
else:
|
||||
# Break when we cannot infer anything reliably.
|
||||
return None
|
||||
return exceptions
|
||||
|
||||
bare_raise = False
|
||||
handler_having_bare_raise = None
|
||||
excs_in_bare_handler = []
|
||||
for handler in node.handlers:
|
||||
if bare_raise:
|
||||
# check that subsequent handler is not parent of handler which had bare raise.
|
||||
# since utils.safe_infer can fail for bare except, check it before.
|
||||
# also break early if bare except is followed by bare except.
|
||||
|
||||
excs_in_current_handler = gather_exceptions_from_handler(handler)
|
||||
|
||||
if not excs_in_current_handler:
|
||||
bare_raise = False
|
||||
break
|
||||
if excs_in_bare_handler is None:
|
||||
# It can be `None` when the inference failed
|
||||
break
|
||||
|
||||
for exc_in_current_handler in excs_in_current_handler:
|
||||
inferred_current = utils.safe_infer(exc_in_current_handler)
|
||||
if any(
|
||||
utils.is_subclass_of(
|
||||
utils.safe_infer(exc_in_bare_handler), inferred_current
|
||||
)
|
||||
for exc_in_bare_handler in excs_in_bare_handler
|
||||
):
|
||||
bare_raise = False
|
||||
break
|
||||
|
||||
# `raise` as the first operator inside the except handler
|
||||
if _is_raising([handler.body[0]]):
|
||||
# flags when there is a bare raise
|
||||
if handler.body[0].exc is None:
|
||||
bare_raise = True
|
||||
handler_having_bare_raise = handler
|
||||
excs_in_bare_handler = gather_exceptions_from_handler(handler)
|
||||
else:
|
||||
if bare_raise:
|
||||
self.add_message("try-except-raise", node=handler_having_bare_raise)
|
||||
|
||||
@utils.check_messages("wrong-exception-operation")
|
||||
def visit_binop(self, node):
|
||||
if isinstance(node.parent, astroid.ExceptHandler):
|
||||
# except (V | A)
|
||||
suggestion = "Did you mean '(%s, %s)' instead?" % (
|
||||
node.left.as_string(),
|
||||
node.right.as_string(),
|
||||
)
|
||||
self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
|
||||
|
||||
@utils.check_messages("wrong-exception-operation")
|
||||
def visit_compare(self, node):
|
||||
if isinstance(node.parent, astroid.ExceptHandler):
|
||||
# except (V < A)
|
||||
suggestion = "Did you mean '(%s, %s)' instead?" % (
|
||||
node.left.as_string(),
|
||||
", ".join(operand.as_string() for _, operand in node.ops),
|
||||
)
|
||||
self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
|
||||
|
||||
@utils.check_messages(
|
||||
"bare-except",
|
||||
"broad-except",
|
||||
"try-except-raise",
|
||||
"binary-op-exception",
|
||||
"bad-except-order",
|
||||
"catching-non-exception",
|
||||
"duplicate-except",
|
||||
)
|
||||
def visit_tryexcept(self, node):
|
||||
"""check for empty except"""
|
||||
self._check_try_except_raise(node)
|
||||
exceptions_classes = []
|
||||
nb_handlers = len(node.handlers)
|
||||
for index, handler in enumerate(node.handlers):
|
||||
if handler.type is None:
|
||||
if not _is_raising(handler.body):
|
||||
self.add_message("bare-except", node=handler)
|
||||
|
||||
# check if an "except:" is followed by some other
|
||||
# except
|
||||
if index < (nb_handlers - 1):
|
||||
msg = "empty except clause should always appear last"
|
||||
self.add_message("bad-except-order", node=node, args=msg)
|
||||
|
||||
elif isinstance(handler.type, astroid.BoolOp):
|
||||
self.add_message(
|
||||
"binary-op-exception", node=handler, args=handler.type.op
|
||||
)
|
||||
else:
|
||||
try:
|
||||
excs = list(_annotated_unpack_infer(handler.type))
|
||||
except astroid.InferenceError:
|
||||
continue
|
||||
|
||||
for part, exc in excs:
|
||||
if exc is astroid.Uninferable:
|
||||
continue
|
||||
if isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(
|
||||
exc
|
||||
):
|
||||
# pylint: disable=protected-access
|
||||
exc = exc._proxied
|
||||
|
||||
self._check_catching_non_exception(handler, exc, part)
|
||||
|
||||
if not isinstance(exc, astroid.ClassDef):
|
||||
continue
|
||||
|
||||
exc_ancestors = [
|
||||
anc
|
||||
for anc in exc.ancestors()
|
||||
if isinstance(anc, astroid.ClassDef)
|
||||
]
|
||||
|
||||
for previous_exc in exceptions_classes:
|
||||
if previous_exc in exc_ancestors:
|
||||
msg = "%s is an ancestor class of %s" % (
|
||||
previous_exc.name,
|
||||
exc.name,
|
||||
)
|
||||
self.add_message(
|
||||
"bad-except-order", node=handler.type, args=msg
|
||||
)
|
||||
if (
|
||||
exc.name in self.config.overgeneral_exceptions
|
||||
and exc.root().name == utils.EXCEPTIONS_MODULE
|
||||
and not _is_raising(handler.body)
|
||||
):
|
||||
self.add_message(
|
||||
"broad-except", args=exc.name, node=handler.type
|
||||
)
|
||||
|
||||
if exc in exceptions_classes:
|
||||
self.add_message(
|
||||
"duplicate-except", args=exc.name, node=handler.type
|
||||
)
|
||||
|
||||
exceptions_classes += [exc for _, exc in excs]
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker"""
|
||||
linter.register_checker(ExceptionsChecker(linter))
|
||||
1392
venv/lib/python3.8/site-packages/pylint/checkers/format.py
Normal file
1392
venv/lib/python3.8/site-packages/pylint/checkers/format.py
Normal file
File diff suppressed because it is too large
Load Diff
991
venv/lib/python3.8/site-packages/pylint/checkers/imports.py
Normal file
991
venv/lib/python3.8/site-packages/pylint/checkers/imports.py
Normal file
@@ -0,0 +1,991 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015-2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
|
||||
# Copyright (c) 2015 Cezar <celnazli@bitdefender.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015 Noam Yorav-Raphael <noamraph@gmail.com>
|
||||
# Copyright (c) 2015 James Morgensen <james.morgensen@gmail.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Jared Garst <cultofjared@gmail.com>
|
||||
# Copyright (c) 2016 Maik Röder <maikroeder@gmail.com>
|
||||
# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
|
||||
# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2017 Michka Popoff <michkapopoff@gmail.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Erik Wright <erik.wright@shopify.com>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Hornwitser <github@hornwitser.no>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Natalie Serebryakova <natalie.serebryakova@Natalies-MacBook-Pro.local>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Marianna Polatoglou <mpolatoglou@bloomberg.net>
|
||||
# Copyright (c) 2019 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2019 Nick Smith <clickthisnick@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Paul Renvoisé <renvoisepaul@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""imports checkers for Python code"""
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import os
|
||||
import sys
|
||||
from distutils import sysconfig
|
||||
|
||||
import astroid
|
||||
import isort
|
||||
from astroid import modutils
|
||||
from astroid.decorators import cached
|
||||
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers.utils import (
|
||||
check_messages,
|
||||
is_from_fallback_block,
|
||||
node_ignores_exception,
|
||||
)
|
||||
from pylint.exceptions import EmptyReportError
|
||||
from pylint.graph import DotBackend, get_cycles
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
from pylint.reporters.ureports.nodes import Paragraph, VerbatimText
|
||||
from pylint.utils import get_global_option
|
||||
|
||||
|
||||
def _qualified_names(modname):
|
||||
"""Split the names of the given module into subparts
|
||||
|
||||
For example,
|
||||
_qualified_names('pylint.checkers.ImportsChecker')
|
||||
returns
|
||||
['pylint', 'pylint.checkers', 'pylint.checkers.ImportsChecker']
|
||||
"""
|
||||
names = modname.split(".")
|
||||
return [".".join(names[0 : i + 1]) for i in range(len(names))]
|
||||
|
||||
|
||||
def _get_import_name(importnode, modname):
|
||||
"""Get a prepared module name from the given import node
|
||||
|
||||
In the case of relative imports, this will return the
|
||||
absolute qualified module name, which might be useful
|
||||
for debugging. Otherwise, the initial module name
|
||||
is returned unchanged.
|
||||
"""
|
||||
if isinstance(importnode, astroid.ImportFrom):
|
||||
if importnode.level:
|
||||
root = importnode.root()
|
||||
if isinstance(root, astroid.Module):
|
||||
modname = root.relative_to_absolute_name(
|
||||
modname, level=importnode.level
|
||||
)
|
||||
return modname
|
||||
|
||||
|
||||
def _get_first_import(node, context, name, base, level, alias):
|
||||
"""return the node where [base.]<name> is imported or None if not found
|
||||
"""
|
||||
fullname = "%s.%s" % (base, name) if base else name
|
||||
|
||||
first = None
|
||||
found = False
|
||||
for first in context.body:
|
||||
if first is node:
|
||||
continue
|
||||
if first.scope() is node.scope() and first.fromlineno > node.fromlineno:
|
||||
continue
|
||||
if isinstance(first, astroid.Import):
|
||||
if any(fullname == iname[0] for iname in first.names):
|
||||
found = True
|
||||
break
|
||||
elif isinstance(first, astroid.ImportFrom):
|
||||
if level == first.level:
|
||||
for imported_name, imported_alias in first.names:
|
||||
if fullname == "%s.%s" % (first.modname, imported_name):
|
||||
found = True
|
||||
break
|
||||
if (
|
||||
name != "*"
|
||||
and name == imported_name
|
||||
and not (alias or imported_alias)
|
||||
):
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
break
|
||||
if found and not astroid.are_exclusive(first, node):
|
||||
return first
|
||||
return None
|
||||
|
||||
|
||||
def _ignore_import_failure(node, modname, ignored_modules):
|
||||
for submodule in _qualified_names(modname):
|
||||
if submodule in ignored_modules:
|
||||
return True
|
||||
|
||||
return node_ignores_exception(node, ImportError)
|
||||
|
||||
|
||||
# utilities to represents import dependencies as tree and dot graph ###########
|
||||
|
||||
|
||||
def _make_tree_defs(mod_files_list):
|
||||
"""get a list of 2-uple (module, list_of_files_which_import_this_module),
|
||||
it will return a dictionary to represent this as a tree
|
||||
"""
|
||||
tree_defs = {}
|
||||
for mod, files in mod_files_list:
|
||||
node = (tree_defs, ())
|
||||
for prefix in mod.split("."):
|
||||
node = node[0].setdefault(prefix, [{}, []])
|
||||
node[1] += files
|
||||
return tree_defs
|
||||
|
||||
|
||||
def _repr_tree_defs(data, indent_str=None):
|
||||
"""return a string which represents imports as a tree"""
|
||||
lines = []
|
||||
nodes = data.items()
|
||||
for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])):
|
||||
if not files:
|
||||
files = ""
|
||||
else:
|
||||
files = "(%s)" % ",".join(sorted(files))
|
||||
if indent_str is None:
|
||||
lines.append("%s %s" % (mod, files))
|
||||
sub_indent_str = " "
|
||||
else:
|
||||
lines.append(r"%s\-%s %s" % (indent_str, mod, files))
|
||||
if i == len(nodes) - 1:
|
||||
sub_indent_str = "%s " % indent_str
|
||||
else:
|
||||
sub_indent_str = "%s| " % indent_str
|
||||
if sub:
|
||||
lines.append(_repr_tree_defs(sub, sub_indent_str))
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _dependencies_graph(filename, dep_info):
|
||||
"""write dependencies as a dot (graphviz) file
|
||||
"""
|
||||
done = {}
|
||||
printer = DotBackend(filename[:-4], rankdir="LR")
|
||||
printer.emit('URL="." node[shape="box"]')
|
||||
for modname, dependencies in sorted(dep_info.items()):
|
||||
done[modname] = 1
|
||||
printer.emit_node(modname)
|
||||
for depmodname in dependencies:
|
||||
if depmodname not in done:
|
||||
done[depmodname] = 1
|
||||
printer.emit_node(depmodname)
|
||||
for depmodname, dependencies in sorted(dep_info.items()):
|
||||
for modname in dependencies:
|
||||
printer.emit_edge(modname, depmodname)
|
||||
printer.generate(filename)
|
||||
|
||||
|
||||
def _make_graph(filename, dep_info, sect, gtype):
|
||||
"""generate a dependencies graph and add some information about it in the
|
||||
report's section
|
||||
"""
|
||||
_dependencies_graph(filename, dep_info)
|
||||
sect.append(Paragraph("%simports graph has been written to %s" % (gtype, filename)))
|
||||
|
||||
|
||||
# the import checker itself ###################################################
|
||||
|
||||
MSGS = {
|
||||
"E0401": (
|
||||
"Unable to import %s",
|
||||
"import-error",
|
||||
"Used when pylint has been unable to import a module.",
|
||||
{"old_names": [("F0401", "old-import-error")]},
|
||||
),
|
||||
"E0402": (
|
||||
"Attempted relative import beyond top-level package",
|
||||
"relative-beyond-top-level",
|
||||
"Used when a relative import tries to access too many levels "
|
||||
"in the current package.",
|
||||
),
|
||||
"R0401": (
|
||||
"Cyclic import (%s)",
|
||||
"cyclic-import",
|
||||
"Used when a cyclic import between two or more modules is detected.",
|
||||
),
|
||||
"W0401": (
|
||||
"Wildcard import %s",
|
||||
"wildcard-import",
|
||||
"Used when `from module import *` is detected.",
|
||||
),
|
||||
"W0402": (
|
||||
"Uses of a deprecated module %r",
|
||||
"deprecated-module",
|
||||
"Used a module marked as deprecated is imported.",
|
||||
),
|
||||
"W0404": (
|
||||
"Reimport %r (imported line %s)",
|
||||
"reimported",
|
||||
"Used when a module is reimported multiple times.",
|
||||
),
|
||||
"W0406": (
|
||||
"Module import itself",
|
||||
"import-self",
|
||||
"Used when a module is importing itself.",
|
||||
),
|
||||
"W0407": (
|
||||
"Prefer importing %r instead of %r",
|
||||
"preferred-module",
|
||||
"Used when a module imported has a preferred replacement module.",
|
||||
),
|
||||
"W0410": (
|
||||
"__future__ import is not the first non docstring statement",
|
||||
"misplaced-future",
|
||||
"Python 2.5 and greater require __future__ import to be the "
|
||||
"first non docstring statement in the module.",
|
||||
),
|
||||
"C0410": (
|
||||
"Multiple imports on one line (%s)",
|
||||
"multiple-imports",
|
||||
"Used when import statement importing multiple modules is detected.",
|
||||
),
|
||||
"C0411": (
|
||||
"%s should be placed before %s",
|
||||
"wrong-import-order",
|
||||
"Used when PEP8 import order is not respected (standard imports "
|
||||
"first, then third-party libraries, then local imports)",
|
||||
),
|
||||
"C0412": (
|
||||
"Imports from package %s are not grouped",
|
||||
"ungrouped-imports",
|
||||
"Used when imports are not grouped by packages",
|
||||
),
|
||||
"C0413": (
|
||||
'Import "%s" should be placed at the top of the module',
|
||||
"wrong-import-position",
|
||||
"Used when code and imports are mixed",
|
||||
),
|
||||
"C0414": (
|
||||
"Import alias does not rename original package",
|
||||
"useless-import-alias",
|
||||
"Used when an import alias is same as original package."
|
||||
"e.g using import numpy as numpy instead of import numpy as np",
|
||||
),
|
||||
"C0415": (
|
||||
"Import outside toplevel (%s)",
|
||||
"import-outside-toplevel",
|
||||
"Used when an import statement is used anywhere other than the module "
|
||||
"toplevel. Move this import to the top of the file.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
DEFAULT_STANDARD_LIBRARY = ()
|
||||
DEFAULT_KNOWN_THIRD_PARTY = ("enchant",)
|
||||
DEFAULT_PREFERRED_MODULES = ()
|
||||
|
||||
|
||||
class ImportsChecker(BaseChecker):
|
||||
"""checks for
|
||||
* external modules dependencies
|
||||
* relative / wildcard imports
|
||||
* cyclic imports
|
||||
* uses of deprecated modules
|
||||
* uses of modules instead of preferred modules
|
||||
"""
|
||||
|
||||
__implements__ = IAstroidChecker
|
||||
|
||||
name = "imports"
|
||||
msgs = MSGS
|
||||
priority = -2
|
||||
deprecated_modules = ("optparse", "tkinter.tix")
|
||||
|
||||
options = (
|
||||
(
|
||||
"deprecated-modules",
|
||||
{
|
||||
"default": deprecated_modules,
|
||||
"type": "csv",
|
||||
"metavar": "<modules>",
|
||||
"help": "Deprecated modules which should not be used,"
|
||||
" separated by a comma.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"preferred-modules",
|
||||
{
|
||||
"default": DEFAULT_PREFERRED_MODULES,
|
||||
"type": "csv",
|
||||
"metavar": "<module:preferred-module>",
|
||||
"help": "Couples of modules and preferred modules,"
|
||||
" separated by a comma.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"import-graph",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<file.dot>",
|
||||
"help": "Create a graph of every (i.e. internal and"
|
||||
" external) dependencies in the given file"
|
||||
" (report RP0402 must not be disabled).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ext-import-graph",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<file.dot>",
|
||||
"help": "Create a graph of external dependencies in the"
|
||||
" given file (report RP0402 must not be disabled).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"int-import-graph",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<file.dot>",
|
||||
"help": "Create a graph of internal dependencies in the"
|
||||
" given file (report RP0402 must not be disabled).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"known-standard-library",
|
||||
{
|
||||
"default": DEFAULT_STANDARD_LIBRARY,
|
||||
"type": "csv",
|
||||
"metavar": "<modules>",
|
||||
"help": "Force import order to recognize a module as part of "
|
||||
"the standard compatibility libraries.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"known-third-party",
|
||||
{
|
||||
"default": DEFAULT_KNOWN_THIRD_PARTY,
|
||||
"type": "csv",
|
||||
"metavar": "<modules>",
|
||||
"help": "Force import order to recognize a module as part of "
|
||||
"a third party library.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"allow-any-import-level",
|
||||
{
|
||||
"default": (),
|
||||
"type": "csv",
|
||||
"metavar": "<modules>",
|
||||
"help": (
|
||||
"List of modules that can be imported at any level, not just "
|
||||
"the top level one."
|
||||
),
|
||||
},
|
||||
),
|
||||
(
|
||||
"analyse-fallback-blocks",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "Analyse import fallback blocks. This can be used to "
|
||||
"support both Python 2 and 3 compatible code, which "
|
||||
"means that the block might have code that exists "
|
||||
"only in one or another interpreter, leading to false "
|
||||
"positives when analysed.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"allow-wildcard-with-all",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "Allow wildcard imports from modules that define __all__.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def __init__(self, linter=None):
|
||||
BaseChecker.__init__(self, linter)
|
||||
self.stats = None
|
||||
self.import_graph = None
|
||||
self._imports_stack = []
|
||||
self._first_non_import_node = None
|
||||
self._module_pkg = {} # mapping of modules to the pkg they belong in
|
||||
self._allow_any_import_level = set()
|
||||
self.reports = (
|
||||
("RP0401", "External dependencies", self._report_external_dependencies),
|
||||
("RP0402", "Modules dependencies graph", self._report_dependencies_graph),
|
||||
)
|
||||
|
||||
self._site_packages = self._compute_site_packages()
|
||||
|
||||
@staticmethod
|
||||
def _compute_site_packages():
|
||||
def _normalized_path(path):
|
||||
return os.path.normcase(os.path.abspath(path))
|
||||
|
||||
paths = set()
|
||||
real_prefix = getattr(sys, "real_prefix", None)
|
||||
for prefix in filter(None, (real_prefix, sys.prefix)):
|
||||
path = sysconfig.get_python_lib(prefix=prefix)
|
||||
path = _normalized_path(path)
|
||||
paths.add(path)
|
||||
|
||||
# Handle Debian's derivatives /usr/local.
|
||||
if os.path.isfile("/etc/debian_version"):
|
||||
for prefix in filter(None, (real_prefix, sys.prefix)):
|
||||
libpython = os.path.join(
|
||||
prefix,
|
||||
"local",
|
||||
"lib",
|
||||
"python" + sysconfig.get_python_version(),
|
||||
"dist-packages",
|
||||
)
|
||||
paths.add(libpython)
|
||||
return paths
|
||||
|
||||
def open(self):
|
||||
"""called before visiting project (i.e set of modules)"""
|
||||
self.linter.add_stats(dependencies={})
|
||||
self.linter.add_stats(cycles=[])
|
||||
self.stats = self.linter.stats
|
||||
self.import_graph = collections.defaultdict(set)
|
||||
self._module_pkg = {} # mapping of modules to the pkg they belong in
|
||||
self._excluded_edges = collections.defaultdict(set)
|
||||
self._ignored_modules = get_global_option(self, "ignored-modules", default=[])
|
||||
# Build a mapping {'module': 'preferred-module'}
|
||||
self.preferred_modules = dict(
|
||||
module.split(":")
|
||||
for module in self.config.preferred_modules
|
||||
if ":" in module
|
||||
)
|
||||
self._allow_any_import_level = set(self.config.allow_any_import_level)
|
||||
|
||||
def _import_graph_without_ignored_edges(self):
|
||||
filtered_graph = copy.deepcopy(self.import_graph)
|
||||
for node in filtered_graph:
|
||||
filtered_graph[node].difference_update(self._excluded_edges[node])
|
||||
return filtered_graph
|
||||
|
||||
def close(self):
|
||||
"""called before visiting project (i.e set of modules)"""
|
||||
if self.linter.is_message_enabled("cyclic-import"):
|
||||
graph = self._import_graph_without_ignored_edges()
|
||||
vertices = list(graph)
|
||||
for cycle in get_cycles(graph, vertices=vertices):
|
||||
self.add_message("cyclic-import", args=" -> ".join(cycle))
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_import(self, node):
|
||||
"""triggered when an import statement is seen"""
|
||||
self._check_reimport(node)
|
||||
self._check_import_as_rename(node)
|
||||
self._check_toplevel(node)
|
||||
|
||||
names = [name for name, _ in node.names]
|
||||
if len(names) >= 2:
|
||||
self.add_message("multiple-imports", args=", ".join(names), node=node)
|
||||
|
||||
for name in names:
|
||||
self._check_deprecated_module(node, name)
|
||||
self._check_preferred_module(node, name)
|
||||
imported_module = self._get_imported_module(node, name)
|
||||
if isinstance(node.parent, astroid.Module):
|
||||
# Allow imports nested
|
||||
self._check_position(node)
|
||||
if isinstance(node.scope(), astroid.Module):
|
||||
self._record_import(node, imported_module)
|
||||
|
||||
if imported_module is None:
|
||||
continue
|
||||
|
||||
self._add_imported_module(node, imported_module.name)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_importfrom(self, node):
|
||||
"""triggered when a from statement is seen"""
|
||||
basename = node.modname
|
||||
imported_module = self._get_imported_module(node, basename)
|
||||
|
||||
self._check_import_as_rename(node)
|
||||
self._check_misplaced_future(node)
|
||||
self._check_deprecated_module(node, basename)
|
||||
self._check_preferred_module(node, basename)
|
||||
self._check_wildcard_imports(node, imported_module)
|
||||
self._check_same_line_imports(node)
|
||||
self._check_reimport(node, basename=basename, level=node.level)
|
||||
self._check_toplevel(node)
|
||||
|
||||
if isinstance(node.parent, astroid.Module):
|
||||
# Allow imports nested
|
||||
self._check_position(node)
|
||||
if isinstance(node.scope(), astroid.Module):
|
||||
self._record_import(node, imported_module)
|
||||
if imported_module is None:
|
||||
return
|
||||
for name, _ in node.names:
|
||||
if name != "*":
|
||||
self._add_imported_module(node, "%s.%s" % (imported_module.name, name))
|
||||
else:
|
||||
self._add_imported_module(node, imported_module.name)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def leave_module(self, node):
|
||||
# Check imports are grouped by category (standard, 3rd party, local)
|
||||
std_imports, ext_imports, loc_imports = self._check_imports_order(node)
|
||||
|
||||
# Check that imports are grouped by package within a given category
|
||||
met_import = set() # set for 'import x' style
|
||||
met_from = set() # set for 'from x import y' style
|
||||
current_package = None
|
||||
for import_node, import_name in std_imports + ext_imports + loc_imports:
|
||||
if not self.linter.is_message_enabled(
|
||||
"ungrouped-imports", import_node.fromlineno
|
||||
):
|
||||
continue
|
||||
if isinstance(import_node, astroid.node_classes.ImportFrom):
|
||||
met = met_from
|
||||
else:
|
||||
met = met_import
|
||||
package, _, _ = import_name.partition(".")
|
||||
if current_package and current_package != package and package in met:
|
||||
self.add_message("ungrouped-imports", node=import_node, args=package)
|
||||
current_package = package
|
||||
met.add(package)
|
||||
|
||||
self._imports_stack = []
|
||||
self._first_non_import_node = None
|
||||
|
||||
def compute_first_non_import_node(self, node):
|
||||
if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
|
||||
return
|
||||
# if the node does not contain an import instruction, and if it is the
|
||||
# first node of the module, keep a track of it (all the import positions
|
||||
# of the module will be compared to the position of this first
|
||||
# instruction)
|
||||
if self._first_non_import_node:
|
||||
return
|
||||
if not isinstance(node.parent, astroid.Module):
|
||||
return
|
||||
nested_allowed = [astroid.TryExcept, astroid.TryFinally]
|
||||
is_nested_allowed = [
|
||||
allowed for allowed in nested_allowed if isinstance(node, allowed)
|
||||
]
|
||||
if is_nested_allowed and any(
|
||||
node.nodes_of_class((astroid.Import, astroid.ImportFrom))
|
||||
):
|
||||
return
|
||||
if isinstance(node, astroid.Assign):
|
||||
# Add compatibility for module level dunder names
|
||||
# https://www.python.org/dev/peps/pep-0008/#module-level-dunder-names
|
||||
valid_targets = [
|
||||
isinstance(target, astroid.AssignName)
|
||||
and target.name.startswith("__")
|
||||
and target.name.endswith("__")
|
||||
for target in node.targets
|
||||
]
|
||||
if all(valid_targets):
|
||||
return
|
||||
self._first_non_import_node = node
|
||||
|
||||
visit_tryfinally = (
|
||||
visit_tryexcept
|
||||
) = (
|
||||
visit_assignattr
|
||||
) = (
|
||||
visit_assign
|
||||
) = (
|
||||
visit_ifexp
|
||||
) = visit_comprehension = visit_expr = visit_if = compute_first_non_import_node
|
||||
|
||||
def visit_functiondef(self, node):
|
||||
if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
|
||||
return
|
||||
# If it is the first non import instruction of the module, record it.
|
||||
if self._first_non_import_node:
|
||||
return
|
||||
|
||||
# Check if the node belongs to an `If` or a `Try` block. If they
|
||||
# contain imports, skip recording this node.
|
||||
if not isinstance(node.parent.scope(), astroid.Module):
|
||||
return
|
||||
|
||||
root = node
|
||||
while not isinstance(root.parent, astroid.Module):
|
||||
root = root.parent
|
||||
|
||||
if isinstance(root, (astroid.If, astroid.TryFinally, astroid.TryExcept)):
|
||||
if any(root.nodes_of_class((astroid.Import, astroid.ImportFrom))):
|
||||
return
|
||||
|
||||
self._first_non_import_node = node
|
||||
|
||||
visit_classdef = visit_for = visit_while = visit_functiondef
|
||||
|
||||
def _check_misplaced_future(self, node):
|
||||
basename = node.modname
|
||||
if basename == "__future__":
|
||||
# check if this is the first non-docstring statement in the module
|
||||
prev = node.previous_sibling()
|
||||
if prev:
|
||||
# consecutive future statements are possible
|
||||
if not (
|
||||
isinstance(prev, astroid.ImportFrom)
|
||||
and prev.modname == "__future__"
|
||||
):
|
||||
self.add_message("misplaced-future", node=node)
|
||||
return
|
||||
|
||||
def _check_same_line_imports(self, node):
|
||||
# Detect duplicate imports on the same line.
|
||||
names = (name for name, _ in node.names)
|
||||
counter = collections.Counter(names)
|
||||
for name, count in counter.items():
|
||||
if count > 1:
|
||||
self.add_message("reimported", node=node, args=(name, node.fromlineno))
|
||||
|
||||
def _check_position(self, node):
|
||||
"""Check `node` import or importfrom node position is correct
|
||||
|
||||
Send a message if `node` comes before another instruction
|
||||
"""
|
||||
# if a first non-import instruction has already been encountered,
|
||||
# it means the import comes after it and therefore is not well placed
|
||||
if self._first_non_import_node:
|
||||
self.add_message("wrong-import-position", node=node, args=node.as_string())
|
||||
|
||||
def _record_import(self, node, importedmodnode):
|
||||
"""Record the package `node` imports from"""
|
||||
if isinstance(node, astroid.ImportFrom):
|
||||
importedname = node.modname
|
||||
else:
|
||||
importedname = importedmodnode.name if importedmodnode else None
|
||||
if not importedname:
|
||||
importedname = node.names[0][0].split(".")[0]
|
||||
|
||||
if isinstance(node, astroid.ImportFrom) and (node.level or 0) >= 1:
|
||||
# We need the importedname with first point to detect local package
|
||||
# Example of node:
|
||||
# 'from .my_package1 import MyClass1'
|
||||
# the output should be '.my_package1' instead of 'my_package1'
|
||||
# Example of node:
|
||||
# 'from . import my_package2'
|
||||
# the output should be '.my_package2' instead of '{pyfile}'
|
||||
importedname = "." + importedname
|
||||
|
||||
self._imports_stack.append((node, importedname))
|
||||
|
||||
@staticmethod
|
||||
def _is_fallback_import(node, imports):
|
||||
imports = [import_node for (import_node, _) in imports]
|
||||
return any(astroid.are_exclusive(import_node, node) for import_node in imports)
|
||||
|
||||
def _check_imports_order(self, _module_node):
|
||||
"""Checks imports of module `node` are grouped by category
|
||||
|
||||
Imports must follow this order: standard, 3rd party, local
|
||||
"""
|
||||
std_imports = []
|
||||
third_party_imports = []
|
||||
first_party_imports = []
|
||||
# need of a list that holds third or first party ordered import
|
||||
external_imports = []
|
||||
local_imports = []
|
||||
third_party_not_ignored = []
|
||||
first_party_not_ignored = []
|
||||
local_not_ignored = []
|
||||
isort_obj = isort.SortImports(
|
||||
file_contents="",
|
||||
known_third_party=self.config.known_third_party,
|
||||
known_standard_library=self.config.known_standard_library,
|
||||
)
|
||||
for node, modname in self._imports_stack:
|
||||
if modname.startswith("."):
|
||||
package = "." + modname.split(".")[1]
|
||||
else:
|
||||
package = modname.split(".")[0]
|
||||
nested = not isinstance(node.parent, astroid.Module)
|
||||
ignore_for_import_order = not self.linter.is_message_enabled(
|
||||
"wrong-import-order", node.fromlineno
|
||||
)
|
||||
import_category = isort_obj.place_module(package)
|
||||
node_and_package_import = (node, package)
|
||||
if import_category in ("FUTURE", "STDLIB"):
|
||||
std_imports.append(node_and_package_import)
|
||||
wrong_import = (
|
||||
third_party_not_ignored
|
||||
or first_party_not_ignored
|
||||
or local_not_ignored
|
||||
)
|
||||
if self._is_fallback_import(node, wrong_import):
|
||||
continue
|
||||
if wrong_import and not nested:
|
||||
self.add_message(
|
||||
"wrong-import-order",
|
||||
node=node,
|
||||
args=(
|
||||
'standard import "%s"' % node.as_string(),
|
||||
'"%s"' % wrong_import[0][0].as_string(),
|
||||
),
|
||||
)
|
||||
elif import_category == "THIRDPARTY":
|
||||
third_party_imports.append(node_and_package_import)
|
||||
external_imports.append(node_and_package_import)
|
||||
if not nested and not ignore_for_import_order:
|
||||
third_party_not_ignored.append(node_and_package_import)
|
||||
wrong_import = first_party_not_ignored or local_not_ignored
|
||||
if wrong_import and not nested:
|
||||
self.add_message(
|
||||
"wrong-import-order",
|
||||
node=node,
|
||||
args=(
|
||||
'third party import "%s"' % node.as_string(),
|
||||
'"%s"' % wrong_import[0][0].as_string(),
|
||||
),
|
||||
)
|
||||
elif import_category == "FIRSTPARTY":
|
||||
first_party_imports.append(node_and_package_import)
|
||||
external_imports.append(node_and_package_import)
|
||||
if not nested and not ignore_for_import_order:
|
||||
first_party_not_ignored.append(node_and_package_import)
|
||||
wrong_import = local_not_ignored
|
||||
if wrong_import and not nested:
|
||||
self.add_message(
|
||||
"wrong-import-order",
|
||||
node=node,
|
||||
args=(
|
||||
'first party import "%s"' % node.as_string(),
|
||||
'"%s"' % wrong_import[0][0].as_string(),
|
||||
),
|
||||
)
|
||||
elif import_category == "LOCALFOLDER":
|
||||
local_imports.append((node, package))
|
||||
if not nested and not ignore_for_import_order:
|
||||
local_not_ignored.append((node, package))
|
||||
return std_imports, external_imports, local_imports
|
||||
|
||||
def _get_imported_module(self, importnode, modname):
|
||||
try:
|
||||
return importnode.do_import_module(modname)
|
||||
except astroid.TooManyLevelsError:
|
||||
if _ignore_import_failure(importnode, modname, self._ignored_modules):
|
||||
return None
|
||||
self.add_message("relative-beyond-top-level", node=importnode)
|
||||
except astroid.AstroidSyntaxError as exc:
|
||||
message = "Cannot import {!r} due to syntax error {!r}".format(
|
||||
modname, str(exc.error) # pylint: disable=no-member; false positive
|
||||
)
|
||||
self.add_message("syntax-error", line=importnode.lineno, args=message)
|
||||
|
||||
except astroid.AstroidBuildingException:
|
||||
if not self.linter.is_message_enabled("import-error"):
|
||||
return None
|
||||
if _ignore_import_failure(importnode, modname, self._ignored_modules):
|
||||
return None
|
||||
if not self.config.analyse_fallback_blocks and is_from_fallback_block(
|
||||
importnode
|
||||
):
|
||||
return None
|
||||
|
||||
dotted_modname = _get_import_name(importnode, modname)
|
||||
self.add_message("import-error", args=repr(dotted_modname), node=importnode)
|
||||
|
||||
def _add_imported_module(self, node, importedmodname):
|
||||
"""notify an imported module, used to analyze dependencies"""
|
||||
module_file = node.root().file
|
||||
context_name = node.root().name
|
||||
base = os.path.splitext(os.path.basename(module_file))[0]
|
||||
|
||||
try:
|
||||
importedmodname = modutils.get_module_part(importedmodname, module_file)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if context_name == importedmodname:
|
||||
self.add_message("import-self", node=node)
|
||||
|
||||
elif not modutils.is_standard_module(importedmodname):
|
||||
# if this is not a package __init__ module
|
||||
if base != "__init__" and context_name not in self._module_pkg:
|
||||
# record the module's parent, or the module itself if this is
|
||||
# a top level module, as the package it belongs to
|
||||
self._module_pkg[context_name] = context_name.rsplit(".", 1)[0]
|
||||
|
||||
# handle dependencies
|
||||
importedmodnames = self.stats["dependencies"].setdefault(
|
||||
importedmodname, set()
|
||||
)
|
||||
if context_name not in importedmodnames:
|
||||
importedmodnames.add(context_name)
|
||||
|
||||
# update import graph
|
||||
self.import_graph[context_name].add(importedmodname)
|
||||
if not self.linter.is_message_enabled("cyclic-import", line=node.lineno):
|
||||
self._excluded_edges[context_name].add(importedmodname)
|
||||
|
||||
def _check_deprecated_module(self, node, mod_path):
|
||||
"""check if the module is deprecated"""
|
||||
for mod_name in self.config.deprecated_modules:
|
||||
if mod_path == mod_name or mod_path.startswith(mod_name + "."):
|
||||
self.add_message("deprecated-module", node=node, args=mod_path)
|
||||
|
||||
def _check_preferred_module(self, node, mod_path):
|
||||
"""check if the module has a preferred replacement"""
|
||||
if mod_path in self.preferred_modules:
|
||||
self.add_message(
|
||||
"preferred-module",
|
||||
node=node,
|
||||
args=(self.preferred_modules[mod_path], mod_path),
|
||||
)
|
||||
|
||||
def _check_import_as_rename(self, node):
|
||||
names = node.names
|
||||
for name in names:
|
||||
if not all(name):
|
||||
return
|
||||
|
||||
real_name = name[0]
|
||||
splitted_packages = real_name.rsplit(".")
|
||||
real_name = splitted_packages[-1]
|
||||
imported_name = name[1]
|
||||
# consider only following cases
|
||||
# import x as x
|
||||
# and ignore following
|
||||
# import x.y.z as z
|
||||
if real_name == imported_name and len(splitted_packages) == 1:
|
||||
self.add_message("useless-import-alias", node=node)
|
||||
|
||||
def _check_reimport(self, node, basename=None, level=None):
|
||||
"""check if the import is necessary (i.e. not already done)"""
|
||||
if not self.linter.is_message_enabled("reimported"):
|
||||
return
|
||||
|
||||
frame = node.frame()
|
||||
root = node.root()
|
||||
contexts = [(frame, level)]
|
||||
if root is not frame:
|
||||
contexts.append((root, None))
|
||||
|
||||
for known_context, known_level in contexts:
|
||||
for name, alias in node.names:
|
||||
first = _get_first_import(
|
||||
node, known_context, name, basename, known_level, alias
|
||||
)
|
||||
if first is not None:
|
||||
self.add_message(
|
||||
"reimported", node=node, args=(name, first.fromlineno)
|
||||
)
|
||||
|
||||
def _report_external_dependencies(self, sect, _, _dummy):
|
||||
"""return a verbatim layout for displaying dependencies"""
|
||||
dep_info = _make_tree_defs(self._external_dependencies_info().items())
|
||||
if not dep_info:
|
||||
raise EmptyReportError()
|
||||
tree_str = _repr_tree_defs(dep_info)
|
||||
sect.append(VerbatimText(tree_str))
|
||||
|
||||
def _report_dependencies_graph(self, sect, _, _dummy):
|
||||
"""write dependencies as a dot (graphviz) file"""
|
||||
dep_info = self.stats["dependencies"]
|
||||
if not dep_info or not (
|
||||
self.config.import_graph
|
||||
or self.config.ext_import_graph
|
||||
or self.config.int_import_graph
|
||||
):
|
||||
raise EmptyReportError()
|
||||
filename = self.config.import_graph
|
||||
if filename:
|
||||
_make_graph(filename, dep_info, sect, "")
|
||||
filename = self.config.ext_import_graph
|
||||
if filename:
|
||||
_make_graph(filename, self._external_dependencies_info(), sect, "external ")
|
||||
filename = self.config.int_import_graph
|
||||
if filename:
|
||||
_make_graph(filename, self._internal_dependencies_info(), sect, "internal ")
|
||||
|
||||
def _filter_dependencies_graph(self, internal):
|
||||
"""build the internal or the external dependency graph"""
|
||||
graph = collections.defaultdict(set)
|
||||
for importee, importers in self.stats["dependencies"].items():
|
||||
for importer in importers:
|
||||
package = self._module_pkg.get(importer, importer)
|
||||
is_inside = importee.startswith(package)
|
||||
if is_inside and internal or not is_inside and not internal:
|
||||
graph[importee].add(importer)
|
||||
return graph
|
||||
|
||||
@cached
|
||||
def _external_dependencies_info(self):
|
||||
"""return cached external dependencies information or build and
|
||||
cache them
|
||||
"""
|
||||
return self._filter_dependencies_graph(internal=False)
|
||||
|
||||
@cached
|
||||
def _internal_dependencies_info(self):
|
||||
"""return cached internal dependencies information or build and
|
||||
cache them
|
||||
"""
|
||||
return self._filter_dependencies_graph(internal=True)
|
||||
|
||||
def _check_wildcard_imports(self, node, imported_module):
|
||||
if node.root().package:
|
||||
# Skip the check if in __init__.py issue #2026
|
||||
return
|
||||
|
||||
wildcard_import_is_allowed = self._wildcard_import_is_allowed(imported_module)
|
||||
for name, _ in node.names:
|
||||
if name == "*" and not wildcard_import_is_allowed:
|
||||
self.add_message("wildcard-import", args=node.modname, node=node)
|
||||
|
||||
def _wildcard_import_is_allowed(self, imported_module):
|
||||
return (
|
||||
self.config.allow_wildcard_with_all
|
||||
and imported_module is not None
|
||||
and "__all__" in imported_module.locals
|
||||
)
|
||||
|
||||
def _check_toplevel(self, node):
|
||||
"""Check whether the import is made outside the module toplevel.
|
||||
"""
|
||||
# If the scope of the import is a module, then obviously it is
|
||||
# not outside the module toplevel.
|
||||
if isinstance(node.scope(), astroid.Module):
|
||||
return
|
||||
|
||||
module_names = [
|
||||
"{}.{}".format(node.modname, name[0])
|
||||
if isinstance(node, astroid.ImportFrom)
|
||||
else name[0]
|
||||
for name in node.names
|
||||
]
|
||||
|
||||
# Get the full names of all the imports that are not whitelisted.
|
||||
scoped_imports = [
|
||||
name for name in module_names if name not in self._allow_any_import_level
|
||||
]
|
||||
|
||||
if scoped_imports:
|
||||
self.add_message(
|
||||
"import-outside-toplevel", args=", ".join(scoped_imports), node=node
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(ImportsChecker(linter))
|
||||
415
venv/lib/python3.8/site-packages/pylint/checkers/logging.py
Normal file
415
venv/lib/python3.8/site-packages/pylint/checkers/logging.py
Normal file
@@ -0,0 +1,415 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2009, 2012, 2014 Google, Inc.
|
||||
# Copyright (c) 2012 Mike Bryant <leachim@leachim.info>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016, 2019-2020 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2016 Chris Murray <chris@chrismurray.scot>
|
||||
# Copyright (c) 2017 guillaume2 <guillaume.peillex@gmail.col>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018 Alan Chan <achan961117@gmail.com>
|
||||
# Copyright (c) 2018 Yury Gribov <tetra2005@gmail.com>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Mariatta Wijaya <mariatta@python.org>
|
||||
# Copyright (c) 2019 Djailla <bastien.vallet@gmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2019 Svet <svet@hyperscience.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""checker for use of Python logging
|
||||
"""
|
||||
import string
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint import checkers, interfaces
|
||||
from pylint.checkers import utils
|
||||
from pylint.checkers.utils import check_messages
|
||||
|
||||
MSGS = {
|
||||
"W1201": (
|
||||
"Use %s formatting in logging functions",
|
||||
"logging-not-lazy",
|
||||
"Used when a logging statement has a call form of "
|
||||
'"logging.<logging method>(format_string % (format_args...))". '
|
||||
"Use another type of string formatting instead. "
|
||||
"You can use % formatting but leave interpolation to "
|
||||
"the logging function by passing the parameters as arguments. "
|
||||
"If logging-fstring-interpolation is disabled then "
|
||||
"you can use fstring formatting. "
|
||||
"If logging-format-interpolation is disabled then "
|
||||
"you can use str.format.",
|
||||
),
|
||||
"W1202": (
|
||||
"Use %s formatting in logging functions",
|
||||
"logging-format-interpolation",
|
||||
"Used when a logging statement has a call form of "
|
||||
'"logging.<logging method>(format_string.format(format_args...))". '
|
||||
"Use another type of string formatting instead. "
|
||||
"You can use % formatting but leave interpolation to "
|
||||
"the logging function by passing the parameters as arguments. "
|
||||
"If logging-fstring-interpolation is disabled then "
|
||||
"you can use fstring formatting. "
|
||||
"If logging-not-lazy is disabled then "
|
||||
"you can use % formatting as normal.",
|
||||
),
|
||||
"W1203": (
|
||||
"Use %s formatting in logging functions",
|
||||
"logging-fstring-interpolation",
|
||||
"Used when a logging statement has a call form of "
|
||||
'"logging.<logging method>(f"...")".'
|
||||
"Use another type of string formatting instead. "
|
||||
"You can use % formatting but leave interpolation to "
|
||||
"the logging function by passing the parameters as arguments. "
|
||||
"If logging-format-interpolation is disabled then "
|
||||
"you can use str.format. "
|
||||
"If logging-not-lazy is disabled then "
|
||||
"you can use % formatting as normal.",
|
||||
),
|
||||
"E1200": (
|
||||
"Unsupported logging format character %r (%#02x) at index %d",
|
||||
"logging-unsupported-format",
|
||||
"Used when an unsupported format character is used in a logging "
|
||||
"statement format string.",
|
||||
),
|
||||
"E1201": (
|
||||
"Logging format string ends in middle of conversion specifier",
|
||||
"logging-format-truncated",
|
||||
"Used when a logging statement format string terminates before "
|
||||
"the end of a conversion specifier.",
|
||||
),
|
||||
"E1205": (
|
||||
"Too many arguments for logging format string",
|
||||
"logging-too-many-args",
|
||||
"Used when a logging format string is given too many arguments.",
|
||||
),
|
||||
"E1206": (
|
||||
"Not enough arguments for logging format string",
|
||||
"logging-too-few-args",
|
||||
"Used when a logging format string is given too few arguments.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
CHECKED_CONVENIENCE_FUNCTIONS = {
|
||||
"critical",
|
||||
"debug",
|
||||
"error",
|
||||
"exception",
|
||||
"fatal",
|
||||
"info",
|
||||
"warn",
|
||||
"warning",
|
||||
}
|
||||
|
||||
|
||||
def is_method_call(func, types=(), methods=()):
|
||||
"""Determines if a BoundMethod node represents a method call.
|
||||
|
||||
Args:
|
||||
func (astroid.BoundMethod): The BoundMethod AST node to check.
|
||||
types (Optional[String]): Optional sequence of caller type names to restrict check.
|
||||
methods (Optional[String]): Optional sequence of method names to restrict check.
|
||||
|
||||
Returns:
|
||||
bool: true if the node represents a method call for the given type and
|
||||
method names, False otherwise.
|
||||
"""
|
||||
return (
|
||||
isinstance(func, astroid.BoundMethod)
|
||||
and isinstance(func.bound, astroid.Instance)
|
||||
and (func.bound.name in types if types else True)
|
||||
and (func.name in methods if methods else True)
|
||||
)
|
||||
|
||||
|
||||
class LoggingChecker(checkers.BaseChecker):
|
||||
"""Checks use of the logging module."""
|
||||
|
||||
__implements__ = interfaces.IAstroidChecker
|
||||
name = "logging"
|
||||
msgs = MSGS
|
||||
|
||||
options = (
|
||||
(
|
||||
"logging-modules",
|
||||
{
|
||||
"default": ("logging",),
|
||||
"type": "csv",
|
||||
"metavar": "<comma separated list>",
|
||||
"help": "Logging modules to check that the string format "
|
||||
"arguments are in logging function parameter format.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"logging-format-style",
|
||||
{
|
||||
"default": "old",
|
||||
"type": "choice",
|
||||
"metavar": "<old (%) or new ({)>",
|
||||
"choices": ["old", "new"],
|
||||
"help": "The type of string formatting that logging methods do. "
|
||||
"`old` means using % formatting, `new` is for `{}` formatting.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def visit_module(self, node): # pylint: disable=unused-argument
|
||||
"""Clears any state left in this checker from last module checked."""
|
||||
# The code being checked can just as easily "import logging as foo",
|
||||
# so it is necessary to process the imports and store in this field
|
||||
# what name the logging module is actually given.
|
||||
self._logging_names = set()
|
||||
logging_mods = self.config.logging_modules
|
||||
|
||||
self._format_style = self.config.logging_format_style
|
||||
|
||||
self._logging_modules = set(logging_mods)
|
||||
self._from_imports = {}
|
||||
for logging_mod in logging_mods:
|
||||
parts = logging_mod.rsplit(".", 1)
|
||||
if len(parts) > 1:
|
||||
self._from_imports[parts[0]] = parts[1]
|
||||
|
||||
def visit_importfrom(self, node):
|
||||
"""Checks to see if a module uses a non-Python logging module."""
|
||||
try:
|
||||
logging_name = self._from_imports[node.modname]
|
||||
for module, as_name in node.names:
|
||||
if module == logging_name:
|
||||
self._logging_names.add(as_name or module)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def visit_import(self, node):
|
||||
"""Checks to see if this module uses Python's built-in logging."""
|
||||
for module, as_name in node.names:
|
||||
if module in self._logging_modules:
|
||||
self._logging_names.add(as_name or module)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_call(self, node):
|
||||
"""Checks calls to logging methods."""
|
||||
|
||||
def is_logging_name():
|
||||
return (
|
||||
isinstance(node.func, astroid.Attribute)
|
||||
and isinstance(node.func.expr, astroid.Name)
|
||||
and node.func.expr.name in self._logging_names
|
||||
)
|
||||
|
||||
def is_logger_class():
|
||||
try:
|
||||
for inferred in node.func.infer():
|
||||
if isinstance(inferred, astroid.BoundMethod):
|
||||
parent = inferred._proxied.parent
|
||||
if isinstance(parent, astroid.ClassDef) and (
|
||||
parent.qname() == "logging.Logger"
|
||||
or any(
|
||||
ancestor.qname() == "logging.Logger"
|
||||
for ancestor in parent.ancestors()
|
||||
)
|
||||
):
|
||||
return True, inferred._proxied.name
|
||||
except astroid.exceptions.InferenceError:
|
||||
pass
|
||||
return False, None
|
||||
|
||||
if is_logging_name():
|
||||
name = node.func.attrname
|
||||
else:
|
||||
result, name = is_logger_class()
|
||||
if not result:
|
||||
return
|
||||
self._check_log_method(node, name)
|
||||
|
||||
def _check_log_method(self, node, name):
|
||||
"""Checks calls to logging.log(level, format, *format_args)."""
|
||||
if name == "log":
|
||||
if node.starargs or node.kwargs or len(node.args) < 2:
|
||||
# Either a malformed call, star args, or double-star args. Beyond
|
||||
# the scope of this checker.
|
||||
return
|
||||
format_pos = 1
|
||||
elif name in CHECKED_CONVENIENCE_FUNCTIONS:
|
||||
if node.starargs or node.kwargs or not node.args:
|
||||
# Either no args, star args, or double-star args. Beyond the
|
||||
# scope of this checker.
|
||||
return
|
||||
format_pos = 0
|
||||
else:
|
||||
return
|
||||
|
||||
if isinstance(node.args[format_pos], astroid.BinOp):
|
||||
binop = node.args[format_pos]
|
||||
emit = binop.op == "%"
|
||||
if binop.op == "+":
|
||||
total_number_of_strings = sum(
|
||||
1
|
||||
for operand in (binop.left, binop.right)
|
||||
if self._is_operand_literal_str(utils.safe_infer(operand))
|
||||
)
|
||||
emit = total_number_of_strings > 0
|
||||
if emit:
|
||||
self.add_message(
|
||||
"logging-not-lazy", node=node, args=(self._helper_string(node),),
|
||||
)
|
||||
elif isinstance(node.args[format_pos], astroid.Call):
|
||||
self._check_call_func(node.args[format_pos])
|
||||
elif isinstance(node.args[format_pos], astroid.Const):
|
||||
self._check_format_string(node, format_pos)
|
||||
elif isinstance(node.args[format_pos], astroid.JoinedStr):
|
||||
self.add_message(
|
||||
"logging-fstring-interpolation",
|
||||
node=node,
|
||||
args=(self._helper_string(node),),
|
||||
)
|
||||
|
||||
def _helper_string(self, node):
|
||||
"""Create a string that lists the valid types of formatting for this node."""
|
||||
valid_types = ["lazy %"]
|
||||
|
||||
if not self.linter.is_message_enabled(
|
||||
"logging-fstring-formatting", node.fromlineno
|
||||
):
|
||||
valid_types.append("fstring")
|
||||
if not self.linter.is_message_enabled(
|
||||
"logging-format-interpolation", node.fromlineno
|
||||
):
|
||||
valid_types.append(".format()")
|
||||
if not self.linter.is_message_enabled("logging-not-lazy", node.fromlineno):
|
||||
valid_types.append("%")
|
||||
|
||||
return " or ".join(valid_types)
|
||||
|
||||
@staticmethod
|
||||
def _is_operand_literal_str(operand):
|
||||
"""
|
||||
Return True if the operand in argument is a literal string
|
||||
"""
|
||||
return isinstance(operand, astroid.Const) and operand.name == "str"
|
||||
|
||||
def _check_call_func(self, node):
|
||||
"""Checks that function call is not format_string.format().
|
||||
|
||||
Args:
|
||||
node (astroid.node_classes.Call):
|
||||
Call AST node to be checked.
|
||||
"""
|
||||
func = utils.safe_infer(node.func)
|
||||
types = ("str", "unicode")
|
||||
methods = ("format",)
|
||||
if is_method_call(func, types, methods) and not is_complex_format_str(
|
||||
func.bound
|
||||
):
|
||||
self.add_message(
|
||||
"logging-format-interpolation",
|
||||
node=node,
|
||||
args=(self._helper_string(node),),
|
||||
)
|
||||
|
||||
def _check_format_string(self, node, format_arg):
|
||||
"""Checks that format string tokens match the supplied arguments.
|
||||
|
||||
Args:
|
||||
node (astroid.node_classes.NodeNG): AST node to be checked.
|
||||
format_arg (int): Index of the format string in the node arguments.
|
||||
"""
|
||||
num_args = _count_supplied_tokens(node.args[format_arg + 1 :])
|
||||
if not num_args:
|
||||
# If no args were supplied the string is not interpolated and can contain
|
||||
# formatting characters - it's used verbatim. Don't check any further.
|
||||
return
|
||||
|
||||
format_string = node.args[format_arg].value
|
||||
required_num_args = 0
|
||||
if isinstance(format_string, bytes):
|
||||
format_string = format_string.decode()
|
||||
if isinstance(format_string, str):
|
||||
try:
|
||||
if self._format_style == "old":
|
||||
keyword_args, required_num_args, _, _ = utils.parse_format_string(
|
||||
format_string
|
||||
)
|
||||
if keyword_args:
|
||||
# Keyword checking on logging strings is complicated by
|
||||
# special keywords - out of scope.
|
||||
return
|
||||
elif self._format_style == "new":
|
||||
(
|
||||
keyword_arguments,
|
||||
implicit_pos_args,
|
||||
explicit_pos_args,
|
||||
) = utils.parse_format_method_string(format_string)
|
||||
|
||||
keyword_args_cnt = len(
|
||||
{k for k, l in keyword_arguments if not isinstance(k, int)}
|
||||
)
|
||||
required_num_args = (
|
||||
keyword_args_cnt + implicit_pos_args + explicit_pos_args
|
||||
)
|
||||
except utils.UnsupportedFormatCharacter as ex:
|
||||
char = format_string[ex.index]
|
||||
self.add_message(
|
||||
"logging-unsupported-format",
|
||||
node=node,
|
||||
args=(char, ord(char), ex.index),
|
||||
)
|
||||
return
|
||||
except utils.IncompleteFormatString:
|
||||
self.add_message("logging-format-truncated", node=node)
|
||||
return
|
||||
if num_args > required_num_args:
|
||||
self.add_message("logging-too-many-args", node=node)
|
||||
elif num_args < required_num_args:
|
||||
self.add_message("logging-too-few-args", node=node)
|
||||
|
||||
|
||||
def is_complex_format_str(node):
|
||||
"""Checks if node represents a string with complex formatting specs.
|
||||
|
||||
Args:
|
||||
node (astroid.node_classes.NodeNG): AST node to check
|
||||
Returns:
|
||||
bool: True if inferred string uses complex formatting, False otherwise
|
||||
"""
|
||||
inferred = utils.safe_infer(node)
|
||||
if inferred is None or not (
|
||||
isinstance(inferred, astroid.Const) and isinstance(inferred.value, str)
|
||||
):
|
||||
return True
|
||||
try:
|
||||
parsed = list(string.Formatter().parse(inferred.value))
|
||||
except ValueError:
|
||||
# This format string is invalid
|
||||
return False
|
||||
for _, _, format_spec, _ in parsed:
|
||||
if format_spec:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _count_supplied_tokens(args):
|
||||
"""Counts the number of tokens in an args list.
|
||||
|
||||
The Python log functions allow for special keyword arguments: func,
|
||||
exc_info and extra. To handle these cases correctly, we only count
|
||||
arguments that aren't keywords.
|
||||
|
||||
Args:
|
||||
args (list): AST nodes that are arguments for a log format string.
|
||||
|
||||
Returns:
|
||||
int: Number of AST nodes that aren't keywords.
|
||||
"""
|
||||
return sum(1 for arg in args if not isinstance(arg, astroid.Keyword))
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto-register this checker."""
|
||||
linter.register_checker(LoggingChecker(linter))
|
||||
199
venv/lib/python3.8/site-packages/pylint/checkers/misc.py
Normal file
199
venv/lib/python3.8/site-packages/pylint/checkers/misc.py
Normal file
@@ -0,0 +1,199 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006, 2009-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Alexandru Coman <fcoman@bitdefender.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2016 glegoux <gilles.legoux@gmail.com>
|
||||
# Copyright (c) 2017-2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
|
||||
# Copyright (c) 2018 Rogalski, Lukasz <lukasz.rogalski@intel.com>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019-2020 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2020 Benny <benny.mueller91@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
|
||||
"""Check source code is ascii only or has an encoding declaration (PEP 263)"""
|
||||
|
||||
import re
|
||||
import tokenize
|
||||
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.interfaces import IRawChecker, ITokenChecker
|
||||
from pylint.message import MessagesHandlerMixIn
|
||||
from pylint.utils.pragma_parser import OPTION_PO, PragmaParserError, parse_pragma
|
||||
|
||||
|
||||
class ByIdManagedMessagesChecker(BaseChecker):
|
||||
|
||||
"""checks for messages that are enabled or disabled by id instead of symbol."""
|
||||
|
||||
__implements__ = IRawChecker
|
||||
|
||||
# configuration section name
|
||||
name = "miscellaneous"
|
||||
msgs = {
|
||||
"I0023": (
|
||||
"%s",
|
||||
"use-symbolic-message-instead",
|
||||
"Used when a message is enabled or disabled by id.",
|
||||
)
|
||||
}
|
||||
|
||||
options = ()
|
||||
|
||||
def process_module(self, module):
|
||||
"""inspect the source file to find messages activated or deactivated by id."""
|
||||
managed_msgs = MessagesHandlerMixIn.get_by_id_managed_msgs()
|
||||
for (mod_name, msg_id, msg_symbol, lineno, is_disabled) in managed_msgs:
|
||||
if mod_name == module.name:
|
||||
if is_disabled:
|
||||
txt = "Id '{ident}' is used to disable '{symbol}' message emission".format(
|
||||
ident=msg_id, symbol=msg_symbol
|
||||
)
|
||||
else:
|
||||
txt = "Id '{ident}' is used to enable '{symbol}' message emission".format(
|
||||
ident=msg_id, symbol=msg_symbol
|
||||
)
|
||||
self.add_message("use-symbolic-message-instead", line=lineno, args=txt)
|
||||
MessagesHandlerMixIn.clear_by_id_managed_msgs()
|
||||
|
||||
|
||||
class EncodingChecker(BaseChecker):
|
||||
|
||||
"""checks for:
|
||||
* warning notes in the code like FIXME, XXX
|
||||
* encoding issues.
|
||||
"""
|
||||
|
||||
__implements__ = (IRawChecker, ITokenChecker)
|
||||
|
||||
# configuration section name
|
||||
name = "miscellaneous"
|
||||
msgs = {
|
||||
"W0511": (
|
||||
"%s",
|
||||
"fixme",
|
||||
"Used when a warning note as FIXME or XXX is detected.",
|
||||
)
|
||||
}
|
||||
|
||||
options = (
|
||||
(
|
||||
"notes",
|
||||
{
|
||||
"type": "csv",
|
||||
"metavar": "<comma separated values>",
|
||||
"default": ("FIXME", "XXX", "TODO"),
|
||||
"help": (
|
||||
"List of note tags to take in consideration, "
|
||||
"separated by a comma."
|
||||
),
|
||||
},
|
||||
),
|
||||
(
|
||||
"notes-rgx",
|
||||
{
|
||||
"type": "string",
|
||||
"metavar": "<regexp>",
|
||||
"help": "Regular expression of note tags to take in consideration.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def open(self):
|
||||
super().open()
|
||||
|
||||
notes = "|".join(map(re.escape, self.config.notes))
|
||||
if self.config.notes_rgx:
|
||||
regex_string = r"#\s*(%s|%s)\b" % (notes, self.config.notes_rgx)
|
||||
else:
|
||||
regex_string = r"#\s*(%s)\b" % (notes)
|
||||
|
||||
self._fixme_pattern = re.compile(regex_string, re.I)
|
||||
|
||||
def _check_encoding(self, lineno, line, file_encoding):
|
||||
try:
|
||||
return line.decode(file_encoding)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
except LookupError:
|
||||
if line.startswith("#") and "coding" in line and file_encoding in line:
|
||||
self.add_message(
|
||||
"syntax-error",
|
||||
line=lineno,
|
||||
args='Cannot decode using encoding "{}",'
|
||||
" bad encoding".format(file_encoding),
|
||||
)
|
||||
|
||||
def process_module(self, module):
|
||||
"""inspect the source file to find encoding problem"""
|
||||
if module.file_encoding:
|
||||
encoding = module.file_encoding
|
||||
else:
|
||||
encoding = "ascii"
|
||||
|
||||
with module.stream() as stream:
|
||||
for lineno, line in enumerate(stream):
|
||||
self._check_encoding(lineno + 1, line, encoding)
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
"""inspect the source to find fixme problems"""
|
||||
if not self.config.notes:
|
||||
return
|
||||
comments = (
|
||||
token_info for token_info in tokens if token_info.type == tokenize.COMMENT
|
||||
)
|
||||
for comment in comments:
|
||||
comment_text = comment.string[1:].lstrip() # trim '#' and whitespaces
|
||||
|
||||
# handle pylint disable clauses
|
||||
disable_option_match = OPTION_PO.search(comment_text)
|
||||
if disable_option_match:
|
||||
try:
|
||||
values = []
|
||||
try:
|
||||
for pragma_repr in (
|
||||
p_rep
|
||||
for p_rep in parse_pragma(disable_option_match.group(2))
|
||||
if p_rep.action == "disable"
|
||||
):
|
||||
values.extend(pragma_repr.messages)
|
||||
except PragmaParserError:
|
||||
# Printing useful information dealing with this error is done in the lint package
|
||||
pass
|
||||
values = [_val.upper() for _val in values]
|
||||
if set(values) & set(self.config.notes):
|
||||
continue
|
||||
except ValueError:
|
||||
self.add_message(
|
||||
"bad-inline-option",
|
||||
args=disable_option_match.group(1).strip(),
|
||||
line=comment.start[0],
|
||||
)
|
||||
continue
|
||||
|
||||
# emit warnings if necessary
|
||||
match = self._fixme_pattern.search("#" + comment_text.lower())
|
||||
if match:
|
||||
note = match.group(1)
|
||||
self.add_message(
|
||||
"fixme",
|
||||
col_offset=comment.string.lower().index(note.lower()),
|
||||
args=comment_text,
|
||||
line=comment.start[0],
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker"""
|
||||
linter.register_checker(EncodingChecker(linter))
|
||||
linter.register_checker(ByIdManagedMessagesChecker(linter))
|
||||
133
venv/lib/python3.8/site-packages/pylint/checkers/newstyle.py
Normal file
133
venv/lib/python3.8/site-packages/pylint/checkers/newstyle.py
Normal file
@@ -0,0 +1,133 @@
|
||||
# Copyright (c) 2006, 2008-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2013-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Natalie Serebryakova <natalie.serebryakova@Natalies-MacBook-Pro.local>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Robert Schweizer <robert_schweizer@gmx.de>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""check for new / old style related problems
|
||||
"""
|
||||
import astroid
|
||||
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers.utils import check_messages, has_known_bases, node_frame_class
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
|
||||
MSGS = {
|
||||
"E1003": (
|
||||
"Bad first argument %r given to super()",
|
||||
"bad-super-call",
|
||||
"Used when another argument than the current class is given as "
|
||||
"first argument of the super builtin.",
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
class NewStyleConflictChecker(BaseChecker):
|
||||
"""checks for usage of new style capabilities on old style classes and
|
||||
other new/old styles conflicts problems
|
||||
* use of property, __slots__, super
|
||||
* "super" usage
|
||||
"""
|
||||
|
||||
__implements__ = (IAstroidChecker,)
|
||||
|
||||
# configuration section name
|
||||
name = "newstyle"
|
||||
# messages
|
||||
msgs = MSGS
|
||||
priority = -2
|
||||
# configuration options
|
||||
options = ()
|
||||
|
||||
@check_messages("bad-super-call")
|
||||
def visit_functiondef(self, node):
|
||||
"""check use of super"""
|
||||
# ignore actual functions or method within a new style class
|
||||
if not node.is_method():
|
||||
return
|
||||
klass = node.parent.frame()
|
||||
for stmt in node.nodes_of_class(astroid.Call):
|
||||
if node_frame_class(stmt) != node_frame_class(node):
|
||||
# Don't look down in other scopes.
|
||||
continue
|
||||
|
||||
expr = stmt.func
|
||||
if not isinstance(expr, astroid.Attribute):
|
||||
continue
|
||||
|
||||
call = expr.expr
|
||||
# skip the test if using super
|
||||
if not (
|
||||
isinstance(call, astroid.Call)
|
||||
and isinstance(call.func, astroid.Name)
|
||||
and call.func.name == "super"
|
||||
):
|
||||
continue
|
||||
|
||||
# super should not be used on an old style class
|
||||
if klass.newstyle or not has_known_bases(klass):
|
||||
# super first arg should not be the class
|
||||
if not call.args:
|
||||
continue
|
||||
|
||||
# calling super(type(self), self) can lead to recursion loop
|
||||
# in derived classes
|
||||
arg0 = call.args[0]
|
||||
if (
|
||||
isinstance(arg0, astroid.Call)
|
||||
and isinstance(arg0.func, astroid.Name)
|
||||
and arg0.func.name == "type"
|
||||
):
|
||||
self.add_message("bad-super-call", node=call, args=("type",))
|
||||
continue
|
||||
|
||||
# calling super(self.__class__, self) can lead to recursion loop
|
||||
# in derived classes
|
||||
if (
|
||||
len(call.args) >= 2
|
||||
and isinstance(call.args[1], astroid.Name)
|
||||
and call.args[1].name == "self"
|
||||
and isinstance(arg0, astroid.Attribute)
|
||||
and arg0.attrname == "__class__"
|
||||
):
|
||||
self.add_message(
|
||||
"bad-super-call", node=call, args=("self.__class__",)
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
supcls = call.args and next(call.args[0].infer(), None)
|
||||
except astroid.InferenceError:
|
||||
continue
|
||||
|
||||
if klass is not supcls:
|
||||
name = None
|
||||
# if supcls is not Uninferable, then supcls was inferred
|
||||
# and use its name. Otherwise, try to look
|
||||
# for call.args[0].name
|
||||
if supcls:
|
||||
name = supcls.name
|
||||
elif call.args and hasattr(call.args[0], "name"):
|
||||
name = call.args[0].name
|
||||
if name:
|
||||
self.add_message("bad-super-call", node=call, args=(name,))
|
||||
|
||||
visit_asyncfunctiondef = visit_functiondef
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(NewStyleConflictChecker(linter))
|
||||
1425
venv/lib/python3.8/site-packages/pylint/checkers/python3.py
Normal file
1425
venv/lib/python3.8/site-packages/pylint/checkers/python3.py
Normal file
File diff suppressed because it is too large
Load Diff
121
venv/lib/python3.8/site-packages/pylint/checkers/raw_metrics.py
Normal file
121
venv/lib/python3.8/site-packages/pylint/checkers/raw_metrics.py
Normal file
@@ -0,0 +1,121 @@
|
||||
# Copyright (c) 2007, 2010, 2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013 Google, Inc.
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 Mike Frysinger <vapier@gentoo.org>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
""" Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
|
||||
http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
|
||||
Raw metrics checker
|
||||
"""
|
||||
|
||||
import tokenize
|
||||
from typing import Any
|
||||
|
||||
from pylint.checkers import BaseTokenChecker
|
||||
from pylint.exceptions import EmptyReportError
|
||||
from pylint.interfaces import ITokenChecker
|
||||
from pylint.reporters.ureports.nodes import Table
|
||||
|
||||
|
||||
def report_raw_stats(sect, stats, _):
|
||||
"""calculate percentage of code / doc / comment / empty
|
||||
"""
|
||||
total_lines = stats["total_lines"]
|
||||
if not total_lines:
|
||||
raise EmptyReportError()
|
||||
sect.description = "%s lines have been analyzed" % total_lines
|
||||
lines = ("type", "number", "%", "previous", "difference")
|
||||
for node_type in ("code", "docstring", "comment", "empty"):
|
||||
key = node_type + "_lines"
|
||||
total = stats[key]
|
||||
percent = float(total * 100) / total_lines
|
||||
lines += (node_type, str(total), "%.2f" % percent, "NC", "NC")
|
||||
sect.append(Table(children=lines, cols=5, rheaders=1))
|
||||
|
||||
|
||||
class RawMetricsChecker(BaseTokenChecker):
|
||||
"""does not check anything but gives some raw metrics :
|
||||
* total number of lines
|
||||
* total number of code lines
|
||||
* total number of docstring lines
|
||||
* total number of comments lines
|
||||
* total number of empty lines
|
||||
"""
|
||||
|
||||
__implements__ = (ITokenChecker,)
|
||||
|
||||
# configuration section name
|
||||
name = "metrics"
|
||||
# configuration options
|
||||
options = ()
|
||||
# messages
|
||||
msgs = {} # type: Any
|
||||
# reports
|
||||
reports = (("RP0701", "Raw metrics", report_raw_stats),)
|
||||
|
||||
def __init__(self, linter):
|
||||
BaseTokenChecker.__init__(self, linter)
|
||||
self.stats = None
|
||||
|
||||
def open(self):
|
||||
"""init statistics"""
|
||||
self.stats = self.linter.add_stats(
|
||||
total_lines=0,
|
||||
code_lines=0,
|
||||
empty_lines=0,
|
||||
docstring_lines=0,
|
||||
comment_lines=0,
|
||||
)
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
"""update stats"""
|
||||
i = 0
|
||||
tokens = list(tokens)
|
||||
while i < len(tokens):
|
||||
i, lines_number, line_type = get_type(tokens, i)
|
||||
self.stats["total_lines"] += lines_number
|
||||
self.stats[line_type] += lines_number
|
||||
|
||||
|
||||
JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)
|
||||
|
||||
|
||||
def get_type(tokens, start_index):
|
||||
"""return the line type : docstring, comment, code, empty"""
|
||||
i = start_index
|
||||
tok_type = tokens[i][0]
|
||||
start = tokens[i][2]
|
||||
pos = start
|
||||
line_type = None
|
||||
while i < len(tokens) and tokens[i][2][0] == start[0]:
|
||||
tok_type = tokens[i][0]
|
||||
pos = tokens[i][3]
|
||||
if line_type is None:
|
||||
if tok_type == tokenize.STRING:
|
||||
line_type = "docstring_lines"
|
||||
elif tok_type == tokenize.COMMENT:
|
||||
line_type = "comment_lines"
|
||||
elif tok_type in JUNK:
|
||||
pass
|
||||
else:
|
||||
line_type = "code_lines"
|
||||
i += 1
|
||||
if line_type is None:
|
||||
line_type = "empty_lines"
|
||||
elif i < len(tokens) and tokens[i][0] == tokenize.NEWLINE:
|
||||
i += 1
|
||||
return i, pos[0] - start[0] + 1, line_type
|
||||
|
||||
|
||||
def register(linter):
|
||||
""" required method to auto register this checker """
|
||||
linter.register_checker(RawMetricsChecker(linter))
|
||||
1547
venv/lib/python3.8/site-packages/pylint/checkers/refactoring.py
Normal file
1547
venv/lib/python3.8/site-packages/pylint/checkers/refactoring.py
Normal file
File diff suppressed because it is too large
Load Diff
455
venv/lib/python3.8/site-packages/pylint/checkers/similar.py
Normal file
455
venv/lib/python3.8/site-packages/pylint/checkers/similar.py
Normal file
@@ -0,0 +1,455 @@
|
||||
# Copyright (c) 2006, 2008-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012 Ry4an Brase <ry4an-hg@ry4an.org>
|
||||
# Copyright (c) 2012 Google, Inc.
|
||||
# Copyright (c) 2012 Anthony VEREZ <anthony.verez.external@cassidian.com>
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2017, 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
|
||||
# Copyright (c) 2018 Scott Worley <scottworley@scottworley.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Taewon D. Kim <kimt33@mcmaster.ca>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
# pylint: disable=redefined-builtin
|
||||
"""a similarities / code duplication command line tool and pylint checker
|
||||
"""
|
||||
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from getopt import getopt
|
||||
from itertools import groupby
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint.checkers import BaseChecker, table_lines_from_stats
|
||||
from pylint.interfaces import IRawChecker
|
||||
from pylint.reporters.ureports.nodes import Table
|
||||
from pylint.utils import decoding_stream
|
||||
|
||||
|
||||
class Similar:
|
||||
"""finds copy-pasted lines of code in a project"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
min_lines=4,
|
||||
ignore_comments=False,
|
||||
ignore_docstrings=False,
|
||||
ignore_imports=False,
|
||||
):
|
||||
self.min_lines = min_lines
|
||||
self.ignore_comments = ignore_comments
|
||||
self.ignore_docstrings = ignore_docstrings
|
||||
self.ignore_imports = ignore_imports
|
||||
self.linesets = []
|
||||
|
||||
def append_stream(self, streamid, stream, encoding=None):
|
||||
"""append a file to search for similarities"""
|
||||
if encoding is None:
|
||||
readlines = stream.readlines
|
||||
else:
|
||||
readlines = decoding_stream(stream, encoding).readlines
|
||||
try:
|
||||
self.linesets.append(
|
||||
LineSet(
|
||||
streamid,
|
||||
readlines(),
|
||||
self.ignore_comments,
|
||||
self.ignore_docstrings,
|
||||
self.ignore_imports,
|
||||
)
|
||||
)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""start looking for similarities and display results on stdout"""
|
||||
self._display_sims(self._compute_sims())
|
||||
|
||||
def _compute_sims(self):
|
||||
"""compute similarities in appended files"""
|
||||
no_duplicates = defaultdict(list)
|
||||
for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
|
||||
duplicate = no_duplicates[num]
|
||||
for couples in duplicate:
|
||||
if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
|
||||
couples.add((lineset1, idx1))
|
||||
couples.add((lineset2, idx2))
|
||||
break
|
||||
else:
|
||||
duplicate.append({(lineset1, idx1), (lineset2, idx2)})
|
||||
sims = []
|
||||
for num, ensembles in no_duplicates.items():
|
||||
for couples in ensembles:
|
||||
sims.append((num, couples))
|
||||
sims.sort()
|
||||
sims.reverse()
|
||||
return sims
|
||||
|
||||
def _display_sims(self, sims):
|
||||
"""display computed similarities on stdout"""
|
||||
nb_lignes_dupliquees = 0
|
||||
for num, couples in sims:
|
||||
print()
|
||||
print(num, "similar lines in", len(couples), "files")
|
||||
couples = sorted(couples)
|
||||
lineset = idx = None
|
||||
for lineset, idx in couples:
|
||||
print("==%s:%s" % (lineset.name, idx))
|
||||
if lineset:
|
||||
for line in lineset._real_lines[idx : idx + num]:
|
||||
print(" ", line.rstrip())
|
||||
nb_lignes_dupliquees += num * (len(couples) - 1)
|
||||
nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
|
||||
print(
|
||||
"TOTAL lines=%s duplicates=%s percent=%.2f"
|
||||
% (
|
||||
nb_total_lignes,
|
||||
nb_lignes_dupliquees,
|
||||
nb_lignes_dupliquees * 100.0 / nb_total_lignes,
|
||||
)
|
||||
)
|
||||
|
||||
def _find_common(self, lineset1, lineset2):
|
||||
"""find similarities in the two given linesets"""
|
||||
lines1 = lineset1.enumerate_stripped
|
||||
lines2 = lineset2.enumerate_stripped
|
||||
find = lineset2.find
|
||||
index1 = 0
|
||||
min_lines = self.min_lines
|
||||
while index1 < len(lineset1):
|
||||
skip = 1
|
||||
num = 0
|
||||
for index2 in find(lineset1[index1]):
|
||||
non_blank = 0
|
||||
for num, ((_, line1), (_, line2)) in enumerate(
|
||||
zip(lines1(index1), lines2(index2))
|
||||
):
|
||||
if line1 != line2:
|
||||
if non_blank > min_lines:
|
||||
yield num, lineset1, index1, lineset2, index2
|
||||
skip = max(skip, num)
|
||||
break
|
||||
if line1:
|
||||
non_blank += 1
|
||||
else:
|
||||
# we may have reach the end
|
||||
num += 1
|
||||
if non_blank > min_lines:
|
||||
yield num, lineset1, index1, lineset2, index2
|
||||
skip = max(skip, num)
|
||||
index1 += skip
|
||||
|
||||
def _iter_sims(self):
|
||||
"""iterate on similarities among all files, by making a cartesian
|
||||
product
|
||||
"""
|
||||
for idx, lineset in enumerate(self.linesets[:-1]):
|
||||
for lineset2 in self.linesets[idx + 1 :]:
|
||||
yield from self._find_common(lineset, lineset2)
|
||||
|
||||
|
||||
def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
|
||||
"""return lines with leading/trailing whitespace and any ignored code
|
||||
features removed
|
||||
"""
|
||||
if ignore_imports:
|
||||
tree = astroid.parse("".join(lines))
|
||||
node_is_import_by_lineno = (
|
||||
(node.lineno, isinstance(node, (astroid.Import, astroid.ImportFrom)))
|
||||
for node in tree.body
|
||||
)
|
||||
line_begins_import = {
|
||||
lineno: all(is_import for _, is_import in node_is_import_group)
|
||||
for lineno, node_is_import_group in groupby(
|
||||
node_is_import_by_lineno, key=lambda x: x[0]
|
||||
)
|
||||
}
|
||||
current_line_is_import = False
|
||||
|
||||
strippedlines = []
|
||||
docstring = None
|
||||
for lineno, line in enumerate(lines, start=1):
|
||||
line = line.strip()
|
||||
if ignore_docstrings:
|
||||
if not docstring and any(
|
||||
line.startswith(i) for i in ['"""', "'''", 'r"""', "r'''"]
|
||||
):
|
||||
docstring = line[:3]
|
||||
line = line[3:]
|
||||
if docstring:
|
||||
if line.endswith(docstring):
|
||||
docstring = None
|
||||
line = ""
|
||||
if ignore_imports:
|
||||
current_line_is_import = line_begins_import.get(
|
||||
lineno, current_line_is_import
|
||||
)
|
||||
if current_line_is_import:
|
||||
line = ""
|
||||
if ignore_comments:
|
||||
line = line.split("#", 1)[0].strip()
|
||||
strippedlines.append(line)
|
||||
return strippedlines
|
||||
|
||||
|
||||
class LineSet:
|
||||
"""Holds and indexes all the lines of a single source file"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
lines,
|
||||
ignore_comments=False,
|
||||
ignore_docstrings=False,
|
||||
ignore_imports=False,
|
||||
):
|
||||
self.name = name
|
||||
self._real_lines = lines
|
||||
self._stripped_lines = stripped_lines(
|
||||
lines, ignore_comments, ignore_docstrings, ignore_imports
|
||||
)
|
||||
self._index = self._mk_index()
|
||||
|
||||
def __str__(self):
|
||||
return "<Lineset for %s>" % self.name
|
||||
|
||||
def __len__(self):
|
||||
return len(self._real_lines)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._stripped_lines[index]
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.name < other.name
|
||||
|
||||
def __hash__(self):
|
||||
return id(self)
|
||||
|
||||
def enumerate_stripped(self, start_at=0):
|
||||
"""return an iterator on stripped lines, starting from a given index
|
||||
if specified, else 0
|
||||
"""
|
||||
idx = start_at
|
||||
if start_at:
|
||||
lines = self._stripped_lines[start_at:]
|
||||
else:
|
||||
lines = self._stripped_lines
|
||||
for line in lines:
|
||||
# if line:
|
||||
yield idx, line
|
||||
idx += 1
|
||||
|
||||
def find(self, stripped_line):
|
||||
"""return positions of the given stripped line in this set"""
|
||||
return self._index.get(stripped_line, ())
|
||||
|
||||
def _mk_index(self):
|
||||
"""create the index for this set"""
|
||||
index = defaultdict(list)
|
||||
for line_no, line in enumerate(self._stripped_lines):
|
||||
if line:
|
||||
index[line].append(line_no)
|
||||
return index
|
||||
|
||||
|
||||
MSGS = {
|
||||
"R0801": (
|
||||
"Similar lines in %s files\n%s",
|
||||
"duplicate-code",
|
||||
"Indicates that a set of similar lines has been detected "
|
||||
"among multiple file. This usually means that the code should "
|
||||
"be refactored to avoid this duplication.",
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def report_similarities(sect, stats, old_stats):
|
||||
"""make a layout with some stats about duplication"""
|
||||
lines = ["", "now", "previous", "difference"]
|
||||
lines += table_lines_from_stats(
|
||||
stats, old_stats, ("nb_duplicated_lines", "percent_duplicated_lines")
|
||||
)
|
||||
sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
|
||||
|
||||
|
||||
# wrapper to get a pylint checker from the similar class
|
||||
class SimilarChecker(BaseChecker, Similar):
|
||||
"""checks for similarities and duplicated code. This computation may be
|
||||
memory / CPU intensive, so you should disable it if you experiment some
|
||||
problems.
|
||||
"""
|
||||
|
||||
__implements__ = (IRawChecker,)
|
||||
# configuration section name
|
||||
name = "similarities"
|
||||
# messages
|
||||
msgs = MSGS
|
||||
# configuration options
|
||||
# for available dict keys/values see the optik parser 'add_option' method
|
||||
options = (
|
||||
(
|
||||
"min-similarity-lines", # type: ignore
|
||||
{
|
||||
"default": 4,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Minimum lines number of a similarity.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ignore-comments",
|
||||
{
|
||||
"default": True,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Ignore comments when computing similarities.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ignore-docstrings",
|
||||
{
|
||||
"default": True,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Ignore docstrings when computing similarities.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ignore-imports",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Ignore imports when computing similarities.",
|
||||
},
|
||||
),
|
||||
)
|
||||
# reports
|
||||
reports = (("RP0801", "Duplication", report_similarities),) # type: ignore
|
||||
|
||||
def __init__(self, linter=None):
|
||||
BaseChecker.__init__(self, linter)
|
||||
Similar.__init__(
|
||||
self, min_lines=4, ignore_comments=True, ignore_docstrings=True
|
||||
)
|
||||
self.stats = None
|
||||
|
||||
def set_option(self, optname, value, action=None, optdict=None):
|
||||
"""method called to set an option (registered in the options list)
|
||||
|
||||
overridden to report options setting to Similar
|
||||
"""
|
||||
BaseChecker.set_option(self, optname, value, action, optdict)
|
||||
if optname == "min-similarity-lines":
|
||||
self.min_lines = self.config.min_similarity_lines
|
||||
elif optname == "ignore-comments":
|
||||
self.ignore_comments = self.config.ignore_comments
|
||||
elif optname == "ignore-docstrings":
|
||||
self.ignore_docstrings = self.config.ignore_docstrings
|
||||
elif optname == "ignore-imports":
|
||||
self.ignore_imports = self.config.ignore_imports
|
||||
|
||||
def open(self):
|
||||
"""init the checkers: reset linesets and statistics information"""
|
||||
self.linesets = []
|
||||
self.stats = self.linter.add_stats(
|
||||
nb_duplicated_lines=0, percent_duplicated_lines=0
|
||||
)
|
||||
|
||||
def process_module(self, node):
|
||||
"""process a module
|
||||
|
||||
the module's content is accessible via the stream object
|
||||
|
||||
stream must implement the readlines method
|
||||
"""
|
||||
with node.stream() as stream:
|
||||
self.append_stream(self.linter.current_name, stream, node.file_encoding)
|
||||
|
||||
def close(self):
|
||||
"""compute and display similarities on closing (i.e. end of parsing)"""
|
||||
total = sum(len(lineset) for lineset in self.linesets)
|
||||
duplicated = 0
|
||||
stats = self.stats
|
||||
for num, couples in self._compute_sims():
|
||||
msg = []
|
||||
lineset = idx = None
|
||||
for lineset, idx in couples:
|
||||
msg.append("==%s:%s" % (lineset.name, idx))
|
||||
msg.sort()
|
||||
|
||||
if lineset:
|
||||
for line in lineset._real_lines[idx : idx + num]:
|
||||
msg.append(line.rstrip())
|
||||
|
||||
self.add_message("R0801", args=(len(couples), "\n".join(msg)))
|
||||
duplicated += num * (len(couples) - 1)
|
||||
stats["nb_duplicated_lines"] = duplicated
|
||||
stats["percent_duplicated_lines"] = total and duplicated * 100.0 / total
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(SimilarChecker(linter))
|
||||
|
||||
|
||||
def usage(status=0):
|
||||
"""display command line usage information"""
|
||||
print("finds copy pasted blocks in a set of files")
|
||||
print()
|
||||
print(
|
||||
"Usage: symilar [-d|--duplicates min_duplicated_lines] \
|
||||
[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1..."
|
||||
)
|
||||
sys.exit(status)
|
||||
|
||||
|
||||
def Run(argv=None):
|
||||
"""standalone command line access point"""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
s_opts = "hdi"
|
||||
l_opts = (
|
||||
"help",
|
||||
"duplicates=",
|
||||
"ignore-comments",
|
||||
"ignore-imports",
|
||||
"ignore-docstrings",
|
||||
)
|
||||
min_lines = 4
|
||||
ignore_comments = False
|
||||
ignore_docstrings = False
|
||||
ignore_imports = False
|
||||
opts, args = getopt(argv, s_opts, l_opts)
|
||||
for opt, val in opts:
|
||||
if opt in ("-d", "--duplicates"):
|
||||
min_lines = int(val)
|
||||
elif opt in ("-h", "--help"):
|
||||
usage()
|
||||
elif opt in ("-i", "--ignore-comments"):
|
||||
ignore_comments = True
|
||||
elif opt in ("--ignore-docstrings",):
|
||||
ignore_docstrings = True
|
||||
elif opt in ("--ignore-imports",):
|
||||
ignore_imports = True
|
||||
if not args:
|
||||
usage(1)
|
||||
sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
|
||||
for filename in args:
|
||||
with open(filename) as stream:
|
||||
sim.append_stream(filename, stream)
|
||||
sim.run()
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
Run()
|
||||
415
venv/lib/python3.8/site-packages/pylint/checkers/spelling.py
Normal file
415
venv/lib/python3.8/site-packages/pylint/checkers/spelling.py
Normal file
@@ -0,0 +1,415 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
|
||||
# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2015 Pavel Roskin <proski@gnu.org>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016-2017 Pedro Algarvio <pedro@algarvio.me>
|
||||
# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
|
||||
# Copyright (c) 2018, 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Peter Kolbus <peter.kolbus@gmail.com>
|
||||
# Copyright (c) 2019 agutole <toldo_carp@hotmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checker for spelling errors in comments and docstrings.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import tokenize
|
||||
|
||||
from pylint.checkers import BaseTokenChecker
|
||||
from pylint.checkers.utils import check_messages
|
||||
from pylint.interfaces import IAstroidChecker, ITokenChecker
|
||||
|
||||
try:
|
||||
import enchant
|
||||
from enchant.tokenize import ( # type: ignore
|
||||
get_tokenizer,
|
||||
Chunker,
|
||||
Filter,
|
||||
EmailFilter,
|
||||
URLFilter,
|
||||
WikiWordFilter,
|
||||
)
|
||||
except ImportError:
|
||||
enchant = None
|
||||
# pylint: disable=no-init
|
||||
class Filter: # type: ignore
|
||||
def _skip(self, word):
|
||||
raise NotImplementedError
|
||||
|
||||
class Chunker: # type: ignore
|
||||
pass
|
||||
|
||||
|
||||
if enchant is not None:
|
||||
br = enchant.Broker()
|
||||
dicts = br.list_dicts()
|
||||
dict_choices = [""] + [d[0] for d in dicts]
|
||||
dicts = ["%s (%s)" % (d[0], d[1].name) for d in dicts]
|
||||
dicts = ", ".join(dicts)
|
||||
instr = ""
|
||||
else:
|
||||
dicts = "none"
|
||||
dict_choices = [""]
|
||||
instr = " To make it work, install the python-enchant package."
|
||||
|
||||
|
||||
class WordsWithDigigtsFilter(Filter):
|
||||
"""Skips words with digits.
|
||||
"""
|
||||
|
||||
def _skip(self, word):
|
||||
for char in word:
|
||||
if char.isdigit():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class WordsWithUnderscores(Filter):
|
||||
"""Skips words with underscores.
|
||||
|
||||
They are probably function parameter names.
|
||||
"""
|
||||
|
||||
def _skip(self, word):
|
||||
return "_" in word
|
||||
|
||||
|
||||
class CamelCasedWord(Filter):
|
||||
r"""Filter skipping over camelCasedWords.
|
||||
This filter skips any words matching the following regular expression:
|
||||
|
||||
^([a-z]\w+[A-Z]+\w+)
|
||||
|
||||
That is, any words that are camelCasedWords.
|
||||
"""
|
||||
_pattern = re.compile(r"^([a-z]+([\d]|[A-Z])(?:\w+)?)")
|
||||
|
||||
def _skip(self, word):
|
||||
return bool(self._pattern.match(word))
|
||||
|
||||
|
||||
class SphinxDirectives(Filter):
|
||||
r"""Filter skipping over Sphinx Directives.
|
||||
This filter skips any words matching the following regular expression:
|
||||
|
||||
^:([a-z]+):`([^`]+)(`)?
|
||||
|
||||
That is, for example, :class:`BaseQuery`
|
||||
"""
|
||||
# The final ` in the pattern is optional because enchant strips it out
|
||||
_pattern = re.compile(r"^:([a-z]+):`([^`]+)(`)?")
|
||||
|
||||
def _skip(self, word):
|
||||
return bool(self._pattern.match(word))
|
||||
|
||||
|
||||
class ForwardSlashChunkder(Chunker):
|
||||
"""
|
||||
This chunker allows splitting words like 'before/after' into 'before' and 'after'
|
||||
"""
|
||||
|
||||
def next(self):
|
||||
while True:
|
||||
if not self._text:
|
||||
raise StopIteration()
|
||||
if "/" not in self._text:
|
||||
text = self._text
|
||||
self._offset = 0
|
||||
self._text = ""
|
||||
return (text, 0)
|
||||
pre_text, post_text = self._text.split("/", 1)
|
||||
self._text = post_text
|
||||
self._offset = 0
|
||||
if (
|
||||
not pre_text
|
||||
or not post_text
|
||||
or not pre_text[-1].isalpha()
|
||||
or not post_text[0].isalpha()
|
||||
):
|
||||
self._text = ""
|
||||
self._offset = 0
|
||||
return (pre_text + "/" + post_text, 0)
|
||||
return (pre_text, 0)
|
||||
|
||||
def _next(self):
|
||||
while True:
|
||||
if "/" not in self._text:
|
||||
return (self._text, 0)
|
||||
pre_text, post_text = self._text.split("/", 1)
|
||||
if not pre_text or not post_text:
|
||||
break
|
||||
if not pre_text[-1].isalpha() or not post_text[0].isalpha():
|
||||
raise StopIteration()
|
||||
self._text = pre_text + " " + post_text
|
||||
raise StopIteration()
|
||||
|
||||
|
||||
class SpellingChecker(BaseTokenChecker):
|
||||
"""Check spelling in comments and docstrings"""
|
||||
|
||||
__implements__ = (ITokenChecker, IAstroidChecker)
|
||||
name = "spelling"
|
||||
msgs = {
|
||||
"C0401": (
|
||||
"Wrong spelling of a word '%s' in a comment:\n%s\n"
|
||||
"%s\nDid you mean: '%s'?",
|
||||
"wrong-spelling-in-comment",
|
||||
"Used when a word in comment is not spelled correctly.",
|
||||
),
|
||||
"C0402": (
|
||||
"Wrong spelling of a word '%s' in a docstring:\n%s\n"
|
||||
"%s\nDid you mean: '%s'?",
|
||||
"wrong-spelling-in-docstring",
|
||||
"Used when a word in docstring is not spelled correctly.",
|
||||
),
|
||||
"C0403": (
|
||||
"Invalid characters %r in a docstring",
|
||||
"invalid-characters-in-docstring",
|
||||
"Used when a word in docstring cannot be checked by enchant.",
|
||||
),
|
||||
}
|
||||
options = (
|
||||
(
|
||||
"spelling-dict",
|
||||
{
|
||||
"default": "",
|
||||
"type": "choice",
|
||||
"metavar": "<dict name>",
|
||||
"choices": dict_choices,
|
||||
"help": "Spelling dictionary name. "
|
||||
"Available dictionaries: %s.%s" % (dicts, instr),
|
||||
},
|
||||
),
|
||||
(
|
||||
"spelling-ignore-words",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<comma separated words>",
|
||||
"help": "List of comma separated words that " "should not be checked.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"spelling-private-dict-file",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<path to file>",
|
||||
"help": "A path to a file that contains the private "
|
||||
"dictionary; one word per line.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"spelling-store-unknown-words",
|
||||
{
|
||||
"default": "n",
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "Tells whether to store unknown words to the "
|
||||
"private dictionary (see the "
|
||||
"--spelling-private-dict-file option) instead of "
|
||||
"raising a message.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-spelling-suggestions",
|
||||
{
|
||||
"default": 4,
|
||||
"type": "int",
|
||||
"metavar": "N",
|
||||
"help": "Limits count of emitted suggestions for " "spelling mistakes.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def open(self):
|
||||
self.initialized = False
|
||||
self.private_dict_file = None
|
||||
|
||||
if enchant is None:
|
||||
return
|
||||
dict_name = self.config.spelling_dict
|
||||
if not dict_name:
|
||||
return
|
||||
|
||||
self.ignore_list = [
|
||||
w.strip() for w in self.config.spelling_ignore_words.split(",")
|
||||
]
|
||||
# "param" appears in docstring in param description and
|
||||
# "pylint" appears in comments in pylint pragmas.
|
||||
self.ignore_list.extend(["param", "pylint"])
|
||||
|
||||
# Expand tilde to allow e.g. spelling-private-dict-file = ~/.pylintdict
|
||||
if self.config.spelling_private_dict_file:
|
||||
self.config.spelling_private_dict_file = os.path.expanduser(
|
||||
self.config.spelling_private_dict_file
|
||||
)
|
||||
|
||||
if self.config.spelling_private_dict_file:
|
||||
self.spelling_dict = enchant.DictWithPWL(
|
||||
dict_name, self.config.spelling_private_dict_file
|
||||
)
|
||||
self.private_dict_file = open(self.config.spelling_private_dict_file, "a")
|
||||
else:
|
||||
self.spelling_dict = enchant.Dict(dict_name)
|
||||
|
||||
if self.config.spelling_store_unknown_words:
|
||||
self.unknown_words = set()
|
||||
|
||||
self.tokenizer = get_tokenizer(
|
||||
dict_name,
|
||||
chunkers=[ForwardSlashChunkder],
|
||||
filters=[
|
||||
EmailFilter,
|
||||
URLFilter,
|
||||
WikiWordFilter,
|
||||
WordsWithDigigtsFilter,
|
||||
WordsWithUnderscores,
|
||||
CamelCasedWord,
|
||||
SphinxDirectives,
|
||||
],
|
||||
)
|
||||
self.initialized = True
|
||||
|
||||
def close(self):
|
||||
if self.private_dict_file:
|
||||
self.private_dict_file.close()
|
||||
|
||||
def _check_spelling(self, msgid, line, line_num):
|
||||
original_line = line
|
||||
try:
|
||||
initial_space = re.search(r"^[^\S]\s*", line).regs[0][1]
|
||||
except (IndexError, AttributeError):
|
||||
initial_space = 0
|
||||
if line.strip().startswith("#"):
|
||||
line = line.strip()[1:]
|
||||
starts_with_comment = True
|
||||
else:
|
||||
starts_with_comment = False
|
||||
for word, word_start_at in self.tokenizer(line.strip()):
|
||||
word_start_at += initial_space
|
||||
lower_cased_word = word.casefold()
|
||||
|
||||
# Skip words from ignore list.
|
||||
if word in self.ignore_list or lower_cased_word in self.ignore_list:
|
||||
continue
|
||||
|
||||
# Strip starting u' from unicode literals and r' from raw strings.
|
||||
if word.startswith(("u'", 'u"', "r'", 'r"')) and len(word) > 2:
|
||||
word = word[2:]
|
||||
lower_cased_word = lower_cased_word[2:]
|
||||
|
||||
# If it is a known word, then continue.
|
||||
try:
|
||||
if self.spelling_dict.check(lower_cased_word):
|
||||
# The lower cased version of word passed spell checking
|
||||
continue
|
||||
|
||||
# If we reached this far, it means there was a spelling mistake.
|
||||
# Let's retry with the original work because 'unicode' is a
|
||||
# spelling mistake but 'Unicode' is not
|
||||
if self.spelling_dict.check(word):
|
||||
continue
|
||||
except enchant.errors.Error:
|
||||
self.add_message(
|
||||
"invalid-characters-in-docstring", line=line_num, args=(word,)
|
||||
)
|
||||
continue
|
||||
|
||||
# Store word to private dict or raise a message.
|
||||
if self.config.spelling_store_unknown_words:
|
||||
if lower_cased_word not in self.unknown_words:
|
||||
self.private_dict_file.write("%s\n" % lower_cased_word)
|
||||
self.unknown_words.add(lower_cased_word)
|
||||
else:
|
||||
# Present up to N suggestions.
|
||||
suggestions = self.spelling_dict.suggest(word)
|
||||
del suggestions[self.config.max_spelling_suggestions :]
|
||||
|
||||
line_segment = line[word_start_at:]
|
||||
match = re.search(r"(\W|^)(%s)(\W|$)" % word, line_segment)
|
||||
if match:
|
||||
# Start position of second group in regex.
|
||||
col = match.regs[2][0]
|
||||
else:
|
||||
col = line_segment.index(word)
|
||||
|
||||
col += word_start_at
|
||||
|
||||
if starts_with_comment:
|
||||
col += 1
|
||||
indicator = (" " * col) + ("^" * len(word))
|
||||
|
||||
self.add_message(
|
||||
msgid,
|
||||
line=line_num,
|
||||
args=(
|
||||
word,
|
||||
original_line,
|
||||
indicator,
|
||||
"'{}'".format("' or '".join(suggestions)),
|
||||
),
|
||||
)
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
if not self.initialized:
|
||||
return
|
||||
|
||||
# Process tokens and look for comments.
|
||||
for (tok_type, token, (start_row, _), _, _) in tokens:
|
||||
if tok_type == tokenize.COMMENT:
|
||||
if start_row == 1 and token.startswith("#!/"):
|
||||
# Skip shebang lines
|
||||
continue
|
||||
if token.startswith("# pylint:"):
|
||||
# Skip pylint enable/disable comments
|
||||
continue
|
||||
self._check_spelling("wrong-spelling-in-comment", token, start_row)
|
||||
|
||||
@check_messages("wrong-spelling-in-docstring")
|
||||
def visit_module(self, node):
|
||||
if not self.initialized:
|
||||
return
|
||||
self._check_docstring(node)
|
||||
|
||||
@check_messages("wrong-spelling-in-docstring")
|
||||
def visit_classdef(self, node):
|
||||
if not self.initialized:
|
||||
return
|
||||
self._check_docstring(node)
|
||||
|
||||
@check_messages("wrong-spelling-in-docstring")
|
||||
def visit_functiondef(self, node):
|
||||
if not self.initialized:
|
||||
return
|
||||
self._check_docstring(node)
|
||||
|
||||
visit_asyncfunctiondef = visit_functiondef
|
||||
|
||||
def _check_docstring(self, node):
|
||||
"""check the node has any spelling errors"""
|
||||
docstring = node.doc
|
||||
if not docstring:
|
||||
return
|
||||
|
||||
start_line = node.lineno + 1
|
||||
|
||||
# Go through lines of docstring
|
||||
for idx, line in enumerate(docstring.splitlines()):
|
||||
self._check_spelling("wrong-spelling-in-docstring", line, start_line + idx)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(SpellingChecker(linter))
|
||||
458
venv/lib/python3.8/site-packages/pylint/checkers/stdlib.py
Normal file
458
venv/lib/python3.8/site-packages/pylint/checkers/stdlib.py
Normal file
@@ -0,0 +1,458 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Cosmin Poieana <cmin@ropython.org>
|
||||
# Copyright (c) 2014 Vlad Temian <vladtemian@gmail.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Cezar <celnazli@bitdefender.com>
|
||||
# Copyright (c) 2015 Chris Rebert <code@rebertia.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Jared Garst <cultofjared@gmail.com>
|
||||
# Copyright (c) 2017 Renat Galimov <renat2017@gmail.com>
|
||||
# Copyright (c) 2017 Martin <MartinBasti@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Christopher Zurcher <zurcher@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Banjamin Freeman <befreeman@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
|
||||
# Copyright (c) 2019 Julien Palard <julien@palard.fr>
|
||||
# Copyright (c) 2019 laike9m <laike9m@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Robert Schweizer <robert_schweizer@gmx.de>
|
||||
# Copyright (c) 2019 fadedDexofan <fadedDexofan@gmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checkers for various standard library functions."""
|
||||
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
from astroid.bases import Instance
|
||||
from astroid.node_classes import Const
|
||||
|
||||
from pylint.checkers import BaseChecker, utils
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
|
||||
OPEN_FILES = {"open", "file"}
|
||||
UNITTEST_CASE = "unittest.case"
|
||||
THREADING_THREAD = "threading.Thread"
|
||||
COPY_COPY = "copy.copy"
|
||||
OS_ENVIRON = "os._Environ"
|
||||
ENV_GETTERS = {"os.getenv"}
|
||||
SUBPROCESS_POPEN = "subprocess.Popen"
|
||||
SUBPROCESS_RUN = "subprocess.run"
|
||||
OPEN_MODULE = "_io"
|
||||
|
||||
|
||||
def _check_mode_str(mode):
|
||||
# check type
|
||||
if not isinstance(mode, str):
|
||||
return False
|
||||
# check syntax
|
||||
modes = set(mode)
|
||||
_mode = "rwatb+Ux"
|
||||
creating = "x" in modes
|
||||
if modes - set(_mode) or len(mode) > len(modes):
|
||||
return False
|
||||
# check logic
|
||||
reading = "r" in modes
|
||||
writing = "w" in modes
|
||||
appending = "a" in modes
|
||||
text = "t" in modes
|
||||
binary = "b" in modes
|
||||
if "U" in modes:
|
||||
if writing or appending or creating:
|
||||
return False
|
||||
reading = True
|
||||
if text and binary:
|
||||
return False
|
||||
total = reading + writing + appending + creating
|
||||
if total > 1:
|
||||
return False
|
||||
if not (reading or writing or appending or creating):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class StdlibChecker(BaseChecker):
|
||||
__implements__ = (IAstroidChecker,)
|
||||
name = "stdlib"
|
||||
|
||||
msgs = {
|
||||
"W1501": (
|
||||
'"%s" is not a valid mode for open.',
|
||||
"bad-open-mode",
|
||||
"Python supports: r, w, a[, x] modes with b, +, "
|
||||
"and U (only with r) options. "
|
||||
"See http://docs.python.org/2/library/functions.html#open",
|
||||
),
|
||||
"W1502": (
|
||||
"Using datetime.time in a boolean context.",
|
||||
"boolean-datetime",
|
||||
"Using datetime.time in a boolean context can hide "
|
||||
"subtle bugs when the time they represent matches "
|
||||
"midnight UTC. This behaviour was fixed in Python 3.5. "
|
||||
"See http://bugs.python.org/issue13936 for reference.",
|
||||
{"maxversion": (3, 5)},
|
||||
),
|
||||
"W1503": (
|
||||
"Redundant use of %s with constant value %r",
|
||||
"redundant-unittest-assert",
|
||||
"The first argument of assertTrue and assertFalse is "
|
||||
"a condition. If a constant is passed as parameter, that "
|
||||
"condition will be always true. In this case a warning "
|
||||
"should be emitted.",
|
||||
),
|
||||
"W1505": (
|
||||
"Using deprecated method %s()",
|
||||
"deprecated-method",
|
||||
"The method is marked as deprecated and will be removed in "
|
||||
"a future version of Python. Consider looking for an "
|
||||
"alternative in the documentation.",
|
||||
),
|
||||
"W1506": (
|
||||
"threading.Thread needs the target function",
|
||||
"bad-thread-instantiation",
|
||||
"The warning is emitted when a threading.Thread class "
|
||||
"is instantiated without the target function being passed. "
|
||||
"By default, the first parameter is the group param, not the target param. ",
|
||||
),
|
||||
"W1507": (
|
||||
"Using copy.copy(os.environ). Use os.environ.copy() instead. ",
|
||||
"shallow-copy-environ",
|
||||
"os.environ is not a dict object but proxy object, so "
|
||||
"shallow copy has still effects on original object. "
|
||||
"See https://bugs.python.org/issue15373 for reference. ",
|
||||
),
|
||||
"E1507": (
|
||||
"%s does not support %s type argument",
|
||||
"invalid-envvar-value",
|
||||
"Env manipulation functions support only string type arguments. "
|
||||
"See https://docs.python.org/3/library/os.html#os.getenv. ",
|
||||
),
|
||||
"W1508": (
|
||||
"%s default type is %s. Expected str or None.",
|
||||
"invalid-envvar-default",
|
||||
"Env manipulation functions return None or str values. "
|
||||
"Supplying anything different as a default may cause bugs. "
|
||||
"See https://docs.python.org/3/library/os.html#os.getenv. ",
|
||||
),
|
||||
"W1509": (
|
||||
"Using preexec_fn keyword which may be unsafe in the presence "
|
||||
"of threads",
|
||||
"subprocess-popen-preexec-fn",
|
||||
"The preexec_fn parameter is not safe to use in the presence "
|
||||
"of threads in your application. The child process could "
|
||||
"deadlock before exec is called. If you must use it, keep it "
|
||||
"trivial! Minimize the number of libraries you call into."
|
||||
"https://docs.python.org/3/library/subprocess.html#popen-constructor",
|
||||
),
|
||||
"W1510": (
|
||||
"Using subprocess.run without explicitly set `check` is not recommended.",
|
||||
"subprocess-run-check",
|
||||
"The check parameter should always be used with explicitly set "
|
||||
"`check` keyword to make clear what the error-handling behavior is."
|
||||
"https://docs.python.org/3/library/subprocess.html#subprocess.run",
|
||||
),
|
||||
}
|
||||
|
||||
deprecated = {
|
||||
0: {
|
||||
"cgi.parse_qs",
|
||||
"cgi.parse_qsl",
|
||||
"ctypes.c_buffer",
|
||||
"distutils.command.register.register.check_metadata",
|
||||
"distutils.command.sdist.sdist.check_metadata",
|
||||
"tkinter.Misc.tk_menuBar",
|
||||
"tkinter.Menu.tk_bindForTraversal",
|
||||
},
|
||||
2: {
|
||||
(2, 6, 0): {
|
||||
"commands.getstatus",
|
||||
"os.popen2",
|
||||
"os.popen3",
|
||||
"os.popen4",
|
||||
"macostools.touched",
|
||||
},
|
||||
(2, 7, 0): {
|
||||
"unittest.case.TestCase.assertEquals",
|
||||
"unittest.case.TestCase.assertNotEquals",
|
||||
"unittest.case.TestCase.assertAlmostEquals",
|
||||
"unittest.case.TestCase.assertNotAlmostEquals",
|
||||
"unittest.case.TestCase.assert_",
|
||||
"xml.etree.ElementTree.Element.getchildren",
|
||||
"xml.etree.ElementTree.Element.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.doctype",
|
||||
},
|
||||
},
|
||||
3: {
|
||||
(3, 0, 0): {
|
||||
"inspect.getargspec",
|
||||
"failUnlessEqual",
|
||||
"assertEquals",
|
||||
"failIfEqual",
|
||||
"assertNotEquals",
|
||||
"failUnlessAlmostEqual",
|
||||
"assertAlmostEquals",
|
||||
"failIfAlmostEqual",
|
||||
"assertNotAlmostEquals",
|
||||
"failUnless",
|
||||
"assert_",
|
||||
"failUnlessRaises",
|
||||
"failIf",
|
||||
"assertRaisesRegexp",
|
||||
"assertRegexpMatches",
|
||||
"assertNotRegexpMatches",
|
||||
},
|
||||
(3, 1, 0): {
|
||||
"base64.encodestring",
|
||||
"base64.decodestring",
|
||||
"ntpath.splitunc",
|
||||
},
|
||||
(3, 2, 0): {
|
||||
"cgi.escape",
|
||||
"configparser.RawConfigParser.readfp",
|
||||
"xml.etree.ElementTree.Element.getchildren",
|
||||
"xml.etree.ElementTree.Element.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.doctype",
|
||||
},
|
||||
(3, 3, 0): {
|
||||
"inspect.getmoduleinfo",
|
||||
"logging.warn",
|
||||
"logging.Logger.warn",
|
||||
"logging.LoggerAdapter.warn",
|
||||
"nntplib._NNTPBase.xpath",
|
||||
"platform.popen",
|
||||
},
|
||||
(3, 4, 0): {
|
||||
"importlib.find_loader",
|
||||
"plistlib.readPlist",
|
||||
"plistlib.writePlist",
|
||||
"plistlib.readPlistFromBytes",
|
||||
"plistlib.writePlistToBytes",
|
||||
},
|
||||
(3, 4, 4): {"asyncio.tasks.async"},
|
||||
(3, 5, 0): {
|
||||
"fractions.gcd",
|
||||
"inspect.formatargspec",
|
||||
"inspect.getcallargs",
|
||||
"platform.linux_distribution",
|
||||
"platform.dist",
|
||||
},
|
||||
(3, 6, 0): {"importlib._bootstrap_external.FileLoader.load_module"},
|
||||
},
|
||||
}
|
||||
|
||||
def _check_bad_thread_instantiation(self, node):
|
||||
if not node.kwargs and not node.keywords and len(node.args) <= 1:
|
||||
self.add_message("bad-thread-instantiation", node=node)
|
||||
|
||||
def _check_for_preexec_fn_in_popen(self, node):
|
||||
if node.keywords:
|
||||
for keyword in node.keywords:
|
||||
if keyword.arg == "preexec_fn":
|
||||
self.add_message("subprocess-popen-preexec-fn", node=node)
|
||||
|
||||
def _check_for_check_kw_in_run(self, node):
|
||||
kwargs = {keyword.arg for keyword in (node.keywords or ())}
|
||||
if "check" not in kwargs:
|
||||
self.add_message("subprocess-run-check", node=node)
|
||||
|
||||
def _check_shallow_copy_environ(self, node):
|
||||
arg = utils.get_argument_from_call(node, position=0)
|
||||
for inferred in arg.inferred():
|
||||
if inferred.qname() == OS_ENVIRON:
|
||||
self.add_message("shallow-copy-environ", node=node)
|
||||
break
|
||||
|
||||
@utils.check_messages(
|
||||
"bad-open-mode",
|
||||
"redundant-unittest-assert",
|
||||
"deprecated-method",
|
||||
"bad-thread-instantiation",
|
||||
"shallow-copy-environ",
|
||||
"invalid-envvar-value",
|
||||
"invalid-envvar-default",
|
||||
"subprocess-popen-preexec-fn",
|
||||
"subprocess-run-check",
|
||||
)
|
||||
def visit_call(self, node):
|
||||
"""Visit a Call node."""
|
||||
try:
|
||||
for inferred in node.func.infer():
|
||||
if inferred is astroid.Uninferable:
|
||||
continue
|
||||
if inferred.root().name == OPEN_MODULE:
|
||||
if getattr(node.func, "name", None) in OPEN_FILES:
|
||||
self._check_open_mode(node)
|
||||
elif inferred.root().name == UNITTEST_CASE:
|
||||
self._check_redundant_assert(node, inferred)
|
||||
elif isinstance(inferred, astroid.ClassDef):
|
||||
if inferred.qname() == THREADING_THREAD:
|
||||
self._check_bad_thread_instantiation(node)
|
||||
elif inferred.qname() == SUBPROCESS_POPEN:
|
||||
self._check_for_preexec_fn_in_popen(node)
|
||||
elif isinstance(inferred, astroid.FunctionDef):
|
||||
name = inferred.qname()
|
||||
if name == COPY_COPY:
|
||||
self._check_shallow_copy_environ(node)
|
||||
elif name in ENV_GETTERS:
|
||||
self._check_env_function(node, inferred)
|
||||
elif name == SUBPROCESS_RUN:
|
||||
self._check_for_check_kw_in_run(node)
|
||||
self._check_deprecated_method(node, inferred)
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_unaryop(self, node):
|
||||
if node.op == "not":
|
||||
self._check_datetime(node.operand)
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_if(self, node):
|
||||
self._check_datetime(node.test)
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_ifexp(self, node):
|
||||
self._check_datetime(node.test)
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_boolop(self, node):
|
||||
for value in node.values:
|
||||
self._check_datetime(value)
|
||||
|
||||
def _check_deprecated_method(self, node, inferred):
|
||||
py_vers = sys.version_info[0]
|
||||
|
||||
if isinstance(node.func, astroid.Attribute):
|
||||
func_name = node.func.attrname
|
||||
elif isinstance(node.func, astroid.Name):
|
||||
func_name = node.func.name
|
||||
else:
|
||||
# Not interested in other nodes.
|
||||
return
|
||||
|
||||
# Reject nodes which aren't of interest to us.
|
||||
acceptable_nodes = (
|
||||
astroid.BoundMethod,
|
||||
astroid.UnboundMethod,
|
||||
astroid.FunctionDef,
|
||||
)
|
||||
if not isinstance(inferred, acceptable_nodes):
|
||||
return
|
||||
|
||||
qname = inferred.qname()
|
||||
if any(name in self.deprecated[0] for name in (qname, func_name)):
|
||||
self.add_message("deprecated-method", node=node, args=(func_name,))
|
||||
else:
|
||||
for since_vers, func_list in self.deprecated[py_vers].items():
|
||||
if since_vers <= sys.version_info and any(
|
||||
name in func_list for name in (qname, func_name)
|
||||
):
|
||||
self.add_message("deprecated-method", node=node, args=(func_name,))
|
||||
break
|
||||
|
||||
def _check_redundant_assert(self, node, infer):
|
||||
if (
|
||||
isinstance(infer, astroid.BoundMethod)
|
||||
and node.args
|
||||
and isinstance(node.args[0], astroid.Const)
|
||||
and infer.name in ["assertTrue", "assertFalse"]
|
||||
):
|
||||
self.add_message(
|
||||
"redundant-unittest-assert",
|
||||
args=(infer.name, node.args[0].value),
|
||||
node=node,
|
||||
)
|
||||
|
||||
def _check_datetime(self, node):
|
||||
""" Check that a datetime was inferred.
|
||||
If so, emit boolean-datetime warning.
|
||||
"""
|
||||
try:
|
||||
inferred = next(node.infer())
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
if isinstance(inferred, Instance) and inferred.qname() == "datetime.time":
|
||||
self.add_message("boolean-datetime", node=node)
|
||||
|
||||
def _check_open_mode(self, node):
|
||||
"""Check that the mode argument of an open or file call is valid."""
|
||||
try:
|
||||
mode_arg = utils.get_argument_from_call(node, position=1, keyword="mode")
|
||||
except utils.NoSuchArgumentError:
|
||||
return
|
||||
if mode_arg:
|
||||
mode_arg = utils.safe_infer(mode_arg)
|
||||
if isinstance(mode_arg, astroid.Const) and not _check_mode_str(
|
||||
mode_arg.value
|
||||
):
|
||||
self.add_message("bad-open-mode", node=node, args=mode_arg.value)
|
||||
|
||||
def _check_env_function(self, node, infer):
|
||||
env_name_kwarg = "key"
|
||||
env_value_kwarg = "default"
|
||||
if node.keywords:
|
||||
kwargs = {keyword.arg: keyword.value for keyword in node.keywords}
|
||||
else:
|
||||
kwargs = None
|
||||
if node.args:
|
||||
env_name_arg = node.args[0]
|
||||
elif kwargs and env_name_kwarg in kwargs:
|
||||
env_name_arg = kwargs[env_name_kwarg]
|
||||
else:
|
||||
env_name_arg = None
|
||||
|
||||
if env_name_arg:
|
||||
self._check_invalid_envvar_value(
|
||||
node=node,
|
||||
message="invalid-envvar-value",
|
||||
call_arg=utils.safe_infer(env_name_arg),
|
||||
infer=infer,
|
||||
allow_none=False,
|
||||
)
|
||||
|
||||
if len(node.args) == 2:
|
||||
env_value_arg = node.args[1]
|
||||
elif kwargs and env_value_kwarg in kwargs:
|
||||
env_value_arg = kwargs[env_value_kwarg]
|
||||
else:
|
||||
env_value_arg = None
|
||||
|
||||
if env_value_arg:
|
||||
self._check_invalid_envvar_value(
|
||||
node=node,
|
||||
infer=infer,
|
||||
message="invalid-envvar-default",
|
||||
call_arg=utils.safe_infer(env_value_arg),
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
def _check_invalid_envvar_value(self, node, infer, message, call_arg, allow_none):
|
||||
if call_arg in (astroid.Uninferable, None):
|
||||
return
|
||||
|
||||
name = infer.qname()
|
||||
if isinstance(call_arg, Const):
|
||||
emit = False
|
||||
if call_arg.value is None:
|
||||
emit = not allow_none
|
||||
elif not isinstance(call_arg.value, str):
|
||||
emit = True
|
||||
if emit:
|
||||
self.add_message(message, node=node, args=(name, call_arg.pytype()))
|
||||
else:
|
||||
self.add_message(message, node=node, args=(name, call_arg.pytype()))
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(StdlibChecker(linter))
|
||||
952
venv/lib/python3.8/site-packages/pylint/checkers/strings.py
Normal file
952
venv/lib/python3.8/site-packages/pylint/checkers/strings.py
Normal file
@@ -0,0 +1,952 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2009 Charles Hebert <charles.hebert@logilab.fr>
|
||||
# Copyright (c) 2010-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2013-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016, 2018 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2016 Peter Dawyndt <Peter.Dawyndt@UGent.be>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018, 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2018-2019 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 Alan Chan <achan961117@gmail.com>
|
||||
# Copyright (c) 2018 Yury Gribov <tetra2005@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2019 Wes Turner <westurner@google.com>
|
||||
# Copyright (c) 2019 Djailla <bastien.vallet@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony <tanant@users.noreply.github.com>
|
||||
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checker for string formatting operations.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
import collections
|
||||
import numbers
|
||||
import re
|
||||
import tokenize
|
||||
import typing
|
||||
from typing import Iterable
|
||||
|
||||
import astroid
|
||||
from astroid.arguments import CallSite
|
||||
from astroid.node_classes import Const
|
||||
|
||||
from pylint.checkers import BaseChecker, BaseTokenChecker, utils
|
||||
from pylint.checkers.utils import check_messages
|
||||
from pylint.interfaces import IAstroidChecker, IRawChecker, ITokenChecker
|
||||
|
||||
_AST_NODE_STR_TYPES = ("__builtin__.unicode", "__builtin__.str", "builtins.str")
|
||||
# Prefixes for both strings and bytes literals per
|
||||
# https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
|
||||
_PREFIXES = {
|
||||
"r",
|
||||
"u",
|
||||
"R",
|
||||
"U",
|
||||
"f",
|
||||
"F",
|
||||
"fr",
|
||||
"Fr",
|
||||
"fR",
|
||||
"FR",
|
||||
"rf",
|
||||
"rF",
|
||||
"Rf",
|
||||
"RF",
|
||||
"b",
|
||||
"B",
|
||||
"br",
|
||||
"Br",
|
||||
"bR",
|
||||
"BR",
|
||||
"rb",
|
||||
"rB",
|
||||
"Rb",
|
||||
"RB",
|
||||
}
|
||||
SINGLE_QUOTED_REGEX = re.compile("(%s)?'''" % "|".join(_PREFIXES))
|
||||
DOUBLE_QUOTED_REGEX = re.compile('(%s)?"""' % "|".join(_PREFIXES))
|
||||
QUOTE_DELIMITER_REGEX = re.compile("(%s)?(\"|')" % "|".join(_PREFIXES), re.DOTALL)
|
||||
|
||||
MSGS = {
|
||||
"E1300": (
|
||||
"Unsupported format character %r (%#02x) at index %d",
|
||||
"bad-format-character",
|
||||
"Used when an unsupported format character is used in a format string.",
|
||||
),
|
||||
"E1301": (
|
||||
"Format string ends in middle of conversion specifier",
|
||||
"truncated-format-string",
|
||||
"Used when a format string terminates before the end of a "
|
||||
"conversion specifier.",
|
||||
),
|
||||
"E1302": (
|
||||
"Mixing named and unnamed conversion specifiers in format string",
|
||||
"mixed-format-string",
|
||||
"Used when a format string contains both named (e.g. '%(foo)d') "
|
||||
"and unnamed (e.g. '%d') conversion specifiers. This is also "
|
||||
"used when a named conversion specifier contains * for the "
|
||||
"minimum field width and/or precision.",
|
||||
),
|
||||
"E1303": (
|
||||
"Expected mapping for format string, not %s",
|
||||
"format-needs-mapping",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with an argument that is not a mapping.",
|
||||
),
|
||||
"W1300": (
|
||||
"Format string dictionary key should be a string, not %s",
|
||||
"bad-format-string-key",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with a dictionary whose keys are not all strings.",
|
||||
),
|
||||
"W1301": (
|
||||
"Unused key %r in format string dictionary",
|
||||
"unused-format-string-key",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with a dictionary that contains keys not required by the "
|
||||
"format string.",
|
||||
),
|
||||
"E1304": (
|
||||
"Missing key %r in format string dictionary",
|
||||
"missing-format-string-key",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with a dictionary that doesn't contain all the keys "
|
||||
"required by the format string.",
|
||||
),
|
||||
"E1305": (
|
||||
"Too many arguments for format string",
|
||||
"too-many-format-args",
|
||||
"Used when a format string that uses unnamed conversion "
|
||||
"specifiers is given too many arguments.",
|
||||
),
|
||||
"E1306": (
|
||||
"Not enough arguments for format string",
|
||||
"too-few-format-args",
|
||||
"Used when a format string that uses unnamed conversion "
|
||||
"specifiers is given too few arguments",
|
||||
),
|
||||
"E1307": (
|
||||
"Argument %r does not match format type %r",
|
||||
"bad-string-format-type",
|
||||
"Used when a type required by format string "
|
||||
"is not suitable for actual argument type",
|
||||
),
|
||||
"E1310": (
|
||||
"Suspicious argument in %s.%s call",
|
||||
"bad-str-strip-call",
|
||||
"The argument to a str.{l,r,}strip call contains a duplicate character, ",
|
||||
),
|
||||
"W1302": (
|
||||
"Invalid format string",
|
||||
"bad-format-string",
|
||||
"Used when a PEP 3101 format string is invalid.",
|
||||
),
|
||||
"W1303": (
|
||||
"Missing keyword argument %r for format string",
|
||||
"missing-format-argument-key",
|
||||
"Used when a PEP 3101 format string that uses named fields "
|
||||
"doesn't receive one or more required keywords.",
|
||||
),
|
||||
"W1304": (
|
||||
"Unused format argument %r",
|
||||
"unused-format-string-argument",
|
||||
"Used when a PEP 3101 format string that uses named "
|
||||
"fields is used with an argument that "
|
||||
"is not required by the format string.",
|
||||
),
|
||||
"W1305": (
|
||||
"Format string contains both automatic field numbering "
|
||||
"and manual field specification",
|
||||
"format-combined-specification",
|
||||
"Used when a PEP 3101 format string contains both automatic "
|
||||
"field numbering (e.g. '{}') and manual field "
|
||||
"specification (e.g. '{0}').",
|
||||
),
|
||||
"W1306": (
|
||||
"Missing format attribute %r in format specifier %r",
|
||||
"missing-format-attribute",
|
||||
"Used when a PEP 3101 format string uses an "
|
||||
"attribute specifier ({0.length}), but the argument "
|
||||
"passed for formatting doesn't have that attribute.",
|
||||
),
|
||||
"W1307": (
|
||||
"Using invalid lookup key %r in format specifier %r",
|
||||
"invalid-format-index",
|
||||
"Used when a PEP 3101 format string uses a lookup specifier "
|
||||
"({a[1]}), but the argument passed for formatting "
|
||||
"doesn't contain or doesn't have that key as an attribute.",
|
||||
),
|
||||
"W1308": (
|
||||
"Duplicate string formatting argument %r, consider passing as named argument",
|
||||
"duplicate-string-formatting-argument",
|
||||
"Used when we detect that a string formatting is "
|
||||
"repeating an argument instead of using named string arguments",
|
||||
),
|
||||
"W1309": (
|
||||
"Using an f-string that does not have any interpolated variables",
|
||||
"f-string-without-interpolation",
|
||||
"Used when we detect an f-string that does not use any interpolation variables, "
|
||||
"in which case it can be either a normal string or a bug in the code.",
|
||||
),
|
||||
}
|
||||
|
||||
OTHER_NODES = (
|
||||
astroid.Const,
|
||||
astroid.List,
|
||||
astroid.Lambda,
|
||||
astroid.FunctionDef,
|
||||
astroid.ListComp,
|
||||
astroid.SetComp,
|
||||
astroid.GeneratorExp,
|
||||
)
|
||||
|
||||
BUILTINS_STR = builtins.__name__ + ".str"
|
||||
BUILTINS_FLOAT = builtins.__name__ + ".float"
|
||||
BUILTINS_INT = builtins.__name__ + ".int"
|
||||
|
||||
|
||||
def get_access_path(key, parts):
|
||||
""" Given a list of format specifiers, returns
|
||||
the final access path (e.g. a.b.c[0][1]).
|
||||
"""
|
||||
path = []
|
||||
for is_attribute, specifier in parts:
|
||||
if is_attribute:
|
||||
path.append(".{}".format(specifier))
|
||||
else:
|
||||
path.append("[{!r}]".format(specifier))
|
||||
return str(key) + "".join(path)
|
||||
|
||||
|
||||
def arg_matches_format_type(arg_type, format_type):
|
||||
if format_type in "sr":
|
||||
# All types can be printed with %s and %r
|
||||
return True
|
||||
if isinstance(arg_type, astroid.Instance):
|
||||
arg_type = arg_type.pytype()
|
||||
if arg_type == BUILTINS_STR:
|
||||
return format_type == "c"
|
||||
if arg_type == BUILTINS_FLOAT:
|
||||
return format_type in "deEfFgGn%"
|
||||
if arg_type == BUILTINS_INT:
|
||||
# Integers allow all types
|
||||
return True
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class StringFormatChecker(BaseChecker):
|
||||
"""Checks string formatting operations to ensure that the format string
|
||||
is valid and the arguments match the format string.
|
||||
"""
|
||||
|
||||
__implements__ = (IAstroidChecker,)
|
||||
name = "string"
|
||||
msgs = MSGS
|
||||
|
||||
# pylint: disable=too-many-branches
|
||||
@check_messages(
|
||||
"bad-format-character",
|
||||
"truncated-format-string",
|
||||
"mixed-format-string",
|
||||
"bad-format-string-key",
|
||||
"missing-format-string-key",
|
||||
"unused-format-string-key",
|
||||
"bad-string-format-type",
|
||||
"format-needs-mapping",
|
||||
"too-many-format-args",
|
||||
"too-few-format-args",
|
||||
"bad-string-format-type",
|
||||
)
|
||||
def visit_binop(self, node):
|
||||
if node.op != "%":
|
||||
return
|
||||
left = node.left
|
||||
args = node.right
|
||||
|
||||
if not (isinstance(left, astroid.Const) and isinstance(left.value, str)):
|
||||
return
|
||||
format_string = left.value
|
||||
try:
|
||||
(
|
||||
required_keys,
|
||||
required_num_args,
|
||||
required_key_types,
|
||||
required_arg_types,
|
||||
) = utils.parse_format_string(format_string)
|
||||
except utils.UnsupportedFormatCharacter as exc:
|
||||
formatted = format_string[exc.index]
|
||||
self.add_message(
|
||||
"bad-format-character",
|
||||
node=node,
|
||||
args=(formatted, ord(formatted), exc.index),
|
||||
)
|
||||
return
|
||||
except utils.IncompleteFormatString:
|
||||
self.add_message("truncated-format-string", node=node)
|
||||
return
|
||||
if required_keys and required_num_args:
|
||||
# The format string uses both named and unnamed format
|
||||
# specifiers.
|
||||
self.add_message("mixed-format-string", node=node)
|
||||
elif required_keys:
|
||||
# The format string uses only named format specifiers.
|
||||
# Check that the RHS of the % operator is a mapping object
|
||||
# that contains precisely the set of keys required by the
|
||||
# format string.
|
||||
if isinstance(args, astroid.Dict):
|
||||
keys = set()
|
||||
unknown_keys = False
|
||||
for k, _ in args.items:
|
||||
if isinstance(k, astroid.Const):
|
||||
key = k.value
|
||||
if isinstance(key, str):
|
||||
keys.add(key)
|
||||
else:
|
||||
self.add_message(
|
||||
"bad-format-string-key", node=node, args=key
|
||||
)
|
||||
else:
|
||||
# One of the keys was something other than a
|
||||
# constant. Since we can't tell what it is,
|
||||
# suppress checks for missing keys in the
|
||||
# dictionary.
|
||||
unknown_keys = True
|
||||
if not unknown_keys:
|
||||
for key in required_keys:
|
||||
if key not in keys:
|
||||
self.add_message(
|
||||
"missing-format-string-key", node=node, args=key
|
||||
)
|
||||
for key in keys:
|
||||
if key not in required_keys:
|
||||
self.add_message(
|
||||
"unused-format-string-key", node=node, args=key
|
||||
)
|
||||
for key, arg in args.items:
|
||||
if not isinstance(key, astroid.Const):
|
||||
continue
|
||||
format_type = required_key_types.get(key.value, None)
|
||||
arg_type = utils.safe_infer(arg)
|
||||
if (
|
||||
format_type is not None
|
||||
and arg_type not in (None, astroid.Uninferable)
|
||||
and not arg_matches_format_type(arg_type, format_type)
|
||||
):
|
||||
self.add_message(
|
||||
"bad-string-format-type",
|
||||
node=node,
|
||||
args=(arg_type.pytype(), format_type),
|
||||
)
|
||||
elif isinstance(args, (OTHER_NODES, astroid.Tuple)):
|
||||
type_name = type(args).__name__
|
||||
self.add_message("format-needs-mapping", node=node, args=type_name)
|
||||
# else:
|
||||
# The RHS of the format specifier is a name or
|
||||
# expression. It may be a mapping object, so
|
||||
# there's nothing we can check.
|
||||
else:
|
||||
# The format string uses only unnamed format specifiers.
|
||||
# Check that the number of arguments passed to the RHS of
|
||||
# the % operator matches the number required by the format
|
||||
# string.
|
||||
args_elts = ()
|
||||
if isinstance(args, astroid.Tuple):
|
||||
rhs_tuple = utils.safe_infer(args)
|
||||
num_args = None
|
||||
if hasattr(rhs_tuple, "elts"):
|
||||
args_elts = rhs_tuple.elts
|
||||
num_args = len(args_elts)
|
||||
elif isinstance(args, (OTHER_NODES, (astroid.Dict, astroid.DictComp))):
|
||||
args_elts = [args]
|
||||
num_args = 1
|
||||
else:
|
||||
# The RHS of the format specifier is a name or
|
||||
# expression. It could be a tuple of unknown size, so
|
||||
# there's nothing we can check.
|
||||
num_args = None
|
||||
if num_args is not None:
|
||||
if num_args > required_num_args:
|
||||
self.add_message("too-many-format-args", node=node)
|
||||
elif num_args < required_num_args:
|
||||
self.add_message("too-few-format-args", node=node)
|
||||
for arg, format_type in zip(args_elts, required_arg_types):
|
||||
if not arg:
|
||||
continue
|
||||
arg_type = utils.safe_infer(arg)
|
||||
if arg_type not in (
|
||||
None,
|
||||
astroid.Uninferable,
|
||||
) and not arg_matches_format_type(arg_type, format_type):
|
||||
self.add_message(
|
||||
"bad-string-format-type",
|
||||
node=node,
|
||||
args=(arg_type.pytype(), format_type),
|
||||
)
|
||||
|
||||
@check_messages("f-string-without-interpolation")
|
||||
def visit_joinedstr(self, node):
|
||||
if isinstance(node.parent, astroid.FormattedValue):
|
||||
return
|
||||
for value in node.values:
|
||||
if isinstance(value, astroid.FormattedValue):
|
||||
return
|
||||
self.add_message("f-string-without-interpolation", node=node)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_call(self, node):
|
||||
func = utils.safe_infer(node.func)
|
||||
if (
|
||||
isinstance(func, astroid.BoundMethod)
|
||||
and isinstance(func.bound, astroid.Instance)
|
||||
and func.bound.name in ("str", "unicode", "bytes")
|
||||
):
|
||||
if func.name in ("strip", "lstrip", "rstrip") and node.args:
|
||||
arg = utils.safe_infer(node.args[0])
|
||||
if not isinstance(arg, astroid.Const) or not isinstance(arg.value, str):
|
||||
return
|
||||
if len(arg.value) != len(set(arg.value)):
|
||||
self.add_message(
|
||||
"bad-str-strip-call",
|
||||
node=node,
|
||||
args=(func.bound.name, func.name),
|
||||
)
|
||||
elif func.name == "format":
|
||||
self._check_new_format(node, func)
|
||||
|
||||
def _detect_vacuous_formatting(self, node, positional_arguments):
|
||||
counter = collections.Counter(
|
||||
arg.name for arg in positional_arguments if isinstance(arg, astroid.Name)
|
||||
)
|
||||
for name, count in counter.items():
|
||||
if count == 1:
|
||||
continue
|
||||
self.add_message(
|
||||
"duplicate-string-formatting-argument", node=node, args=(name,)
|
||||
)
|
||||
|
||||
def _check_new_format(self, node, func):
|
||||
"""Check the new string formatting. """
|
||||
# Skip ormat nodes which don't have an explicit string on the
|
||||
# left side of the format operation.
|
||||
# We do this because our inference engine can't properly handle
|
||||
# redefinitions of the original string.
|
||||
# Note that there may not be any left side at all, if the format method
|
||||
# has been assigned to another variable. See issue 351. For example:
|
||||
#
|
||||
# fmt = 'some string {}'.format
|
||||
# fmt('arg')
|
||||
if isinstance(node.func, astroid.Attribute) and not isinstance(
|
||||
node.func.expr, astroid.Const
|
||||
):
|
||||
return
|
||||
if node.starargs or node.kwargs:
|
||||
return
|
||||
try:
|
||||
strnode = next(func.bound.infer())
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
if not (isinstance(strnode, astroid.Const) and isinstance(strnode.value, str)):
|
||||
return
|
||||
try:
|
||||
call_site = CallSite.from_call(node)
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
|
||||
try:
|
||||
fields, num_args, manual_pos = utils.parse_format_method_string(
|
||||
strnode.value
|
||||
)
|
||||
except utils.IncompleteFormatString:
|
||||
self.add_message("bad-format-string", node=node)
|
||||
return
|
||||
|
||||
positional_arguments = call_site.positional_arguments
|
||||
named_arguments = call_site.keyword_arguments
|
||||
named_fields = {field[0] for field in fields if isinstance(field[0], str)}
|
||||
if num_args and manual_pos:
|
||||
self.add_message("format-combined-specification", node=node)
|
||||
return
|
||||
|
||||
check_args = False
|
||||
# Consider "{[0]} {[1]}" as num_args.
|
||||
num_args += sum(1 for field in named_fields if field == "")
|
||||
if named_fields:
|
||||
for field in named_fields:
|
||||
if field and field not in named_arguments:
|
||||
self.add_message(
|
||||
"missing-format-argument-key", node=node, args=(field,)
|
||||
)
|
||||
for field in named_arguments:
|
||||
if field not in named_fields:
|
||||
self.add_message(
|
||||
"unused-format-string-argument", node=node, args=(field,)
|
||||
)
|
||||
# num_args can be 0 if manual_pos is not.
|
||||
num_args = num_args or manual_pos
|
||||
if positional_arguments or num_args:
|
||||
empty = any(True for field in named_fields if field == "")
|
||||
if named_arguments or empty:
|
||||
# Verify the required number of positional arguments
|
||||
# only if the .format got at least one keyword argument.
|
||||
# This means that the format strings accepts both
|
||||
# positional and named fields and we should warn
|
||||
# when one of the them is missing or is extra.
|
||||
check_args = True
|
||||
else:
|
||||
check_args = True
|
||||
if check_args:
|
||||
# num_args can be 0 if manual_pos is not.
|
||||
num_args = num_args or manual_pos
|
||||
if len(positional_arguments) > num_args:
|
||||
self.add_message("too-many-format-args", node=node)
|
||||
elif len(positional_arguments) < num_args:
|
||||
self.add_message("too-few-format-args", node=node)
|
||||
|
||||
self._detect_vacuous_formatting(node, positional_arguments)
|
||||
self._check_new_format_specifiers(node, fields, named_arguments)
|
||||
|
||||
def _check_new_format_specifiers(self, node, fields, named):
|
||||
"""
|
||||
Check attribute and index access in the format
|
||||
string ("{0.a}" and "{0[a]}").
|
||||
"""
|
||||
for key, specifiers in fields:
|
||||
# Obtain the argument. If it can't be obtained
|
||||
# or inferred, skip this check.
|
||||
if key == "":
|
||||
# {[0]} will have an unnamed argument, defaulting
|
||||
# to 0. It will not be present in `named`, so use the value
|
||||
# 0 for it.
|
||||
key = 0
|
||||
if isinstance(key, numbers.Number):
|
||||
try:
|
||||
argname = utils.get_argument_from_call(node, key)
|
||||
except utils.NoSuchArgumentError:
|
||||
continue
|
||||
else:
|
||||
if key not in named:
|
||||
continue
|
||||
argname = named[key]
|
||||
if argname in (astroid.Uninferable, None):
|
||||
continue
|
||||
try:
|
||||
argument = utils.safe_infer(argname)
|
||||
except astroid.InferenceError:
|
||||
continue
|
||||
if not specifiers or not argument:
|
||||
# No need to check this key if it doesn't
|
||||
# use attribute / item access
|
||||
continue
|
||||
if argument.parent and isinstance(argument.parent, astroid.Arguments):
|
||||
# Ignore any object coming from an argument,
|
||||
# because we can't infer its value properly.
|
||||
continue
|
||||
previous = argument
|
||||
parsed = []
|
||||
for is_attribute, specifier in specifiers:
|
||||
if previous is astroid.Uninferable:
|
||||
break
|
||||
parsed.append((is_attribute, specifier))
|
||||
if is_attribute:
|
||||
try:
|
||||
previous = previous.getattr(specifier)[0]
|
||||
except astroid.NotFoundError:
|
||||
if (
|
||||
hasattr(previous, "has_dynamic_getattr")
|
||||
and previous.has_dynamic_getattr()
|
||||
):
|
||||
# Don't warn if the object has a custom __getattr__
|
||||
break
|
||||
path = get_access_path(key, parsed)
|
||||
self.add_message(
|
||||
"missing-format-attribute",
|
||||
args=(specifier, path),
|
||||
node=node,
|
||||
)
|
||||
break
|
||||
else:
|
||||
warn_error = False
|
||||
if hasattr(previous, "getitem"):
|
||||
try:
|
||||
previous = previous.getitem(astroid.Const(specifier))
|
||||
except (
|
||||
astroid.AstroidIndexError,
|
||||
astroid.AstroidTypeError,
|
||||
astroid.AttributeInferenceError,
|
||||
):
|
||||
warn_error = True
|
||||
except astroid.InferenceError:
|
||||
break
|
||||
if previous is astroid.Uninferable:
|
||||
break
|
||||
else:
|
||||
try:
|
||||
# Lookup __getitem__ in the current node,
|
||||
# but skip further checks, because we can't
|
||||
# retrieve the looked object
|
||||
previous.getattr("__getitem__")
|
||||
break
|
||||
except astroid.NotFoundError:
|
||||
warn_error = True
|
||||
if warn_error:
|
||||
path = get_access_path(key, parsed)
|
||||
self.add_message(
|
||||
"invalid-format-index", args=(specifier, path), node=node
|
||||
)
|
||||
break
|
||||
|
||||
try:
|
||||
previous = next(previous.infer())
|
||||
except astroid.InferenceError:
|
||||
# can't check further if we can't infer it
|
||||
break
|
||||
|
||||
|
||||
class StringConstantChecker(BaseTokenChecker):
|
||||
"""Check string literals"""
|
||||
|
||||
__implements__ = (IAstroidChecker, ITokenChecker, IRawChecker)
|
||||
name = "string"
|
||||
msgs = {
|
||||
"W1401": (
|
||||
"Anomalous backslash in string: '%s'. "
|
||||
"String constant might be missing an r prefix.",
|
||||
"anomalous-backslash-in-string",
|
||||
"Used when a backslash is in a literal string but not as an escape.",
|
||||
),
|
||||
"W1402": (
|
||||
"Anomalous Unicode escape in byte string: '%s'. "
|
||||
"String constant might be missing an r or u prefix.",
|
||||
"anomalous-unicode-escape-in-string",
|
||||
"Used when an escape like \\u is encountered in a byte "
|
||||
"string where it has no effect.",
|
||||
),
|
||||
"W1404": (
|
||||
"Implicit string concatenation found in %s",
|
||||
"implicit-str-concat",
|
||||
"String literals are implicitly concatenated in a "
|
||||
"literal iterable definition : "
|
||||
"maybe a comma is missing ?",
|
||||
{"old_names": [("W1403", "implicit-str-concat-in-sequence")]},
|
||||
),
|
||||
"W1405": (
|
||||
"Quote delimiter %s is inconsistent with the rest of the file",
|
||||
"inconsistent-quotes",
|
||||
"Quote delimiters are not used consistently throughout a module "
|
||||
"(with allowances made for avoiding unnecessary escaping).",
|
||||
),
|
||||
}
|
||||
options = (
|
||||
(
|
||||
"check-str-concat-over-line-jumps",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "This flag controls whether the "
|
||||
"implicit-str-concat should generate a warning "
|
||||
"on implicit string concatenation in sequences defined over "
|
||||
"several lines.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"check-quote-consistency",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "This flag controls whether inconsistent-quotes generates a "
|
||||
"warning when the character used as a quote delimiter is used "
|
||||
"inconsistently within a module.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
# Characters that have a special meaning after a backslash in either
|
||||
# Unicode or byte strings.
|
||||
ESCAPE_CHARACTERS = "abfnrtvx\n\r\t\\'\"01234567"
|
||||
|
||||
# Characters that have a special meaning after a backslash but only in
|
||||
# Unicode strings.
|
||||
UNICODE_ESCAPE_CHARACTERS = "uUN"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.string_tokens = {} # token position -> (token value, next token)
|
||||
|
||||
def process_module(self, module):
|
||||
self._unicode_literals = "unicode_literals" in module.future_imports
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
encoding = "ascii"
|
||||
for i, (tok_type, token, start, _, line) in enumerate(tokens):
|
||||
if tok_type == tokenize.ENCODING:
|
||||
# this is always the first token processed
|
||||
encoding = token
|
||||
elif tok_type == tokenize.STRING:
|
||||
# 'token' is the whole un-parsed token; we can look at the start
|
||||
# of it to see whether it's a raw or unicode string etc.
|
||||
self.process_string_token(token, start[0])
|
||||
# We figure the next token, ignoring comments & newlines:
|
||||
j = i + 1
|
||||
while j < len(tokens) and tokens[j].type in (
|
||||
tokenize.NEWLINE,
|
||||
tokenize.NL,
|
||||
tokenize.COMMENT,
|
||||
):
|
||||
j += 1
|
||||
next_token = tokens[j] if j < len(tokens) else None
|
||||
if encoding != "ascii":
|
||||
# We convert `tokenize` character count into a byte count,
|
||||
# to match with astroid `.col_offset`
|
||||
start = (start[0], len(line[: start[1]].encode(encoding)))
|
||||
self.string_tokens[start] = (str_eval(token), next_token)
|
||||
|
||||
if self.config.check_quote_consistency:
|
||||
self.check_for_consistent_string_delimiters(tokens)
|
||||
|
||||
@check_messages("implicit-str-concat")
|
||||
def visit_list(self, node):
|
||||
self.check_for_concatenated_strings(node.elts, "list")
|
||||
|
||||
@check_messages("implicit-str-concat")
|
||||
def visit_set(self, node):
|
||||
self.check_for_concatenated_strings(node.elts, "set")
|
||||
|
||||
@check_messages("implicit-str-concat")
|
||||
def visit_tuple(self, node):
|
||||
self.check_for_concatenated_strings(node.elts, "tuple")
|
||||
|
||||
def visit_assign(self, node):
|
||||
if isinstance(node.value, astroid.Const) and isinstance(node.value.value, str):
|
||||
self.check_for_concatenated_strings([node.value], "assignment")
|
||||
|
||||
def check_for_consistent_string_delimiters(
|
||||
self, tokens: Iterable[tokenize.TokenInfo]
|
||||
) -> None:
|
||||
"""Adds a message for each string using inconsistent quote delimiters.
|
||||
|
||||
Quote delimiters are used inconsistently if " and ' are mixed in a module's
|
||||
shortstrings without having done so to avoid escaping an internal quote
|
||||
character.
|
||||
|
||||
Args:
|
||||
tokens: The tokens to be checked against for consistent usage.
|
||||
"""
|
||||
string_delimiters = collections.Counter() # type: typing.Counter[str]
|
||||
|
||||
# First, figure out which quote character predominates in the module
|
||||
for tok_type, token, _, _, _ in tokens:
|
||||
if tok_type == tokenize.STRING and _is_quote_delimiter_chosen_freely(token):
|
||||
string_delimiters[_get_quote_delimiter(token)] += 1
|
||||
|
||||
if len(string_delimiters) > 1:
|
||||
# Ties are broken arbitrarily
|
||||
most_common_delimiter = string_delimiters.most_common(1)[0][0]
|
||||
for tok_type, token, start, _, _ in tokens:
|
||||
if tok_type != tokenize.STRING:
|
||||
continue
|
||||
quote_delimiter = _get_quote_delimiter(token)
|
||||
if (
|
||||
_is_quote_delimiter_chosen_freely(token)
|
||||
and quote_delimiter != most_common_delimiter
|
||||
):
|
||||
self.add_message(
|
||||
"inconsistent-quotes", line=start[0], args=(quote_delimiter,)
|
||||
)
|
||||
|
||||
def check_for_concatenated_strings(self, elements, iterable_type):
|
||||
for elt in elements:
|
||||
if not (isinstance(elt, Const) and elt.pytype() in _AST_NODE_STR_TYPES):
|
||||
continue
|
||||
if elt.col_offset < 0:
|
||||
# This can happen in case of escaped newlines
|
||||
continue
|
||||
if (elt.lineno, elt.col_offset) not in self.string_tokens:
|
||||
# This may happen with Latin1 encoding
|
||||
# cf. https://github.com/PyCQA/pylint/issues/2610
|
||||
continue
|
||||
matching_token, next_token = self.string_tokens[
|
||||
(elt.lineno, elt.col_offset)
|
||||
]
|
||||
# We detect string concatenation: the AST Const is the
|
||||
# combination of 2 string tokens
|
||||
if matching_token != elt.value and next_token is not None:
|
||||
if next_token.type == tokenize.STRING and (
|
||||
next_token.start[0] == elt.lineno
|
||||
or self.config.check_str_concat_over_line_jumps
|
||||
):
|
||||
self.add_message(
|
||||
"implicit-str-concat", line=elt.lineno, args=(iterable_type,)
|
||||
)
|
||||
|
||||
def process_string_token(self, token, start_row):
|
||||
quote_char = None
|
||||
index = None
|
||||
for index, char in enumerate(token):
|
||||
if char in "'\"":
|
||||
quote_char = char
|
||||
break
|
||||
if quote_char is None:
|
||||
return
|
||||
|
||||
prefix = token[:index].lower() # markers like u, b, r.
|
||||
after_prefix = token[index:]
|
||||
if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
|
||||
string_body = after_prefix[3:-3]
|
||||
else:
|
||||
string_body = after_prefix[1:-1] # Chop off quotes
|
||||
# No special checks on raw strings at the moment.
|
||||
if "r" not in prefix:
|
||||
self.process_non_raw_string_token(prefix, string_body, start_row)
|
||||
|
||||
def process_non_raw_string_token(self, prefix, string_body, start_row):
|
||||
"""check for bad escapes in a non-raw string.
|
||||
|
||||
prefix: lowercase string of eg 'ur' string prefix markers.
|
||||
string_body: the un-parsed body of the string, not including the quote
|
||||
marks.
|
||||
start_row: integer line number in the source.
|
||||
"""
|
||||
# Walk through the string; if we see a backslash then escape the next
|
||||
# character, and skip over it. If we see a non-escaped character,
|
||||
# alert, and continue.
|
||||
#
|
||||
# Accept a backslash when it escapes a backslash, or a quote, or
|
||||
# end-of-line, or one of the letters that introduce a special escape
|
||||
# sequence <http://docs.python.org/reference/lexical_analysis.html>
|
||||
#
|
||||
index = 0
|
||||
while True:
|
||||
index = string_body.find("\\", index)
|
||||
if index == -1:
|
||||
break
|
||||
# There must be a next character; having a backslash at the end
|
||||
# of the string would be a SyntaxError.
|
||||
next_char = string_body[index + 1]
|
||||
match = string_body[index : index + 2]
|
||||
if next_char in self.UNICODE_ESCAPE_CHARACTERS:
|
||||
if "u" in prefix:
|
||||
pass
|
||||
elif "b" not in prefix:
|
||||
pass # unicode by default
|
||||
else:
|
||||
self.add_message(
|
||||
"anomalous-unicode-escape-in-string",
|
||||
line=start_row,
|
||||
args=(match,),
|
||||
col_offset=index,
|
||||
)
|
||||
elif next_char not in self.ESCAPE_CHARACTERS:
|
||||
self.add_message(
|
||||
"anomalous-backslash-in-string",
|
||||
line=start_row,
|
||||
args=(match,),
|
||||
col_offset=index,
|
||||
)
|
||||
# Whether it was a valid escape or not, backslash followed by
|
||||
# another character can always be consumed whole: the second
|
||||
# character can never be the start of a new backslash escape.
|
||||
index += 2
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(StringFormatChecker(linter))
|
||||
linter.register_checker(StringConstantChecker(linter))
|
||||
|
||||
|
||||
def str_eval(token):
|
||||
"""
|
||||
Mostly replicate `ast.literal_eval(token)` manually to avoid any performance hit.
|
||||
This supports f-strings, contrary to `ast.literal_eval`.
|
||||
We have to support all string literal notations:
|
||||
https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
|
||||
"""
|
||||
if token[0:2].lower() in ("fr", "rf"):
|
||||
token = token[2:]
|
||||
elif token[0].lower() in ("r", "u", "f"):
|
||||
token = token[1:]
|
||||
if token[0:3] in ('"""', "'''"):
|
||||
return token[3:-3]
|
||||
return token[1:-1]
|
||||
|
||||
|
||||
def _is_long_string(string_token: str) -> bool:
|
||||
"""Is this string token a "longstring" (is it triple-quoted)?
|
||||
|
||||
Long strings are triple-quoted as defined in
|
||||
https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
|
||||
|
||||
This function only checks characters up through the open quotes. Because it's meant
|
||||
to be applied only to tokens that represent string literals, it doesn't bother to
|
||||
check for close-quotes (demonstrating that the literal is a well-formed string).
|
||||
|
||||
Args:
|
||||
string_token: The string token to be parsed.
|
||||
|
||||
Returns:
|
||||
A boolean representing whether or not this token matches a longstring
|
||||
regex.
|
||||
"""
|
||||
return bool(
|
||||
SINGLE_QUOTED_REGEX.match(string_token)
|
||||
or DOUBLE_QUOTED_REGEX.match(string_token)
|
||||
)
|
||||
|
||||
|
||||
def _get_quote_delimiter(string_token: str) -> str:
|
||||
"""Returns the quote character used to delimit this token string.
|
||||
|
||||
This function does little checking for whether the token is a well-formed
|
||||
string.
|
||||
|
||||
Args:
|
||||
string_token: The token to be parsed.
|
||||
|
||||
Returns:
|
||||
A string containing solely the first quote delimiter character in the passed
|
||||
string.
|
||||
|
||||
Raises:
|
||||
ValueError: No quote delimiter characters are present.
|
||||
"""
|
||||
match = QUOTE_DELIMITER_REGEX.match(string_token)
|
||||
if not match:
|
||||
raise ValueError("string token %s is not a well-formed string" % string_token)
|
||||
return match.group(2)
|
||||
|
||||
|
||||
def _is_quote_delimiter_chosen_freely(string_token: str) -> bool:
|
||||
"""Was there a non-awkward option for the quote delimiter?
|
||||
|
||||
Args:
|
||||
string_token: The quoted string whose delimiters are to be checked.
|
||||
|
||||
Returns:
|
||||
Whether there was a choice in this token's quote character that would
|
||||
not have involved backslash-escaping an interior quote character. Long
|
||||
strings are excepted from this analysis under the assumption that their
|
||||
quote characters are set by policy.
|
||||
"""
|
||||
quote_delimiter = _get_quote_delimiter(string_token)
|
||||
unchosen_delimiter = '"' if quote_delimiter == "'" else "'"
|
||||
return bool(
|
||||
quote_delimiter
|
||||
and not _is_long_string(string_token)
|
||||
and unchosen_delimiter not in str_eval(string_token)
|
||||
)
|
||||
1862
venv/lib/python3.8/site-packages/pylint/checkers/typecheck.py
Normal file
1862
venv/lib/python3.8/site-packages/pylint/checkers/typecheck.py
Normal file
File diff suppressed because it is too large
Load Diff
1295
venv/lib/python3.8/site-packages/pylint/checkers/utils.py
Normal file
1295
venv/lib/python3.8/site-packages/pylint/checkers/utils.py
Normal file
File diff suppressed because it is too large
Load Diff
2076
venv/lib/python3.8/site-packages/pylint/checkers/variables.py
Normal file
2076
venv/lib/python3.8/site-packages/pylint/checkers/variables.py
Normal file
File diff suppressed because it is too large
Load Diff
968
venv/lib/python3.8/site-packages/pylint/config.py
Normal file
968
venv/lib/python3.8/site-packages/pylint/config.py
Normal file
@@ -0,0 +1,968 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006-2010, 2012-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2008 pyves@crater.logilab.fr <pyves@crater.logilab.fr>
|
||||
# Copyright (c) 2010 Julien Jehannet <julien.jehannet@logilab.fr>
|
||||
# Copyright (c) 2013 Google, Inc.
|
||||
# Copyright (c) 2013 John McGehee <jmcgehee@altera.com>
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Aru Sahni <arusahni@gmail.com>
|
||||
# Copyright (c) 2015 John Kirkham <jakirkham@gmail.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Erik <erik.eriksson@yahoo.com>
|
||||
# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2017-2019 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2017 ahirnish <ahirnish@gmail.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018, 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2018 Jim Robertson <jrobertson98atx@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Gary Tyler McLeod <mail@garytyler.com>
|
||||
# Copyright (c) 2018 Konstantin <Github@pheanex.de>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2019 Janne Rönkkö <jannero@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""utilities for Pylint configuration :
|
||||
|
||||
* pylintrc
|
||||
* pylint.d (PYLINTHOME)
|
||||
"""
|
||||
import collections
|
||||
import configparser
|
||||
import contextlib
|
||||
import copy
|
||||
import functools
|
||||
import optparse
|
||||
import os
|
||||
import pickle
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from typing import Any, Dict, Tuple
|
||||
|
||||
import toml
|
||||
|
||||
from pylint import utils
|
||||
|
||||
USER_HOME = os.path.expanduser("~")
|
||||
if "PYLINTHOME" in os.environ:
|
||||
PYLINT_HOME = os.environ["PYLINTHOME"]
|
||||
if USER_HOME == "~":
|
||||
USER_HOME = os.path.dirname(PYLINT_HOME)
|
||||
elif USER_HOME == "~":
|
||||
PYLINT_HOME = ".pylint.d"
|
||||
else:
|
||||
PYLINT_HOME = os.path.join(USER_HOME, ".pylint.d")
|
||||
|
||||
|
||||
def _get_pdata_path(base_name, recurs):
|
||||
base_name = base_name.replace(os.sep, "_")
|
||||
return os.path.join(PYLINT_HOME, "%s%s%s" % (base_name, recurs, ".stats"))
|
||||
|
||||
|
||||
def load_results(base):
|
||||
data_file = _get_pdata_path(base, 1)
|
||||
try:
|
||||
with open(data_file, "rb") as stream:
|
||||
return pickle.load(stream)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return {}
|
||||
|
||||
|
||||
def save_results(results, base):
|
||||
if not os.path.exists(PYLINT_HOME):
|
||||
try:
|
||||
os.mkdir(PYLINT_HOME)
|
||||
except OSError:
|
||||
print("Unable to create directory %s" % PYLINT_HOME, file=sys.stderr)
|
||||
data_file = _get_pdata_path(base, 1)
|
||||
try:
|
||||
with open(data_file, "wb") as stream:
|
||||
pickle.dump(results, stream)
|
||||
except OSError as ex:
|
||||
print("Unable to create file %s: %s" % (data_file, ex), file=sys.stderr)
|
||||
|
||||
|
||||
def _toml_has_config(path):
|
||||
with open(path) as toml_handle:
|
||||
content = toml.load(toml_handle)
|
||||
try:
|
||||
content["tool"]["pylint"]
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _cfg_has_config(path):
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(path)
|
||||
return any(section.startswith("pylint.") for section in parser.sections())
|
||||
|
||||
|
||||
def find_default_config_files():
|
||||
"""Find all possible config files."""
|
||||
rc_names = ("pylintrc", ".pylintrc")
|
||||
config_names = rc_names + ("pyproject.toml", "setup.cfg")
|
||||
for config_name in config_names:
|
||||
if os.path.isfile(config_name):
|
||||
if config_name.endswith(".toml") and not _toml_has_config(config_name):
|
||||
continue
|
||||
if config_name.endswith(".cfg") and not _cfg_has_config(config_name):
|
||||
continue
|
||||
|
||||
yield os.path.abspath(config_name)
|
||||
|
||||
if os.path.isfile("__init__.py"):
|
||||
curdir = os.path.abspath(os.getcwd())
|
||||
while os.path.isfile(os.path.join(curdir, "__init__.py")):
|
||||
curdir = os.path.abspath(os.path.join(curdir, ".."))
|
||||
for rc_name in rc_names:
|
||||
rc_path = os.path.join(curdir, rc_name)
|
||||
if os.path.isfile(rc_path):
|
||||
yield rc_path
|
||||
|
||||
if "PYLINTRC" in os.environ and os.path.exists(os.environ["PYLINTRC"]):
|
||||
if os.path.isfile(os.environ["PYLINTRC"]):
|
||||
yield os.environ["PYLINTRC"]
|
||||
else:
|
||||
user_home = os.path.expanduser("~")
|
||||
if user_home not in ("~", "/root"):
|
||||
home_rc = os.path.join(user_home, ".pylintrc")
|
||||
if os.path.isfile(home_rc):
|
||||
yield home_rc
|
||||
home_rc = os.path.join(user_home, ".config", "pylintrc")
|
||||
if os.path.isfile(home_rc):
|
||||
yield home_rc
|
||||
|
||||
if os.path.isfile("/etc/pylintrc"):
|
||||
yield "/etc/pylintrc"
|
||||
|
||||
|
||||
def find_pylintrc():
|
||||
"""search the pylint rc file and return its path if it find it, else None
|
||||
"""
|
||||
for config_file in find_default_config_files():
|
||||
if config_file.endswith("pylintrc"):
|
||||
return config_file
|
||||
|
||||
return None
|
||||
|
||||
|
||||
PYLINTRC = find_pylintrc()
|
||||
|
||||
ENV_HELP = (
|
||||
"""
|
||||
The following environment variables are used:
|
||||
* PYLINTHOME
|
||||
Path to the directory where persistent data for the run will be stored. If
|
||||
not found, it defaults to ~/.pylint.d/ or .pylint.d (in the current working
|
||||
directory).
|
||||
* PYLINTRC
|
||||
Path to the configuration file. See the documentation for the method used
|
||||
to search for configuration file.
|
||||
"""
|
||||
% globals() # type: ignore
|
||||
)
|
||||
|
||||
|
||||
class UnsupportedAction(Exception):
|
||||
"""raised by set_option when it doesn't know what to do for an action"""
|
||||
|
||||
|
||||
def _multiple_choice_validator(choices, name, value):
|
||||
values = utils._check_csv(value)
|
||||
for csv_value in values:
|
||||
if csv_value not in choices:
|
||||
msg = "option %s: invalid value: %r, should be in %s"
|
||||
raise optparse.OptionValueError(msg % (name, csv_value, choices))
|
||||
return values
|
||||
|
||||
|
||||
def _choice_validator(choices, name, value):
|
||||
if value not in choices:
|
||||
msg = "option %s: invalid value: %r, should be in %s"
|
||||
raise optparse.OptionValueError(msg % (name, value, choices))
|
||||
return value
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def _csv_validator(_, name, value):
|
||||
return utils._check_csv(value)
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def _regexp_validator(_, name, value):
|
||||
if hasattr(value, "pattern"):
|
||||
return value
|
||||
return re.compile(value)
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def _regexp_csv_validator(_, name, value):
|
||||
return [_regexp_validator(_, name, val) for val in _csv_validator(_, name, value)]
|
||||
|
||||
|
||||
def _yn_validator(opt, _, value):
|
||||
if isinstance(value, int):
|
||||
return bool(value)
|
||||
if value in ("y", "yes"):
|
||||
return True
|
||||
if value in ("n", "no"):
|
||||
return False
|
||||
msg = "option %s: invalid yn value %r, should be in (y, yes, n, no)"
|
||||
raise optparse.OptionValueError(msg % (opt, value))
|
||||
|
||||
|
||||
def _non_empty_string_validator(opt, _, value):
|
||||
if not value:
|
||||
msg = "indent string can't be empty."
|
||||
raise optparse.OptionValueError(msg)
|
||||
return utils._unquote(value)
|
||||
|
||||
|
||||
VALIDATORS = {
|
||||
"string": utils._unquote,
|
||||
"int": int,
|
||||
"regexp": re.compile,
|
||||
"regexp_csv": _regexp_csv_validator,
|
||||
"csv": _csv_validator,
|
||||
"yn": _yn_validator,
|
||||
"choice": lambda opt, name, value: _choice_validator(opt["choices"], name, value),
|
||||
"multiple_choice": lambda opt, name, value: _multiple_choice_validator(
|
||||
opt["choices"], name, value
|
||||
),
|
||||
"non_empty_string": _non_empty_string_validator,
|
||||
}
|
||||
|
||||
|
||||
def _call_validator(opttype, optdict, option, value):
|
||||
if opttype not in VALIDATORS:
|
||||
raise Exception('Unsupported type "%s"' % opttype)
|
||||
try:
|
||||
return VALIDATORS[opttype](optdict, option, value)
|
||||
except TypeError:
|
||||
try:
|
||||
return VALIDATORS[opttype](value)
|
||||
except Exception:
|
||||
raise optparse.OptionValueError(
|
||||
"%s value (%r) should be of type %s" % (option, value, opttype)
|
||||
)
|
||||
|
||||
|
||||
def _validate(value, optdict, name=""):
|
||||
"""return a validated value for an option according to its type
|
||||
|
||||
optional argument name is only used for error message formatting
|
||||
"""
|
||||
try:
|
||||
_type = optdict["type"]
|
||||
except KeyError:
|
||||
return value
|
||||
return _call_validator(_type, optdict, name, value)
|
||||
|
||||
|
||||
def _level_options(group, outputlevel):
|
||||
return [
|
||||
option
|
||||
for option in group.option_list
|
||||
if (getattr(option, "level", 0) or 0) <= outputlevel
|
||||
and option.help is not optparse.SUPPRESS_HELP
|
||||
]
|
||||
|
||||
|
||||
def _expand_default(self, option):
|
||||
"""Patch OptionParser.expand_default with custom behaviour
|
||||
|
||||
This will handle defaults to avoid overriding values in the
|
||||
configuration file.
|
||||
"""
|
||||
if self.parser is None or not self.default_tag:
|
||||
return option.help
|
||||
optname = option._long_opts[0][2:]
|
||||
try:
|
||||
provider = self.parser.options_manager._all_options[optname]
|
||||
except KeyError:
|
||||
value = None
|
||||
else:
|
||||
optdict = provider.get_option_def(optname)
|
||||
optname = provider.option_attrname(optname, optdict)
|
||||
value = getattr(provider.config, optname, optdict)
|
||||
value = utils._format_option_value(optdict, value)
|
||||
if value is optparse.NO_DEFAULT or not value:
|
||||
value = self.NO_DEFAULT_VALUE
|
||||
return option.help.replace(self.default_tag, str(value))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _patch_optparse():
|
||||
orig_default = optparse.HelpFormatter
|
||||
try:
|
||||
optparse.HelpFormatter.expand_default = _expand_default
|
||||
yield
|
||||
finally:
|
||||
optparse.HelpFormatter.expand_default = orig_default
|
||||
|
||||
|
||||
def _multiple_choices_validating_option(opt, name, value):
|
||||
return _multiple_choice_validator(opt.choices, name, value)
|
||||
|
||||
|
||||
# pylint: disable=no-member
|
||||
class Option(optparse.Option):
|
||||
TYPES = optparse.Option.TYPES + (
|
||||
"regexp",
|
||||
"regexp_csv",
|
||||
"csv",
|
||||
"yn",
|
||||
"multiple_choice",
|
||||
"non_empty_string",
|
||||
)
|
||||
ATTRS = optparse.Option.ATTRS + ["hide", "level"]
|
||||
TYPE_CHECKER = copy.copy(optparse.Option.TYPE_CHECKER)
|
||||
TYPE_CHECKER["regexp"] = _regexp_validator
|
||||
TYPE_CHECKER["regexp_csv"] = _regexp_csv_validator
|
||||
TYPE_CHECKER["csv"] = _csv_validator
|
||||
TYPE_CHECKER["yn"] = _yn_validator
|
||||
TYPE_CHECKER["multiple_choice"] = _multiple_choices_validating_option
|
||||
TYPE_CHECKER["non_empty_string"] = _non_empty_string_validator
|
||||
|
||||
def __init__(self, *opts, **attrs):
|
||||
optparse.Option.__init__(self, *opts, **attrs)
|
||||
if hasattr(self, "hide") and self.hide:
|
||||
self.help = optparse.SUPPRESS_HELP
|
||||
|
||||
def _check_choice(self):
|
||||
if self.type in ("choice", "multiple_choice"):
|
||||
if self.choices is None:
|
||||
raise optparse.OptionError(
|
||||
"must supply a list of choices for type 'choice'", self
|
||||
)
|
||||
if not isinstance(self.choices, (tuple, list)):
|
||||
raise optparse.OptionError(
|
||||
"choices must be a list of strings ('%s' supplied)"
|
||||
% str(type(self.choices)).split("'")[1],
|
||||
self,
|
||||
)
|
||||
elif self.choices is not None:
|
||||
raise optparse.OptionError(
|
||||
"must not supply choices for type %r" % self.type, self
|
||||
)
|
||||
|
||||
# pylint: disable=unsupported-assignment-operation
|
||||
optparse.Option.CHECK_METHODS[2] = _check_choice # type: ignore
|
||||
|
||||
def process(self, opt, value, values, parser):
|
||||
# First, convert the value(s) to the right type. Howl if any
|
||||
# value(s) are bogus.
|
||||
value = self.convert_value(opt, value)
|
||||
if self.type == "named":
|
||||
existent = getattr(values, self.dest)
|
||||
if existent:
|
||||
existent.update(value)
|
||||
value = existent
|
||||
# And then take whatever action is expected of us.
|
||||
# This is a separate method to make life easier for
|
||||
# subclasses to add new actions.
|
||||
return self.take_action(self.action, self.dest, opt, value, values, parser)
|
||||
|
||||
|
||||
class OptionParser(optparse.OptionParser):
|
||||
def __init__(self, option_class, *args, **kwargs):
|
||||
optparse.OptionParser.__init__(self, option_class=Option, *args, **kwargs)
|
||||
|
||||
def format_option_help(self, formatter=None):
|
||||
if formatter is None:
|
||||
formatter = self.formatter
|
||||
outputlevel = getattr(formatter, "output_level", 0)
|
||||
formatter.store_option_strings(self)
|
||||
result = []
|
||||
result.append(formatter.format_heading("Options"))
|
||||
formatter.indent()
|
||||
if self.option_list:
|
||||
result.append(optparse.OptionContainer.format_option_help(self, formatter))
|
||||
result.append("\n")
|
||||
for group in self.option_groups:
|
||||
if group.level <= outputlevel and (
|
||||
group.description or _level_options(group, outputlevel)
|
||||
):
|
||||
result.append(group.format_help(formatter))
|
||||
result.append("\n")
|
||||
formatter.dedent()
|
||||
# Drop the last "\n", or the header if no options or option groups:
|
||||
return "".join(result[:-1])
|
||||
|
||||
def _match_long_opt(self, opt):
|
||||
"""Disable abbreviations."""
|
||||
if opt not in self._long_opt:
|
||||
raise optparse.BadOptionError(opt)
|
||||
return opt
|
||||
|
||||
|
||||
# pylint: disable=abstract-method; by design?
|
||||
class _ManHelpFormatter(optparse.HelpFormatter):
|
||||
def __init__(
|
||||
self, indent_increment=0, max_help_position=24, width=79, short_first=0
|
||||
):
|
||||
optparse.HelpFormatter.__init__(
|
||||
self, indent_increment, max_help_position, width, short_first
|
||||
)
|
||||
|
||||
def format_heading(self, heading):
|
||||
return ".SH %s\n" % heading.upper()
|
||||
|
||||
def format_description(self, description):
|
||||
return description
|
||||
|
||||
def format_option(self, option):
|
||||
try:
|
||||
optstring = option.option_strings
|
||||
except AttributeError:
|
||||
optstring = self.format_option_strings(option)
|
||||
if option.help:
|
||||
help_text = self.expand_default(option)
|
||||
help_string = " ".join([l.strip() for l in help_text.splitlines()])
|
||||
help_string = help_string.replace("\\", "\\\\")
|
||||
help_string = help_string.replace("[current:", "[default:")
|
||||
else:
|
||||
help_string = ""
|
||||
return """.IP "%s"
|
||||
%s
|
||||
""" % (
|
||||
optstring,
|
||||
help_string,
|
||||
)
|
||||
|
||||
def format_head(self, optparser, pkginfo, section=1):
|
||||
long_desc = ""
|
||||
try:
|
||||
pgm = optparser._get_prog_name()
|
||||
except AttributeError:
|
||||
# py >= 2.4.X (dunno which X exactly, at least 2)
|
||||
pgm = optparser.get_prog_name()
|
||||
short_desc = self.format_short_description(pgm, pkginfo.description)
|
||||
if hasattr(pkginfo, "long_desc"):
|
||||
long_desc = self.format_long_description(pgm, pkginfo.long_desc)
|
||||
return "%s\n%s\n%s\n%s" % (
|
||||
self.format_title(pgm, section),
|
||||
short_desc,
|
||||
self.format_synopsis(pgm),
|
||||
long_desc,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def format_title(pgm, section):
|
||||
date = "%d-%02d-%02d" % time.localtime()[:3]
|
||||
return '.TH %s %s "%s" %s' % (pgm, section, date, pgm)
|
||||
|
||||
@staticmethod
|
||||
def format_short_description(pgm, short_desc):
|
||||
return """.SH NAME
|
||||
.B %s
|
||||
\\- %s
|
||||
""" % (
|
||||
pgm,
|
||||
short_desc.strip(),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def format_synopsis(pgm):
|
||||
return (
|
||||
""".SH SYNOPSIS
|
||||
.B %s
|
||||
[
|
||||
.I OPTIONS
|
||||
] [
|
||||
.I <arguments>
|
||||
]
|
||||
"""
|
||||
% pgm
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def format_long_description(pgm, long_desc):
|
||||
long_desc = "\n".join(line.lstrip() for line in long_desc.splitlines())
|
||||
long_desc = long_desc.replace("\n.\n", "\n\n")
|
||||
if long_desc.lower().startswith(pgm):
|
||||
long_desc = long_desc[len(pgm) :]
|
||||
return """.SH DESCRIPTION
|
||||
.B %s
|
||||
%s
|
||||
""" % (
|
||||
pgm,
|
||||
long_desc.strip(),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def format_tail(pkginfo):
|
||||
tail = """.SH SEE ALSO
|
||||
/usr/share/doc/pythonX.Y-%s/
|
||||
|
||||
.SH BUGS
|
||||
Please report bugs on the project\'s mailing list:
|
||||
%s
|
||||
|
||||
.SH AUTHOR
|
||||
%s <%s>
|
||||
""" % (
|
||||
getattr(pkginfo, "debian_name", "pylint"),
|
||||
pkginfo.mailinglist,
|
||||
pkginfo.author,
|
||||
pkginfo.author_email,
|
||||
)
|
||||
|
||||
if hasattr(pkginfo, "copyright"):
|
||||
tail += (
|
||||
"""
|
||||
.SH COPYRIGHT
|
||||
%s
|
||||
"""
|
||||
% pkginfo.copyright
|
||||
)
|
||||
|
||||
return tail
|
||||
|
||||
|
||||
class OptionsManagerMixIn:
|
||||
"""Handle configuration from both a configuration file and command line options"""
|
||||
|
||||
def __init__(self, usage, config_file=None, version=None):
|
||||
self.config_file = config_file
|
||||
self.reset_parsers(usage, version=version)
|
||||
# list of registered options providers
|
||||
self.options_providers = []
|
||||
# dictionary associating option name to checker
|
||||
self._all_options = collections.OrderedDict()
|
||||
self._short_options = {}
|
||||
self._nocallback_options = {}
|
||||
self._mygroups = {}
|
||||
# verbosity
|
||||
self._maxlevel = 0
|
||||
|
||||
def reset_parsers(self, usage="", version=None):
|
||||
# configuration file parser
|
||||
self.cfgfile_parser = configparser.ConfigParser(
|
||||
inline_comment_prefixes=("#", ";")
|
||||
)
|
||||
# command line parser
|
||||
self.cmdline_parser = OptionParser(Option, usage=usage, version=version)
|
||||
self.cmdline_parser.options_manager = self
|
||||
self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS)
|
||||
|
||||
def register_options_provider(self, provider, own_group=True):
|
||||
"""register an options provider"""
|
||||
assert provider.priority <= 0, "provider's priority can't be >= 0"
|
||||
for i in range(len(self.options_providers)):
|
||||
if provider.priority > self.options_providers[i].priority:
|
||||
self.options_providers.insert(i, provider)
|
||||
break
|
||||
else:
|
||||
self.options_providers.append(provider)
|
||||
non_group_spec_options = [
|
||||
option for option in provider.options if "group" not in option[1]
|
||||
]
|
||||
groups = getattr(provider, "option_groups", ())
|
||||
if own_group and non_group_spec_options:
|
||||
self.add_option_group(
|
||||
provider.name.upper(),
|
||||
provider.__doc__,
|
||||
non_group_spec_options,
|
||||
provider,
|
||||
)
|
||||
else:
|
||||
for opt, optdict in non_group_spec_options:
|
||||
self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
|
||||
for gname, gdoc in groups:
|
||||
gname = gname.upper()
|
||||
goptions = [
|
||||
option
|
||||
for option in provider.options
|
||||
if option[1].get("group", "").upper() == gname
|
||||
]
|
||||
self.add_option_group(gname, gdoc, goptions, provider)
|
||||
|
||||
def add_option_group(self, group_name, _, options, provider):
|
||||
# add option group to the command line parser
|
||||
if group_name in self._mygroups:
|
||||
group = self._mygroups[group_name]
|
||||
else:
|
||||
group = optparse.OptionGroup(
|
||||
self.cmdline_parser, title=group_name.capitalize()
|
||||
)
|
||||
self.cmdline_parser.add_option_group(group)
|
||||
group.level = provider.level
|
||||
self._mygroups[group_name] = group
|
||||
# add section to the config file
|
||||
if (
|
||||
group_name != "DEFAULT"
|
||||
and group_name not in self.cfgfile_parser._sections
|
||||
):
|
||||
self.cfgfile_parser.add_section(group_name)
|
||||
# add provider's specific options
|
||||
for opt, optdict in options:
|
||||
self.add_optik_option(provider, group, opt, optdict)
|
||||
|
||||
def add_optik_option(self, provider, optikcontainer, opt, optdict):
|
||||
args, optdict = self.optik_option(provider, opt, optdict)
|
||||
option = optikcontainer.add_option(*args, **optdict)
|
||||
self._all_options[opt] = provider
|
||||
self._maxlevel = max(self._maxlevel, option.level or 0)
|
||||
|
||||
def optik_option(self, provider, opt, optdict):
|
||||
"""get our personal option definition and return a suitable form for
|
||||
use with optik/optparse
|
||||
"""
|
||||
optdict = copy.copy(optdict)
|
||||
if "action" in optdict:
|
||||
self._nocallback_options[provider] = opt
|
||||
else:
|
||||
optdict["action"] = "callback"
|
||||
optdict["callback"] = self.cb_set_provider_option
|
||||
# default is handled here and *must not* be given to optik if you
|
||||
# want the whole machinery to work
|
||||
if "default" in optdict:
|
||||
if (
|
||||
"help" in optdict
|
||||
and optdict.get("default") is not None
|
||||
and optdict["action"] not in ("store_true", "store_false")
|
||||
):
|
||||
optdict["help"] += " [current: %default]"
|
||||
del optdict["default"]
|
||||
args = ["--" + str(opt)]
|
||||
if "short" in optdict:
|
||||
self._short_options[optdict["short"]] = opt
|
||||
args.append("-" + optdict["short"])
|
||||
del optdict["short"]
|
||||
# cleanup option definition dict before giving it to optik
|
||||
for key in list(optdict.keys()):
|
||||
if key not in self._optik_option_attrs:
|
||||
optdict.pop(key)
|
||||
return args, optdict
|
||||
|
||||
def cb_set_provider_option(self, option, opt, value, parser):
|
||||
"""optik callback for option setting"""
|
||||
if opt.startswith("--"):
|
||||
# remove -- on long option
|
||||
opt = opt[2:]
|
||||
else:
|
||||
# short option, get its long equivalent
|
||||
opt = self._short_options[opt[1:]]
|
||||
# trick since we can't set action='store_true' on options
|
||||
if value is None:
|
||||
value = 1
|
||||
self.global_set_option(opt, value)
|
||||
|
||||
def global_set_option(self, opt, value):
|
||||
"""set option on the correct option provider"""
|
||||
self._all_options[opt].set_option(opt, value)
|
||||
|
||||
def generate_config(self, stream=None, skipsections=(), encoding=None):
|
||||
"""write a configuration file according to the current configuration
|
||||
into the given stream or stdout
|
||||
"""
|
||||
options_by_section = {}
|
||||
sections = []
|
||||
for provider in self.options_providers:
|
||||
for section, options in provider.options_by_section():
|
||||
if section is None:
|
||||
section = provider.name
|
||||
if section in skipsections:
|
||||
continue
|
||||
options = [
|
||||
(n, d, v)
|
||||
for (n, d, v) in options
|
||||
if d.get("type") is not None and not d.get("deprecated")
|
||||
]
|
||||
if not options:
|
||||
continue
|
||||
if section not in sections:
|
||||
sections.append(section)
|
||||
alloptions = options_by_section.setdefault(section, [])
|
||||
alloptions += options
|
||||
stream = stream or sys.stdout
|
||||
printed = False
|
||||
for section in sections:
|
||||
if printed:
|
||||
print("\n", file=stream)
|
||||
utils.format_section(
|
||||
stream, section.upper(), sorted(options_by_section[section])
|
||||
)
|
||||
printed = True
|
||||
|
||||
def generate_manpage(self, pkginfo, section=1, stream=None):
|
||||
with _patch_optparse():
|
||||
_generate_manpage(
|
||||
self.cmdline_parser,
|
||||
pkginfo,
|
||||
section,
|
||||
stream=stream or sys.stdout,
|
||||
level=self._maxlevel,
|
||||
)
|
||||
|
||||
def load_provider_defaults(self):
|
||||
"""initialize configuration using default values"""
|
||||
for provider in self.options_providers:
|
||||
provider.load_defaults()
|
||||
|
||||
def read_config_file(self, config_file=None, verbose=None):
|
||||
"""read the configuration file but do not load it (i.e. dispatching
|
||||
values to each options provider)
|
||||
"""
|
||||
helplevel = 1
|
||||
while helplevel <= self._maxlevel:
|
||||
opt = "-".join(["long"] * helplevel) + "-help"
|
||||
if opt in self._all_options:
|
||||
break # already processed
|
||||
|
||||
helpfunc = functools.partial(self.helpfunc, level=helplevel)
|
||||
|
||||
helpmsg = "%s verbose help." % " ".join(["more"] * helplevel)
|
||||
optdict = {"action": "callback", "callback": helpfunc, "help": helpmsg}
|
||||
provider = self.options_providers[0]
|
||||
self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
|
||||
provider.options += ((opt, optdict),)
|
||||
helplevel += 1
|
||||
if config_file is None:
|
||||
config_file = self.config_file
|
||||
if config_file is not None:
|
||||
config_file = os.path.expanduser(config_file)
|
||||
if not os.path.exists(config_file):
|
||||
raise OSError("The config file {:s} doesn't exist!".format(config_file))
|
||||
|
||||
use_config_file = config_file and os.path.exists(config_file)
|
||||
if use_config_file:
|
||||
parser = self.cfgfile_parser
|
||||
|
||||
if config_file.endswith(".toml"):
|
||||
with open(config_file) as fp:
|
||||
content = toml.load(fp)
|
||||
|
||||
try:
|
||||
sections_values = content["tool"]["pylint"]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
for section, values in sections_values.items():
|
||||
parser._sections[section.upper()] = values
|
||||
else:
|
||||
# Use this encoding in order to strip the BOM marker, if any.
|
||||
with open(config_file, encoding="utf_8_sig") as fp:
|
||||
parser.read_file(fp)
|
||||
|
||||
# normalize sections'title
|
||||
for sect, values in list(parser._sections.items()):
|
||||
if sect.startswith("pylint."):
|
||||
sect = sect[len("pylint.") :]
|
||||
if not sect.isupper() and values:
|
||||
parser._sections[sect.upper()] = values
|
||||
|
||||
if not verbose:
|
||||
return
|
||||
|
||||
if use_config_file:
|
||||
msg = "Using config file {}".format(os.path.abspath(config_file))
|
||||
else:
|
||||
msg = "No config file found, using default configuration"
|
||||
print(msg, file=sys.stderr)
|
||||
|
||||
def load_config_file(self):
|
||||
"""dispatch values previously read from a configuration file to each
|
||||
options provider)
|
||||
"""
|
||||
parser = self.cfgfile_parser
|
||||
for section in parser.sections():
|
||||
for option, value in parser.items(section):
|
||||
try:
|
||||
self.global_set_option(option, value)
|
||||
except (KeyError, optparse.OptionError):
|
||||
continue
|
||||
|
||||
def load_configuration(self, **kwargs):
|
||||
"""override configuration according to given parameters"""
|
||||
return self.load_configuration_from_config(kwargs)
|
||||
|
||||
def load_configuration_from_config(self, config):
|
||||
for opt, opt_value in config.items():
|
||||
opt = opt.replace("_", "-")
|
||||
provider = self._all_options[opt]
|
||||
provider.set_option(opt, opt_value)
|
||||
|
||||
def load_command_line_configuration(self, args=None):
|
||||
"""Override configuration according to command line parameters
|
||||
|
||||
return additional arguments
|
||||
"""
|
||||
with _patch_optparse():
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
else:
|
||||
args = list(args)
|
||||
(options, args) = self.cmdline_parser.parse_args(args=args)
|
||||
for provider in self._nocallback_options:
|
||||
config = provider.config
|
||||
for attr in config.__dict__.keys():
|
||||
value = getattr(options, attr, None)
|
||||
if value is None:
|
||||
continue
|
||||
setattr(config, attr, value)
|
||||
return args
|
||||
|
||||
def add_help_section(self, title, description, level=0):
|
||||
"""add a dummy option section for help purpose """
|
||||
group = optparse.OptionGroup(
|
||||
self.cmdline_parser, title=title.capitalize(), description=description
|
||||
)
|
||||
group.level = level
|
||||
self._maxlevel = max(self._maxlevel, level)
|
||||
self.cmdline_parser.add_option_group(group)
|
||||
|
||||
def help(self, level=0):
|
||||
"""return the usage string for available options """
|
||||
self.cmdline_parser.formatter.output_level = level
|
||||
with _patch_optparse():
|
||||
return self.cmdline_parser.format_help()
|
||||
|
||||
def helpfunc(self, option, opt, val, p, level): # pylint: disable=unused-argument
|
||||
print(self.help(level))
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
class OptionsProviderMixIn:
|
||||
"""Mixin to provide options to an OptionsManager"""
|
||||
|
||||
# those attributes should be overridden
|
||||
priority = -1
|
||||
name = "default"
|
||||
options = () # type: Tuple[Tuple[str, Dict[str, Any]], ...]
|
||||
level = 0
|
||||
|
||||
def __init__(self):
|
||||
self.config = optparse.Values()
|
||||
self.load_defaults()
|
||||
|
||||
def load_defaults(self):
|
||||
"""initialize the provider using default values"""
|
||||
for opt, optdict in self.options:
|
||||
action = optdict.get("action")
|
||||
if action != "callback":
|
||||
# callback action have no default
|
||||
if optdict is None:
|
||||
optdict = self.get_option_def(opt)
|
||||
default = optdict.get("default")
|
||||
self.set_option(opt, default, action, optdict)
|
||||
|
||||
def option_attrname(self, opt, optdict=None):
|
||||
"""get the config attribute corresponding to opt"""
|
||||
if optdict is None:
|
||||
optdict = self.get_option_def(opt)
|
||||
return optdict.get("dest", opt.replace("-", "_"))
|
||||
|
||||
def option_value(self, opt):
|
||||
"""get the current value for the given option"""
|
||||
return getattr(self.config, self.option_attrname(opt), None)
|
||||
|
||||
def set_option(self, optname, value, action=None, optdict=None):
|
||||
"""method called to set an option (registered in the options list)"""
|
||||
if optdict is None:
|
||||
optdict = self.get_option_def(optname)
|
||||
if value is not None:
|
||||
value = _validate(value, optdict, optname)
|
||||
if action is None:
|
||||
action = optdict.get("action", "store")
|
||||
if action == "store":
|
||||
setattr(self.config, self.option_attrname(optname, optdict), value)
|
||||
elif action in ("store_true", "count"):
|
||||
setattr(self.config, self.option_attrname(optname, optdict), 0)
|
||||
elif action == "store_false":
|
||||
setattr(self.config, self.option_attrname(optname, optdict), 1)
|
||||
elif action == "append":
|
||||
optname = self.option_attrname(optname, optdict)
|
||||
_list = getattr(self.config, optname, None)
|
||||
if _list is None:
|
||||
if isinstance(value, (list, tuple)):
|
||||
_list = value
|
||||
elif value is not None:
|
||||
_list = []
|
||||
_list.append(value)
|
||||
setattr(self.config, optname, _list)
|
||||
elif isinstance(_list, tuple):
|
||||
setattr(self.config, optname, _list + (value,))
|
||||
else:
|
||||
_list.append(value)
|
||||
elif action == "callback":
|
||||
optdict["callback"](None, optname, value, None)
|
||||
else:
|
||||
raise UnsupportedAction(action)
|
||||
|
||||
def get_option_def(self, opt):
|
||||
"""return the dictionary defining an option given its name"""
|
||||
assert self.options
|
||||
for option in self.options:
|
||||
if option[0] == opt:
|
||||
return option[1]
|
||||
raise optparse.OptionError(
|
||||
"no such option %s in section %r" % (opt, self.name), opt
|
||||
)
|
||||
|
||||
def options_by_section(self):
|
||||
"""return an iterator on options grouped by section
|
||||
|
||||
(section, [list of (optname, optdict, optvalue)])
|
||||
"""
|
||||
sections = {}
|
||||
for optname, optdict in self.options:
|
||||
sections.setdefault(optdict.get("group"), []).append(
|
||||
(optname, optdict, self.option_value(optname))
|
||||
)
|
||||
if None in sections:
|
||||
yield None, sections.pop(None)
|
||||
for section, options in sorted(sections.items()):
|
||||
yield section.upper(), options
|
||||
|
||||
def options_and_values(self, options=None):
|
||||
if options is None:
|
||||
options = self.options
|
||||
for optname, optdict in options:
|
||||
yield (optname, optdict, self.option_value(optname))
|
||||
|
||||
|
||||
class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn):
|
||||
"""basic mixin for simple configurations which don't need the
|
||||
manager / providers model
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if not args:
|
||||
kwargs.setdefault("usage", "")
|
||||
OptionsManagerMixIn.__init__(self, *args, **kwargs)
|
||||
OptionsProviderMixIn.__init__(self)
|
||||
if not getattr(self, "option_groups", None):
|
||||
self.option_groups = []
|
||||
for _, optdict in self.options:
|
||||
try:
|
||||
gdef = (optdict["group"].upper(), "")
|
||||
except KeyError:
|
||||
continue
|
||||
if gdef not in self.option_groups:
|
||||
self.option_groups.append(gdef)
|
||||
self.register_options_provider(self, own_group=False)
|
||||
|
||||
|
||||
def _generate_manpage(optparser, pkginfo, section=1, stream=sys.stdout, level=0):
|
||||
formatter = _ManHelpFormatter()
|
||||
formatter.output_level = level
|
||||
formatter.parser = optparser
|
||||
print(formatter.format_head(optparser, pkginfo, section), file=stream)
|
||||
print(optparser.format_option_help(formatter), file=stream)
|
||||
print(formatter.format_tail(pkginfo), file=stream)
|
||||
41
venv/lib/python3.8/site-packages/pylint/constants.py
Normal file
41
venv/lib/python3.8/site-packages/pylint/constants.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
import re
|
||||
|
||||
# Allow stopping after the first semicolon/hash encountered,
|
||||
# so that an option can be continued with the reasons
|
||||
# why it is active or disabled.
|
||||
OPTION_RGX = re.compile(r"\s*#.*\bpylint:\s*([^;#]+)[;#]{0,1}")
|
||||
|
||||
PY_EXTS = (".py", ".pyc", ".pyo", ".pyw", ".so", ".dll")
|
||||
|
||||
MSG_STATE_CONFIDENCE = 2
|
||||
_MSG_ORDER = "EWRCIF"
|
||||
MSG_STATE_SCOPE_CONFIG = 0
|
||||
MSG_STATE_SCOPE_MODULE = 1
|
||||
|
||||
# The line/node distinction does not apply to fatal errors and reports.
|
||||
_SCOPE_EXEMPT = "FR"
|
||||
|
||||
MSG_TYPES = {
|
||||
"I": "info",
|
||||
"C": "convention",
|
||||
"R": "refactor",
|
||||
"W": "warning",
|
||||
"E": "error",
|
||||
"F": "fatal",
|
||||
}
|
||||
MSG_TYPES_LONG = {v: k for k, v in MSG_TYPES.items()}
|
||||
|
||||
MSG_TYPES_STATUS = {"I": 0, "C": 16, "R": 8, "W": 4, "E": 2, "F": 1}
|
||||
|
||||
# You probably don't want to change the MAIN_CHECKER_NAME
|
||||
# This would affect rcfile generation and retro-compatibility
|
||||
# on all project using [MASTER] in their rcfile.
|
||||
MAIN_CHECKER_NAME = "master"
|
||||
|
||||
|
||||
class WarningScope:
|
||||
LINE = "line-based-msg"
|
||||
NODE = "node-based-msg"
|
||||
200
venv/lib/python3.8/site-packages/pylint/epylint.py
Normal file
200
venv/lib/python3.8/site-packages/pylint/epylint.py
Normal file
@@ -0,0 +1,200 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4
|
||||
# -*- vim:fenc=utf-8:ft=python:et:sw=4:ts=4:sts=4
|
||||
|
||||
# Copyright (c) 2008-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Jakob Normark <jakobnormark@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Manuel Vázquez Acosta <mva.led@gmail.com>
|
||||
# Copyright (c) 2014 Derek Harland <derek.harland@finq.co.nz>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 Mihai Balint <balint.mihai@gmail.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2017 Daniela Plascencia <daplascen@gmail.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Ryan McGuire <ryan@enigmacurry.com>
|
||||
# Copyright (c) 2018 thernstig <30827238+thernstig@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Radostin Stoyanov <rst0git@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Emacs and Flymake compatible Pylint.
|
||||
|
||||
This script is for integration with emacs and is compatible with flymake mode.
|
||||
|
||||
epylint walks out of python packages before invoking pylint. This avoids
|
||||
reporting import errors that occur when a module within a package uses the
|
||||
absolute import path to get another module within this package.
|
||||
|
||||
For example:
|
||||
- Suppose a package is structured as
|
||||
|
||||
a/__init__.py
|
||||
a/b/x.py
|
||||
a/c/y.py
|
||||
|
||||
- Then if y.py imports x as "from a.b import x" the following produces pylint
|
||||
errors
|
||||
|
||||
cd a/c; pylint y.py
|
||||
|
||||
- The following obviously doesn't
|
||||
|
||||
pylint a/c/y.py
|
||||
|
||||
- As this script will be invoked by emacs within the directory of the file
|
||||
we are checking we need to go out of it to avoid these false positives.
|
||||
|
||||
|
||||
You may also use py_run to run pylint with desired options and get back (or not)
|
||||
its output.
|
||||
"""
|
||||
import os
|
||||
import os.path as osp
|
||||
import shlex
|
||||
import sys
|
||||
from io import StringIO
|
||||
from subprocess import PIPE, Popen
|
||||
|
||||
|
||||
def _get_env():
|
||||
"""Extracts the environment PYTHONPATH and appends the current sys.path to
|
||||
those."""
|
||||
env = dict(os.environ)
|
||||
env["PYTHONPATH"] = os.pathsep.join(sys.path)
|
||||
return env
|
||||
|
||||
|
||||
def lint(filename, options=()):
|
||||
"""Pylint the given file.
|
||||
|
||||
When run from emacs we will be in the directory of a file, and passed its
|
||||
filename. If this file is part of a package and is trying to import other
|
||||
modules from within its own package or another package rooted in a directory
|
||||
below it, pylint will classify it as a failed import.
|
||||
|
||||
To get around this, we traverse down the directory tree to find the root of
|
||||
the package this module is in. We then invoke pylint from this directory.
|
||||
|
||||
Finally, we must correct the filenames in the output generated by pylint so
|
||||
Emacs doesn't become confused (it will expect just the original filename,
|
||||
while pylint may extend it with extra directories if we've traversed down
|
||||
the tree)
|
||||
"""
|
||||
# traverse downwards until we are out of a python package
|
||||
full_path = osp.abspath(filename)
|
||||
parent_path = osp.dirname(full_path)
|
||||
child_path = osp.basename(full_path)
|
||||
|
||||
while parent_path != "/" and osp.exists(osp.join(parent_path, "__init__.py")):
|
||||
child_path = osp.join(osp.basename(parent_path), child_path)
|
||||
parent_path = osp.dirname(parent_path)
|
||||
|
||||
# Start pylint
|
||||
# Ensure we use the python and pylint associated with the running epylint
|
||||
run_cmd = "import sys; from pylint.lint import Run; Run(sys.argv[1:])"
|
||||
cmd = (
|
||||
[sys.executable, "-c", run_cmd]
|
||||
+ [
|
||||
"--msg-template",
|
||||
"{path}:{line}: {category} ({msg_id}, {symbol}, {obj}) {msg}",
|
||||
"-r",
|
||||
"n",
|
||||
child_path,
|
||||
]
|
||||
+ list(options)
|
||||
)
|
||||
process = Popen(
|
||||
cmd, stdout=PIPE, cwd=parent_path, env=_get_env(), universal_newlines=True
|
||||
)
|
||||
|
||||
for line in process.stdout:
|
||||
# remove pylintrc warning
|
||||
if line.startswith("No config file found"):
|
||||
continue
|
||||
|
||||
# modify the file name thats output to reverse the path traversal we made
|
||||
parts = line.split(":")
|
||||
if parts and parts[0] == child_path:
|
||||
line = ":".join([filename] + parts[1:])
|
||||
print(line, end=" ")
|
||||
|
||||
process.wait()
|
||||
return process.returncode
|
||||
|
||||
|
||||
def py_run(command_options="", return_std=False, stdout=None, stderr=None):
|
||||
"""Run pylint from python
|
||||
|
||||
``command_options`` is a string containing ``pylint`` command line options;
|
||||
``return_std`` (boolean) indicates return of created standard output
|
||||
and error (see below);
|
||||
``stdout`` and ``stderr`` are 'file-like' objects in which standard output
|
||||
could be written.
|
||||
|
||||
Calling agent is responsible for stdout/err management (creation, close).
|
||||
Default standard output and error are those from sys,
|
||||
or standalone ones (``subprocess.PIPE``) are used
|
||||
if they are not set and ``return_std``.
|
||||
|
||||
If ``return_std`` is set to ``True``, this function returns a 2-uple
|
||||
containing standard output and error related to created process,
|
||||
as follows: ``(stdout, stderr)``.
|
||||
|
||||
To silently run Pylint on a module, and get its standard output and error:
|
||||
>>> (pylint_stdout, pylint_stderr) = py_run( 'module_name.py', True)
|
||||
"""
|
||||
# Detect if we use Python as executable or not, else default to `python`
|
||||
executable = sys.executable if "python" in sys.executable else "python"
|
||||
|
||||
# Create command line to call pylint
|
||||
epylint_part = [executable, "-c", "from pylint import epylint;epylint.Run()"]
|
||||
options = shlex.split(command_options, posix=not sys.platform.startswith("win"))
|
||||
cli = epylint_part + options
|
||||
|
||||
# Providing standard output and/or error if not set
|
||||
if stdout is None:
|
||||
if return_std:
|
||||
stdout = PIPE
|
||||
else:
|
||||
stdout = sys.stdout
|
||||
if stderr is None:
|
||||
if return_std:
|
||||
stderr = PIPE
|
||||
else:
|
||||
stderr = sys.stderr
|
||||
# Call pylint in a subprocess
|
||||
process = Popen(
|
||||
cli,
|
||||
shell=False,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
env=_get_env(),
|
||||
universal_newlines=True,
|
||||
)
|
||||
proc_stdout, proc_stderr = process.communicate()
|
||||
# Return standard output and error
|
||||
if return_std:
|
||||
return StringIO(proc_stdout), StringIO(proc_stderr)
|
||||
return None
|
||||
|
||||
|
||||
def Run():
|
||||
if len(sys.argv) == 1:
|
||||
print("Usage: %s <filename> [options]" % sys.argv[0])
|
||||
sys.exit(1)
|
||||
elif not osp.exists(sys.argv[1]):
|
||||
print("%s does not exist" % sys.argv[1])
|
||||
sys.exit(1)
|
||||
else:
|
||||
sys.exit(lint(sys.argv[1], sys.argv[2:]))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
Run()
|
||||
31
venv/lib/python3.8/site-packages/pylint/exceptions.py
Normal file
31
venv/lib/python3.8/site-packages/pylint/exceptions.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019 Thomas Hisch <t.hisch@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Exception classes raised by various operations within pylint."""
|
||||
|
||||
|
||||
class InvalidMessageError(Exception):
|
||||
"""raised when a message creation, registration or addition is rejected"""
|
||||
|
||||
|
||||
class UnknownMessageError(Exception):
|
||||
"""raised when an unregistered message id is encountered"""
|
||||
|
||||
|
||||
class EmptyReportError(Exception):
|
||||
"""raised when a report is empty and so should not be displayed"""
|
||||
|
||||
|
||||
class InvalidReporterError(Exception):
|
||||
"""raised when selected reporter is invalid (e.g. not found)"""
|
||||
|
||||
|
||||
class InvalidArgsError(ValueError):
|
||||
"""raised when passed arguments are invalid, e.g., have the wrong length"""
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,808 @@
|
||||
# Copyright (c) 2016-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016-2019 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2016 Yuri Bochkarev <baltazar.bz@gmail.com>
|
||||
# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2017 Mitar <mitar.github@tnode.com>
|
||||
# Copyright (c) 2018, 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2018 Jim Robertson <jrobertson98atx@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Mitchell T.H. Young <mitchelly@gmail.com>
|
||||
# Copyright (c) 2018 Adrian Chirieac <chirieacam@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Danny Hermes <daniel.j.hermes@gmail.com>
|
||||
# Copyright (c) 2019 Zeb Nicholls <zebedee.nicholls@climate-energy-college.org>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Utility methods for docstring checking."""
|
||||
|
||||
import re
|
||||
from typing import List
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint.checkers import utils
|
||||
|
||||
|
||||
def space_indentation(s):
|
||||
"""The number of leading spaces in a string
|
||||
|
||||
:param str s: input string
|
||||
|
||||
:rtype: int
|
||||
:return: number of leading spaces
|
||||
"""
|
||||
return len(s) - len(s.lstrip(" "))
|
||||
|
||||
|
||||
def get_setters_property_name(node):
|
||||
"""Get the name of the property that the given node is a setter for.
|
||||
|
||||
:param node: The node to get the property name for.
|
||||
:type node: str
|
||||
|
||||
:rtype: str or None
|
||||
:returns: The name of the property that the node is a setter for,
|
||||
or None if one could not be found.
|
||||
"""
|
||||
decorators = node.decorators.nodes if node.decorators else []
|
||||
for decorator in decorators:
|
||||
if (
|
||||
isinstance(decorator, astroid.Attribute)
|
||||
and decorator.attrname == "setter"
|
||||
and isinstance(decorator.expr, astroid.Name)
|
||||
):
|
||||
return decorator.expr.name
|
||||
return None
|
||||
|
||||
|
||||
def get_setters_property(node):
|
||||
"""Get the property node for the given setter node.
|
||||
|
||||
:param node: The node to get the property for.
|
||||
:type node: astroid.FunctionDef
|
||||
|
||||
:rtype: astroid.FunctionDef or None
|
||||
:returns: The node relating to the property of the given setter node,
|
||||
or None if one could not be found.
|
||||
"""
|
||||
property_ = None
|
||||
|
||||
property_name = get_setters_property_name(node)
|
||||
class_node = utils.node_frame_class(node)
|
||||
if property_name and class_node:
|
||||
class_attrs = class_node.getattr(node.name)
|
||||
for attr in class_attrs:
|
||||
if utils.decorated_with_property(attr):
|
||||
property_ = attr
|
||||
break
|
||||
|
||||
return property_
|
||||
|
||||
|
||||
def returns_something(return_node):
|
||||
"""Check if a return node returns a value other than None.
|
||||
|
||||
:param return_node: The return node to check.
|
||||
:type return_node: astroid.Return
|
||||
|
||||
:rtype: bool
|
||||
:return: True if the return node returns a value other than None,
|
||||
False otherwise.
|
||||
"""
|
||||
returns = return_node.value
|
||||
|
||||
if returns is None:
|
||||
return False
|
||||
|
||||
return not (isinstance(returns, astroid.Const) and returns.value is None)
|
||||
|
||||
|
||||
def _get_raise_target(node):
|
||||
if isinstance(node.exc, astroid.Call):
|
||||
func = node.exc.func
|
||||
if isinstance(func, (astroid.Name, astroid.Attribute)):
|
||||
return utils.safe_infer(func)
|
||||
return None
|
||||
|
||||
|
||||
def _split_multiple_exc_types(target: str) -> List[str]:
|
||||
delimiters = r"(\s*,(?:\s*or\s)?\s*|\s+or\s+)"
|
||||
return re.split(delimiters, target)
|
||||
|
||||
|
||||
def possible_exc_types(node):
|
||||
"""
|
||||
Gets all of the possible raised exception types for the given raise node.
|
||||
|
||||
.. note::
|
||||
|
||||
Caught exception types are ignored.
|
||||
|
||||
|
||||
:param node: The raise node to find exception types for.
|
||||
:type node: astroid.node_classes.NodeNG
|
||||
|
||||
:returns: A list of exception types possibly raised by :param:`node`.
|
||||
:rtype: set(str)
|
||||
"""
|
||||
excs = []
|
||||
if isinstance(node.exc, astroid.Name):
|
||||
inferred = utils.safe_infer(node.exc)
|
||||
if inferred:
|
||||
excs = [inferred.name]
|
||||
elif node.exc is None:
|
||||
handler = node.parent
|
||||
while handler and not isinstance(handler, astroid.ExceptHandler):
|
||||
handler = handler.parent
|
||||
|
||||
if handler and handler.type:
|
||||
inferred_excs = astroid.unpack_infer(handler.type)
|
||||
excs = (exc.name for exc in inferred_excs if exc is not astroid.Uninferable)
|
||||
else:
|
||||
target = _get_raise_target(node)
|
||||
if isinstance(target, astroid.ClassDef):
|
||||
excs = [target.name]
|
||||
elif isinstance(target, astroid.FunctionDef):
|
||||
for ret in target.nodes_of_class(astroid.Return):
|
||||
if ret.frame() != target:
|
||||
# return from inner function - ignore it
|
||||
continue
|
||||
|
||||
val = utils.safe_infer(ret.value)
|
||||
if (
|
||||
val
|
||||
and isinstance(val, (astroid.Instance, astroid.ClassDef))
|
||||
and utils.inherit_from_std_ex(val)
|
||||
):
|
||||
excs.append(val.name)
|
||||
|
||||
try:
|
||||
return {exc for exc in excs if not utils.node_ignores_exception(node, exc)}
|
||||
except astroid.InferenceError:
|
||||
return set()
|
||||
|
||||
|
||||
def docstringify(docstring, default_type="default"):
|
||||
for docstring_type in [
|
||||
SphinxDocstring,
|
||||
EpytextDocstring,
|
||||
GoogleDocstring,
|
||||
NumpyDocstring,
|
||||
]:
|
||||
instance = docstring_type(docstring)
|
||||
if instance.is_valid():
|
||||
return instance
|
||||
|
||||
docstring_type = DOCSTRING_TYPES.get(default_type, Docstring)
|
||||
return docstring_type(docstring)
|
||||
|
||||
|
||||
class Docstring:
|
||||
re_for_parameters_see = re.compile(
|
||||
r"""
|
||||
For\s+the\s+(other)?\s*parameters\s*,\s+see
|
||||
""",
|
||||
re.X | re.S,
|
||||
)
|
||||
|
||||
supports_yields = None
|
||||
"""True if the docstring supports a "yield" section.
|
||||
|
||||
False if the docstring uses the returns section to document generators.
|
||||
"""
|
||||
|
||||
# These methods are designed to be overridden
|
||||
# pylint: disable=no-self-use
|
||||
def __init__(self, doc):
|
||||
doc = doc or ""
|
||||
self.doc = doc.expandtabs()
|
||||
|
||||
def is_valid(self):
|
||||
return False
|
||||
|
||||
def exceptions(self):
|
||||
return set()
|
||||
|
||||
def has_params(self):
|
||||
return False
|
||||
|
||||
def has_returns(self):
|
||||
return False
|
||||
|
||||
def has_rtype(self):
|
||||
return False
|
||||
|
||||
def has_property_returns(self):
|
||||
return False
|
||||
|
||||
def has_property_type(self):
|
||||
return False
|
||||
|
||||
def has_yields(self):
|
||||
return False
|
||||
|
||||
def has_yields_type(self):
|
||||
return False
|
||||
|
||||
def match_param_docs(self):
|
||||
return set(), set()
|
||||
|
||||
def params_documented_elsewhere(self):
|
||||
return self.re_for_parameters_see.search(self.doc) is not None
|
||||
|
||||
|
||||
class SphinxDocstring(Docstring):
|
||||
re_type = r"""
|
||||
[~!.]? # Optional link style prefix
|
||||
\w(?:\w|\.[^\.])* # Valid python name
|
||||
"""
|
||||
|
||||
re_simple_container_type = r"""
|
||||
{type} # a container type
|
||||
[\(\[] [^\n\s]+ [\)\]] # with the contents of the container
|
||||
""".format(
|
||||
type=re_type
|
||||
)
|
||||
|
||||
re_multiple_simple_type = r"""
|
||||
(?:{container_type}|{type})
|
||||
(?:(?:\s+(?:of|or)\s+|\s*,\s*)(?:{container_type}|{type}))*
|
||||
""".format(
|
||||
type=re_type, container_type=re_simple_container_type
|
||||
)
|
||||
|
||||
re_xref = r"""
|
||||
(?::\w+:)? # optional tag
|
||||
`{}` # what to reference
|
||||
""".format(
|
||||
re_type
|
||||
)
|
||||
|
||||
re_param_raw = r"""
|
||||
: # initial colon
|
||||
(?: # Sphinx keywords
|
||||
param|parameter|
|
||||
arg|argument|
|
||||
key|keyword
|
||||
)
|
||||
\s+ # whitespace
|
||||
|
||||
(?: # optional type declaration
|
||||
({type}|{container_type})
|
||||
\s+
|
||||
)?
|
||||
|
||||
(\w+) # Parameter name
|
||||
\s* # whitespace
|
||||
: # final colon
|
||||
""".format(
|
||||
type=re_type, container_type=re_simple_container_type
|
||||
)
|
||||
re_param_in_docstring = re.compile(re_param_raw, re.X | re.S)
|
||||
|
||||
re_type_raw = r"""
|
||||
:type # Sphinx keyword
|
||||
\s+ # whitespace
|
||||
({type}) # Parameter name
|
||||
\s* # whitespace
|
||||
: # final colon
|
||||
""".format(
|
||||
type=re_multiple_simple_type
|
||||
)
|
||||
re_type_in_docstring = re.compile(re_type_raw, re.X | re.S)
|
||||
|
||||
re_property_type_raw = r"""
|
||||
:type: # Sphinx keyword
|
||||
\s+ # whitespace
|
||||
{type} # type declaration
|
||||
""".format(
|
||||
type=re_multiple_simple_type
|
||||
)
|
||||
re_property_type_in_docstring = re.compile(re_property_type_raw, re.X | re.S)
|
||||
|
||||
re_raise_raw = r"""
|
||||
: # initial colon
|
||||
(?: # Sphinx keyword
|
||||
raises?|
|
||||
except|exception
|
||||
)
|
||||
\s+ # whitespace
|
||||
({type}) # exception type
|
||||
\s* # whitespace
|
||||
: # final colon
|
||||
""".format(
|
||||
type=re_multiple_simple_type
|
||||
)
|
||||
re_raise_in_docstring = re.compile(re_raise_raw, re.X | re.S)
|
||||
|
||||
re_rtype_in_docstring = re.compile(r":rtype:")
|
||||
|
||||
re_returns_in_docstring = re.compile(r":returns?:")
|
||||
|
||||
supports_yields = False
|
||||
|
||||
def is_valid(self):
|
||||
return bool(
|
||||
self.re_param_in_docstring.search(self.doc)
|
||||
or self.re_raise_in_docstring.search(self.doc)
|
||||
or self.re_rtype_in_docstring.search(self.doc)
|
||||
or self.re_returns_in_docstring.search(self.doc)
|
||||
or self.re_property_type_in_docstring.search(self.doc)
|
||||
)
|
||||
|
||||
def exceptions(self):
|
||||
types = set()
|
||||
|
||||
for match in re.finditer(self.re_raise_in_docstring, self.doc):
|
||||
raise_type = match.group(1)
|
||||
types.update(_split_multiple_exc_types(raise_type))
|
||||
|
||||
return types
|
||||
|
||||
def has_params(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
return self.re_param_in_docstring.search(self.doc) is not None
|
||||
|
||||
def has_returns(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
return bool(self.re_returns_in_docstring.search(self.doc))
|
||||
|
||||
def has_rtype(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
return bool(self.re_rtype_in_docstring.search(self.doc))
|
||||
|
||||
def has_property_returns(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
# The summary line is the return doc,
|
||||
# so the first line must not be a known directive.
|
||||
return not self.doc.lstrip().startswith(":")
|
||||
|
||||
def has_property_type(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
return bool(self.re_property_type_in_docstring.search(self.doc))
|
||||
|
||||
def match_param_docs(self):
|
||||
params_with_doc = set()
|
||||
params_with_type = set()
|
||||
|
||||
for match in re.finditer(self.re_param_in_docstring, self.doc):
|
||||
name = match.group(2)
|
||||
params_with_doc.add(name)
|
||||
param_type = match.group(1)
|
||||
if param_type is not None:
|
||||
params_with_type.add(name)
|
||||
|
||||
params_with_type.update(re.findall(self.re_type_in_docstring, self.doc))
|
||||
return params_with_doc, params_with_type
|
||||
|
||||
|
||||
class EpytextDocstring(SphinxDocstring):
|
||||
"""
|
||||
Epytext is similar to Sphinx. See the docs:
|
||||
http://epydoc.sourceforge.net/epytext.html
|
||||
http://epydoc.sourceforge.net/fields.html#fields
|
||||
|
||||
It's used in PyCharm:
|
||||
https://www.jetbrains.com/help/pycharm/2016.1/creating-documentation-comments.html#d848203e314
|
||||
https://www.jetbrains.com/help/pycharm/2016.1/using-docstrings-to-specify-types.html
|
||||
"""
|
||||
|
||||
re_param_in_docstring = re.compile(
|
||||
SphinxDocstring.re_param_raw.replace(":", "@", 1), re.X | re.S
|
||||
)
|
||||
|
||||
re_type_in_docstring = re.compile(
|
||||
SphinxDocstring.re_type_raw.replace(":", "@", 1), re.X | re.S
|
||||
)
|
||||
|
||||
re_property_type_in_docstring = re.compile(
|
||||
SphinxDocstring.re_property_type_raw.replace(":", "@", 1), re.X | re.S
|
||||
)
|
||||
|
||||
re_raise_in_docstring = re.compile(
|
||||
SphinxDocstring.re_raise_raw.replace(":", "@", 1), re.X | re.S
|
||||
)
|
||||
|
||||
re_rtype_in_docstring = re.compile(
|
||||
r"""
|
||||
@ # initial "at" symbol
|
||||
(?: # Epytext keyword
|
||||
rtype|returntype
|
||||
)
|
||||
: # final colon
|
||||
""",
|
||||
re.X | re.S,
|
||||
)
|
||||
|
||||
re_returns_in_docstring = re.compile(r"@returns?:")
|
||||
|
||||
def has_property_returns(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
# If this is a property docstring, the summary is the return doc.
|
||||
if self.has_property_type():
|
||||
# The summary line is the return doc,
|
||||
# so the first line must not be a known directive.
|
||||
return not self.doc.lstrip().startswith("@")
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class GoogleDocstring(Docstring):
|
||||
re_type = SphinxDocstring.re_type
|
||||
|
||||
re_xref = SphinxDocstring.re_xref
|
||||
|
||||
re_container_type = r"""
|
||||
(?:{type}|{xref}) # a container type
|
||||
[\(\[] [^\n]+ [\)\]] # with the contents of the container
|
||||
""".format(
|
||||
type=re_type, xref=re_xref
|
||||
)
|
||||
|
||||
re_multiple_type = r"""
|
||||
(?:{container_type}|{type}|{xref})
|
||||
(?:(?:\s+(?:of|or)\s+|\s*,\s*)(?:{container_type}|{type}|{xref}))*
|
||||
""".format(
|
||||
type=re_type, xref=re_xref, container_type=re_container_type
|
||||
)
|
||||
|
||||
_re_section_template = r"""
|
||||
^([ ]*) {0} \s*: \s*$ # Google parameter header
|
||||
( .* ) # section
|
||||
"""
|
||||
|
||||
re_param_section = re.compile(
|
||||
_re_section_template.format(r"(?:Args|Arguments|Parameters)"),
|
||||
re.X | re.S | re.M,
|
||||
)
|
||||
|
||||
re_keyword_param_section = re.compile(
|
||||
_re_section_template.format(r"Keyword\s(?:Args|Arguments|Parameters)"),
|
||||
re.X | re.S | re.M,
|
||||
)
|
||||
|
||||
re_param_line = re.compile(
|
||||
r"""
|
||||
\s* \*{{0,2}}(\w+) # identifier potentially with asterisks
|
||||
\s* ( [(]
|
||||
{type}
|
||||
(?:,\s+optional)?
|
||||
[)] )? \s* : # optional type declaration
|
||||
\s* (.*) # beginning of optional description
|
||||
""".format(
|
||||
type=re_multiple_type
|
||||
),
|
||||
re.X | re.S | re.M,
|
||||
)
|
||||
|
||||
re_raise_section = re.compile(
|
||||
_re_section_template.format(r"Raises"), re.X | re.S | re.M
|
||||
)
|
||||
|
||||
re_raise_line = re.compile(
|
||||
r"""
|
||||
\s* ({type}) \s* : # identifier
|
||||
\s* (.*) # beginning of optional description
|
||||
""".format(
|
||||
type=re_multiple_type
|
||||
),
|
||||
re.X | re.S | re.M,
|
||||
)
|
||||
|
||||
re_returns_section = re.compile(
|
||||
_re_section_template.format(r"Returns?"), re.X | re.S | re.M
|
||||
)
|
||||
|
||||
re_returns_line = re.compile(
|
||||
r"""
|
||||
\s* ({type}:)? # identifier
|
||||
\s* (.*) # beginning of description
|
||||
""".format(
|
||||
type=re_multiple_type
|
||||
),
|
||||
re.X | re.S | re.M,
|
||||
)
|
||||
|
||||
re_property_returns_line = re.compile(
|
||||
r"""
|
||||
^{type}: # indentifier
|
||||
\s* (.*) # Summary line / description
|
||||
""".format(
|
||||
type=re_multiple_type
|
||||
),
|
||||
re.X | re.S | re.M,
|
||||
)
|
||||
|
||||
re_yields_section = re.compile(
|
||||
_re_section_template.format(r"Yields?"), re.X | re.S | re.M
|
||||
)
|
||||
|
||||
re_yields_line = re_returns_line
|
||||
|
||||
supports_yields = True
|
||||
|
||||
def is_valid(self):
|
||||
return bool(
|
||||
self.re_param_section.search(self.doc)
|
||||
or self.re_raise_section.search(self.doc)
|
||||
or self.re_returns_section.search(self.doc)
|
||||
or self.re_yields_section.search(self.doc)
|
||||
or self.re_property_returns_line.search(self._first_line())
|
||||
)
|
||||
|
||||
def has_params(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
return self.re_param_section.search(self.doc) is not None
|
||||
|
||||
def has_returns(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
entries = self._parse_section(self.re_returns_section)
|
||||
for entry in entries:
|
||||
match = self.re_returns_line.match(entry)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
return_desc = match.group(2)
|
||||
if return_desc:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def has_rtype(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
entries = self._parse_section(self.re_returns_section)
|
||||
for entry in entries:
|
||||
match = self.re_returns_line.match(entry)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
return_type = match.group(1)
|
||||
if return_type:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def has_property_returns(self):
|
||||
# The summary line is the return doc,
|
||||
# so the first line must not be a known directive.
|
||||
first_line = self._first_line()
|
||||
return not bool(
|
||||
self.re_param_section.search(first_line)
|
||||
or self.re_raise_section.search(first_line)
|
||||
or self.re_returns_section.search(first_line)
|
||||
or self.re_yields_section.search(first_line)
|
||||
)
|
||||
|
||||
def has_property_type(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
return bool(self.re_property_returns_line.match(self._first_line()))
|
||||
|
||||
def has_yields(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
entries = self._parse_section(self.re_yields_section)
|
||||
for entry in entries:
|
||||
match = self.re_yields_line.match(entry)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
yield_desc = match.group(2)
|
||||
if yield_desc:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def has_yields_type(self):
|
||||
if not self.doc:
|
||||
return False
|
||||
|
||||
entries = self._parse_section(self.re_yields_section)
|
||||
for entry in entries:
|
||||
match = self.re_yields_line.match(entry)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
yield_type = match.group(1)
|
||||
if yield_type:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def exceptions(self):
|
||||
types = set()
|
||||
|
||||
entries = self._parse_section(self.re_raise_section)
|
||||
for entry in entries:
|
||||
match = self.re_raise_line.match(entry)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
exc_type = match.group(1)
|
||||
exc_desc = match.group(2)
|
||||
if exc_desc:
|
||||
types.update(_split_multiple_exc_types(exc_type))
|
||||
|
||||
return types
|
||||
|
||||
def match_param_docs(self):
|
||||
params_with_doc = set()
|
||||
params_with_type = set()
|
||||
|
||||
entries = self._parse_section(self.re_param_section)
|
||||
entries.extend(self._parse_section(self.re_keyword_param_section))
|
||||
for entry in entries:
|
||||
match = self.re_param_line.match(entry)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
param_name = match.group(1)
|
||||
param_type = match.group(2)
|
||||
param_desc = match.group(3)
|
||||
if param_type:
|
||||
params_with_type.add(param_name)
|
||||
|
||||
if param_desc:
|
||||
params_with_doc.add(param_name)
|
||||
|
||||
return params_with_doc, params_with_type
|
||||
|
||||
def _first_line(self):
|
||||
return self.doc.lstrip().split("\n", 1)[0]
|
||||
|
||||
@staticmethod
|
||||
def min_section_indent(section_match):
|
||||
return len(section_match.group(1)) + 1
|
||||
|
||||
@staticmethod
|
||||
def _is_section_header(_):
|
||||
# Google parsing does not need to detect section headers,
|
||||
# because it works off of indentation level only
|
||||
return False
|
||||
|
||||
def _parse_section(self, section_re):
|
||||
section_match = section_re.search(self.doc)
|
||||
if section_match is None:
|
||||
return []
|
||||
|
||||
min_indentation = self.min_section_indent(section_match)
|
||||
|
||||
entries = []
|
||||
entry = []
|
||||
is_first = True
|
||||
for line in section_match.group(2).splitlines():
|
||||
if not line.strip():
|
||||
continue
|
||||
indentation = space_indentation(line)
|
||||
if indentation < min_indentation:
|
||||
break
|
||||
|
||||
# The first line after the header defines the minimum
|
||||
# indentation.
|
||||
if is_first:
|
||||
min_indentation = indentation
|
||||
is_first = False
|
||||
|
||||
if indentation == min_indentation:
|
||||
if self._is_section_header(line):
|
||||
break
|
||||
# Lines with minimum indentation must contain the beginning
|
||||
# of a new parameter documentation.
|
||||
if entry:
|
||||
entries.append("\n".join(entry))
|
||||
entry = []
|
||||
|
||||
entry.append(line)
|
||||
|
||||
if entry:
|
||||
entries.append("\n".join(entry))
|
||||
|
||||
return entries
|
||||
|
||||
|
||||
class NumpyDocstring(GoogleDocstring):
|
||||
_re_section_template = r"""
|
||||
^([ ]*) {0} \s*?$ # Numpy parameters header
|
||||
\s* [-=]+ \s*?$ # underline
|
||||
( .* ) # section
|
||||
"""
|
||||
|
||||
re_param_section = re.compile(
|
||||
_re_section_template.format(r"(?:Args|Arguments|Parameters)"),
|
||||
re.X | re.S | re.M,
|
||||
)
|
||||
|
||||
re_param_line = re.compile(
|
||||
r"""
|
||||
\s* (\w+) # identifier
|
||||
\s* :
|
||||
\s* (?:({type})(?:,\s+optional)?)? # optional type declaration
|
||||
\n # description starts on a new line
|
||||
\s* (.*) # description
|
||||
""".format(
|
||||
type=GoogleDocstring.re_multiple_type
|
||||
),
|
||||
re.X | re.S,
|
||||
)
|
||||
|
||||
re_raise_section = re.compile(
|
||||
_re_section_template.format(r"Raises"), re.X | re.S | re.M
|
||||
)
|
||||
|
||||
re_raise_line = re.compile(
|
||||
r"""
|
||||
\s* ({type})$ # type declaration
|
||||
\s* (.*) # optional description
|
||||
""".format(
|
||||
type=GoogleDocstring.re_type
|
||||
),
|
||||
re.X | re.S | re.M,
|
||||
)
|
||||
|
||||
re_returns_section = re.compile(
|
||||
_re_section_template.format(r"Returns?"), re.X | re.S | re.M
|
||||
)
|
||||
|
||||
re_returns_line = re.compile(
|
||||
r"""
|
||||
\s* (?:\w+\s+:\s+)? # optional name
|
||||
({type})$ # type declaration
|
||||
\s* (.*) # optional description
|
||||
""".format(
|
||||
type=GoogleDocstring.re_multiple_type
|
||||
),
|
||||
re.X | re.S | re.M,
|
||||
)
|
||||
|
||||
re_yields_section = re.compile(
|
||||
_re_section_template.format(r"Yields?"), re.X | re.S | re.M
|
||||
)
|
||||
|
||||
re_yields_line = re_returns_line
|
||||
|
||||
supports_yields = True
|
||||
|
||||
@staticmethod
|
||||
def min_section_indent(section_match):
|
||||
return len(section_match.group(1))
|
||||
|
||||
@staticmethod
|
||||
def _is_section_header(line):
|
||||
return bool(re.match(r"\s*-+$", line))
|
||||
|
||||
|
||||
DOCSTRING_TYPES = {
|
||||
"sphinx": SphinxDocstring,
|
||||
"epytext": EpytextDocstring,
|
||||
"google": GoogleDocstring,
|
||||
"numpy": NumpyDocstring,
|
||||
"default": Docstring,
|
||||
}
|
||||
"""A map of the name of the docstring type to its class.
|
||||
|
||||
:type: dict(str, type)
|
||||
"""
|
||||
@@ -0,0 +1,71 @@
|
||||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checker for deprecated builtins."""
|
||||
import astroid
|
||||
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers.utils import check_messages
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
|
||||
BAD_FUNCTIONS = ["map", "filter"]
|
||||
# Some hints regarding the use of bad builtins.
|
||||
BUILTIN_HINTS = {"map": "Using a list comprehension can be clearer."}
|
||||
BUILTIN_HINTS["filter"] = BUILTIN_HINTS["map"]
|
||||
|
||||
|
||||
class BadBuiltinChecker(BaseChecker):
|
||||
|
||||
__implements__ = (IAstroidChecker,)
|
||||
name = "deprecated_builtins"
|
||||
msgs = {
|
||||
"W0141": (
|
||||
"Used builtin function %s",
|
||||
"bad-builtin",
|
||||
"Used when a black listed builtin function is used (see the "
|
||||
"bad-function option). Usual black listed functions are the ones "
|
||||
"like map, or filter , where Python offers now some cleaner "
|
||||
"alternative like list comprehension.",
|
||||
)
|
||||
}
|
||||
|
||||
options = (
|
||||
(
|
||||
"bad-functions",
|
||||
{
|
||||
"default": BAD_FUNCTIONS,
|
||||
"type": "csv",
|
||||
"metavar": "<builtin function names>",
|
||||
"help": "List of builtins function names that should not be "
|
||||
"used, separated by a comma",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@check_messages("bad-builtin")
|
||||
def visit_call(self, node):
|
||||
if isinstance(node.func, astroid.Name):
|
||||
name = node.func.name
|
||||
# ignore the name if it's not a builtin (i.e. not defined in the
|
||||
# locals nor globals scope)
|
||||
if not (name in node.frame() or name in node.root()):
|
||||
if name in self.config.bad_functions:
|
||||
hint = BUILTIN_HINTS.get(name)
|
||||
if hint:
|
||||
args = "%r. %s" % (name, hint)
|
||||
else:
|
||||
args = repr(name)
|
||||
self.add_message("bad-builtin", node=node, args=args)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto register this checker.
|
||||
|
||||
:param linter: Main interface object for Pylint plugins
|
||||
:type linter: Pylint object
|
||||
"""
|
||||
linter.register_checker(BadBuiltinChecker(linter))
|
||||
@@ -0,0 +1,74 @@
|
||||
# Copyright (c) 2019-2020 Tyler Thieding <tyler@thieding.com>
|
||||
# Copyright (c) 2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Looks for try/except statements with too much code in the try clause."""
|
||||
|
||||
from astroid.node_classes import For, If, While, With
|
||||
|
||||
from pylint import checkers, interfaces
|
||||
|
||||
|
||||
class BroadTryClauseChecker(checkers.BaseChecker):
|
||||
"""Checks for try clauses with too many lines.
|
||||
|
||||
According to PEP 8, ``try`` clauses shall contain the absolute minimum
|
||||
amount of code. This checker enforces a maximum number of statements within
|
||||
``try`` clauses.
|
||||
|
||||
"""
|
||||
|
||||
__implements__ = interfaces.IAstroidChecker
|
||||
|
||||
# configuration section name
|
||||
name = "broad_try_clause"
|
||||
msgs = {
|
||||
"W0717": (
|
||||
"%s",
|
||||
"too-many-try-statements",
|
||||
"Try clause contains too many statements.",
|
||||
)
|
||||
}
|
||||
|
||||
priority = -2
|
||||
options = (
|
||||
(
|
||||
"max-try-statements",
|
||||
{
|
||||
"default": 1,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of statements allowed in a try clause",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def _count_statements(self, try_node):
|
||||
statement_count = len(try_node.body)
|
||||
|
||||
for body_node in try_node.body:
|
||||
if isinstance(body_node, (For, If, While, With)):
|
||||
statement_count += self._count_statements(body_node)
|
||||
|
||||
return statement_count
|
||||
|
||||
def visit_tryexcept(self, node):
|
||||
try_clause_statements = self._count_statements(node)
|
||||
if try_clause_statements > self.config.max_try_statements:
|
||||
msg = "try clause contains {} statements, expected at most {}".format(
|
||||
try_clause_statements, self.config.max_try_statements
|
||||
)
|
||||
self.add_message(
|
||||
"too-many-try-statements", node.lineno, node=node, args=msg
|
||||
)
|
||||
|
||||
def visit_tryfinally(self, node):
|
||||
self.visit_tryexcept(node)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto register this checker."""
|
||||
linter.register_checker(BroadTryClauseChecker(linter))
|
||||
@@ -0,0 +1,23 @@
|
||||
# Copyright (c) 2014-2015 Bruno Daniel <bruno.daniel@blue-yonder.com>
|
||||
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
import warnings
|
||||
|
||||
from pylint.extensions import docparams
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto register this checker.
|
||||
|
||||
:param linter: Main interface object for Pylint plugins
|
||||
:type linter: Pylint object
|
||||
"""
|
||||
warnings.warn(
|
||||
"This plugin is deprecated, use pylint.extensions.docparams instead.",
|
||||
DeprecationWarning,
|
||||
)
|
||||
linter.register_checker(docparams.DocstringParameterChecker(linter))
|
||||
@@ -0,0 +1,79 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2016-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Glenn Matthews <glmatthe@cisco.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint.checkers import BaseTokenChecker
|
||||
from pylint.checkers.utils import check_messages
|
||||
from pylint.interfaces import IAstroidChecker, ITokenChecker
|
||||
|
||||
|
||||
class ElseifUsedChecker(BaseTokenChecker):
|
||||
"""Checks for use of "else if" when an "elif" could be used
|
||||
"""
|
||||
|
||||
__implements__ = (ITokenChecker, IAstroidChecker)
|
||||
name = "else_if_used"
|
||||
msgs = {
|
||||
"R5501": (
|
||||
'Consider using "elif" instead of "else if"',
|
||||
"else-if-used",
|
||||
"Used when an else statement is immediately followed by "
|
||||
"an if statement and does not contain statements that "
|
||||
"would be unrelated to it.",
|
||||
)
|
||||
}
|
||||
|
||||
def __init__(self, linter=None):
|
||||
BaseTokenChecker.__init__(self, linter)
|
||||
self._init()
|
||||
|
||||
def _init(self):
|
||||
self._elifs = []
|
||||
self._if_counter = 0
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
# Process tokens and look for 'if' or 'elif'
|
||||
for _, token, _, _, _ in tokens:
|
||||
if token == "elif":
|
||||
self._elifs.append(True)
|
||||
elif token == "if":
|
||||
self._elifs.append(False)
|
||||
|
||||
def leave_module(self, _):
|
||||
self._init()
|
||||
|
||||
def visit_ifexp(self, node):
|
||||
if isinstance(node.parent, astroid.FormattedValue):
|
||||
return
|
||||
self._if_counter += 1
|
||||
|
||||
def visit_comprehension(self, node):
|
||||
self._if_counter += len(node.ifs)
|
||||
|
||||
@check_messages("else-if-used")
|
||||
def visit_if(self, node):
|
||||
if isinstance(node.parent, astroid.If):
|
||||
orelse = node.parent.orelse
|
||||
# current if node must directly follow an "else"
|
||||
if orelse and orelse == [node]:
|
||||
if not self._elifs[self._if_counter]:
|
||||
self.add_message("else-if-used", node=node)
|
||||
self._if_counter += 1
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto register this checker.
|
||||
|
||||
:param linter: Main interface object for Pylint plugins
|
||||
:type linter: Pylint object
|
||||
"""
|
||||
linter.register_checker(ElseifUsedChecker(linter))
|
||||
@@ -0,0 +1,75 @@
|
||||
# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2017-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Looks for comparisons to empty string."""
|
||||
|
||||
import itertools
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint import checkers, interfaces
|
||||
from pylint.checkers import utils
|
||||
|
||||
|
||||
def _is_constant_zero(node):
|
||||
return isinstance(node, astroid.Const) and node.value == 0
|
||||
|
||||
|
||||
class CompareToZeroChecker(checkers.BaseChecker):
|
||||
"""Checks for comparisons to zero.
|
||||
Most of the times you should use the fact that integers with a value of 0 are false.
|
||||
An exception to this rule is when 0 is allowed in the program and has a
|
||||
different meaning than None!
|
||||
"""
|
||||
|
||||
__implements__ = (interfaces.IAstroidChecker,)
|
||||
|
||||
# configuration section name
|
||||
name = "compare-to-zero"
|
||||
msgs = {
|
||||
"C2001": (
|
||||
"Avoid comparisons to zero",
|
||||
"compare-to-zero",
|
||||
"Used when Pylint detects comparison to a 0 constant.",
|
||||
)
|
||||
}
|
||||
|
||||
priority = -2
|
||||
options = ()
|
||||
|
||||
@utils.check_messages("compare-to-zero")
|
||||
def visit_compare(self, node):
|
||||
_operators = ["!=", "==", "is not", "is"]
|
||||
# note: astroid.Compare has the left most operand in node.left
|
||||
# while the rest are a list of tuples in node.ops
|
||||
# the format of the tuple is ('compare operator sign', node)
|
||||
# here we squash everything into `ops` to make it easier for processing later
|
||||
ops = [("", node.left)]
|
||||
ops.extend(node.ops)
|
||||
ops = list(itertools.chain(*ops))
|
||||
|
||||
for ops_idx in range(len(ops) - 2):
|
||||
op_1 = ops[ops_idx]
|
||||
op_2 = ops[ops_idx + 1]
|
||||
op_3 = ops[ops_idx + 2]
|
||||
error_detected = False
|
||||
|
||||
# 0 ?? X
|
||||
if _is_constant_zero(op_1) and op_2 in _operators:
|
||||
error_detected = True
|
||||
# X ?? 0
|
||||
elif op_2 in _operators and _is_constant_zero(op_3):
|
||||
error_detected = True
|
||||
|
||||
if error_detected:
|
||||
self.add_message("compare-to-zero", node=node)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto register this checker."""
|
||||
linter.register_checker(CompareToZeroChecker(linter))
|
||||
539
venv/lib/python3.8/site-packages/pylint/extensions/docparams.py
Normal file
539
venv/lib/python3.8/site-packages/pylint/extensions/docparams.py
Normal file
@@ -0,0 +1,539 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2014-2015 Bruno Daniel <bruno.daniel@blue-yonder.com>
|
||||
# Copyright (c) 2015-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016-2019 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
|
||||
# Copyright (c) 2016 Glenn Matthews <glmatthe@cisco.com>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2017 John Paraskevopoulos <io.paraskev@gmail.com>
|
||||
# Copyright (c) 2018, 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2018 Jim Robertson <jrobertson98atx@gmail.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Adam Dangoor <adamdangoor@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Pylint plugin for checking in Sphinx, Google, or Numpy style docstrings
|
||||
"""
|
||||
import astroid
|
||||
|
||||
import pylint.extensions._check_docs_utils as utils
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers import utils as checker_utils
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
|
||||
|
||||
class DocstringParameterChecker(BaseChecker):
|
||||
"""Checker for Sphinx, Google, or Numpy style docstrings
|
||||
|
||||
* Check that all function, method and constructor parameters are mentioned
|
||||
in the params and types part of the docstring. Constructor parameters
|
||||
can be documented in either the class docstring or ``__init__`` docstring,
|
||||
but not both.
|
||||
* Check that there are no naming inconsistencies between the signature and
|
||||
the documentation, i.e. also report documented parameters that are missing
|
||||
in the signature. This is important to find cases where parameters are
|
||||
renamed only in the code, not in the documentation.
|
||||
* Check that all explicitly raised exceptions in a function are documented
|
||||
in the function docstring. Caught exceptions are ignored.
|
||||
|
||||
Activate this checker by adding the line::
|
||||
|
||||
load-plugins=pylint.extensions.docparams
|
||||
|
||||
to the ``MASTER`` section of your ``.pylintrc``.
|
||||
|
||||
:param linter: linter object
|
||||
:type linter: :class:`pylint.lint.PyLinter`
|
||||
"""
|
||||
|
||||
__implements__ = IAstroidChecker
|
||||
|
||||
name = "parameter_documentation"
|
||||
msgs = {
|
||||
"W9005": (
|
||||
'"%s" has constructor parameters documented in class and __init__',
|
||||
"multiple-constructor-doc",
|
||||
"Please remove parameter declarations in the class or constructor.",
|
||||
),
|
||||
"W9006": (
|
||||
'"%s" not documented as being raised',
|
||||
"missing-raises-doc",
|
||||
"Please document exceptions for all raised exception types.",
|
||||
),
|
||||
"W9008": (
|
||||
"Redundant returns documentation",
|
||||
"redundant-returns-doc",
|
||||
"Please remove the return/rtype documentation from this method.",
|
||||
),
|
||||
"W9010": (
|
||||
"Redundant yields documentation",
|
||||
"redundant-yields-doc",
|
||||
"Please remove the yields documentation from this method.",
|
||||
),
|
||||
"W9011": (
|
||||
"Missing return documentation",
|
||||
"missing-return-doc",
|
||||
"Please add documentation about what this method returns.",
|
||||
{"old_names": [("W9007", "old-missing-returns-doc")]},
|
||||
),
|
||||
"W9012": (
|
||||
"Missing return type documentation",
|
||||
"missing-return-type-doc",
|
||||
"Please document the type returned by this method.",
|
||||
# we can't use the same old_name for two different warnings
|
||||
# {'old_names': [('W9007', 'missing-returns-doc')]},
|
||||
),
|
||||
"W9013": (
|
||||
"Missing yield documentation",
|
||||
"missing-yield-doc",
|
||||
"Please add documentation about what this generator yields.",
|
||||
{"old_names": [("W9009", "old-missing-yields-doc")]},
|
||||
),
|
||||
"W9014": (
|
||||
"Missing yield type documentation",
|
||||
"missing-yield-type-doc",
|
||||
"Please document the type yielded by this method.",
|
||||
# we can't use the same old_name for two different warnings
|
||||
# {'old_names': [('W9009', 'missing-yields-doc')]},
|
||||
),
|
||||
"W9015": (
|
||||
'"%s" missing in parameter documentation',
|
||||
"missing-param-doc",
|
||||
"Please add parameter declarations for all parameters.",
|
||||
{"old_names": [("W9003", "old-missing-param-doc")]},
|
||||
),
|
||||
"W9016": (
|
||||
'"%s" missing in parameter type documentation',
|
||||
"missing-type-doc",
|
||||
"Please add parameter type declarations for all parameters.",
|
||||
{"old_names": [("W9004", "old-missing-type-doc")]},
|
||||
),
|
||||
"W9017": (
|
||||
'"%s" differing in parameter documentation',
|
||||
"differing-param-doc",
|
||||
"Please check parameter names in declarations.",
|
||||
),
|
||||
"W9018": (
|
||||
'"%s" differing in parameter type documentation',
|
||||
"differing-type-doc",
|
||||
"Please check parameter names in type declarations.",
|
||||
),
|
||||
}
|
||||
|
||||
options = (
|
||||
(
|
||||
"accept-no-param-doc",
|
||||
{
|
||||
"default": True,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Whether to accept totally missing parameter "
|
||||
"documentation in the docstring of a function that has "
|
||||
"parameters.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"accept-no-raise-doc",
|
||||
{
|
||||
"default": True,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Whether to accept totally missing raises "
|
||||
"documentation in the docstring of a function that "
|
||||
"raises an exception.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"accept-no-return-doc",
|
||||
{
|
||||
"default": True,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Whether to accept totally missing return "
|
||||
"documentation in the docstring of a function that "
|
||||
"returns a statement.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"accept-no-yields-doc",
|
||||
{
|
||||
"default": True,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Whether to accept totally missing yields "
|
||||
"documentation in the docstring of a generator.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"default-docstring-type",
|
||||
{
|
||||
"type": "choice",
|
||||
"default": "default",
|
||||
"choices": list(utils.DOCSTRING_TYPES),
|
||||
"help": "If the docstring type cannot be guessed "
|
||||
"the specified docstring type will be used.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
priority = -2
|
||||
|
||||
constructor_names = {"__init__", "__new__"}
|
||||
not_needed_param_in_docstring = {"self", "cls"}
|
||||
|
||||
def visit_functiondef(self, node):
|
||||
"""Called for function and method definitions (def).
|
||||
|
||||
:param node: Node for a function or method definition in the AST
|
||||
:type node: :class:`astroid.scoped_nodes.Function`
|
||||
"""
|
||||
node_doc = utils.docstringify(node.doc, self.config.default_docstring_type)
|
||||
self.check_functiondef_params(node, node_doc)
|
||||
self.check_functiondef_returns(node, node_doc)
|
||||
self.check_functiondef_yields(node, node_doc)
|
||||
|
||||
def check_functiondef_params(self, node, node_doc):
|
||||
node_allow_no_param = None
|
||||
if node.name in self.constructor_names:
|
||||
class_node = checker_utils.node_frame_class(node)
|
||||
if class_node is not None:
|
||||
class_doc = utils.docstringify(
|
||||
class_node.doc, self.config.default_docstring_type
|
||||
)
|
||||
self.check_single_constructor_params(class_doc, node_doc, class_node)
|
||||
|
||||
# __init__ or class docstrings can have no parameters documented
|
||||
# as long as the other documents them.
|
||||
node_allow_no_param = (
|
||||
class_doc.has_params()
|
||||
or class_doc.params_documented_elsewhere()
|
||||
or None
|
||||
)
|
||||
class_allow_no_param = (
|
||||
node_doc.has_params()
|
||||
or node_doc.params_documented_elsewhere()
|
||||
or None
|
||||
)
|
||||
|
||||
self.check_arguments_in_docstring(
|
||||
class_doc, node.args, class_node, class_allow_no_param
|
||||
)
|
||||
|
||||
self.check_arguments_in_docstring(
|
||||
node_doc, node.args, node, node_allow_no_param
|
||||
)
|
||||
|
||||
def check_functiondef_returns(self, node, node_doc):
|
||||
if (not node_doc.supports_yields and node.is_generator()) or node.is_abstract():
|
||||
return
|
||||
|
||||
return_nodes = node.nodes_of_class(astroid.Return)
|
||||
if (node_doc.has_returns() or node_doc.has_rtype()) and not any(
|
||||
utils.returns_something(ret_node) for ret_node in return_nodes
|
||||
):
|
||||
self.add_message("redundant-returns-doc", node=node)
|
||||
|
||||
def check_functiondef_yields(self, node, node_doc):
|
||||
if not node_doc.supports_yields or node.is_abstract():
|
||||
return
|
||||
|
||||
if (
|
||||
node_doc.has_yields() or node_doc.has_yields_type()
|
||||
) and not node.is_generator():
|
||||
self.add_message("redundant-yields-doc", node=node)
|
||||
|
||||
def visit_raise(self, node):
|
||||
func_node = node.frame()
|
||||
if not isinstance(func_node, astroid.FunctionDef):
|
||||
return
|
||||
|
||||
expected_excs = utils.possible_exc_types(node)
|
||||
|
||||
if not expected_excs:
|
||||
return
|
||||
|
||||
if not func_node.doc:
|
||||
# If this is a property setter,
|
||||
# the property should have the docstring instead.
|
||||
property_ = utils.get_setters_property(func_node)
|
||||
if property_:
|
||||
func_node = property_
|
||||
|
||||
doc = utils.docstringify(func_node.doc, self.config.default_docstring_type)
|
||||
if not doc.is_valid():
|
||||
if doc.doc:
|
||||
self._handle_no_raise_doc(expected_excs, func_node)
|
||||
return
|
||||
|
||||
found_excs_full_names = doc.exceptions()
|
||||
|
||||
# Extract just the class name, e.g. "error" from "re.error"
|
||||
found_excs_class_names = {exc.split(".")[-1] for exc in found_excs_full_names}
|
||||
missing_excs = expected_excs - found_excs_class_names
|
||||
self._add_raise_message(missing_excs, func_node)
|
||||
|
||||
def visit_return(self, node):
|
||||
if not utils.returns_something(node):
|
||||
return
|
||||
|
||||
func_node = node.frame()
|
||||
if not isinstance(func_node, astroid.FunctionDef):
|
||||
return
|
||||
|
||||
doc = utils.docstringify(func_node.doc, self.config.default_docstring_type)
|
||||
if not doc.is_valid() and self.config.accept_no_return_doc:
|
||||
return
|
||||
|
||||
is_property = checker_utils.decorated_with_property(func_node)
|
||||
|
||||
if not (doc.has_returns() or (doc.has_property_returns() and is_property)):
|
||||
self.add_message("missing-return-doc", node=func_node)
|
||||
|
||||
if func_node.returns:
|
||||
return
|
||||
|
||||
if not (doc.has_rtype() or (doc.has_property_type() and is_property)):
|
||||
self.add_message("missing-return-type-doc", node=func_node)
|
||||
|
||||
def visit_yield(self, node):
|
||||
func_node = node.frame()
|
||||
if not isinstance(func_node, astroid.FunctionDef):
|
||||
return
|
||||
|
||||
doc = utils.docstringify(func_node.doc, self.config.default_docstring_type)
|
||||
if not doc.is_valid() and self.config.accept_no_yields_doc:
|
||||
return
|
||||
|
||||
if doc.supports_yields:
|
||||
doc_has_yields = doc.has_yields()
|
||||
doc_has_yields_type = doc.has_yields_type()
|
||||
else:
|
||||
doc_has_yields = doc.has_returns()
|
||||
doc_has_yields_type = doc.has_rtype()
|
||||
|
||||
if not doc_has_yields:
|
||||
self.add_message("missing-yield-doc", node=func_node)
|
||||
|
||||
if not (doc_has_yields_type or func_node.returns):
|
||||
self.add_message("missing-yield-type-doc", node=func_node)
|
||||
|
||||
def visit_yieldfrom(self, node):
|
||||
self.visit_yield(node)
|
||||
|
||||
def _compare_missing_args(
|
||||
self,
|
||||
found_argument_names,
|
||||
message_id,
|
||||
not_needed_names,
|
||||
expected_argument_names,
|
||||
warning_node,
|
||||
):
|
||||
"""Compare the found argument names with the expected ones and
|
||||
generate a message if there are arguments missing.
|
||||
|
||||
:param set found_argument_names: argument names found in the
|
||||
docstring
|
||||
|
||||
:param str message_id: pylint message id
|
||||
|
||||
:param not_needed_names: names that may be omitted
|
||||
:type not_needed_names: set of str
|
||||
|
||||
:param set expected_argument_names: Expected argument names
|
||||
:param NodeNG warning_node: The node to be analyzed
|
||||
"""
|
||||
missing_argument_names = (
|
||||
expected_argument_names - found_argument_names
|
||||
) - not_needed_names
|
||||
if missing_argument_names:
|
||||
self.add_message(
|
||||
message_id,
|
||||
args=(", ".join(sorted(missing_argument_names)),),
|
||||
node=warning_node,
|
||||
)
|
||||
|
||||
def _compare_different_args(
|
||||
self,
|
||||
found_argument_names,
|
||||
message_id,
|
||||
not_needed_names,
|
||||
expected_argument_names,
|
||||
warning_node,
|
||||
):
|
||||
"""Compare the found argument names with the expected ones and
|
||||
generate a message if there are extra arguments found.
|
||||
|
||||
:param set found_argument_names: argument names found in the
|
||||
docstring
|
||||
|
||||
:param str message_id: pylint message id
|
||||
|
||||
:param not_needed_names: names that may be omitted
|
||||
:type not_needed_names: set of str
|
||||
|
||||
:param set expected_argument_names: Expected argument names
|
||||
:param NodeNG warning_node: The node to be analyzed
|
||||
"""
|
||||
differing_argument_names = (
|
||||
(expected_argument_names ^ found_argument_names)
|
||||
- not_needed_names
|
||||
- expected_argument_names
|
||||
)
|
||||
|
||||
if differing_argument_names:
|
||||
self.add_message(
|
||||
message_id,
|
||||
args=(", ".join(sorted(differing_argument_names)),),
|
||||
node=warning_node,
|
||||
)
|
||||
|
||||
def check_arguments_in_docstring(
|
||||
self, doc, arguments_node, warning_node, accept_no_param_doc=None
|
||||
):
|
||||
"""Check that all parameters in a function, method or class constructor
|
||||
on the one hand and the parameters mentioned in the parameter
|
||||
documentation (e.g. the Sphinx tags 'param' and 'type') on the other
|
||||
hand are consistent with each other.
|
||||
|
||||
* Undocumented parameters except 'self' are noticed.
|
||||
* Undocumented parameter types except for 'self' and the ``*<args>``
|
||||
and ``**<kwargs>`` parameters are noticed.
|
||||
* Parameters mentioned in the parameter documentation that don't or no
|
||||
longer exist in the function parameter list are noticed.
|
||||
* If the text "For the parameters, see" or "For the other parameters,
|
||||
see" (ignoring additional whitespace) is mentioned in the docstring,
|
||||
missing parameter documentation is tolerated.
|
||||
* If there's no Sphinx style, Google style or NumPy style parameter
|
||||
documentation at all, i.e. ``:param`` is never mentioned etc., the
|
||||
checker assumes that the parameters are documented in another format
|
||||
and the absence is tolerated.
|
||||
|
||||
:param doc: Docstring for the function, method or class.
|
||||
:type doc: :class:`Docstring`
|
||||
|
||||
:param arguments_node: Arguments node for the function, method or
|
||||
class constructor.
|
||||
:type arguments_node: :class:`astroid.scoped_nodes.Arguments`
|
||||
|
||||
:param warning_node: The node to assign the warnings to
|
||||
:type warning_node: :class:`astroid.scoped_nodes.Node`
|
||||
|
||||
:param accept_no_param_doc: Whether or not to allow no parameters
|
||||
to be documented.
|
||||
If None then this value is read from the configuration.
|
||||
:type accept_no_param_doc: bool or None
|
||||
"""
|
||||
# Tolerate missing param or type declarations if there is a link to
|
||||
# another method carrying the same name.
|
||||
if not doc.doc:
|
||||
return
|
||||
|
||||
if accept_no_param_doc is None:
|
||||
accept_no_param_doc = self.config.accept_no_param_doc
|
||||
tolerate_missing_params = doc.params_documented_elsewhere()
|
||||
|
||||
# Collect the function arguments.
|
||||
expected_argument_names = {arg.name for arg in arguments_node.args}
|
||||
expected_argument_names.update(arg.name for arg in arguments_node.kwonlyargs)
|
||||
not_needed_type_in_docstring = self.not_needed_param_in_docstring.copy()
|
||||
|
||||
if arguments_node.vararg is not None:
|
||||
expected_argument_names.add(arguments_node.vararg)
|
||||
not_needed_type_in_docstring.add(arguments_node.vararg)
|
||||
if arguments_node.kwarg is not None:
|
||||
expected_argument_names.add(arguments_node.kwarg)
|
||||
not_needed_type_in_docstring.add(arguments_node.kwarg)
|
||||
params_with_doc, params_with_type = doc.match_param_docs()
|
||||
|
||||
# Tolerate no parameter documentation at all.
|
||||
if not params_with_doc and not params_with_type and accept_no_param_doc:
|
||||
tolerate_missing_params = True
|
||||
|
||||
if not tolerate_missing_params:
|
||||
self._compare_missing_args(
|
||||
params_with_doc,
|
||||
"missing-param-doc",
|
||||
self.not_needed_param_in_docstring,
|
||||
expected_argument_names,
|
||||
warning_node,
|
||||
)
|
||||
|
||||
for index, arg_name in enumerate(arguments_node.args):
|
||||
if arguments_node.annotations[index]:
|
||||
params_with_type.add(arg_name.name)
|
||||
for index, arg_name in enumerate(arguments_node.kwonlyargs):
|
||||
if arguments_node.kwonlyargs_annotations[index]:
|
||||
params_with_type.add(arg_name.name)
|
||||
|
||||
if not tolerate_missing_params:
|
||||
self._compare_missing_args(
|
||||
params_with_type,
|
||||
"missing-type-doc",
|
||||
not_needed_type_in_docstring,
|
||||
expected_argument_names,
|
||||
warning_node,
|
||||
)
|
||||
|
||||
self._compare_different_args(
|
||||
params_with_doc,
|
||||
"differing-param-doc",
|
||||
self.not_needed_param_in_docstring,
|
||||
expected_argument_names,
|
||||
warning_node,
|
||||
)
|
||||
self._compare_different_args(
|
||||
params_with_type,
|
||||
"differing-type-doc",
|
||||
not_needed_type_in_docstring,
|
||||
expected_argument_names,
|
||||
warning_node,
|
||||
)
|
||||
|
||||
def check_single_constructor_params(self, class_doc, init_doc, class_node):
|
||||
if class_doc.has_params() and init_doc.has_params():
|
||||
self.add_message(
|
||||
"multiple-constructor-doc", args=(class_node.name,), node=class_node
|
||||
)
|
||||
|
||||
def _handle_no_raise_doc(self, excs, node):
|
||||
if self.config.accept_no_raise_doc:
|
||||
return
|
||||
|
||||
self._add_raise_message(excs, node)
|
||||
|
||||
def _add_raise_message(self, missing_excs, node):
|
||||
"""
|
||||
Adds a message on :param:`node` for the missing exception type.
|
||||
|
||||
:param missing_excs: A list of missing exception types.
|
||||
:type missing_excs: set(str)
|
||||
|
||||
:param node: The node show the message on.
|
||||
:type node: astroid.node_classes.NodeNG
|
||||
"""
|
||||
if node.is_abstract():
|
||||
try:
|
||||
missing_excs.remove("NotImplementedError")
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if not missing_excs:
|
||||
return
|
||||
|
||||
self.add_message(
|
||||
"missing-raises-doc", args=(", ".join(sorted(missing_excs)),), node=node
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto register this checker.
|
||||
|
||||
:param linter: Main interface object for Pylint plugins
|
||||
:type linter: Pylint object
|
||||
"""
|
||||
linter.register_checker(DocstringParameterChecker(linter))
|
||||
@@ -0,0 +1,91 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2016 Luis Escobar <lescobar@vauxoo.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
import linecache
|
||||
|
||||
from pylint import checkers
|
||||
from pylint.checkers.utils import check_messages
|
||||
from pylint.interfaces import HIGH, IAstroidChecker
|
||||
|
||||
|
||||
class DocStringStyleChecker(checkers.BaseChecker):
|
||||
"""Checks format of docstrings based on PEP 0257"""
|
||||
|
||||
__implements__ = IAstroidChecker
|
||||
name = "docstyle"
|
||||
|
||||
msgs = {
|
||||
"C0198": (
|
||||
'Bad docstring quotes in %s, expected """, given %s',
|
||||
"bad-docstring-quotes",
|
||||
"Used when a docstring does not have triple double quotes.",
|
||||
),
|
||||
"C0199": (
|
||||
"First line empty in %s docstring",
|
||||
"docstring-first-line-empty",
|
||||
"Used when a blank line is found at the beginning of a docstring.",
|
||||
),
|
||||
}
|
||||
|
||||
@check_messages("docstring-first-line-empty", "bad-docstring-quotes")
|
||||
def visit_module(self, node):
|
||||
self._check_docstring("module", node)
|
||||
|
||||
def visit_classdef(self, node):
|
||||
self._check_docstring("class", node)
|
||||
|
||||
def visit_functiondef(self, node):
|
||||
ftype = "method" if node.is_method() else "function"
|
||||
self._check_docstring(ftype, node)
|
||||
|
||||
visit_asyncfunctiondef = visit_functiondef
|
||||
|
||||
def _check_docstring(self, node_type, node):
|
||||
docstring = node.doc
|
||||
if docstring and docstring[0] == "\n":
|
||||
self.add_message(
|
||||
"docstring-first-line-empty",
|
||||
node=node,
|
||||
args=(node_type,),
|
||||
confidence=HIGH,
|
||||
)
|
||||
|
||||
# Use "linecache", instead of node.as_string(), because the latter
|
||||
# looses the original form of the docstrings.
|
||||
|
||||
if docstring:
|
||||
lineno = node.fromlineno + 1
|
||||
line = linecache.getline(node.root().file, lineno).lstrip()
|
||||
if line and line.find('"""') == 0:
|
||||
return
|
||||
if line and "'''" in line:
|
||||
quotes = "'''"
|
||||
elif line and line[0] == '"':
|
||||
quotes = '"'
|
||||
elif line and line[0] == "'":
|
||||
quotes = "'"
|
||||
else:
|
||||
quotes = False
|
||||
if quotes:
|
||||
self.add_message(
|
||||
"bad-docstring-quotes",
|
||||
node=node,
|
||||
args=(node_type, quotes),
|
||||
confidence=HIGH,
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto register this checker.
|
||||
|
||||
:param linter: Main interface object for Pylint plugins
|
||||
:type linter: Pylint object
|
||||
"""
|
||||
linter.register_checker(DocStringStyleChecker(linter))
|
||||
@@ -0,0 +1,75 @@
|
||||
# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2017-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Looks for comparisons to empty string."""
|
||||
|
||||
import itertools
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint import checkers, interfaces
|
||||
from pylint.checkers import utils
|
||||
|
||||
|
||||
def _is_constant_empty_str(node):
|
||||
return isinstance(node, astroid.Const) and node.value == ""
|
||||
|
||||
|
||||
class CompareToEmptyStringChecker(checkers.BaseChecker):
|
||||
"""Checks for comparisons to empty string.
|
||||
Most of the times you should use the fact that empty strings are false.
|
||||
An exception to this rule is when an empty string value is allowed in the program
|
||||
and has a different meaning than None!
|
||||
"""
|
||||
|
||||
__implements__ = (interfaces.IAstroidChecker,)
|
||||
|
||||
# configuration section name
|
||||
name = "compare-to-empty-string"
|
||||
msgs = {
|
||||
"C1901": (
|
||||
"Avoid comparisons to empty string",
|
||||
"compare-to-empty-string",
|
||||
"Used when Pylint detects comparison to an empty string constant.",
|
||||
)
|
||||
}
|
||||
|
||||
priority = -2
|
||||
options = ()
|
||||
|
||||
@utils.check_messages("compare-to-empty-string")
|
||||
def visit_compare(self, node):
|
||||
_operators = ["!=", "==", "is not", "is"]
|
||||
# note: astroid.Compare has the left most operand in node.left
|
||||
# while the rest are a list of tuples in node.ops
|
||||
# the format of the tuple is ('compare operator sign', node)
|
||||
# here we squash everything into `ops` to make it easier for processing later
|
||||
ops = [("", node.left)]
|
||||
ops.extend(node.ops)
|
||||
ops = list(itertools.chain(*ops))
|
||||
|
||||
for ops_idx in range(len(ops) - 2):
|
||||
op_1 = ops[ops_idx]
|
||||
op_2 = ops[ops_idx + 1]
|
||||
op_3 = ops[ops_idx + 2]
|
||||
error_detected = False
|
||||
|
||||
# x ?? ""
|
||||
if _is_constant_empty_str(op_1) and op_2 in _operators:
|
||||
error_detected = True
|
||||
# '' ?? X
|
||||
elif op_2 in _operators and _is_constant_empty_str(op_3):
|
||||
error_detected = True
|
||||
|
||||
if error_detected:
|
||||
self.add_message("compare-to-empty-string", node=node)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto register this checker."""
|
||||
linter.register_checker(CompareToEmptyStringChecker(linter))
|
||||
199
venv/lib/python3.8/site-packages/pylint/extensions/mccabe.py
Normal file
199
venv/lib/python3.8/site-packages/pylint/extensions/mccabe.py
Normal file
@@ -0,0 +1,199 @@
|
||||
# Copyright (c) 2016-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Module to add McCabe checker class for pylint. """
|
||||
|
||||
from mccabe import PathGraph as Mccabe_PathGraph
|
||||
from mccabe import PathGraphingAstVisitor as Mccabe_PathGraphingAstVisitor
|
||||
|
||||
from pylint import checkers
|
||||
from pylint.checkers.utils import check_messages
|
||||
from pylint.interfaces import HIGH, IAstroidChecker
|
||||
|
||||
|
||||
class PathGraph(Mccabe_PathGraph):
|
||||
def __init__(self, node):
|
||||
super().__init__(name="", entity="", lineno=1)
|
||||
self.root = node
|
||||
|
||||
|
||||
class PathGraphingAstVisitor(Mccabe_PathGraphingAstVisitor):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._bottom_counter = 0
|
||||
|
||||
def default(self, node, *args):
|
||||
for child in node.get_children():
|
||||
self.dispatch(child, *args)
|
||||
|
||||
def dispatch(self, node, *args):
|
||||
self.node = node
|
||||
klass = node.__class__
|
||||
meth = self._cache.get(klass)
|
||||
if meth is None:
|
||||
class_name = klass.__name__
|
||||
meth = getattr(self.visitor, "visit" + class_name, self.default)
|
||||
self._cache[klass] = meth
|
||||
return meth(node, *args)
|
||||
|
||||
def visitFunctionDef(self, node):
|
||||
if self.graph is not None:
|
||||
# closure
|
||||
pathnode = self._append_node(node)
|
||||
self.tail = pathnode
|
||||
self.dispatch_list(node.body)
|
||||
bottom = "%s" % self._bottom_counter
|
||||
self._bottom_counter += 1
|
||||
self.graph.connect(self.tail, bottom)
|
||||
self.graph.connect(node, bottom)
|
||||
self.tail = bottom
|
||||
else:
|
||||
self.graph = PathGraph(node)
|
||||
self.tail = node
|
||||
self.dispatch_list(node.body)
|
||||
self.graphs["%s%s" % (self.classname, node.name)] = self.graph
|
||||
self.reset()
|
||||
|
||||
visitAsyncFunctionDef = visitFunctionDef
|
||||
|
||||
def visitSimpleStatement(self, node):
|
||||
self._append_node(node)
|
||||
|
||||
visitAssert = (
|
||||
visitAssign
|
||||
) = (
|
||||
visitAugAssign
|
||||
) = (
|
||||
visitDelete
|
||||
) = (
|
||||
visitPrint
|
||||
) = (
|
||||
visitRaise
|
||||
) = (
|
||||
visitYield
|
||||
) = (
|
||||
visitImport
|
||||
) = (
|
||||
visitCall
|
||||
) = (
|
||||
visitSubscript
|
||||
) = (
|
||||
visitPass
|
||||
) = (
|
||||
visitContinue
|
||||
) = (
|
||||
visitBreak
|
||||
) = visitGlobal = visitReturn = visitExpr = visitAwait = visitSimpleStatement
|
||||
|
||||
def visitWith(self, node):
|
||||
self._append_node(node)
|
||||
self.dispatch_list(node.body)
|
||||
|
||||
visitAsyncWith = visitWith
|
||||
|
||||
def _append_node(self, node):
|
||||
if not self.tail:
|
||||
return None
|
||||
self.graph.connect(self.tail, node)
|
||||
self.tail = node
|
||||
return node
|
||||
|
||||
def _subgraph(self, node, name, extra_blocks=()):
|
||||
"""create the subgraphs representing any `if` and `for` statements"""
|
||||
if self.graph is None:
|
||||
# global loop
|
||||
self.graph = PathGraph(node)
|
||||
self._subgraph_parse(node, node, extra_blocks)
|
||||
self.graphs["%s%s" % (self.classname, name)] = self.graph
|
||||
self.reset()
|
||||
else:
|
||||
self._append_node(node)
|
||||
self._subgraph_parse(node, node, extra_blocks)
|
||||
|
||||
def _subgraph_parse(self, node, pathnode, extra_blocks):
|
||||
"""parse the body and any `else` block of `if` and `for` statements"""
|
||||
loose_ends = []
|
||||
self.tail = node
|
||||
self.dispatch_list(node.body)
|
||||
loose_ends.append(self.tail)
|
||||
for extra in extra_blocks:
|
||||
self.tail = node
|
||||
self.dispatch_list(extra.body)
|
||||
loose_ends.append(self.tail)
|
||||
if node.orelse:
|
||||
self.tail = node
|
||||
self.dispatch_list(node.orelse)
|
||||
loose_ends.append(self.tail)
|
||||
else:
|
||||
loose_ends.append(node)
|
||||
if node:
|
||||
bottom = "%s" % self._bottom_counter
|
||||
self._bottom_counter += 1
|
||||
for end in loose_ends:
|
||||
self.graph.connect(end, bottom)
|
||||
self.tail = bottom
|
||||
|
||||
|
||||
class McCabeMethodChecker(checkers.BaseChecker):
|
||||
"""Checks McCabe complexity cyclomatic threshold in methods and functions
|
||||
to validate a too complex code.
|
||||
"""
|
||||
|
||||
__implements__ = IAstroidChecker
|
||||
name = "design"
|
||||
|
||||
msgs = {
|
||||
"R1260": (
|
||||
"%s is too complex. The McCabe rating is %d",
|
||||
"too-complex",
|
||||
"Used when a method or function is too complex based on "
|
||||
"McCabe Complexity Cyclomatic",
|
||||
)
|
||||
}
|
||||
options = (
|
||||
(
|
||||
"max-complexity",
|
||||
{
|
||||
"default": 10,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "McCabe complexity cyclomatic threshold",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@check_messages("too-complex")
|
||||
def visit_module(self, node):
|
||||
"""visit an astroid.Module node to check too complex rating and
|
||||
add message if is greather than max_complexity stored from options"""
|
||||
visitor = PathGraphingAstVisitor()
|
||||
for child in node.body:
|
||||
visitor.preorder(child, visitor)
|
||||
for graph in visitor.graphs.values():
|
||||
complexity = graph.complexity()
|
||||
node = graph.root
|
||||
if hasattr(node, "name"):
|
||||
node_name = "'%s'" % node.name
|
||||
else:
|
||||
node_name = "This '%s'" % node.__class__.__name__.lower()
|
||||
if complexity <= self.config.max_complexity:
|
||||
continue
|
||||
self.add_message(
|
||||
"too-complex", node=node, confidence=HIGH, args=(node_name, complexity)
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto register this checker.
|
||||
|
||||
:param linter: Main interface object for Pylint plugins
|
||||
:type linter: Pylint object
|
||||
"""
|
||||
linter.register_checker(McCabeMethodChecker(linter))
|
||||
@@ -0,0 +1,86 @@
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Looks for overlapping exceptions."""
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint import checkers, interfaces
|
||||
from pylint.checkers import utils
|
||||
from pylint.checkers.exceptions import _annotated_unpack_infer
|
||||
|
||||
|
||||
class OverlappingExceptionsChecker(checkers.BaseChecker):
|
||||
"""Checks for two or more exceptions in the same exception handler
|
||||
clause that are identical or parts of the same inheritance hierarchy
|
||||
(i.e. overlapping)."""
|
||||
|
||||
__implements__ = interfaces.IAstroidChecker
|
||||
|
||||
name = "overlap-except"
|
||||
msgs = {
|
||||
"W0714": (
|
||||
"Overlapping exceptions (%s)",
|
||||
"overlapping-except",
|
||||
"Used when exceptions in handler overlap or are identical",
|
||||
)
|
||||
}
|
||||
priority = -2
|
||||
options = ()
|
||||
|
||||
@utils.check_messages("overlapping-except")
|
||||
def visit_tryexcept(self, node):
|
||||
"""check for empty except"""
|
||||
for handler in node.handlers:
|
||||
if handler.type is None:
|
||||
continue
|
||||
if isinstance(handler.type, astroid.BoolOp):
|
||||
continue
|
||||
try:
|
||||
excs = list(_annotated_unpack_infer(handler.type))
|
||||
except astroid.InferenceError:
|
||||
continue
|
||||
|
||||
handled_in_clause = []
|
||||
for part, exc in excs:
|
||||
if exc is astroid.Uninferable:
|
||||
continue
|
||||
if isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(exc):
|
||||
# pylint: disable=protected-access
|
||||
exc = exc._proxied
|
||||
|
||||
if not isinstance(exc, astroid.ClassDef):
|
||||
continue
|
||||
|
||||
exc_ancestors = [
|
||||
anc for anc in exc.ancestors() if isinstance(anc, astroid.ClassDef)
|
||||
]
|
||||
|
||||
for prev_part, prev_exc in handled_in_clause:
|
||||
prev_exc_ancestors = [
|
||||
anc
|
||||
for anc in prev_exc.ancestors()
|
||||
if isinstance(anc, astroid.ClassDef)
|
||||
]
|
||||
if exc == prev_exc:
|
||||
self.add_message(
|
||||
"overlapping-except",
|
||||
node=handler.type,
|
||||
args="%s and %s are the same"
|
||||
% (prev_part.as_string(), part.as_string()),
|
||||
)
|
||||
elif prev_exc in exc_ancestors or exc in prev_exc_ancestors:
|
||||
ancestor = part if exc in prev_exc_ancestors else prev_part
|
||||
descendant = part if prev_exc in exc_ancestors else prev_part
|
||||
self.add_message(
|
||||
"overlapping-except",
|
||||
node=handler.type,
|
||||
args="%s is an ancestor class of %s"
|
||||
% (ancestor.as_string(), descendant.as_string()),
|
||||
)
|
||||
handled_in_clause += [(part, exc)]
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto register this checker."""
|
||||
linter.register_checker(OverlappingExceptionsChecker(linter))
|
||||
@@ -0,0 +1,118 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Glenn Matthews <glmatthe@cisco.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers.utils import check_messages, is_none, node_type
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
|
||||
BUILTINS = "builtins"
|
||||
|
||||
|
||||
class MultipleTypesChecker(BaseChecker):
|
||||
"""Checks for variable type redefinitions (NoneType excepted)
|
||||
|
||||
At a function, method, class or module scope
|
||||
|
||||
This rule could be improved:
|
||||
|
||||
- Currently, if an attribute is set to different types in 2 methods of a
|
||||
same class, it won't be detected (see functional test)
|
||||
- One could improve the support for inference on assignment with tuples,
|
||||
ifexpr, etc. Also it would be great to have support for inference on
|
||||
str.split()
|
||||
"""
|
||||
|
||||
__implements__ = IAstroidChecker
|
||||
|
||||
name = "multiple_types"
|
||||
msgs = {
|
||||
"R0204": (
|
||||
"Redefinition of %s type from %s to %s",
|
||||
"redefined-variable-type",
|
||||
"Used when the type of a variable changes inside a "
|
||||
"method or a function.",
|
||||
)
|
||||
}
|
||||
|
||||
def visit_classdef(self, _):
|
||||
self._assigns.append({})
|
||||
|
||||
@check_messages("redefined-variable-type")
|
||||
def leave_classdef(self, _):
|
||||
self._check_and_add_messages()
|
||||
|
||||
visit_functiondef = visit_classdef
|
||||
leave_functiondef = leave_module = leave_classdef
|
||||
|
||||
def visit_module(self, _):
|
||||
self._assigns = [{}]
|
||||
|
||||
def _check_and_add_messages(self):
|
||||
assigns = self._assigns.pop()
|
||||
for name, args in assigns.items():
|
||||
if len(args) <= 1:
|
||||
continue
|
||||
orig_node, orig_type = args[0]
|
||||
# Check if there is a type in the following nodes that would be
|
||||
# different from orig_type.
|
||||
for redef_node, redef_type in args[1:]:
|
||||
if redef_type == orig_type:
|
||||
continue
|
||||
# if a variable is defined to several types in an if node,
|
||||
# this is not actually redefining.
|
||||
orig_parent = orig_node.parent
|
||||
redef_parent = redef_node.parent
|
||||
if isinstance(orig_parent, astroid.If):
|
||||
if orig_parent == redef_parent:
|
||||
if (
|
||||
redef_node in orig_parent.orelse
|
||||
and orig_node not in orig_parent.orelse
|
||||
):
|
||||
orig_node, orig_type = redef_node, redef_type
|
||||
continue
|
||||
elif isinstance(
|
||||
redef_parent, astroid.If
|
||||
) and redef_parent in orig_parent.nodes_of_class(astroid.If):
|
||||
orig_node, orig_type = redef_node, redef_type
|
||||
continue
|
||||
orig_type = orig_type.replace(BUILTINS + ".", "")
|
||||
redef_type = redef_type.replace(BUILTINS + ".", "")
|
||||
self.add_message(
|
||||
"redefined-variable-type",
|
||||
node=redef_node,
|
||||
args=(name, orig_type, redef_type),
|
||||
)
|
||||
break
|
||||
|
||||
def visit_assign(self, node):
|
||||
# we don't handle multiple assignment nor slice assignment
|
||||
target = node.targets[0]
|
||||
if isinstance(target, (astroid.Tuple, astroid.Subscript)):
|
||||
return
|
||||
# ignore NoneType
|
||||
if is_none(node):
|
||||
return
|
||||
_type = node_type(node.value)
|
||||
if _type:
|
||||
self._assigns[-1].setdefault(target.as_string(), []).append(
|
||||
(node, _type.pytype())
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto register this checker.
|
||||
|
||||
:param linter: Main interface object for Pylint plugins
|
||||
:type linter: Pylint object
|
||||
"""
|
||||
linter.register_checker(MultipleTypesChecker(linter))
|
||||
193
venv/lib/python3.8/site-packages/pylint/graph.py
Normal file
193
venv/lib/python3.8/site-packages/pylint/graph.py
Normal file
@@ -0,0 +1,193 @@
|
||||
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2019 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2019 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2020 Benjamin Graham <benwilliamgraham@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Graph manipulation utilities.
|
||||
|
||||
(dot generation adapted from pypy/translator/tool/make_dot.py)
|
||||
"""
|
||||
|
||||
import codecs
|
||||
import os
|
||||
import os.path as osp
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
def target_info_from_filename(filename):
|
||||
"""Transforms /some/path/foo.png into ('/some/path', 'foo.png', 'png')."""
|
||||
basename = osp.basename(filename)
|
||||
storedir = osp.dirname(osp.abspath(filename))
|
||||
target = filename.split(".")[-1]
|
||||
return storedir, basename, target
|
||||
|
||||
|
||||
class DotBackend:
|
||||
"""Dot File backend."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
graphname,
|
||||
rankdir=None,
|
||||
size=None,
|
||||
ratio=None,
|
||||
charset="utf-8",
|
||||
renderer="dot",
|
||||
additional_param=None,
|
||||
):
|
||||
if additional_param is None:
|
||||
additional_param = {}
|
||||
self.graphname = graphname
|
||||
self.renderer = renderer
|
||||
self.lines = []
|
||||
self._source = None
|
||||
self.emit("digraph %s {" % normalize_node_id(graphname))
|
||||
if rankdir:
|
||||
self.emit("rankdir=%s" % rankdir)
|
||||
if ratio:
|
||||
self.emit("ratio=%s" % ratio)
|
||||
if size:
|
||||
self.emit('size="%s"' % size)
|
||||
if charset:
|
||||
assert charset.lower() in ("utf-8", "iso-8859-1", "latin1"), (
|
||||
"unsupported charset %s" % charset
|
||||
)
|
||||
self.emit('charset="%s"' % charset)
|
||||
for param in additional_param.items():
|
||||
self.emit("=".join(param))
|
||||
|
||||
def get_source(self):
|
||||
"""returns self._source"""
|
||||
if self._source is None:
|
||||
self.emit("}\n")
|
||||
self._source = "\n".join(self.lines)
|
||||
del self.lines
|
||||
return self._source
|
||||
|
||||
source = property(get_source)
|
||||
|
||||
def generate(self, outputfile=None, mapfile=None):
|
||||
"""Generates a graph file.
|
||||
|
||||
:param str outputfile: filename and path [defaults to graphname.png]
|
||||
:param str mapfile: filename and path
|
||||
|
||||
:rtype: str
|
||||
:return: a path to the generated file
|
||||
"""
|
||||
name = self.graphname
|
||||
if outputfile is not None:
|
||||
_, _, target = target_info_from_filename(outputfile)
|
||||
if target != "dot":
|
||||
pdot, dot_sourcepath = tempfile.mkstemp(".dot", name)
|
||||
os.close(pdot)
|
||||
else:
|
||||
dot_sourcepath = outputfile
|
||||
else:
|
||||
target = "png"
|
||||
pdot, dot_sourcepath = tempfile.mkstemp(".dot", name)
|
||||
ppng, outputfile = tempfile.mkstemp(".png", name)
|
||||
os.close(pdot)
|
||||
os.close(ppng)
|
||||
pdot = codecs.open(dot_sourcepath, "w", encoding="utf8")
|
||||
pdot.write(self.source)
|
||||
pdot.close()
|
||||
if target != "dot":
|
||||
use_shell = sys.platform == "win32"
|
||||
if mapfile:
|
||||
subprocess.call(
|
||||
[
|
||||
self.renderer,
|
||||
"-Tcmapx",
|
||||
"-o",
|
||||
mapfile,
|
||||
"-T",
|
||||
target,
|
||||
dot_sourcepath,
|
||||
"-o",
|
||||
outputfile,
|
||||
],
|
||||
shell=use_shell,
|
||||
)
|
||||
else:
|
||||
subprocess.call(
|
||||
[self.renderer, "-T", target, dot_sourcepath, "-o", outputfile],
|
||||
shell=use_shell,
|
||||
)
|
||||
os.unlink(dot_sourcepath)
|
||||
return outputfile
|
||||
|
||||
def emit(self, line):
|
||||
"""Adds <line> to final output."""
|
||||
self.lines.append(line)
|
||||
|
||||
def emit_edge(self, name1, name2, **props):
|
||||
"""emit an edge from <name1> to <name2>.
|
||||
edge properties: see http://www.graphviz.org/doc/info/attrs.html
|
||||
"""
|
||||
attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
|
||||
n_from, n_to = normalize_node_id(name1), normalize_node_id(name2)
|
||||
self.emit("%s -> %s [%s];" % (n_from, n_to, ", ".join(sorted(attrs))))
|
||||
|
||||
def emit_node(self, name, **props):
|
||||
"""emit a node with given properties.
|
||||
node properties: see http://www.graphviz.org/doc/info/attrs.html
|
||||
"""
|
||||
attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
|
||||
self.emit("%s [%s];" % (normalize_node_id(name), ", ".join(sorted(attrs))))
|
||||
|
||||
|
||||
def normalize_node_id(nid):
|
||||
"""Returns a suitable DOT node id for `nid`."""
|
||||
return '"%s"' % nid
|
||||
|
||||
|
||||
def get_cycles(graph_dict, vertices=None):
|
||||
"""given a dictionary representing an ordered graph (i.e. key are vertices
|
||||
and values is a list of destination vertices representing edges), return a
|
||||
list of detected cycles
|
||||
"""
|
||||
if not graph_dict:
|
||||
return ()
|
||||
result = []
|
||||
if vertices is None:
|
||||
vertices = graph_dict.keys()
|
||||
for vertice in vertices:
|
||||
_get_cycles(graph_dict, [], set(), result, vertice)
|
||||
return result
|
||||
|
||||
|
||||
def _get_cycles(graph_dict, path, visited, result, vertice):
|
||||
"""recursive function doing the real work for get_cycles"""
|
||||
if vertice in path:
|
||||
cycle = [vertice]
|
||||
for node in path[::-1]:
|
||||
if node == vertice:
|
||||
break
|
||||
cycle.insert(0, node)
|
||||
# make a canonical representation
|
||||
start_from = min(cycle)
|
||||
index = cycle.index(start_from)
|
||||
cycle = cycle[index:] + cycle[0:index]
|
||||
# append it to result if not already in
|
||||
if cycle not in result:
|
||||
result.append(cycle)
|
||||
return
|
||||
path.append(vertice)
|
||||
try:
|
||||
for node in graph_dict[vertice]:
|
||||
# don't check already visited nodes again
|
||||
if node not in visited:
|
||||
_get_cycles(graph_dict, path, visited, result, node)
|
||||
visited.add(node)
|
||||
except KeyError:
|
||||
pass
|
||||
path.pop()
|
||||
103
venv/lib/python3.8/site-packages/pylint/interfaces.py
Normal file
103
venv/lib/python3.8/site-packages/pylint/interfaces.py
Normal file
@@ -0,0 +1,103 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2009-2010, 2012-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2020 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Interfaces for Pylint objects"""
|
||||
from collections import namedtuple
|
||||
|
||||
Confidence = namedtuple("Confidence", ["name", "description"])
|
||||
# Warning Certainties
|
||||
HIGH = Confidence("HIGH", "No false positive possible.")
|
||||
INFERENCE = Confidence("INFERENCE", "Warning based on inference result.")
|
||||
INFERENCE_FAILURE = Confidence(
|
||||
"INFERENCE_FAILURE", "Warning based on inference with failures."
|
||||
)
|
||||
UNDEFINED = Confidence("UNDEFINED", "Warning without any associated confidence level.")
|
||||
|
||||
CONFIDENCE_LEVELS = [HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED]
|
||||
|
||||
|
||||
class Interface:
|
||||
"""Base class for interfaces."""
|
||||
|
||||
@classmethod
|
||||
def is_implemented_by(cls, instance):
|
||||
return implements(instance, cls)
|
||||
|
||||
|
||||
def implements(obj, interface):
|
||||
"""Return true if the give object (maybe an instance or class) implements
|
||||
the interface.
|
||||
"""
|
||||
kimplements = getattr(obj, "__implements__", ())
|
||||
if not isinstance(kimplements, (list, tuple)):
|
||||
kimplements = (kimplements,)
|
||||
for implementedinterface in kimplements:
|
||||
if issubclass(implementedinterface, interface):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class IChecker(Interface):
|
||||
"""This is a base interface, not designed to be used elsewhere than for
|
||||
sub interfaces definition.
|
||||
"""
|
||||
|
||||
def open(self):
|
||||
"""called before visiting project (i.e set of modules)"""
|
||||
|
||||
def close(self):
|
||||
"""called after visiting project (i.e set of modules)"""
|
||||
|
||||
|
||||
class IRawChecker(IChecker):
|
||||
"""interface for checker which need to parse the raw file
|
||||
"""
|
||||
|
||||
def process_module(self, astroid):
|
||||
""" process a module
|
||||
|
||||
the module's content is accessible via astroid.stream
|
||||
"""
|
||||
|
||||
|
||||
class ITokenChecker(IChecker):
|
||||
"""Interface for checkers that need access to the token list."""
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
"""Process a module.
|
||||
|
||||
tokens is a list of all source code tokens in the file.
|
||||
"""
|
||||
|
||||
|
||||
class IAstroidChecker(IChecker):
|
||||
""" interface for checker which prefers receive events according to
|
||||
statement type
|
||||
"""
|
||||
|
||||
|
||||
class IReporter(Interface):
|
||||
""" reporter collect messages and display results encapsulated in a layout
|
||||
"""
|
||||
|
||||
def handle_message(self, msg):
|
||||
"""Handle the given message object."""
|
||||
|
||||
def display_reports(self, layout):
|
||||
"""display results encapsulated in the layout tree
|
||||
"""
|
||||
|
||||
|
||||
__all__ = ("IRawChecker", "IAstroidChecker", "ITokenChecker", "IReporter")
|
||||
91
venv/lib/python3.8/site-packages/pylint/lint/__init__.py
Normal file
91
venv/lib/python3.8/site-packages/pylint/lint/__init__.py
Normal file
@@ -0,0 +1,91 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2008 Fabrice Douchant <Fabrice.Douchant@logilab.fr>
|
||||
# Copyright (c) 2009 Vincent
|
||||
# Copyright (c) 2009 Mads Kiilerich <mads@kiilerich.com>
|
||||
# Copyright (c) 2011-2014 Google, Inc.
|
||||
# Copyright (c) 2012 David Pursehouse <david.pursehouse@sonymobile.com>
|
||||
# Copyright (c) 2012 Kevin Jing Qiu <kevin.jing.qiu@gmail.com>
|
||||
# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
|
||||
# Copyright (c) 2012 JT Olds <jtolds@xnet5.com>
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014-2015 Michal Nowikowski <godfryd@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Alexandru Coman <fcoman@bitdefender.com>
|
||||
# Copyright (c) 2014 Daniel Harding <dharding@living180.net>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2014 Dan Goldsmith <djgoldsmith@googlemail.com>
|
||||
# Copyright (c) 2015-2016 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015 Aru Sahni <arusahni@gmail.com>
|
||||
# Copyright (c) 2015 Steven Myint <hg@stevenmyint.com>
|
||||
# Copyright (c) 2015 Simu Toni <simutoni@gmail.com>
|
||||
# Copyright (c) 2015 Mihai Balint <balint.mihai@gmail.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
|
||||
# Copyright (c) 2016 Alan Evangelista <alanoe@linux.vnet.ibm.com>
|
||||
# Copyright (c) 2017-2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2017-2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2017 Daniel Miller <millerdev@gmail.com>
|
||||
# Copyright (c) 2017 Roman Ivanov <me@roivanov.com>
|
||||
# Copyright (c) 2017 Ned Batchelder <ned@nedbatchelder.com>
|
||||
# Copyright (c) 2018-2020 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2018, 2020 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2018-2019 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2018 Matus Valo <matusvalo@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Lucas Cimon <lucas.cimon@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Randall Leeds <randall@bleeds.info>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Jason Owen <jason.a.owen@gmail.com>
|
||||
# Copyright (c) 2018 Gary Tyler McLeod <mail@garytyler.com>
|
||||
# Copyright (c) 2018 Yuval Langer <yuvallanger@mail.tau.ac.il>
|
||||
# Copyright (c) 2018 kapsh <kapsh@kap.sh>
|
||||
# Copyright (c) 2019 syutbai <syutbai@gmail.com>
|
||||
# Copyright (c) 2019 Thomas Hisch <t.hisch@gmail.com>
|
||||
# Copyright (c) 2019 Hugues <hugues.bruant@affirm.com>
|
||||
# Copyright (c) 2019 Janne Rönkkö <jannero@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2019 Trevor Bekolay <tbekolay@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Robert Schweizer <robert_schweizer@gmx.de>
|
||||
# Copyright (c) 2019 Andres Perez Hortal <andresperezcba@gmail.com>
|
||||
# Copyright (c) 2019 Peter Kolbus <peter.kolbus@gmail.com>
|
||||
# Copyright (c) 2019 Nicolas Dickreuter <dickreuter@gmail.com>
|
||||
# Copyright (c) 2020 anubh-v <anubhav@u.nus.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
""" pylint [options] modules_or_packages
|
||||
|
||||
Check that module(s) satisfy a coding standard (and more !).
|
||||
|
||||
pylint --help
|
||||
|
||||
Display this help message and exit.
|
||||
|
||||
pylint --help-msg <msg-id>[,<msg-id>]
|
||||
|
||||
Display help messages about given message identifiers and exit.
|
||||
"""
|
||||
import sys
|
||||
|
||||
from pylint.lint.check_parallel import check_parallel
|
||||
from pylint.lint.pylinter import PyLinter
|
||||
from pylint.lint.report_functions import (
|
||||
report_messages_by_module_stats,
|
||||
report_messages_stats,
|
||||
report_total_messages_stats,
|
||||
)
|
||||
from pylint.lint.run import Run
|
||||
from pylint.lint.utils import (
|
||||
ArgumentPreprocessingError,
|
||||
_patch_sys_path,
|
||||
fix_import_path,
|
||||
preprocess_options,
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
Run(sys.argv[1:])
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
116
venv/lib/python3.8/site-packages/pylint/lint/check_parallel.py
Normal file
116
venv/lib/python3.8/site-packages/pylint/lint/check_parallel.py
Normal file
@@ -0,0 +1,116 @@
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
import collections
|
||||
import functools
|
||||
|
||||
from pylint import reporters
|
||||
from pylint.lint.utils import _patch_sys_path
|
||||
from pylint.message import Message
|
||||
|
||||
try:
|
||||
import multiprocessing
|
||||
except ImportError:
|
||||
multiprocessing = None # type: ignore
|
||||
|
||||
# PyLinter object used by worker processes when checking files using multiprocessing
|
||||
# should only be used by the worker processes
|
||||
_worker_linter = None
|
||||
|
||||
|
||||
def _get_new_args(message):
|
||||
location = (
|
||||
message.abspath,
|
||||
message.path,
|
||||
message.module,
|
||||
message.obj,
|
||||
message.line,
|
||||
message.column,
|
||||
)
|
||||
return (message.msg_id, message.symbol, location, message.msg, message.confidence)
|
||||
|
||||
|
||||
def _merge_stats(stats):
|
||||
merged = {}
|
||||
by_msg = collections.Counter()
|
||||
for stat in stats:
|
||||
message_stats = stat.pop("by_msg", {})
|
||||
by_msg.update(message_stats)
|
||||
|
||||
for key, item in stat.items():
|
||||
if key not in merged:
|
||||
merged[key] = item
|
||||
elif isinstance(item, dict):
|
||||
merged[key].update(item)
|
||||
else:
|
||||
merged[key] = merged[key] + item
|
||||
|
||||
merged["by_msg"] = by_msg
|
||||
return merged
|
||||
|
||||
|
||||
def _worker_initialize(linter, arguments=None):
|
||||
global _worker_linter # pylint: disable=global-statement
|
||||
_worker_linter = linter
|
||||
|
||||
# On the worker process side the messages are just collected and passed back to
|
||||
# parent process as _worker_check_file function's return value
|
||||
_worker_linter.set_reporter(reporters.CollectingReporter())
|
||||
_worker_linter.open()
|
||||
|
||||
# Patch sys.path so that each argument is importable just like in single job mode
|
||||
_patch_sys_path(arguments or ())
|
||||
|
||||
|
||||
def _worker_check_single_file(file_item):
|
||||
name, filepath, modname = file_item
|
||||
|
||||
_worker_linter.open()
|
||||
_worker_linter.check_single_file(name, filepath, modname)
|
||||
|
||||
msgs = [_get_new_args(m) for m in _worker_linter.reporter.messages]
|
||||
return (
|
||||
_worker_linter.current_name,
|
||||
msgs,
|
||||
_worker_linter.stats,
|
||||
_worker_linter.msg_status,
|
||||
)
|
||||
|
||||
|
||||
def check_parallel(linter, jobs, files, arguments=None):
|
||||
"""Use the given linter to lint the files with given amount of workers (jobs)"""
|
||||
# The reporter does not need to be passed to worker processess, i.e. the reporter does
|
||||
# not need to be pickleable
|
||||
original_reporter = linter.reporter
|
||||
linter.reporter = None
|
||||
|
||||
# The linter is inherited by all the pool's workers, i.e. the linter
|
||||
# is identical to the linter object here. This is requred so that
|
||||
# a custom PyLinter object can be used.
|
||||
initializer = functools.partial(_worker_initialize, arguments=arguments)
|
||||
with multiprocessing.Pool(jobs, initializer=initializer, initargs=[linter]) as pool:
|
||||
# ..and now when the workers have inherited the linter, the actual reporter
|
||||
# can be set back here on the parent process so that results get stored into
|
||||
# correct reporter
|
||||
linter.set_reporter(original_reporter)
|
||||
linter.open()
|
||||
|
||||
all_stats = []
|
||||
|
||||
for module, messages, stats, msg_status in pool.imap_unordered(
|
||||
_worker_check_single_file, files
|
||||
):
|
||||
linter.set_current_module(module)
|
||||
for msg in messages:
|
||||
msg = Message(*msg)
|
||||
linter.reporter.handle_message(msg)
|
||||
|
||||
all_stats.append(stats)
|
||||
linter.msg_status |= msg_status
|
||||
|
||||
linter.stats = _merge_stats(all_stats)
|
||||
|
||||
# Insert stats data to local checkers.
|
||||
for checker in linter.get_checkers():
|
||||
if checker is not linter:
|
||||
checker.stats = linter.stats
|
||||
1174
venv/lib/python3.8/site-packages/pylint/lint/pylinter.py
Normal file
1174
venv/lib/python3.8/site-packages/pylint/lint/pylinter.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,76 @@
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
import collections
|
||||
|
||||
from pylint import checkers, exceptions
|
||||
from pylint.reporters.ureports import nodes as report_nodes
|
||||
|
||||
|
||||
def report_total_messages_stats(sect, stats, previous_stats):
|
||||
"""make total errors / warnings report"""
|
||||
lines = ["type", "number", "previous", "difference"]
|
||||
lines += checkers.table_lines_from_stats(
|
||||
stats, previous_stats, ("convention", "refactor", "warning", "error")
|
||||
)
|
||||
sect.append(report_nodes.Table(children=lines, cols=4, rheaders=1))
|
||||
|
||||
|
||||
def report_messages_stats(sect, stats, _):
|
||||
"""make messages type report"""
|
||||
if not stats["by_msg"]:
|
||||
# don't print this report when we didn't detected any errors
|
||||
raise exceptions.EmptyReportError()
|
||||
in_order = sorted(
|
||||
[
|
||||
(value, msg_id)
|
||||
for msg_id, value in stats["by_msg"].items()
|
||||
if not msg_id.startswith("I")
|
||||
]
|
||||
)
|
||||
in_order.reverse()
|
||||
lines = ("message id", "occurrences")
|
||||
for value, msg_id in in_order:
|
||||
lines += (msg_id, str(value))
|
||||
sect.append(report_nodes.Table(children=lines, cols=2, rheaders=1))
|
||||
|
||||
|
||||
def report_messages_by_module_stats(sect, stats, _):
|
||||
"""make errors / warnings by modules report"""
|
||||
if len(stats["by_module"]) == 1:
|
||||
# don't print this report when we are analysing a single module
|
||||
raise exceptions.EmptyReportError()
|
||||
by_mod = collections.defaultdict(dict)
|
||||
for m_type in ("fatal", "error", "warning", "refactor", "convention"):
|
||||
total = stats[m_type]
|
||||
for module in stats["by_module"].keys():
|
||||
mod_total = stats["by_module"][module][m_type]
|
||||
if total == 0:
|
||||
percent = 0
|
||||
else:
|
||||
percent = float((mod_total) * 100) / total
|
||||
by_mod[module][m_type] = percent
|
||||
sorted_result = []
|
||||
for module, mod_info in by_mod.items():
|
||||
sorted_result.append(
|
||||
(
|
||||
mod_info["error"],
|
||||
mod_info["warning"],
|
||||
mod_info["refactor"],
|
||||
mod_info["convention"],
|
||||
module,
|
||||
)
|
||||
)
|
||||
sorted_result.sort()
|
||||
sorted_result.reverse()
|
||||
lines = ["module", "error", "warning", "refactor", "convention"]
|
||||
for line in sorted_result:
|
||||
# Don't report clean modules.
|
||||
if all(entry == 0 for entry in line[:-1]):
|
||||
continue
|
||||
lines.append(line[-1])
|
||||
for val in line[:-1]:
|
||||
lines.append("%.2f" % val)
|
||||
if len(lines) == 5:
|
||||
raise exceptions.EmptyReportError()
|
||||
sect.append(report_nodes.Table(children=lines, cols=5, rheaders=1))
|
||||
411
venv/lib/python3.8/site-packages/pylint/lint/run.py
Normal file
411
venv/lib/python3.8/site-packages/pylint/lint/run.py
Normal file
@@ -0,0 +1,411 @@
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pylint import __pkginfo__, config, extensions, interfaces
|
||||
from pylint.lint.pylinter import PyLinter
|
||||
from pylint.lint.utils import ArgumentPreprocessingError, preprocess_options
|
||||
from pylint.utils import utils
|
||||
|
||||
try:
|
||||
import multiprocessing
|
||||
except ImportError:
|
||||
multiprocessing = None # type: ignore
|
||||
|
||||
|
||||
def _cpu_count() -> int:
|
||||
"""Use sched_affinity if available for virtualized or containerized environments."""
|
||||
sched_getaffinity = getattr(os, "sched_getaffinity", None)
|
||||
# pylint: disable=not-callable,using-constant-test
|
||||
if sched_getaffinity:
|
||||
return len(sched_getaffinity(0))
|
||||
if multiprocessing:
|
||||
return multiprocessing.cpu_count()
|
||||
return 1
|
||||
|
||||
|
||||
def cb_list_extensions(option, optname, value, parser):
|
||||
"""List all the extensions under pylint.extensions"""
|
||||
|
||||
for filename in os.listdir(os.path.dirname(extensions.__file__)):
|
||||
if filename.endswith(".py") and not filename.startswith("_"):
|
||||
extension_name, _, _ = filename.partition(".")
|
||||
print("pylint.extensions.{}".format(extension_name))
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def cb_list_confidence_levels(option, optname, value, parser):
|
||||
for level in interfaces.CONFIDENCE_LEVELS:
|
||||
print("%-18s: %s" % level)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def cb_init_hook(optname, value):
|
||||
"""exec arbitrary code to set sys.path for instance"""
|
||||
exec(value) # pylint: disable=exec-used
|
||||
|
||||
|
||||
class Run:
|
||||
"""helper class to use as main for pylint :
|
||||
|
||||
run(*sys.argv[1:])
|
||||
"""
|
||||
|
||||
LinterClass = PyLinter
|
||||
option_groups = (
|
||||
(
|
||||
"Commands",
|
||||
"Options which are actually commands. Options in this \
|
||||
group are mutually exclusive.",
|
||||
),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _return_one(*args): # pylint: disable=unused-argument
|
||||
return 1
|
||||
|
||||
def __init__(self, args, reporter=None, do_exit=True):
|
||||
self._rcfile = None
|
||||
self._plugins = []
|
||||
self.verbose = None
|
||||
try:
|
||||
preprocess_options(
|
||||
args,
|
||||
{
|
||||
# option: (callback, takearg)
|
||||
"init-hook": (cb_init_hook, True),
|
||||
"rcfile": (self.cb_set_rcfile, True),
|
||||
"load-plugins": (self.cb_add_plugins, True),
|
||||
"verbose": (self.cb_verbose_mode, False),
|
||||
},
|
||||
)
|
||||
except ArgumentPreprocessingError as ex:
|
||||
print(ex, file=sys.stderr)
|
||||
sys.exit(32)
|
||||
|
||||
self.linter = linter = self.LinterClass(
|
||||
(
|
||||
(
|
||||
"rcfile",
|
||||
{
|
||||
"action": "callback",
|
||||
"callback": Run._return_one,
|
||||
"group": "Commands",
|
||||
"type": "string",
|
||||
"metavar": "<file>",
|
||||
"help": "Specify a configuration file to load.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"init-hook",
|
||||
{
|
||||
"action": "callback",
|
||||
"callback": Run._return_one,
|
||||
"type": "string",
|
||||
"metavar": "<code>",
|
||||
"level": 1,
|
||||
"help": "Python code to execute, usually for sys.path "
|
||||
"manipulation such as pygtk.require().",
|
||||
},
|
||||
),
|
||||
(
|
||||
"help-msg",
|
||||
{
|
||||
"action": "callback",
|
||||
"type": "string",
|
||||
"metavar": "<msg-id>",
|
||||
"callback": self.cb_help_message,
|
||||
"group": "Commands",
|
||||
"help": "Display a help message for the given message id and "
|
||||
"exit. The value may be a comma separated list of message ids.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"list-msgs",
|
||||
{
|
||||
"action": "callback",
|
||||
"metavar": "<msg-id>",
|
||||
"callback": self.cb_list_messages,
|
||||
"group": "Commands",
|
||||
"level": 1,
|
||||
"help": "Generate pylint's messages.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"list-msgs-enabled",
|
||||
{
|
||||
"action": "callback",
|
||||
"metavar": "<msg-id>",
|
||||
"callback": self.cb_list_messages_enabled,
|
||||
"group": "Commands",
|
||||
"level": 1,
|
||||
"help": "Display a list of what messages are enabled "
|
||||
"and disabled with the given configuration.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"list-groups",
|
||||
{
|
||||
"action": "callback",
|
||||
"metavar": "<msg-id>",
|
||||
"callback": self.cb_list_groups,
|
||||
"group": "Commands",
|
||||
"level": 1,
|
||||
"help": "List pylint's message groups.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"list-conf-levels",
|
||||
{
|
||||
"action": "callback",
|
||||
"callback": cb_list_confidence_levels,
|
||||
"group": "Commands",
|
||||
"level": 1,
|
||||
"help": "Generate pylint's confidence levels.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"list-extensions",
|
||||
{
|
||||
"action": "callback",
|
||||
"callback": cb_list_extensions,
|
||||
"group": "Commands",
|
||||
"level": 1,
|
||||
"help": "List available extensions.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"full-documentation",
|
||||
{
|
||||
"action": "callback",
|
||||
"metavar": "<msg-id>",
|
||||
"callback": self.cb_full_documentation,
|
||||
"group": "Commands",
|
||||
"level": 1,
|
||||
"help": "Generate pylint's full documentation.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"generate-rcfile",
|
||||
{
|
||||
"action": "callback",
|
||||
"callback": self.cb_generate_config,
|
||||
"group": "Commands",
|
||||
"help": "Generate a sample configuration file according to "
|
||||
"the current configuration. You can put other options "
|
||||
"before this one to get them in the generated "
|
||||
"configuration.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"generate-man",
|
||||
{
|
||||
"action": "callback",
|
||||
"callback": self.cb_generate_manpage,
|
||||
"group": "Commands",
|
||||
"help": "Generate pylint's man page.",
|
||||
"hide": True,
|
||||
},
|
||||
),
|
||||
(
|
||||
"errors-only",
|
||||
{
|
||||
"action": "callback",
|
||||
"callback": self.cb_error_mode,
|
||||
"short": "E",
|
||||
"help": "In error mode, checkers without error messages are "
|
||||
"disabled and for others, only the ERROR messages are "
|
||||
"displayed, and no reports are done by default.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"py3k",
|
||||
{
|
||||
"action": "callback",
|
||||
"callback": self.cb_python3_porting_mode,
|
||||
"help": "In Python 3 porting mode, all checkers will be "
|
||||
"disabled and only messages emitted by the porting "
|
||||
"checker will be displayed.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"verbose",
|
||||
{
|
||||
"action": "callback",
|
||||
"callback": self.cb_verbose_mode,
|
||||
"short": "v",
|
||||
"help": "In verbose mode, extra non-checker-related info "
|
||||
"will be displayed.",
|
||||
},
|
||||
),
|
||||
),
|
||||
option_groups=self.option_groups,
|
||||
pylintrc=self._rcfile,
|
||||
)
|
||||
# register standard checkers
|
||||
linter.load_default_plugins()
|
||||
# load command line plugins
|
||||
linter.load_plugin_modules(self._plugins)
|
||||
# add some help section
|
||||
linter.add_help_section("Environment variables", config.ENV_HELP, level=1)
|
||||
# pylint: disable=bad-continuation
|
||||
linter.add_help_section(
|
||||
"Output",
|
||||
"Using the default text output, the message format is : \n"
|
||||
" \n"
|
||||
" MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE \n"
|
||||
" \n"
|
||||
"There are 5 kind of message types : \n"
|
||||
" * (C) convention, for programming standard violation \n"
|
||||
" * (R) refactor, for bad code smell \n"
|
||||
" * (W) warning, for python specific problems \n"
|
||||
" * (E) error, for probable bugs in the code \n"
|
||||
" * (F) fatal, if an error occurred which prevented pylint from doing further\n"
|
||||
"processing.\n",
|
||||
level=1,
|
||||
)
|
||||
linter.add_help_section(
|
||||
"Output status code",
|
||||
"Pylint should leave with following status code: \n"
|
||||
" * 0 if everything went fine \n"
|
||||
" * 1 if a fatal message was issued \n"
|
||||
" * 2 if an error message was issued \n"
|
||||
" * 4 if a warning message was issued \n"
|
||||
" * 8 if a refactor message was issued \n"
|
||||
" * 16 if a convention message was issued \n"
|
||||
" * 32 on usage error \n"
|
||||
" \n"
|
||||
"status 1 to 16 will be bit-ORed so you can know which different categories has\n"
|
||||
"been issued by analysing pylint output status code\n",
|
||||
level=1,
|
||||
)
|
||||
# read configuration
|
||||
linter.disable("I")
|
||||
linter.enable("c-extension-no-member")
|
||||
linter.read_config_file(verbose=self.verbose)
|
||||
config_parser = linter.cfgfile_parser
|
||||
# run init hook, if present, before loading plugins
|
||||
if config_parser.has_option("MASTER", "init-hook"):
|
||||
cb_init_hook(
|
||||
"init-hook", utils._unquote(config_parser.get("MASTER", "init-hook"))
|
||||
)
|
||||
# is there some additional plugins in the file configuration, in
|
||||
if config_parser.has_option("MASTER", "load-plugins"):
|
||||
plugins = utils._splitstrip(config_parser.get("MASTER", "load-plugins"))
|
||||
linter.load_plugin_modules(plugins)
|
||||
# now we can load file config and command line, plugins (which can
|
||||
# provide options) have been registered
|
||||
linter.load_config_file()
|
||||
|
||||
if reporter:
|
||||
# if a custom reporter is provided as argument, it may be overridden
|
||||
# by file parameters, so re-set it here, but before command line
|
||||
# parsing so it's still overrideable by command line option
|
||||
linter.set_reporter(reporter)
|
||||
try:
|
||||
args = linter.load_command_line_configuration(args)
|
||||
except SystemExit as exc:
|
||||
if exc.code == 2: # bad options
|
||||
exc.code = 32
|
||||
raise
|
||||
if not args:
|
||||
print(linter.help())
|
||||
sys.exit(32)
|
||||
|
||||
if linter.config.jobs < 0:
|
||||
print(
|
||||
"Jobs number (%d) should be greater than or equal to 0"
|
||||
% linter.config.jobs,
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(32)
|
||||
if linter.config.jobs > 1 or linter.config.jobs == 0:
|
||||
if multiprocessing is None:
|
||||
print(
|
||||
"Multiprocessing library is missing, " "fallback to single process",
|
||||
file=sys.stderr,
|
||||
)
|
||||
linter.set_option("jobs", 1)
|
||||
elif linter.config.jobs == 0:
|
||||
linter.config.jobs = _cpu_count()
|
||||
|
||||
# We have loaded configuration from config file and command line. Now, we can
|
||||
# load plugin specific configuration.
|
||||
linter.load_plugin_configuration()
|
||||
|
||||
linter.check(args)
|
||||
score_value = linter.generate_reports()
|
||||
if do_exit:
|
||||
if linter.config.exit_zero:
|
||||
sys.exit(0)
|
||||
else:
|
||||
if score_value and score_value > linter.config.fail_under:
|
||||
sys.exit(0)
|
||||
sys.exit(self.linter.msg_status)
|
||||
|
||||
def cb_set_rcfile(self, name, value):
|
||||
"""callback for option preprocessing (i.e. before option parsing)"""
|
||||
self._rcfile = value
|
||||
|
||||
def cb_add_plugins(self, name, value):
|
||||
"""callback for option preprocessing (i.e. before option parsing)"""
|
||||
self._plugins.extend(utils._splitstrip(value))
|
||||
|
||||
def cb_error_mode(self, *args, **kwargs):
|
||||
"""error mode:
|
||||
* disable all but error messages
|
||||
* disable the 'miscellaneous' checker which can be safely deactivated in
|
||||
debug
|
||||
* disable reports
|
||||
* do not save execution information
|
||||
"""
|
||||
self.linter.error_mode()
|
||||
|
||||
def cb_generate_config(self, *args, **kwargs):
|
||||
"""optik callback for sample config file generation"""
|
||||
self.linter.generate_config(skipsections=("COMMANDS",))
|
||||
sys.exit(0)
|
||||
|
||||
def cb_generate_manpage(self, *args, **kwargs):
|
||||
"""optik callback for sample config file generation"""
|
||||
self.linter.generate_manpage(__pkginfo__)
|
||||
sys.exit(0)
|
||||
|
||||
def cb_help_message(self, option, optname, value, parser):
|
||||
"""optik callback for printing some help about a particular message"""
|
||||
self.linter.msgs_store.help_message(utils._splitstrip(value))
|
||||
sys.exit(0)
|
||||
|
||||
def cb_full_documentation(self, option, optname, value, parser):
|
||||
"""optik callback for printing full documentation"""
|
||||
self.linter.print_full_documentation()
|
||||
sys.exit(0)
|
||||
|
||||
def cb_list_messages(self, option, optname, value, parser):
|
||||
"""optik callback for printing available messages"""
|
||||
self.linter.msgs_store.list_messages()
|
||||
sys.exit(0)
|
||||
|
||||
def cb_list_messages_enabled(self, option, optname, value, parser):
|
||||
"""optik callback for printing available messages"""
|
||||
self.linter.list_messages_enabled()
|
||||
sys.exit(0)
|
||||
|
||||
def cb_list_groups(self, *args, **kwargs):
|
||||
"""List all the check groups that pylint knows about
|
||||
|
||||
These should be useful to know what check groups someone can disable
|
||||
or enable.
|
||||
"""
|
||||
for check in self.linter.get_checker_names():
|
||||
print(check)
|
||||
sys.exit(0)
|
||||
|
||||
def cb_python3_porting_mode(self, *args, **kwargs):
|
||||
"""Activate only the python3 porting checker."""
|
||||
self.linter.python3_porting_mode()
|
||||
|
||||
def cb_verbose_mode(self, *args, **kwargs):
|
||||
self.verbose = True
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user