summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--pylint/__init__.py7
-rw-r--r--pylint/__main__.py1
-rw-r--r--pylint/__pkginfo__.py73
-rw-r--r--pylint/checkers/__init__.py31
-rw-r--r--pylint/checkers/async.py156
-rw-r--r--pylint/checkers/base.py1667
-rw-r--r--pylint/checkers/classes.py1079
-rw-r--r--pylint/checkers/design_analysis.py392
-rw-r--r--pylint/checkers/exceptions.py397
-rw-r--r--pylint/checkers/format.py897
-rw-r--r--pylint/checkers/imports.py596
-rw-r--r--pylint/checkers/logging.py216
-rw-r--r--pylint/checkers/misc.py113
-rw-r--r--pylint/checkers/newstyle.py73
-rw-r--r--pylint/checkers/python3.py1505
-rw-r--r--pylint/checkers/raw_metrics.py42
-rw-r--r--pylint/checkers/refactoring.py818
-rw-r--r--pylint/checkers/similar.py218
-rw-r--r--pylint/checkers/spelling.py238
-rw-r--r--pylint/checkers/stdlib.py338
-rw-r--r--pylint/checkers/strings.py452
-rw-r--r--pylint/checkers/typecheck.py946
-rw-r--r--pylint/checkers/utils.py514
-rw-r--r--pylint/checkers/variables.py1086
-rw-r--r--pylint/config.py417
-rwxr-xr-xpylint/epylint.py46
-rw-r--r--pylint/exceptions.py3
-rw-r--r--pylint/extensions/_check_docs_utils.py238
-rw-r--r--pylint/extensions/bad_builtin.py54
-rw-r--r--pylint/extensions/check_docs.py6
-rw-r--r--pylint/extensions/check_elif.py25
-rw-r--r--pylint/extensions/comparetozero.py25
-rw-r--r--pylint/extensions/docparams.py361
-rw-r--r--pylint/extensions/docstyle.py56
-rw-r--r--pylint/extensions/emptystring.py23
-rw-r--r--pylint/extensions/mccabe.py77
-rw-r--r--pylint/extensions/overlapping_exceptions.py53
-rw-r--r--pylint/extensions/redefined_variable_type.py46
-rw-r--r--pylint/graph.py78
-rw-r--r--pylint/interfaces.py20
-rw-r--r--pylint/lint.py1276
-rw-r--r--pylint/pyreverse/diadefslib.py31
-rw-r--r--pylint/pyreverse/diagrams.py68
-rw-r--r--pylint/pyreverse/inspector.py59
-rw-r--r--pylint/pyreverse/main.py190
-rw-r--r--pylint/pyreverse/utils.py76
-rw-r--r--pylint/pyreverse/vcgutils.py247
-rw-r--r--pylint/pyreverse/writer.py126
-rw-r--r--pylint/reporters/__init__.py19
-rw-r--r--pylint/reporters/json.py32
-rw-r--r--pylint/reporters/text.py121
-rw-r--r--pylint/reporters/ureports/__init__.py10
-rw-r--r--pylint/reporters/ureports/nodes.py21
-rw-r--r--pylint/reporters/ureports/text_writer.py30
-rw-r--r--pylint/test/a.py2
-rw-r--r--pylint/test/acceptance/test_stdlib.py19
-rw-r--r--pylint/test/conftest.py13
-rw-r--r--pylint/test/test_func.py74
-rw-r--r--pylint/test/test_import_graph.py38
-rw-r--r--pylint/test/test_regr.py87
-rw-r--r--pylint/test/test_self.py327
-rw-r--r--pylint/test/unittest_checker_base.py323
-rw-r--r--pylint/test/unittest_checker_classes.py48
-rw-r--r--pylint/test/unittest_checker_exceptions.py12
-rw-r--r--pylint/test/unittest_checker_format.py443
-rw-r--r--pylint/test/unittest_checker_imports.py75
-rw-r--r--pylint/test/unittest_checker_logging.py32
-rw-r--r--pylint/test/unittest_checker_misc.py58
-rw-r--r--pylint/test/unittest_checker_python3.py715
-rw-r--r--pylint/test/unittest_checker_similar.py51
-rw-r--r--pylint/test/unittest_checker_spelling.py227
-rw-r--r--pylint/test/unittest_checker_stdlib.py42
-rw-r--r--pylint/test/unittest_checker_strings.py47
-rw-r--r--pylint/test/unittest_checker_typecheck.py178
-rw-r--r--pylint/test/unittest_checker_variables.py149
-rw-r--r--pylint/test/unittest_checkers_utils.py74
-rw-r--r--pylint/test/unittest_config.py7
-rw-r--r--pylint/test/unittest_lint.py602
-rw-r--r--pylint/test/unittest_pyreverse_diadefs.py104
-rw-r--r--pylint/test/unittest_pyreverse_inspector.py72
-rw-r--r--pylint/test/unittest_pyreverse_writer.py81
-rw-r--r--pylint/test/unittest_reporters_json.py31
-rw-r--r--pylint/test/unittest_reporting.py36
-rw-r--r--pylint/test/unittest_utils.py327
-rw-r--r--pylint/testutils.py56
-rw-r--r--pylint/utils.py500
-rw-r--r--pylintrc2
87 files changed, 12199 insertions, 8342 deletions
diff --git a/pylint/__init__.py b/pylint/__init__.py
index c916259a4..a5d43d123 100644
--- a/pylint/__init__.py
+++ b/pylint/__init__.py
@@ -15,22 +15,29 @@ from .__pkginfo__ import version as __version__
def run_pylint():
"""run pylint"""
from pylint.lint import Run
+
try:
Run(sys.argv[1:])
except KeyboardInterrupt:
sys.exit(1)
+
def run_epylint():
"""run pylint"""
from pylint.epylint import Run
+
Run()
+
def run_pyreverse():
"""run pyreverse"""
from pylint.pyreverse.main import Run
+
Run(sys.argv[1:])
+
def run_symilar():
"""run symilar"""
from pylint.checkers.similar import Run
+
Run(sys.argv[1:])
diff --git a/pylint/__main__.py b/pylint/__main__.py
index f1ecf1b98..7eca11929 100644
--- a/pylint/__main__.py
+++ b/pylint/__main__.py
@@ -4,4 +4,5 @@
#!/usr/bin/env python
import pylint
+
pylint.run_pylint()
diff --git a/pylint/__pkginfo__.py b/pylint/__pkginfo__.py
index 302fd6450..d2e9590b9 100644
--- a/pylint/__pkginfo__.py
+++ b/pylint/__pkginfo__.py
@@ -23,50 +23,47 @@ from __future__ import absolute_import
from os.path import join
-modname = distname = 'pylint'
+modname = distname = "pylint"
numversion = (2, 2, 0)
-dev_version = 'dev'
-string_version = '.'.join(str(num) for num in numversion)
+dev_version = "dev"
+string_version = ".".join(str(num) for num in numversion)
-version = string_version + '-' + dev_version
+version = string_version + "-" + dev_version
-install_requires = [
- 'astroid>=2.0.0',
- 'isort >= 4.2.5',
- 'mccabe',
-]
+install_requires = ["astroid>=2.0.0", "isort >= 4.2.5", "mccabe"]
-dependency_links = [] # type: ignore
+dependency_links = [] # type: ignore
extras_require = {}
-extras_require[':sys_platform=="win32"'] = ['colorama']
+extras_require[':sys_platform=="win32"'] = ["colorama"]
-license = 'GPL'
+license = "GPL"
description = "python code static checker"
-web = 'https://github.com/PyCQA/pylint'
+web = "https://github.com/PyCQA/pylint"
mailinglist = "mailto:code-quality@python.org"
-author = 'Python Code Quality Authority'
-author_email = 'code-quality@python.org'
-
-classifiers = ['Development Status :: 6 - Mature',
- 'Environment :: Console',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: GNU General Public License (GPL)',
- 'Operating System :: OS Independent',
- 'Programming Language :: Python',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.4',
- 'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6',
- 'Programming Language :: Python :: 3.7',
- 'Programming Language :: Python :: 3 :: Only',
- 'Programming Language :: Python :: Implementation :: CPython',
- 'Programming Language :: Python :: Implementation :: PyPy',
- 'Topic :: Software Development :: Debuggers',
- 'Topic :: Software Development :: Quality Assurance',
- 'Topic :: Software Development :: Testing'
- ]
+author = "Python Code Quality Authority"
+author_email = "code-quality@python.org"
+
+classifiers = [
+ "Development Status :: 6 - Mature",
+ "Environment :: Console",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: GNU General Public License (GPL)",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3 :: Only",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Software Development :: Debuggers",
+ "Topic :: Software Development :: Quality Assurance",
+ "Topic :: Software Development :: Testing",
+]
long_desc = """\
@@ -86,8 +83,8 @@ long_desc = """\
Pylint is shipped with "pyreverse" (UML diagram generator)
and "symilar" (an independent similarities checker)."""
-scripts = [join('bin', filename)
- for filename in ('pylint', "symilar", "epylint",
- "pyreverse")]
+scripts = [
+ join("bin", filename) for filename in ("pylint", "symilar", "epylint", "pyreverse")
+]
-include_dirs = [join('pylint', 'test')]
+include_dirs = [join("pylint", "test")]
diff --git a/pylint/checkers/__init__.py b/pylint/checkers/__init__.py
index 22eab4466..fcb1f5bf5 100644
--- a/pylint/checkers/__init__.py
+++ b/pylint/checkers/__init__.py
@@ -58,31 +58,32 @@ def table_lines_from_stats(stats, old_stats, columns):
lines = []
for m_type in columns:
new = stats[m_type]
- format = str # pylint: disable=redefined-builtin
+ format = str # pylint: disable=redefined-builtin
if isinstance(new, float):
- format = lambda num: '%.3f' % num
+ format = lambda num: "%.3f" % num
old = old_stats.get(m_type)
if old is not None:
diff_str = diff_string(old, new)
old = format(old)
else:
- old, diff_str = 'NC', 'NC'
- lines += (m_type.replace('_', ' '), format(new), old, diff_str)
+ old, diff_str = "NC", "NC"
+ lines += (m_type.replace("_", " "), format(new), old, diff_str)
return lines
class BaseChecker(OptionsProviderMixIn):
"""base class for checkers"""
+
# checker name (you may reuse an existing one)
- name = None # type: str
+ name = None # type: str
# options level (0 will be displaying in --help, 1 in --long-help)
level = 1
# ordered list of options to control the ckecker behaviour
- options = () # type: Any
+ options = () # type: Any
# messages issued by this checker
- msgs = {} # type: Any
+ msgs = {} # type: Any
# reports issued by this checker
- reports = () # type: Any
+ reports = () # type: Any
# mark this checker as enabled or not.
enabled = True
@@ -96,8 +97,15 @@ class BaseChecker(OptionsProviderMixIn):
OptionsProviderMixIn.__init__(self)
self.linter = linter
- def add_message(self, msg_id, line=None, node=None, args=None, confidence=UNDEFINED,
- col_offset=None):
+ def add_message(
+ self,
+ msg_id,
+ line=None,
+ node=None,
+ args=None,
+ confidence=UNDEFINED,
+ col_offset=None,
+ ):
"""add a message of a given type"""
self.linter.add_message(msg_id, line, node, args, confidence, col_offset)
@@ -122,4 +130,5 @@ def initialize(linter):
"""initialize linter with checkers in this package """
register_plugins(linter, __path__[0])
-__all__ = ('BaseChecker', 'initialize')
+
+__all__ = ("BaseChecker", "initialize")
diff --git a/pylint/checkers/async.py b/pylint/checkers/async.py
index f25fec9e1..881b29daf 100644
--- a/pylint/checkers/async.py
+++ b/pylint/checkers/async.py
@@ -1,76 +1,84 @@
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Checker for anything related to the async protocol (PEP 492)."""
-
-import sys
-
-import astroid
-from astroid import exceptions
-
-from pylint import checkers
-from pylint.checkers import utils as checker_utils
-from pylint import interfaces
-from pylint import utils
-
-
-class AsyncChecker(checkers.BaseChecker):
- __implements__ = interfaces.IAstroidChecker
- name = 'async'
- msgs = {
- 'E1700': ('Yield inside async function',
- 'yield-inside-async-function',
- 'Used when an `yield` or `yield from` statement is '
- 'found inside an async function.',
- {'minversion': (3, 5)}),
- 'E1701': ("Async context manager '%s' doesn't implement __aenter__ and __aexit__.",
- 'not-async-context-manager',
- 'Used when an async context manager is used with an object '
- 'that does not implement the async context management protocol.',
- {'minversion': (3, 5)}),
- }
-
- def open(self):
- self._ignore_mixin_members = utils.get_global_option(self, 'ignore-mixin-members')
-
- @checker_utils.check_messages('yield-inside-async-function')
- def visit_asyncfunctiondef(self, node):
- for child in node.nodes_of_class(astroid.Yield):
- if child.scope() is node and (sys.version_info[:2] == (3, 5) or
- isinstance(child, astroid.YieldFrom)):
- self.add_message('yield-inside-async-function', node=child)
-
- @checker_utils.check_messages('not-async-context-manager')
- def visit_asyncwith(self, node):
- for ctx_mgr, _ in node.items:
- infered = checker_utils.safe_infer(ctx_mgr)
- if infered is None or infered is astroid.Uninferable:
- continue
-
- if isinstance(infered, astroid.Instance):
- try:
- infered.getattr('__aenter__')
- infered.getattr('__aexit__')
- except exceptions.NotFoundError:
- if isinstance(infered, astroid.Instance):
- # If we do not know the bases of this class,
- # just skip it.
- if not checker_utils.has_known_bases(infered):
- continue
- # Just ignore mixin classes.
- if self._ignore_mixin_members:
- if infered.name[-5:].lower() == 'mixin':
- continue
- else:
- continue
-
- self.add_message('not-async-context-manager',
- node=node, args=(infered.name, ))
-
-
-def register(linter):
- """required method to auto register this checker"""
- linter.register_checker(AsyncChecker(linter))
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checker for anything related to the async protocol (PEP 492)."""
+
+import sys
+
+import astroid
+from astroid import exceptions
+
+from pylint import checkers
+from pylint.checkers import utils as checker_utils
+from pylint import interfaces
+from pylint import utils
+
+
+class AsyncChecker(checkers.BaseChecker):
+ __implements__ = interfaces.IAstroidChecker
+ name = "async"
+ msgs = {
+ "E1700": (
+ "Yield inside async function",
+ "yield-inside-async-function",
+ "Used when an `yield` or `yield from` statement is "
+ "found inside an async function.",
+ {"minversion": (3, 5)},
+ ),
+ "E1701": (
+ "Async context manager '%s' doesn't implement __aenter__ and __aexit__.",
+ "not-async-context-manager",
+ "Used when an async context manager is used with an object "
+ "that does not implement the async context management protocol.",
+ {"minversion": (3, 5)},
+ ),
+ }
+
+ def open(self):
+ self._ignore_mixin_members = utils.get_global_option(
+ self, "ignore-mixin-members"
+ )
+
+ @checker_utils.check_messages("yield-inside-async-function")
+ def visit_asyncfunctiondef(self, node):
+ for child in node.nodes_of_class(astroid.Yield):
+ if child.scope() is node and (
+ sys.version_info[:2] == (3, 5) or isinstance(child, astroid.YieldFrom)
+ ):
+ self.add_message("yield-inside-async-function", node=child)
+
+ @checker_utils.check_messages("not-async-context-manager")
+ def visit_asyncwith(self, node):
+ for ctx_mgr, _ in node.items:
+ infered = checker_utils.safe_infer(ctx_mgr)
+ if infered is None or infered is astroid.Uninferable:
+ continue
+
+ if isinstance(infered, astroid.Instance):
+ try:
+ infered.getattr("__aenter__")
+ infered.getattr("__aexit__")
+ except exceptions.NotFoundError:
+ if isinstance(infered, astroid.Instance):
+ # If we do not know the bases of this class,
+ # just skip it.
+ if not checker_utils.has_known_bases(infered):
+ continue
+ # Just ignore mixin classes.
+ if self._ignore_mixin_members:
+ if infered.name[-5:].lower() == "mixin":
+ continue
+ else:
+ continue
+
+ self.add_message(
+ "not-async-context-manager", node=node, args=(infered.name,)
+ )
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(AsyncChecker(linter))
diff --git a/pylint/checkers/base.py b/pylint/checkers/base.py
index 53b15d1be..ed6178d51 100644
--- a/pylint/checkers/base.py
+++ b/pylint/checkers/base.py
@@ -63,114 +63,119 @@ class NamingStyle:
# has multiple "accepted" forms of regular expressions,
# but we need to special-case stuff like dunder names
# in method names.
- CLASS_NAME_RGX = None # type: Pattern[str]
- MOD_NAME_RGX = None # type: Pattern[str]
- CONST_NAME_RGX = None # type: Pattern[str]
- COMP_VAR_RGX = None # type: Pattern[str]
- DEFAULT_NAME_RGX = None # type: Pattern[str]
+ CLASS_NAME_RGX = None # type: Pattern[str]
+ MOD_NAME_RGX = None # type: Pattern[str]
+ CONST_NAME_RGX = None # type: Pattern[str]
+ COMP_VAR_RGX = None # type: Pattern[str]
+ DEFAULT_NAME_RGX = None # type: Pattern[str]
CLASS_ATTRIBUTE_RGX = None # type: Pattern[str]
@classmethod
def get_regex(cls, name_type):
return {
- 'module': cls.MOD_NAME_RGX,
- 'const': cls.CONST_NAME_RGX,
- 'class': cls.CLASS_NAME_RGX,
- 'function': cls.DEFAULT_NAME_RGX,
- 'method': cls.DEFAULT_NAME_RGX,
- 'attr': cls.DEFAULT_NAME_RGX,
- 'argument': cls.DEFAULT_NAME_RGX,
- 'variable': cls.DEFAULT_NAME_RGX,
- 'class_attribute': cls.CLASS_ATTRIBUTE_RGX,
- 'inlinevar': cls.COMP_VAR_RGX,
+ "module": cls.MOD_NAME_RGX,
+ "const": cls.CONST_NAME_RGX,
+ "class": cls.CLASS_NAME_RGX,
+ "function": cls.DEFAULT_NAME_RGX,
+ "method": cls.DEFAULT_NAME_RGX,
+ "attr": cls.DEFAULT_NAME_RGX,
+ "argument": cls.DEFAULT_NAME_RGX,
+ "variable": cls.DEFAULT_NAME_RGX,
+ "class_attribute": cls.CLASS_ATTRIBUTE_RGX,
+ "inlinevar": cls.COMP_VAR_RGX,
}[name_type]
class SnakeCaseStyle(NamingStyle):
"""Regex rules for snake_case naming style."""
- CLASS_NAME_RGX = re.compile('[a-z_][a-z0-9_]+$')
- MOD_NAME_RGX = re.compile('([a-z_][a-z0-9_]*)$')
- CONST_NAME_RGX = re.compile('(([a-z_][a-z0-9_]*)|(__.*__))$')
- COMP_VAR_RGX = re.compile('[a-z_][a-z0-9_]*$')
- DEFAULT_NAME_RGX = re.compile('(([a-z_][a-z0-9_]{2,})|(_[a-z0-9_]*)|(__[a-z][a-z0-9_]+__))$')
- CLASS_ATTRIBUTE_RGX = re.compile(r'(([a-z_][a-z0-9_]{2,}|(__.*__)))$')
+
+ CLASS_NAME_RGX = re.compile("[a-z_][a-z0-9_]+$")
+ MOD_NAME_RGX = re.compile("([a-z_][a-z0-9_]*)$")
+ CONST_NAME_RGX = re.compile("(([a-z_][a-z0-9_]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[a-z_][a-z0-9_]*$")
+ DEFAULT_NAME_RGX = re.compile(
+ "(([a-z_][a-z0-9_]{2,})|(_[a-z0-9_]*)|(__[a-z][a-z0-9_]+__))$"
+ )
+ CLASS_ATTRIBUTE_RGX = re.compile(r"(([a-z_][a-z0-9_]{2,}|(__.*__)))$")
class CamelCaseStyle(NamingStyle):
"""Regex rules for camelCase naming style."""
- CLASS_NAME_RGX = re.compile('[a-z_][a-zA-Z0-9]+$')
- MOD_NAME_RGX = re.compile('([a-z_][a-zA-Z0-9]*)$')
- CONST_NAME_RGX = re.compile('(([a-z_][A-Za-z0-9]*)|(__.*__))$')
- COMP_VAR_RGX = re.compile('[a-z_][A-Za-z0-9]*$')
- DEFAULT_NAME_RGX = re.compile('(([a-z_][a-zA-Z0-9]{2,})|(__[a-z][a-zA-Z0-9_]+__))$')
- CLASS_ATTRIBUTE_RGX = re.compile(r'([a-z_][A-Za-z0-9]{2,}|(__.*__))$')
+
+ CLASS_NAME_RGX = re.compile("[a-z_][a-zA-Z0-9]+$")
+ MOD_NAME_RGX = re.compile("([a-z_][a-zA-Z0-9]*)$")
+ CONST_NAME_RGX = re.compile("(([a-z_][A-Za-z0-9]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[a-z_][A-Za-z0-9]*$")
+ DEFAULT_NAME_RGX = re.compile("(([a-z_][a-zA-Z0-9]{2,})|(__[a-z][a-zA-Z0-9_]+__))$")
+ CLASS_ATTRIBUTE_RGX = re.compile(r"([a-z_][A-Za-z0-9]{2,}|(__.*__))$")
class PascalCaseStyle(NamingStyle):
"""Regex rules for PascalCase naming style."""
- CLASS_NAME_RGX = re.compile('[A-Z_][a-zA-Z0-9]+$')
- MOD_NAME_RGX = re.compile('[A-Z_][a-zA-Z0-9]+$')
- CONST_NAME_RGX = re.compile('(([A-Z_][A-Za-z0-9]*)|(__.*__))$')
- COMP_VAR_RGX = re.compile('[A-Z_][a-zA-Z0-9]+$')
- DEFAULT_NAME_RGX = re.compile('[A-Z_][a-zA-Z0-9]{2,}$|(__[a-z][a-zA-Z0-9_]+__)$')
- CLASS_ATTRIBUTE_RGX = re.compile('[A-Z_][a-zA-Z0-9]{2,}$')
+
+ CLASS_NAME_RGX = re.compile("[A-Z_][a-zA-Z0-9]+$")
+ MOD_NAME_RGX = re.compile("[A-Z_][a-zA-Z0-9]+$")
+ CONST_NAME_RGX = re.compile("(([A-Z_][A-Za-z0-9]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[A-Z_][a-zA-Z0-9]+$")
+ DEFAULT_NAME_RGX = re.compile("[A-Z_][a-zA-Z0-9]{2,}$|(__[a-z][a-zA-Z0-9_]+__)$")
+ CLASS_ATTRIBUTE_RGX = re.compile("[A-Z_][a-zA-Z0-9]{2,}$")
class UpperCaseStyle(NamingStyle):
"""Regex rules for UPPER_CASE naming style."""
- CLASS_NAME_RGX = re.compile('[A-Z_][A-Z0-9_]+$')
- MOD_NAME_RGX = re.compile('[A-Z_][A-Z0-9_]+$')
- CONST_NAME_RGX = re.compile('(([A-Z_][A-Z0-9_]*)|(__.*__))$')
- COMP_VAR_RGX = re.compile('[A-Z_][A-Z0-9_]+$')
- DEFAULT_NAME_RGX = re.compile('([A-Z_][A-Z0-9_]{2,})|(__[a-z][a-zA-Z0-9_]+__)$')
- CLASS_ATTRIBUTE_RGX = re.compile('[A-Z_][A-Z0-9_]{2,}$')
+
+ CLASS_NAME_RGX = re.compile("[A-Z_][A-Z0-9_]+$")
+ MOD_NAME_RGX = re.compile("[A-Z_][A-Z0-9_]+$")
+ CONST_NAME_RGX = re.compile("(([A-Z_][A-Z0-9_]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[A-Z_][A-Z0-9_]+$")
+ DEFAULT_NAME_RGX = re.compile("([A-Z_][A-Z0-9_]{2,})|(__[a-z][a-zA-Z0-9_]+__)$")
+ CLASS_ATTRIBUTE_RGX = re.compile("[A-Z_][A-Z0-9_]{2,}$")
class AnyStyle(NamingStyle):
@classmethod
def get_regex(cls, name_type):
- return re.compile('.*')
+ return re.compile(".*")
-NAMING_STYLES = {'snake_case': SnakeCaseStyle, 'camelCase': CamelCaseStyle,
- 'PascalCase': PascalCaseStyle, 'UPPER_CASE': UpperCaseStyle,
- 'any': AnyStyle}
+NAMING_STYLES = {
+ "snake_case": SnakeCaseStyle,
+ "camelCase": CamelCaseStyle,
+ "PascalCase": PascalCaseStyle,
+ "UPPER_CASE": UpperCaseStyle,
+ "any": AnyStyle,
+}
# do not require a doc string on private/system methods
-NO_REQUIRED_DOC_RGX = re.compile('^_')
-REVERSED_PROTOCOL_METHOD = '__reversed__'
-SEQUENCE_PROTOCOL_METHODS = ('__getitem__', '__len__')
-REVERSED_METHODS = (SEQUENCE_PROTOCOL_METHODS,
- (REVERSED_PROTOCOL_METHOD, ))
-TYPECHECK_COMPARISON_OPERATORS = frozenset(('is', 'is not', '==',
- '!=', 'in', 'not in'))
+NO_REQUIRED_DOC_RGX = re.compile("^_")
+REVERSED_PROTOCOL_METHOD = "__reversed__"
+SEQUENCE_PROTOCOL_METHODS = ("__getitem__", "__len__")
+REVERSED_METHODS = (SEQUENCE_PROTOCOL_METHODS, (REVERSED_PROTOCOL_METHOD,))
+TYPECHECK_COMPARISON_OPERATORS = frozenset(("is", "is not", "==", "!=", "in", "not in"))
LITERAL_NODE_TYPES = (astroid.Const, astroid.Dict, astroid.List, astroid.Set)
-UNITTEST_CASE = 'unittest.case'
+UNITTEST_CASE = "unittest.case"
BUILTINS = builtins.__name__
TYPE_QNAME = "%s.type" % BUILTINS
PY33 = sys.version_info >= (3, 3)
PY3K = sys.version_info >= (3, 0)
PY35 = sys.version_info >= (3, 5)
-ABC_METACLASSES = {
- '_py_abc.ABCMeta', # Python 3.7+,
- 'abc.ABCMeta',
-}
+ABC_METACLASSES = {"_py_abc.ABCMeta", "abc.ABCMeta"} # Python 3.7+,
# Name categories that are always consistent with all naming conventions.
-EXEMPT_NAME_CATEGORIES = {'exempt', 'ignore'}
+EXEMPT_NAME_CATEGORIES = {"exempt", "ignore"}
# A mapping from builtin-qname -> symbol, to be used when generating messages
# about dangerous default values as arguments
DEFAULT_ARGUMENT_SYMBOLS = dict(
- zip(['.'.join([BUILTINS, x]) for x in ('set', 'dict', 'list')],
- ['set()', '{}', '[]'])
+ zip(
+ [".".join([BUILTINS, x]) for x in ("set", "dict", "list")],
+ ["set()", "{}", "[]"],
+ )
)
-REVERSED_COMPS = {'<': '>', '<=': '>=', '>': '<', '>=': '<='}
-COMPARISON_OPERATORS = frozenset(('==', '!=', '<', '>', '<=', '>='))
+REVERSED_COMPS = {"<": ">", "<=": ">=", ">": "<", ">=": "<="}
+COMPARISON_OPERATORS = frozenset(("==", "!=", "<", ">", "<=", ">="))
# List of methods which can be redefined
-REDEFINABLE_METHODS = frozenset((
- '__module__',
-))
+REDEFINABLE_METHODS = frozenset(("__module__",))
def _redefines_import(node):
@@ -198,8 +203,16 @@ def in_loop(node):
"""return True if the node is inside a kind of for loop"""
parent = node.parent
while parent is not None:
- if isinstance(parent, (astroid.For, astroid.ListComp, astroid.SetComp,
- astroid.DictComp, astroid.GeneratorExp)):
+ if isinstance(
+ parent,
+ (
+ astroid.For,
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.DictComp,
+ astroid.GeneratorExp,
+ ),
+ ):
return True
parent = parent.parent
return False
@@ -230,7 +243,9 @@ def _get_break_loop_node(break_node):
"""
loop_nodes = (astroid.For, astroid.While)
parent = break_node.parent
- while not isinstance(parent, loop_nodes) or break_node in getattr(parent, 'orelse', []):
+ while not isinstance(parent, loop_nodes) or break_node in getattr(
+ parent, "orelse", []
+ ):
parent = parent.parent
if parent is None:
break
@@ -250,25 +265,27 @@ def _loop_exits_early(loop):
loop_nodes = (astroid.For, astroid.While)
definition_nodes = (astroid.FunctionDef, astroid.ClassDef)
inner_loop_nodes = [
- _node for _node in loop.nodes_of_class(loop_nodes,
- skip_klass=definition_nodes)
+ _node
+ for _node in loop.nodes_of_class(loop_nodes, skip_klass=definition_nodes)
if _node != loop
]
return any(
- _node for _node in loop.nodes_of_class(astroid.Break,
- skip_klass=definition_nodes)
+ _node
+ for _node in loop.nodes_of_class(astroid.Break, skip_klass=definition_nodes)
if _get_break_loop_node(_node) not in inner_loop_nodes
)
def _is_multi_naming_match(match, node_type, confidence):
- return (match is not None and
- match.lastgroup is not None and
- match.lastgroup not in EXEMPT_NAME_CATEGORIES
- and (node_type != 'method' or confidence != interfaces.INFERENCE_FAILURE))
+ return (
+ match is not None
+ and match.lastgroup is not None
+ and match.lastgroup not in EXEMPT_NAME_CATEGORIES
+ and (node_type != "method" or confidence != interfaces.INFERENCE_FAILURE)
+ )
-BUILTIN_PROPERTY = 'builtins.property'
+BUILTIN_PROPERTY = "builtins.property"
def _get_properties(config):
@@ -281,8 +298,9 @@ def _get_properties(config):
property_names = set() # Not returning 'property', it has its own check.
if config is not None:
property_classes.update(config.property_classes)
- property_names.update((prop.rsplit('.', 1)[-1]
- for prop in config.property_classes))
+ property_names.update(
+ (prop.rsplit(".", 1)[-1] for prop in config.property_classes)
+ )
return property_classes, property_names
@@ -299,7 +317,7 @@ def _determine_function_name_type(node, config=None):
"""
property_classes, property_names = _get_properties(config)
if not node.is_method():
- return 'function'
+ return "function"
if node.decorators:
decorators = node.decorators.nodes
else:
@@ -307,18 +325,21 @@ def _determine_function_name_type(node, config=None):
for decorator in decorators:
# If the function is a property (decorated with @property
# or @abc.abstractproperty), the name type is 'attr'.
- if (isinstance(decorator, astroid.Name) or
- (isinstance(decorator, astroid.Attribute) and
- decorator.attrname in property_names)):
+ if isinstance(decorator, astroid.Name) or (
+ isinstance(decorator, astroid.Attribute)
+ and decorator.attrname in property_names
+ ):
infered = utils.safe_infer(decorator)
if infered and infered.qname() in property_classes:
- return 'attr'
+ return "attr"
# If the function is decorated using the prop_method.{setter,getter}
# form, treat it like an attribute as well.
- elif (isinstance(decorator, astroid.Attribute) and
- decorator.attrname in ('setter', 'deleter')):
- return 'attr'
- return 'method'
+ elif isinstance(decorator, astroid.Attribute) and decorator.attrname in (
+ "setter",
+ "deleter",
+ ):
+ return "attr"
+ return "method"
def _has_abstract_methods(node):
@@ -339,7 +360,7 @@ def report_by_type_stats(sect, stats, old_stats):
"""
# percentage of different types documented and/or with a bad name
nice_stats = {}
- for node_type in ('module', 'class', 'method', 'function'):
+ for node_type in ("module", "class", "method", "function"):
try:
total = stats[node_type]
except KeyError:
@@ -347,28 +368,32 @@ def report_by_type_stats(sect, stats, old_stats):
nice_stats[node_type] = {}
if total != 0:
try:
- documented = total - stats['undocumented_'+node_type]
+ documented = total - stats["undocumented_" + node_type]
percent = (documented * 100.) / total
- nice_stats[node_type]['percent_documented'] = '%.2f' % percent
+ nice_stats[node_type]["percent_documented"] = "%.2f" % percent
except KeyError:
- nice_stats[node_type]['percent_documented'] = 'NC'
+ nice_stats[node_type]["percent_documented"] = "NC"
try:
- percent = (stats['badname_'+node_type] * 100.) / total
- nice_stats[node_type]['percent_badname'] = '%.2f' % percent
+ percent = (stats["badname_" + node_type] * 100.) / total
+ nice_stats[node_type]["percent_badname"] = "%.2f" % percent
except KeyError:
- nice_stats[node_type]['percent_badname'] = 'NC'
- lines = ('type', 'number', 'old number', 'difference',
- '%documented', '%badname')
- for node_type in ('module', 'class', 'method', 'function'):
+ nice_stats[node_type]["percent_badname"] = "NC"
+ lines = ("type", "number", "old number", "difference", "%documented", "%badname")
+ for node_type in ("module", "class", "method", "function"):
new = stats[node_type]
old = old_stats.get(node_type, None)
if old is not None:
diff_str = reporters.diff_string(old, new)
else:
- old, diff_str = 'NC', 'NC'
- lines += (node_type, str(new), str(old), diff_str,
- nice_stats[node_type].get('percent_documented', '0'),
- nice_stats[node_type].get('percent_badname', '0'))
+ old, diff_str = "NC", "NC"
+ lines += (
+ node_type,
+ str(new),
+ str(old),
+ diff_str,
+ nice_stats[node_type].get("percent_documented", "0"),
+ nice_stats[node_type].get("percent_badname", "0"),
+ )
sect.append(reporter_nodes.Table(children=lines, cols=6, rheaders=1))
@@ -383,119 +408,151 @@ def redefined_by_decorator(node):
"""
if node.decorators:
for decorator in node.decorators.nodes:
- if (isinstance(decorator, astroid.Attribute) and
- getattr(decorator.expr, 'name', None) == node.name):
+ if (
+ isinstance(decorator, astroid.Attribute)
+ and getattr(decorator.expr, "name", None) == node.name
+ ):
return True
return False
class _BasicChecker(checkers.BaseChecker):
__implements__ = interfaces.IAstroidChecker
- name = 'basic'
+ name = "basic"
class BasicErrorChecker(_BasicChecker):
msgs = {
- 'E0100': ('__init__ method is a generator',
- 'init-is-generator',
- 'Used when the special class method __init__ is turned into a '
- 'generator by a yield in its body.'),
- 'E0101': ('Explicit return in __init__',
- 'return-in-init',
- 'Used when the special class method __init__ has an explicit '
- 'return value.'),
- 'E0102': ('%s already defined line %s',
- 'function-redefined',
- 'Used when a function / class / method is redefined.'),
- 'E0103': ('%r not properly in loop',
- 'not-in-loop',
- 'Used when break or continue keywords are used outside a loop.'),
- 'E0104': ('Return outside function',
- 'return-outside-function',
- 'Used when a "return" statement is found outside a function or '
- 'method.'),
- 'E0105': ('Yield outside function',
- 'yield-outside-function',
- 'Used when a "yield" statement is found outside a function or '
- 'method.'),
- 'E0106': ('Return with argument inside generator',
- 'return-arg-in-generator',
- 'Used when a "return" statement with an argument is found '
- 'outside in a generator function or method (e.g. with some '
- '"yield" statements).',
- {'maxversion': (3, 3)}),
- 'E0107': ("Use of the non-existent %s operator",
- 'nonexistent-operator',
- "Used when you attempt to use the C-style pre-increment or "
- "pre-decrement operator -- and ++, which doesn't exist in Python."),
- 'E0108': ('Duplicate argument name %s in function definition',
- 'duplicate-argument-name',
- 'Duplicate argument names in function definitions are syntax'
- ' errors.'),
- 'E0110': ('Abstract class %r with abstract methods instantiated',
- 'abstract-class-instantiated',
- 'Used when an abstract class with `abc.ABCMeta` as metaclass '
- 'has abstract methods and is instantiated.'),
- 'W0120': ('Else clause on loop without a break statement',
- 'useless-else-on-loop',
- 'Loops should only have an else clause if they can exit early '
- 'with a break statement, otherwise the statements under else '
- 'should be on the same scope as the loop itself.'),
- 'E0112': ('More than one starred expression in assignment',
- 'too-many-star-expressions',
- 'Emitted when there are more than one starred '
- 'expressions (`*x`) in an assignment. This is a SyntaxError.'),
- 'E0113': ('Starred assignment target must be in a list or tuple',
- 'invalid-star-assignment-target',
- 'Emitted when a star expression is used as a starred '
- 'assignment target.'),
- 'E0114': ('Can use starred expression only in assignment target',
- 'star-needs-assignment-target',
- 'Emitted when a star expression is not used in an '
- 'assignment target.'),
- 'E0115': ('Name %r is nonlocal and global',
- 'nonlocal-and-global',
- 'Emitted when a name is both nonlocal and global.'),
- 'E0116': ("'continue' not supported inside 'finally' clause",
- 'continue-in-finally',
- 'Emitted when the `continue` keyword is found '
- 'inside a finally clause, which is a SyntaxError.'),
- 'E0117': ("nonlocal name %s found without binding",
- 'nonlocal-without-binding',
- 'Emitted when a nonlocal variable does not have an attached '
- 'name somewhere in the parent scopes'),
- 'E0118': ("Name %r is used prior to global declaration",
- 'used-prior-global-declaration',
- 'Emitted when a name is used prior a global declaration, '
- 'which results in an error since Python 3.6.',
- {'minversion': (3, 6)}),
- }
+ "E0100": (
+ "__init__ method is a generator",
+ "init-is-generator",
+ "Used when the special class method __init__ is turned into a "
+ "generator by a yield in its body.",
+ ),
+ "E0101": (
+ "Explicit return in __init__",
+ "return-in-init",
+ "Used when the special class method __init__ has an explicit "
+ "return value.",
+ ),
+ "E0102": (
+ "%s already defined line %s",
+ "function-redefined",
+ "Used when a function / class / method is redefined.",
+ ),
+ "E0103": (
+ "%r not properly in loop",
+ "not-in-loop",
+ "Used when break or continue keywords are used outside a loop.",
+ ),
+ "E0104": (
+ "Return outside function",
+ "return-outside-function",
+ 'Used when a "return" statement is found outside a function or ' "method.",
+ ),
+ "E0105": (
+ "Yield outside function",
+ "yield-outside-function",
+ 'Used when a "yield" statement is found outside a function or ' "method.",
+ ),
+ "E0106": (
+ "Return with argument inside generator",
+ "return-arg-in-generator",
+ 'Used when a "return" statement with an argument is found '
+ "outside in a generator function or method (e.g. with some "
+ '"yield" statements).',
+ {"maxversion": (3, 3)},
+ ),
+ "E0107": (
+ "Use of the non-existent %s operator",
+ "nonexistent-operator",
+ "Used when you attempt to use the C-style pre-increment or "
+ "pre-decrement operator -- and ++, which doesn't exist in Python.",
+ ),
+ "E0108": (
+ "Duplicate argument name %s in function definition",
+ "duplicate-argument-name",
+ "Duplicate argument names in function definitions are syntax" " errors.",
+ ),
+ "E0110": (
+ "Abstract class %r with abstract methods instantiated",
+ "abstract-class-instantiated",
+ "Used when an abstract class with `abc.ABCMeta` as metaclass "
+ "has abstract methods and is instantiated.",
+ ),
+ "W0120": (
+ "Else clause on loop without a break statement",
+ "useless-else-on-loop",
+ "Loops should only have an else clause if they can exit early "
+ "with a break statement, otherwise the statements under else "
+ "should be on the same scope as the loop itself.",
+ ),
+ "E0112": (
+ "More than one starred expression in assignment",
+ "too-many-star-expressions",
+ "Emitted when there are more than one starred "
+ "expressions (`*x`) in an assignment. This is a SyntaxError.",
+ ),
+ "E0113": (
+ "Starred assignment target must be in a list or tuple",
+ "invalid-star-assignment-target",
+ "Emitted when a star expression is used as a starred " "assignment target.",
+ ),
+ "E0114": (
+ "Can use starred expression only in assignment target",
+ "star-needs-assignment-target",
+ "Emitted when a star expression is not used in an " "assignment target.",
+ ),
+ "E0115": (
+ "Name %r is nonlocal and global",
+ "nonlocal-and-global",
+ "Emitted when a name is both nonlocal and global.",
+ ),
+ "E0116": (
+ "'continue' not supported inside 'finally' clause",
+ "continue-in-finally",
+ "Emitted when the `continue` keyword is found "
+ "inside a finally clause, which is a SyntaxError.",
+ ),
+ "E0117": (
+ "nonlocal name %s found without binding",
+ "nonlocal-without-binding",
+ "Emitted when a nonlocal variable does not have an attached "
+ "name somewhere in the parent scopes",
+ ),
+ "E0118": (
+ "Name %r is used prior to global declaration",
+ "used-prior-global-declaration",
+ "Emitted when a name is used prior a global declaration, "
+ "which results in an error since Python 3.6.",
+ {"minversion": (3, 6)},
+ ),
+ }
- @utils.check_messages('function-redefined')
+ @utils.check_messages("function-redefined")
def visit_classdef(self, node):
- self._check_redefinition('class', node)
+ self._check_redefinition("class", node)
- @utils.check_messages('too-many-star-expressions',
- 'invalid-star-assignment-target')
+ @utils.check_messages("too-many-star-expressions", "invalid-star-assignment-target")
def visit_assign(self, node):
starred = list(node.targets[0].nodes_of_class(astroid.Starred))
if len(starred) > 1:
- self.add_message('too-many-star-expressions', node=node)
+ self.add_message("too-many-star-expressions", node=node)
# Check *a = b
if isinstance(node.targets[0], astroid.Starred):
- self.add_message('invalid-star-assignment-target', node=node)
+ self.add_message("invalid-star-assignment-target", node=node)
- @utils.check_messages('star-needs-assignment-target')
+ @utils.check_messages("star-needs-assignment-target")
def visit_starred(self, node):
"""Check that a Starred expression is used in an assignment target."""
if isinstance(node.parent, astroid.Call):
# f(*args) is converted to Call(args=[Starred]), so ignore
# them for this check.
return
- if PY35 and isinstance(node.parent,
- (astroid.List, astroid.Tuple,
- astroid.Set, astroid.Dict)):
+ if PY35 and isinstance(
+ node.parent, (astroid.List, astroid.Tuple, astroid.Set, astroid.Dict)
+ ):
# PEP 448 unpacking.
return
@@ -504,38 +561,49 @@ class BasicErrorChecker(_BasicChecker):
return
if stmt.value is node or stmt.value.parent_of(node):
- self.add_message('star-needs-assignment-target', node=node)
-
- @utils.check_messages('init-is-generator', 'return-in-init',
- 'function-redefined', 'return-arg-in-generator',
- 'duplicate-argument-name', 'nonlocal-and-global',
- 'used-prior-global-declaration')
+ self.add_message("star-needs-assignment-target", node=node)
+
+ @utils.check_messages(
+ "init-is-generator",
+ "return-in-init",
+ "function-redefined",
+ "return-arg-in-generator",
+ "duplicate-argument-name",
+ "nonlocal-and-global",
+ "used-prior-global-declaration",
+ )
def visit_functiondef(self, node):
self._check_nonlocal_and_global(node)
self._check_name_used_prior_global(node)
- if (not redefined_by_decorator(node) and
- not utils.is_registered_in_singledispatch_function(node)):
- self._check_redefinition(node.is_method() and 'method' or 'function', node)
+ if not redefined_by_decorator(
+ node
+ ) and not utils.is_registered_in_singledispatch_function(node):
+ self._check_redefinition(node.is_method() and "method" or "function", node)
# checks for max returns, branch, return in __init__
- returns = node.nodes_of_class(astroid.Return,
- skip_klass=(astroid.FunctionDef,
- astroid.ClassDef))
- if node.is_method() and node.name == '__init__':
+ returns = node.nodes_of_class(
+ astroid.Return, skip_klass=(astroid.FunctionDef, astroid.ClassDef)
+ )
+ if node.is_method() and node.name == "__init__":
if node.is_generator():
- self.add_message('init-is-generator', node=node)
+ self.add_message("init-is-generator", node=node)
else:
values = [r.value for r in returns]
# Are we returning anything but None from constructors
if any(v for v in values if not utils.is_none(v)):
- self.add_message('return-in-init', node=node)
+ self.add_message("return-in-init", node=node)
elif node.is_generator():
# make sure we don't mix non-None returns and yields
if not PY33:
for retnode in returns:
- if isinstance(retnode.value, astroid.Const) and \
- retnode.value.value is not None:
- self.add_message('return-arg-in-generator', node=node,
- line=retnode.fromlineno)
+ if (
+ isinstance(retnode.value, astroid.Const)
+ and retnode.value.value is not None
+ ):
+ self.add_message(
+ "return-arg-in-generator",
+ node=node,
+ line=retnode.fromlineno,
+ )
# Check for duplicate names by clustering args with same name for detailed report
arg_clusters = collections.defaultdict(list)
arguments = filter(None, [node.args.args, node.args.kwonlyargs])
@@ -548,7 +616,7 @@ class BasicErrorChecker(_BasicChecker):
if len(argument_duplicates) != 1:
for argument in argument_duplicates:
self.add_message(
- 'duplicate-argument-name',
+ "duplicate-argument-name",
line=argument.lineno,
node=argument,
args=(argument.name,),
@@ -579,65 +647,76 @@ class BasicErrorChecker(_BasicChecker):
global_lineno = corresponding_global.fromlineno
if global_lineno and global_lineno > node_name.fromlineno:
- self.add_message('used-prior-global-declaration',
- node=node_name, args=(name, ))
+ self.add_message(
+ "used-prior-global-declaration", node=node_name, args=(name,)
+ )
def _check_nonlocal_and_global(self, node):
"""Check that a name is both nonlocal and global."""
+
def same_scope(current):
return current.scope() is node
from_iter = itertools.chain.from_iterable
- nonlocals = set(from_iter(
- child.names for child in node.nodes_of_class(astroid.Nonlocal)
- if same_scope(child)))
+ nonlocals = set(
+ from_iter(
+ child.names
+ for child in node.nodes_of_class(astroid.Nonlocal)
+ if same_scope(child)
+ )
+ )
if not nonlocals:
return
- global_vars = set(from_iter(
- child.names for child in node.nodes_of_class(astroid.Global)
- if same_scope(child)))
+ global_vars = set(
+ from_iter(
+ child.names
+ for child in node.nodes_of_class(astroid.Global)
+ if same_scope(child)
+ )
+ )
for name in nonlocals.intersection(global_vars):
- self.add_message('nonlocal-and-global',
- args=(name, ), node=node)
+ self.add_message("nonlocal-and-global", args=(name,), node=node)
- @utils.check_messages('return-outside-function')
+ @utils.check_messages("return-outside-function")
def visit_return(self, node):
if not isinstance(node.frame(), astroid.FunctionDef):
- self.add_message('return-outside-function', node=node)
+ self.add_message("return-outside-function", node=node)
- @utils.check_messages('yield-outside-function')
+ @utils.check_messages("yield-outside-function")
def visit_yield(self, node):
self._check_yield_outside_func(node)
- @utils.check_messages('yield-outside-function')
+ @utils.check_messages("yield-outside-function")
def visit_yieldfrom(self, node):
self._check_yield_outside_func(node)
- @utils.check_messages('not-in-loop', 'continue-in-finally')
+ @utils.check_messages("not-in-loop", "continue-in-finally")
def visit_continue(self, node):
- self._check_in_loop(node, 'continue')
+ self._check_in_loop(node, "continue")
- @utils.check_messages('not-in-loop')
+ @utils.check_messages("not-in-loop")
def visit_break(self, node):
- self._check_in_loop(node, 'break')
+ self._check_in_loop(node, "break")
- @utils.check_messages('useless-else-on-loop')
+ @utils.check_messages("useless-else-on-loop")
def visit_for(self, node):
self._check_else_on_loop(node)
- @utils.check_messages('useless-else-on-loop')
+ @utils.check_messages("useless-else-on-loop")
def visit_while(self, node):
self._check_else_on_loop(node)
- @utils.check_messages('nonexistent-operator')
+ @utils.check_messages("nonexistent-operator")
def visit_unaryop(self, node):
"""check use of the non-existent ++ and -- operator operator"""
- if ((node.op in '+-') and
- isinstance(node.operand, astroid.UnaryOp) and
- (node.operand.op == node.op)):
- self.add_message('nonexistent-operator', node=node, args=node.op*2)
+ if (
+ (node.op in "+-")
+ and isinstance(node.operand, astroid.UnaryOp)
+ and (node.operand.op == node.op)
+ ):
+ self.add_message("nonexistent-operator", node=node, args=node.op * 2)
def _check_nonlocal_without_binding(self, node, name):
current_scope = node.scope()
@@ -646,8 +725,7 @@ class BasicErrorChecker(_BasicChecker):
break
if not isinstance(current_scope, (astroid.ClassDef, astroid.FunctionDef)):
- self.add_message('nonlocal-without-binding', args=(name, ),
- node=node)
+ self.add_message("nonlocal-without-binding", args=(name,), node=node)
return
if name not in current_scope.locals:
@@ -658,14 +736,14 @@ class BasicErrorChecker(_BasicChecker):
return
if not isinstance(current_scope, astroid.FunctionDef):
- self.add_message('nonlocal-without-binding', args=(name, ), node=node)
+ self.add_message("nonlocal-without-binding", args=(name,), node=node)
- @utils.check_messages('nonlocal-without-binding')
+ @utils.check_messages("nonlocal-without-binding")
def visit_nonlocal(self, node):
for name in node.names:
self._check_nonlocal_without_binding(node, name)
- @utils.check_messages('abstract-class-instantiated')
+ @utils.check_messages("abstract-class-instantiated")
def visit_call(self, node):
""" Check instantiating abstract class with
abc.ABCMeta as metaclass.
@@ -700,31 +778,34 @@ class BasicErrorChecker(_BasicChecker):
# Python 3.4 has `abc.ABC`, which won't be detected
# by ClassNode.metaclass()
for ancestor in infered.ancestors():
- if ancestor.qname() == 'abc.ABC':
- self.add_message('abstract-class-instantiated',
- args=(infered.name, ),
- node=node)
+ if ancestor.qname() == "abc.ABC":
+ self.add_message(
+ "abstract-class-instantiated", args=(infered.name,), node=node
+ )
break
return
if metaclass.qname() in ABC_METACLASSES:
- self.add_message('abstract-class-instantiated',
- args=(infered.name, ),
- node=node)
+ self.add_message(
+ "abstract-class-instantiated", args=(infered.name,), node=node
+ )
def _check_yield_outside_func(self, node):
if not isinstance(node.frame(), (astroid.FunctionDef, astroid.Lambda)):
- self.add_message('yield-outside-function', node=node)
+ self.add_message("yield-outside-function", node=node)
def _check_else_on_loop(self, node):
"""Check that any loop with an else clause has a break statement."""
if node.orelse and not _loop_exits_early(node):
- self.add_message('useless-else-on-loop', node=node,
- # This is not optimal, but the line previous
- # to the first statement in the else clause
- # will usually be the one that contains the else:.
- line=node.orelse[0].lineno - 1)
+ self.add_message(
+ "useless-else-on-loop",
+ node=node,
+ # This is not optimal, but the line previous
+ # to the first statement in the else clause
+ # will usually be the one that contains the else:.
+ line=node.orelse[0].lineno - 1,
+ )
def _check_in_loop(self, node, node_name):
"""check that a node is inside a for or while loop"""
@@ -736,34 +817,41 @@ class BasicErrorChecker(_BasicChecker):
if isinstance(_node, (astroid.ClassDef, astroid.FunctionDef)):
break
- if (isinstance(_node, astroid.TryFinally)
- and node in _node.finalbody
- and isinstance(node, astroid.Continue)):
- self.add_message('continue-in-finally', node=node)
+ if (
+ isinstance(_node, astroid.TryFinally)
+ and node in _node.finalbody
+ and isinstance(node, astroid.Continue)
+ ):
+ self.add_message("continue-in-finally", node=node)
_node = _node.parent
- self.add_message('not-in-loop', node=node, args=node_name)
+ self.add_message("not-in-loop", node=node, args=node_name)
def _check_redefinition(self, redeftype, node):
"""check for redefinition of a function / method / class name"""
parent_frame = node.parent.frame()
defined_self = parent_frame[node.name]
- if (defined_self is not node
- and not astroid.are_exclusive(node, defined_self)):
+ if defined_self is not node and not astroid.are_exclusive(node, defined_self):
# Additional checks for methods which are not considered
# redefined, since they are already part of the base API.
- if (isinstance(parent_frame, astroid.ClassDef)
- and node.name in REDEFINABLE_METHODS):
+ if (
+ isinstance(parent_frame, astroid.ClassDef)
+ and node.name in REDEFINABLE_METHODS
+ ):
return
dummy_variables_rgx = lint_utils.get_global_option(
- self, 'dummy-variables-rgx', default=None)
+ self, "dummy-variables-rgx", default=None
+ )
if dummy_variables_rgx and dummy_variables_rgx.match(node.name):
return
- self.add_message('function-redefined', node=node,
- args=(redeftype, defined_self.fromlineno))
+ self.add_message(
+ "function-redefined",
+ node=node,
+ args=(redeftype, defined_self.fromlineno),
+ )
class BasicChecker(_BasicChecker):
@@ -779,88 +867,116 @@ class BasicChecker(_BasicChecker):
__implements__ = interfaces.IAstroidChecker
- name = 'basic'
+ name = "basic"
msgs = {
- 'W0101': ('Unreachable code',
- 'unreachable',
- 'Used when there is some code behind a "return" or "raise" '
- 'statement, which will never be accessed.'),
- 'W0102': ('Dangerous default value %s as argument',
- 'dangerous-default-value',
- 'Used when a mutable value as list or dictionary is detected in '
- 'a default value for an argument.'),
- 'W0104': ('Statement seems to have no effect',
- 'pointless-statement',
- 'Used when a statement doesn\'t have (or at least seems to) '
- 'any effect.'),
- 'W0105': ('String statement has no effect',
- 'pointless-string-statement',
- 'Used when a string is used as a statement (which of course '
- 'has no effect). This is a particular case of W0104 with its '
- 'own message so you can easily disable it if you\'re using '
- 'those strings as documentation, instead of comments.'),
- 'W0106': ('Expression "%s" is assigned to nothing',
- 'expression-not-assigned',
- 'Used when an expression that is not a function call is assigned '
- 'to nothing. Probably something else was intended.'),
- 'W0108': ('Lambda may not be necessary',
- 'unnecessary-lambda',
- 'Used when the body of a lambda expression is a function call '
- 'on the same argument list as the lambda itself; such lambda '
- 'expressions are in all but a few cases replaceable with the '
- 'function being called in the body of the lambda.'),
- 'W0109': ("Duplicate key %r in dictionary",
- 'duplicate-key',
- 'Used when a dictionary expression binds the same key multiple '
- 'times.'),
- 'W0122': ('Use of exec',
- 'exec-used',
- 'Used when you use the "exec" statement (function for Python '
- '3), to discourage its usage. That doesn\'t '
- 'mean you cannot use it !'),
- 'W0123': ('Use of eval',
- 'eval-used',
- 'Used when you use the "eval" function, to discourage its '
- 'usage. Consider using `ast.literal_eval` for safely evaluating '
- 'strings containing Python expressions '
- 'from untrusted sources. '),
- 'W0150': ("%s statement in finally block may swallow exception",
- 'lost-exception',
- 'Used when a break or a return statement is found inside the '
- 'finally clause of a try...finally block: the exceptions raised '
- 'in the try clause will be silently swallowed instead of being '
- 're-raised.'),
- 'W0199': ('Assert called on a 2-uple. Did you mean \'assert x,y\'?',
- 'assert-on-tuple',
- 'A call of assert on a tuple will always evaluate to true if '
- 'the tuple is not empty, and will always evaluate to false if '
- 'it is.'),
- 'W0124': ('Following "as" with another context manager looks like a tuple.',
- 'confusing-with-statement',
- 'Emitted when a `with` statement component returns multiple values '
- 'and uses name binding with `as` only for a part of those values, '
- 'as in with ctx() as a, b. This can be misleading, since it\'s not '
- 'clear if the context manager returns a tuple or if the node without '
- 'a name binding is another context manager.'),
- 'W0125': ('Using a conditional statement with a constant value',
- 'using-constant-test',
- 'Emitted when a conditional statement (If or ternary if) '
- 'uses a constant value for its test. This might not be what '
- 'the user intended to do.'),
- 'E0111': ('The first reversed() argument is not a sequence',
- 'bad-reversed-sequence',
- 'Used when the first argument to reversed() builtin '
- 'isn\'t a sequence (does not implement __reversed__, '
- 'nor __getitem__ and __len__'),
- 'E0119': ('format function is not called on str',
- 'misplaced-format-function',
- 'Emitted when format function is not called on str object. '
- 'e.g doing print("value: {}").format(123) instead of '
- 'print("value: {}".format(123)). This might not be what the user '
- 'intended to do.'),
+ "W0101": (
+ "Unreachable code",
+ "unreachable",
+ 'Used when there is some code behind a "return" or "raise" '
+ "statement, which will never be accessed.",
+ ),
+ "W0102": (
+ "Dangerous default value %s as argument",
+ "dangerous-default-value",
+ "Used when a mutable value as list or dictionary is detected in "
+ "a default value for an argument.",
+ ),
+ "W0104": (
+ "Statement seems to have no effect",
+ "pointless-statement",
+ "Used when a statement doesn't have (or at least seems to) " "any effect.",
+ ),
+ "W0105": (
+ "String statement has no effect",
+ "pointless-string-statement",
+ "Used when a string is used as a statement (which of course "
+ "has no effect). This is a particular case of W0104 with its "
+ "own message so you can easily disable it if you're using "
+ "those strings as documentation, instead of comments.",
+ ),
+ "W0106": (
+ 'Expression "%s" is assigned to nothing',
+ "expression-not-assigned",
+ "Used when an expression that is not a function call is assigned "
+ "to nothing. Probably something else was intended.",
+ ),
+ "W0108": (
+ "Lambda may not be necessary",
+ "unnecessary-lambda",
+ "Used when the body of a lambda expression is a function call "
+ "on the same argument list as the lambda itself; such lambda "
+ "expressions are in all but a few cases replaceable with the "
+ "function being called in the body of the lambda.",
+ ),
+ "W0109": (
+ "Duplicate key %r in dictionary",
+ "duplicate-key",
+ "Used when a dictionary expression binds the same key multiple " "times.",
+ ),
+ "W0122": (
+ "Use of exec",
+ "exec-used",
+ 'Used when you use the "exec" statement (function for Python '
+ "3), to discourage its usage. That doesn't "
+ "mean you cannot use it !",
+ ),
+ "W0123": (
+ "Use of eval",
+ "eval-used",
+ 'Used when you use the "eval" function, to discourage its '
+ "usage. Consider using `ast.literal_eval` for safely evaluating "
+ "strings containing Python expressions "
+ "from untrusted sources. ",
+ ),
+ "W0150": (
+ "%s statement in finally block may swallow exception",
+ "lost-exception",
+ "Used when a break or a return statement is found inside the "
+ "finally clause of a try...finally block: the exceptions raised "
+ "in the try clause will be silently swallowed instead of being "
+ "re-raised.",
+ ),
+ "W0199": (
+ "Assert called on a 2-uple. Did you mean 'assert x,y'?",
+ "assert-on-tuple",
+ "A call of assert on a tuple will always evaluate to true if "
+ "the tuple is not empty, and will always evaluate to false if "
+ "it is.",
+ ),
+ "W0124": (
+ 'Following "as" with another context manager looks like a tuple.',
+ "confusing-with-statement",
+ "Emitted when a `with` statement component returns multiple values "
+ "and uses name binding with `as` only for a part of those values, "
+ "as in with ctx() as a, b. This can be misleading, since it's not "
+ "clear if the context manager returns a tuple or if the node without "
+ "a name binding is another context manager.",
+ ),
+ "W0125": (
+ "Using a conditional statement with a constant value",
+ "using-constant-test",
+ "Emitted when a conditional statement (If or ternary if) "
+ "uses a constant value for its test. This might not be what "
+ "the user intended to do.",
+ ),
+ "E0111": (
+ "The first reversed() argument is not a sequence",
+ "bad-reversed-sequence",
+ "Used when the first argument to reversed() builtin "
+ "isn't a sequence (does not implement __reversed__, "
+ "nor __getitem__ and __len__",
+ ),
+ "E0119": (
+ "format function is not called on str",
+ "misplaced-format-function",
+ "Emitted when format function is not called on str object. "
+ 'e.g doing print("value: {}").format(123) instead of '
+ 'print("value: {}".format(123)). This might not be what the user '
+ "intended to do.",
+ ),
}
- reports = (('RP0101', 'Statistics by type', report_by_type_stats),)
+ reports = (("RP0101", "Statistics by type", report_by_type_stats),)
def __init__(self, linter):
_BasicChecker.__init__(self, linter)
@@ -871,18 +987,17 @@ class BasicChecker(_BasicChecker):
"""initialize visit variables and statistics
"""
self._tryfinallys = []
- self.stats = self.linter.add_stats(module=0, function=0,
- method=0, class_=0)
+ self.stats = self.linter.add_stats(module=0, function=0, method=0, class_=0)
- @utils.check_messages('using-constant-test')
+ @utils.check_messages("using-constant-test")
def visit_if(self, node):
self._check_using_constant_test(node, node.test)
- @utils.check_messages('using-constant-test')
+ @utils.check_messages("using-constant-test")
def visit_ifexp(self, node):
self._check_using_constant_test(node, node.test)
- @utils.check_messages('using-constant-test')
+ @utils.check_messages("using-constant-test")
def visit_comprehension(self, node):
if node.ifs:
for if_test in node.ifs:
@@ -892,9 +1007,14 @@ class BasicChecker(_BasicChecker):
const_nodes = (
astroid.Module,
astroid.scoped_nodes.GeneratorExp,
- astroid.Lambda, astroid.FunctionDef, astroid.ClassDef,
- astroid.bases.Generator, astroid.UnboundMethod,
- astroid.BoundMethod, astroid.Module)
+ astroid.Lambda,
+ astroid.FunctionDef,
+ astroid.ClassDef,
+ astroid.bases.Generator,
+ astroid.UnboundMethod,
+ astroid.BoundMethod,
+ astroid.Module,
+ )
structs = (astroid.Dict, astroid.Tuple, astroid.Set)
# These nodes are excepted, since they are not constant
@@ -903,30 +1023,36 @@ class BasicChecker(_BasicChecker):
# Attribute, which is excepted because the conditional statement
# can be used to verify that the attribute was set inside a class,
# which is definitely a valid use case.
- except_nodes = (astroid.Attribute, astroid.Call,
- astroid.BinOp, astroid.BoolOp, astroid.UnaryOp,
- astroid.Subscript)
+ except_nodes = (
+ astroid.Attribute,
+ astroid.Call,
+ astroid.BinOp,
+ astroid.BoolOp,
+ astroid.UnaryOp,
+ astroid.Subscript,
+ )
inferred = None
- emit = isinstance(test, (astroid.Const, ) + structs + const_nodes)
+ emit = isinstance(test, (astroid.Const,) + structs + const_nodes)
if not isinstance(test, except_nodes):
inferred = utils.safe_infer(test)
if emit or isinstance(inferred, const_nodes):
- self.add_message('using-constant-test', node=node)
+ self.add_message("using-constant-test", node=node)
def visit_module(self, _):
"""check module name, docstring and required arguments
"""
- self.stats['module'] += 1
+ self.stats["module"] += 1
- def visit_classdef(self, node): # pylint: disable=unused-argument
+ def visit_classdef(self, node): # pylint: disable=unused-argument
"""check module name, docstring and redefinition
increment branch counter
"""
- self.stats['class'] += 1
+ self.stats["class"] += 1
- @utils.check_messages('pointless-statement', 'pointless-string-statement',
- 'expression-not-assigned')
+ @utils.check_messages(
+ "pointless-statement", "pointless-string-statement", "expression-not-assigned"
+ )
def visit_expr(self, node):
"""check for various kind of statements without effect"""
expr = node.value
@@ -936,15 +1062,20 @@ class BasicChecker(_BasicChecker):
# An attribute docstring is defined as being a string right after
# an assignment at the module level, class level or __init__ level.
scope = expr.scope()
- if isinstance(scope, (astroid.ClassDef, astroid.Module, astroid.FunctionDef)):
- if isinstance(scope, astroid.FunctionDef) and scope.name != '__init__':
+ if isinstance(
+ scope, (astroid.ClassDef, astroid.Module, astroid.FunctionDef)
+ ):
+ if isinstance(scope, astroid.FunctionDef) and scope.name != "__init__":
pass
else:
sibling = expr.previous_sibling()
- if (sibling is not None and sibling.scope() is scope and
- isinstance(sibling, (astroid.Assign, astroid.AnnAssign))):
+ if (
+ sibling is not None
+ and sibling.scope() is scope
+ and isinstance(sibling, (astroid.Assign, astroid.AnnAssign))
+ ):
return
- self.add_message('pointless-string-statement', node=node)
+ self.add_message("pointless-string-statement", node=node)
return
# ignore if this is :
# * a direct function call
@@ -952,15 +1083,16 @@ class BasicChecker(_BasicChecker):
# * a yield (which are wrapped by a discard node in _ast XXX)
# warn W0106 if we have any underlying function call (we can't predict
# side effects), else pointless-statement
- if (isinstance(expr, (astroid.Yield, astroid.Await, astroid.Call)) or
- (isinstance(node.parent, astroid.TryExcept) and
- node.parent.body == [node])):
+ if isinstance(expr, (astroid.Yield, astroid.Await, astroid.Call)) or (
+ isinstance(node.parent, astroid.TryExcept) and node.parent.body == [node]
+ ):
return
if any(expr.nodes_of_class(astroid.Call)):
- self.add_message('expression-not-assigned', node=node,
- args=expr.as_string())
+ self.add_message(
+ "expression-not-assigned", node=node, args=expr.as_string()
+ )
else:
- self.add_message('pointless-statement', node=node)
+ self.add_message("pointless-statement", node=node)
@staticmethod
def _filter_vararg(node, call_args):
@@ -968,8 +1100,10 @@ class BasicChecker(_BasicChecker):
# not passed as vararg.
for arg in call_args:
if isinstance(arg, astroid.Starred):
- if (isinstance(arg.value, astroid.Name)
- and arg.value.name != node.args.vararg):
+ if (
+ isinstance(arg.value, astroid.Name)
+ and arg.value.name != node.args.vararg
+ ):
yield arg
else:
yield arg
@@ -986,7 +1120,7 @@ class BasicChecker(_BasicChecker):
return True
return False
- @utils.check_messages('unnecessary-lambda')
+ @utils.check_messages("unnecessary-lambda")
def visit_lambda(self, node):
"""check whether or not the lambda is suspicious
"""
@@ -1005,8 +1139,9 @@ class BasicChecker(_BasicChecker):
# The body of the lambda must be a function call expression
# for the lambda to be unnecessary.
return
- if (isinstance(node.body.func, astroid.Attribute) and
- isinstance(node.body.func.expr, astroid.Call)):
+ if isinstance(node.body.func, astroid.Attribute) and isinstance(
+ node.body.func.expr, astroid.Call
+ ):
# Chained call, the intermediate call might
# return something else (but we don't check that, yet).
return
@@ -1035,32 +1170,31 @@ class BasicChecker(_BasicChecker):
if arg.name != passed_arg.name:
return
- self.add_message('unnecessary-lambda', line=node.fromlineno,
- node=node)
+ self.add_message("unnecessary-lambda", line=node.fromlineno, node=node)
- @utils.check_messages('dangerous-default-value')
+ @utils.check_messages("dangerous-default-value")
def visit_functiondef(self, node):
"""check function name, docstring, arguments, redefinition,
variable names, max locals
"""
- self.stats[node.is_method() and 'method' or 'function'] += 1
+ self.stats[node.is_method() and "method" or "function"] += 1
self._check_dangerous_default(node)
visit_asyncfunctiondef = visit_functiondef
def _check_dangerous_default(self, node):
# check for dangerous default values as arguments
- is_iterable = lambda n: isinstance(n, (astroid.List,
- astroid.Set,
- astroid.Dict))
+ is_iterable = lambda n: isinstance(n, (astroid.List, astroid.Set, astroid.Dict))
for default in node.args.defaults:
try:
value = next(default.infer())
except astroid.InferenceError:
continue
- if (isinstance(value, astroid.Instance) and
- value.qname() in DEFAULT_ARGUMENT_SYMBOLS):
+ if (
+ isinstance(value, astroid.Instance)
+ and value.qname() in DEFAULT_ARGUMENT_SYMBOLS
+ ):
if value is default:
msg = DEFAULT_ARGUMENT_SYMBOLS[value.qname()]
@@ -1075,18 +1209,18 @@ class BasicChecker(_BasicChecker):
if is_iterable(default):
msg = value.pytype()
elif isinstance(default, astroid.Call):
- msg = '%s() (%s)' % (value.name, value.qname())
+ msg = "%s() (%s)" % (value.name, value.qname())
else:
- msg = '%s (%s)' % (default.as_string(), value.qname())
+ msg = "%s (%s)" % (default.as_string(), value.qname())
else:
# this argument is a name
- msg = '%s (%s)' % (default.as_string(),
- DEFAULT_ARGUMENT_SYMBOLS[value.qname()])
- self.add_message('dangerous-default-value',
- node=node,
- args=(msg, ))
+ msg = "%s (%s)" % (
+ default.as_string(),
+ DEFAULT_ARGUMENT_SYMBOLS[value.qname()],
+ )
+ self.add_message("dangerous-default-value", node=node, args=(msg,))
- @utils.check_messages('unreachable', 'lost-exception')
+ @utils.check_messages("unreachable", "lost-exception")
def visit_return(self, node):
"""1 - check is the node has a right sibling (if so, that's some
unreachable code)
@@ -1095,16 +1229,16 @@ class BasicChecker(_BasicChecker):
"""
self._check_unreachable(node)
# Is it inside final body of a try...finally bloc ?
- self._check_not_in_finally(node, 'return', (astroid.FunctionDef,))
+ self._check_not_in_finally(node, "return", (astroid.FunctionDef,))
- @utils.check_messages('unreachable')
+ @utils.check_messages("unreachable")
def visit_continue(self, node):
"""check is the node has a right sibling (if so, that's some unreachable
code)
"""
self._check_unreachable(node)
- @utils.check_messages('unreachable', 'lost-exception')
+ @utils.check_messages("unreachable", "lost-exception")
def visit_break(self, node):
"""1 - check is the node has a right sibling (if so, that's some
unreachable code)
@@ -1114,24 +1248,24 @@ class BasicChecker(_BasicChecker):
# 1 - Is it right sibling ?
self._check_unreachable(node)
# 2 - Is it inside final body of a try...finally bloc ?
- self._check_not_in_finally(node, 'break', (astroid.For, astroid.While,))
+ self._check_not_in_finally(node, "break", (astroid.For, astroid.While))
- @utils.check_messages('unreachable')
+ @utils.check_messages("unreachable")
def visit_raise(self, node):
"""check if the node has a right sibling (if so, that's some unreachable
code)
"""
self._check_unreachable(node)
- @utils.check_messages('exec-used')
+ @utils.check_messages("exec-used")
def visit_exec(self, node):
"""just print a warning on exec statements"""
- self.add_message('exec-used', node=node)
+ self.add_message("exec-used", node=node)
def _check_misplaced_format_function(self, call_node):
if not isinstance(call_node.func, astroid.Attribute):
return
- if call_node.func.attrname != 'format':
+ if call_node.func.attrname != "format":
return
expr = utils.safe_infer(call_node.func.expr)
@@ -1143,11 +1277,15 @@ class BasicChecker(_BasicChecker):
call_expr = call_node.func.expr
if not isinstance(call_expr, astroid.Call):
return
- if isinstance(call_expr.func, astroid.Name) and call_expr.func.name == 'print':
- self.add_message('misplaced-format-function', node=call_node)
-
- @utils.check_messages('eval-used', 'exec-used', 'bad-reversed-sequence',
- 'misplaced-format-function')
+ if (
+ isinstance(call_expr.func, astroid.Name)
+ and call_expr.func.name == "print"
+ ):
+ self.add_message("misplaced-format-function", node=call_node)
+
+ @utils.check_messages(
+ "eval-used", "exec-used", "bad-reversed-sequence", "misplaced-format-function"
+ )
def visit_call(self, node):
"""visit a Call node -> check if this is not a blacklisted builtin
call and check for * or ** use
@@ -1157,23 +1295,25 @@ class BasicChecker(_BasicChecker):
name = node.func.name
# ignore the name if it's not a builtin (i.e. not defined in the
# locals nor globals scope)
- if not (name in node.frame() or
- name in node.root()):
- if name == 'exec':
- self.add_message('exec-used', node=node)
- elif name == 'reversed':
+ if not (name in node.frame() or name in node.root()):
+ if name == "exec":
+ self.add_message("exec-used", node=node)
+ elif name == "reversed":
self._check_reversed(node)
- elif name == 'eval':
- self.add_message('eval-used', node=node)
+ elif name == "eval":
+ self.add_message("eval-used", node=node)
- @utils.check_messages('assert-on-tuple')
+ @utils.check_messages("assert-on-tuple")
def visit_assert(self, node):
"""check the use of an assert statement on a tuple."""
- if node.fail is None and isinstance(node.test, astroid.Tuple) and \
- len(node.test.elts) == 2:
- self.add_message('assert-on-tuple', node=node)
-
- @utils.check_messages('duplicate-key')
+ if (
+ node.fail is None
+ and isinstance(node.test, astroid.Tuple)
+ and len(node.test.elts) == 2
+ ):
+ self.add_message("assert-on-tuple", node=node)
+
+ @utils.check_messages("duplicate-key")
def visit_dict(self, node):
"""check duplicate key in dictionary"""
keys = set()
@@ -1181,14 +1321,14 @@ class BasicChecker(_BasicChecker):
if isinstance(k, astroid.Const):
key = k.value
if key in keys:
- self.add_message('duplicate-key', node=node, args=key)
+ self.add_message("duplicate-key", node=node, args=key)
keys.add(key)
def visit_tryfinally(self, node):
"""update try...finally flag"""
self._tryfinallys.append(node)
- def leave_tryfinally(self, node): # pylint: disable=unused-argument
+ def leave_tryfinally(self, node): # pylint: disable=unused-argument
"""update try...finally flag"""
self._tryfinallys.pop()
@@ -1196,7 +1336,7 @@ class BasicChecker(_BasicChecker):
"""check unreachable code"""
unreach_stmt = node.next_sibling()
if unreach_stmt is not None:
- self.add_message('unreachable', node=unreach_stmt)
+ self.add_message("unreachable", node=unreach_stmt)
def _check_not_in_finally(self, node, node_name, breaker_classes=()):
"""check that a node is not inside a finally clause of a
@@ -1210,8 +1350,8 @@ class BasicChecker(_BasicChecker):
_parent = node.parent
_node = node
while _parent and not isinstance(_parent, breaker_classes):
- if hasattr(_parent, 'finalbody') and _node in _parent.finalbody:
- self.add_message('lost-exception', node=node, args=node_name)
+ if hasattr(_parent, "finalbody") and _node in _parent.finalbody:
+ self.add_message("lost-exception", node=node, args=node_name)
return
_node = _parent
_parent = _node.parent
@@ -1233,24 +1373,28 @@ class BasicChecker(_BasicChecker):
func = next(node.args[0].func.infer())
except astroid.InferenceError:
return
- if (getattr(func, 'name', None) == 'iter' and
- utils.is_builtin_object(func)):
- self.add_message('bad-reversed-sequence', node=node)
+ if getattr(
+ func, "name", None
+ ) == "iter" and utils.is_builtin_object(func):
+ self.add_message("bad-reversed-sequence", node=node)
return
if isinstance(argument, astroid.Instance):
- if (argument._proxied.name == 'dict' and
- utils.is_builtin_object(argument._proxied)):
- self.add_message('bad-reversed-sequence', node=node)
+ if argument._proxied.name == "dict" and utils.is_builtin_object(
+ argument._proxied
+ ):
+ self.add_message("bad-reversed-sequence", node=node)
return
- if any(ancestor.name == 'dict' and utils.is_builtin_object(ancestor)
- for ancestor in argument._proxied.ancestors()):
+ if any(
+ ancestor.name == "dict" and utils.is_builtin_object(ancestor)
+ for ancestor in argument._proxied.ancestors()
+ ):
# Mappings aren't accepted by reversed(), unless
# they provide explicitly a __reversed__ method.
try:
argument.locals[REVERSED_PROTOCOL_METHOD]
except KeyError:
- self.add_message('bad-reversed-sequence', node=node)
+ self.add_message("bad-reversed-sequence", node=node)
return
for methods in REVERSED_METHODS:
@@ -1262,12 +1406,12 @@ class BasicChecker(_BasicChecker):
else:
break
else:
- self.add_message('bad-reversed-sequence', node=node)
+ self.add_message("bad-reversed-sequence", node=node)
elif not isinstance(argument, (astroid.List, astroid.Tuple)):
# everything else is not a proper sequence for reversed()
- self.add_message('bad-reversed-sequence', node=node)
+ self.add_message("bad-reversed-sequence", node=node)
- @utils.check_messages('confusing-with-statement')
+ @utils.check_messages("confusing-with-statement")
def visit_with(self, node):
if not PY3K:
# in Python 2 a "with" statement with multiple managers coresponds
@@ -1285,33 +1429,42 @@ class BasicChecker(_BasicChecker):
pairs = node.items
if pairs:
for prev_pair, pair in zip(pairs, pairs[1:]):
- if (isinstance(prev_pair[1], astroid.AssignName) and
- (pair[1] is None and not isinstance(pair[0], astroid.Call))):
+ if isinstance(prev_pair[1], astroid.AssignName) and (
+ pair[1] is None and not isinstance(pair[0], astroid.Call)
+ ):
# don't emit a message if the second is a function call
# there's no way that can be mistaken for a name assignment
if PY3K or node.lineno == node.parent.lineno:
# if the line number doesn't match
# we assume it's a nested "with"
- self.add_message('confusing-with-statement', node=node)
+ self.add_message("confusing-with-statement", node=node)
KNOWN_NAME_TYPES = {
- "module", "const", "class", "function", "method", "attr",
- "argument", "variable", "class_attribute", "inlinevar"
+ "module",
+ "const",
+ "class",
+ "function",
+ "method",
+ "attr",
+ "argument",
+ "variable",
+ "class_attribute",
+ "inlinevar",
}
HUMAN_READABLE_TYPES = {
- 'module': 'module',
- 'const': 'constant',
- 'class': 'class',
- 'function': 'function',
- 'method': 'method',
- 'attr': 'attribute',
- 'argument': 'argument',
- 'variable': 'variable',
- 'class_attribute': 'class attribute',
- 'inlinevar': 'inline iteration',
+ "module": "module",
+ "const": "constant",
+ "class": "class",
+ "function": "function",
+ "method": "method",
+ "attr": "attribute",
+ "argument": "argument",
+ "variable": "variable",
+ "class_attribute": "class attribute",
+ "inlinevar": "inline iteration",
}
DEFAULT_NAMING_STYLES = {
@@ -1333,74 +1486,115 @@ def _create_naming_options():
for name_type in sorted(KNOWN_NAME_TYPES):
human_readable_name = HUMAN_READABLE_TYPES[name_type]
default_style = DEFAULT_NAMING_STYLES[name_type]
- name_type = name_type.replace('_', '-')
- name_options.append((
- '%s-naming-style' % (name_type,),
- {'default': default_style,
- 'type': 'choice', 'choices': list(NAMING_STYLES.keys()), 'metavar': '<style>',
- 'help': 'Naming style matching correct %s names.' % (human_readable_name,)}),)
- name_options.append((
- '%s-rgx' % (name_type,),
- {'default': None, 'type': 'regexp', 'metavar': '<regexp>',
- 'help': 'Regular expression matching correct %s names. Overrides %s-naming-style.'
- % (human_readable_name, name_type,)}))
+ name_type = name_type.replace("_", "-")
+ name_options.append(
+ (
+ "%s-naming-style" % (name_type,),
+ {
+ "default": default_style,
+ "type": "choice",
+ "choices": list(NAMING_STYLES.keys()),
+ "metavar": "<style>",
+ "help": "Naming style matching correct %s names."
+ % (human_readable_name,),
+ },
+ )
+ )
+ name_options.append(
+ (
+ "%s-rgx" % (name_type,),
+ {
+ "default": None,
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "Regular expression matching correct %s names. Overrides %s-naming-style."
+ % (human_readable_name, name_type),
+ },
+ )
+ )
return tuple(name_options)
class NameChecker(_BasicChecker):
msgs = {
- 'C0102': ('Black listed name "%s"',
- 'blacklisted-name',
- 'Used when the name is listed in the black list (unauthorized '
- 'names).'),
- 'C0103': ('%s name "%s" doesn\'t conform to %s',
- 'invalid-name',
- 'Used when the name doesn\'t conform to naming rules '
- 'associated to its type (constant, variable, class...).'),
- 'W0111': ('Name %s will become a keyword in Python %s',
- 'assign-to-new-keyword',
- 'Used when assignment will become invalid in future '
- 'Python release due to introducing new keyword.'),
+ "C0102": (
+ 'Black listed name "%s"',
+ "blacklisted-name",
+ "Used when the name is listed in the black list (unauthorized " "names).",
+ ),
+ "C0103": (
+ '%s name "%s" doesn\'t conform to %s',
+ "invalid-name",
+ "Used when the name doesn't conform to naming rules "
+ "associated to its type (constant, variable, class...).",
+ ),
+ "W0111": (
+ "Name %s will become a keyword in Python %s",
+ "assign-to-new-keyword",
+ "Used when assignment will become invalid in future "
+ "Python release due to introducing new keyword.",
+ ),
}
- options = (('good-names',
- {'default' : ('i', 'j', 'k', 'ex', 'Run', '_'),
- 'type' :'csv', 'metavar' : '<names>',
- 'help' : 'Good variable names which should always be accepted,'
- ' separated by a comma.'}
- ),
- ('bad-names',
- {'default' : ('foo', 'bar', 'baz', 'toto', 'tutu', 'tata'),
- 'type' :'csv', 'metavar' : '<names>',
- 'help' : 'Bad variable names which should always be refused, '
- 'separated by a comma.'}
- ),
- ('name-group',
- {'default' : (),
- 'type' :'csv', 'metavar' : '<name1:name2>',
- 'help' : ('Colon-delimited sets of names that determine each'
- ' other\'s naming style when the name regexes'
- ' allow several styles.')}
- ),
- ('include-naming-hint',
- {'default': False, 'type': 'yn', 'metavar': '<y_or_n>',
- 'help': 'Include a hint for the correct naming format with invalid-name.'}
- ),
- ('property-classes',
- {'default': ('abc.abstractproperty',),
- 'type': 'csv',
- 'metavar': '<decorator names>',
- 'help': 'List of decorators that produce properties, such as '
- 'abc.abstractproperty. Add to this list to register '
- 'other decorators that produce valid properties. '
- 'These decorators are taken in consideration only for invalid-name.'}
- ),
- ) + _create_naming_options()
-
- KEYWORD_ONSET = {
- (3, 7): {'async', 'await'}
- }
+ options = (
+ (
+ "good-names",
+ {
+ "default": ("i", "j", "k", "ex", "Run", "_"),
+ "type": "csv",
+ "metavar": "<names>",
+ "help": "Good variable names which should always be accepted,"
+ " separated by a comma.",
+ },
+ ),
+ (
+ "bad-names",
+ {
+ "default": ("foo", "bar", "baz", "toto", "tutu", "tata"),
+ "type": "csv",
+ "metavar": "<names>",
+ "help": "Bad variable names which should always be refused, "
+ "separated by a comma.",
+ },
+ ),
+ (
+ "name-group",
+ {
+ "default": (),
+ "type": "csv",
+ "metavar": "<name1:name2>",
+ "help": (
+ "Colon-delimited sets of names that determine each"
+ " other's naming style when the name regexes"
+ " allow several styles."
+ ),
+ },
+ ),
+ (
+ "include-naming-hint",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Include a hint for the correct naming format with invalid-name.",
+ },
+ ),
+ (
+ "property-classes",
+ {
+ "default": ("abc.abstractproperty",),
+ "type": "csv",
+ "metavar": "<decorator names>",
+ "help": "List of decorators that produce properties, such as "
+ "abc.abstractproperty. Add to this list to register "
+ "other decorators that produce valid properties. "
+ "These decorators are taken in consideration only for invalid-name.",
+ },
+ ),
+ ) + _create_naming_options()
+
+ KEYWORD_ONSET = {(3, 7): {"async", "await"}}
def __init__(self, linter):
_BasicChecker.__init__(self, linter)
@@ -1411,17 +1605,21 @@ class NameChecker(_BasicChecker):
self._name_hints = {}
def open(self):
- self.stats = self.linter.add_stats(badname_module=0,
- badname_class=0, badname_function=0,
- badname_method=0, badname_attr=0,
- badname_const=0,
- badname_variable=0,
- badname_inlinevar=0,
- badname_argument=0,
- badname_class_attribute=0)
+ self.stats = self.linter.add_stats(
+ badname_module=0,
+ badname_class=0,
+ badname_function=0,
+ badname_method=0,
+ badname_attr=0,
+ badname_const=0,
+ badname_variable=0,
+ badname_inlinevar=0,
+ badname_argument=0,
+ badname_class_attribute=0,
+ )
for group in self.config.name_group:
- for name_type in group.split(':'):
- self._name_group[name_type] = 'group_%s' % (group,)
+ for name_type in group.split(":"):
+ self._name_group[name_type] = "group_%s" % (group,)
regexps, hints = self._create_naming_rules()
self._name_regexps = regexps
@@ -1437,7 +1635,7 @@ class NameChecker(_BasicChecker):
regexps[name_type] = NAMING_STYLES[naming_style_name].get_regex(name_type)
- custom_regex_setting_name = "%s_rgx" % (name_type, )
+ custom_regex_setting_name = "%s_rgx" % (name_type,)
custom_regex = getattr(self.config, custom_regex_setting_name, None)
if custom_regex is not None:
regexps[name_type] = custom_regex
@@ -1449,12 +1647,12 @@ class NameChecker(_BasicChecker):
return regexps, hints
- @utils.check_messages('blacklisted-name', 'invalid-name')
+ @utils.check_messages("blacklisted-name", "invalid-name")
def visit_module(self, node):
- self._check_name('module', node.name.split('.')[-1], node)
+ self._check_name("module", node.name.split(".")[-1], node)
self._bad_names = {}
- def leave_module(self, node): # pylint: disable=unused-argument
+ def leave_module(self, node): # pylint: disable=unused-argument
for all_groups in self._bad_names.values():
if len(all_groups) < 2:
continue
@@ -1464,23 +1662,25 @@ class NameChecker(_BasicChecker):
groups[len(group)].append(group)
min_warnings = min(len(group), min_warnings)
if len(groups[min_warnings]) > 1:
- by_line = sorted(groups[min_warnings],
- key=lambda group: min(warning[0].lineno for warning in group))
+ by_line = sorted(
+ groups[min_warnings],
+ key=lambda group: min(warning[0].lineno for warning in group),
+ )
warnings = itertools.chain(*by_line[1:])
else:
warnings = groups[min_warnings][0]
for args in warnings:
self._raise_name_warning(*args)
- @utils.check_messages('blacklisted-name', 'invalid-name', 'assign-to-new-keyword')
+ @utils.check_messages("blacklisted-name", "invalid-name", "assign-to-new-keyword")
def visit_classdef(self, node):
self._check_assign_to_new_keyword_violation(node.name, node)
- self._check_name('class', node.name, node)
+ self._check_name("class", node.name, node)
for attr, anodes in node.instance_attrs.items():
if not any(node.instance_attr_ancestors(attr)):
- self._check_name('attr', attr, anodes[0])
+ self._check_name("attr", attr, anodes[0])
- @utils.check_messages('blacklisted-name', 'invalid-name', 'assign-to-new-keyword')
+ @utils.check_messages("blacklisted-name", "invalid-name", "assign-to-new-keyword")
def visit_functiondef(self, node):
# Do not emit any warnings if the method is just an implementation
# of a base class method.
@@ -1489,12 +1689,18 @@ class NameChecker(_BasicChecker):
if node.is_method():
if utils.overrides_a_method(node.parent.frame(), node.name):
return
- confidence = (interfaces.INFERENCE if utils.has_known_bases(node.parent.frame())
- else interfaces.INFERENCE_FAILURE)
-
- self._check_name(_determine_function_name_type(node,
- config=self.config),
- node.name, node, confidence)
+ confidence = (
+ interfaces.INFERENCE
+ if utils.has_known_bases(node.parent.frame())
+ else interfaces.INFERENCE_FAILURE
+ )
+
+ self._check_name(
+ _determine_function_name_type(node, config=self.config),
+ node.name,
+ node,
+ confidence,
+ )
# Check argument names
args = node.args.args
if args is not None:
@@ -1502,44 +1708,44 @@ class NameChecker(_BasicChecker):
visit_asyncfunctiondef = visit_functiondef
- @utils.check_messages('blacklisted-name', 'invalid-name')
+ @utils.check_messages("blacklisted-name", "invalid-name")
def visit_global(self, node):
for name in node.names:
- self._check_name('const', name, node)
+ self._check_name("const", name, node)
- @utils.check_messages('blacklisted-name', 'invalid-name', 'assign-to-new-keyword')
+ @utils.check_messages("blacklisted-name", "invalid-name", "assign-to-new-keyword")
def visit_assignname(self, node):
"""check module level assigned names"""
self._check_assign_to_new_keyword_violation(node.name, node)
frame = node.frame()
assign_type = node.assign_type()
if isinstance(assign_type, astroid.Comprehension):
- self._check_name('inlinevar', node.name, node)
+ self._check_name("inlinevar", node.name, node)
elif isinstance(frame, astroid.Module):
if isinstance(assign_type, astroid.Assign) and not in_loop(assign_type):
if isinstance(utils.safe_infer(assign_type.value), astroid.ClassDef):
- self._check_name('class', node.name, node)
+ self._check_name("class", node.name, node)
else:
if not _redefines_import(node):
# Don't emit if the name redefines an import
# in an ImportError except handler.
- self._check_name('const', node.name, node)
+ self._check_name("const", node.name, node)
elif isinstance(assign_type, astroid.ExceptHandler):
- self._check_name('variable', node.name, node)
+ self._check_name("variable", node.name, node)
elif isinstance(frame, astroid.FunctionDef):
# global introduced variable aren't in the function locals
if node.name in frame and node.name not in frame.argnames():
if not _redefines_import(node):
- self._check_name('variable', node.name, node)
+ self._check_name("variable", node.name, node)
elif isinstance(frame, astroid.ClassDef):
if not list(frame.local_attr_ancestors(node.name)):
- self._check_name('class_attribute', node.name, node)
+ self._check_name("class_attribute", node.name, node)
def _recursive_check_names(self, args, node):
"""check names in a possibly recursive list <arg>"""
for arg in args:
if isinstance(arg, astroid.AssignName):
- self._check_name('argument', arg.name, node)
+ self._check_name("argument", arg.name, node)
else:
self._recursive_check_names(arg.elts, node)
@@ -1551,20 +1757,16 @@ class NameChecker(_BasicChecker):
hint = self._name_hints[node_type]
if self.config.include_naming_hint:
hint += " (%r pattern)" % self._name_regexps[node_type].pattern
- args = (
- type_label.capitalize(),
- name,
- hint
- )
+ args = (type_label.capitalize(), name, hint)
- self.add_message('invalid-name', node=node, args=args,
- confidence=confidence)
- self.stats['badname_' + node_type] += 1
+ self.add_message("invalid-name", node=node, args=args, confidence=confidence)
+ self.stats["badname_" + node_type] += 1
def _check_name(self, node_type, name, node, confidence=interfaces.HIGH):
"""check for a name using the type's regexp"""
+
def _should_exempt_from_invalid_name(node):
- if node_type == 'variable':
+ if node_type == "variable":
inferred = utils.safe_infer(node)
if isinstance(inferred, astroid.ClassDef):
return True
@@ -1577,8 +1779,8 @@ class NameChecker(_BasicChecker):
if name in self.config.good_names:
return
if name in self.config.bad_names:
- self.stats['badname_' + node_type] += 1
- self.add_message('blacklisted-name', node=node, args=name)
+ self.stats["badname_" + node_type] += 1
+ self.add_message("blacklisted-name", node=node, args=name)
return
regexp = self._name_regexps[node_type]
match = regexp.match(name)
@@ -1597,89 +1799,112 @@ class NameChecker(_BasicChecker):
name, self.KEYWORD_ONSET
)
if keyword_first_version is not None:
- self.add_message('assign-to-new-keyword',
- node=node, args=(name, keyword_first_version),
- confidence=interfaces.HIGH)
+ self.add_message(
+ "assign-to-new-keyword",
+ node=node,
+ args=(name, keyword_first_version),
+ confidence=interfaces.HIGH,
+ )
@staticmethod
def _name_became_keyword_in_version(name, rules):
for version, keywords in rules.items():
if name in keywords and sys.version_info < version:
- return '.'.join(map(str, version))
+ return ".".join(map(str, version))
return None
class DocStringChecker(_BasicChecker):
msgs = {
- 'C0111': ('Missing %s docstring', # W0131
- 'missing-docstring',
- 'Used when a module, function, class or method has no docstring.'
- 'Some special methods like __init__ doesn\'t necessary require a '
- 'docstring.'),
- 'C0112': ('Empty %s docstring', # W0132
- 'empty-docstring',
- 'Used when a module, function, class or method has an empty '
- 'docstring (it would be too easy ;).'),
- }
- options = (('no-docstring-rgx',
- {'default' : NO_REQUIRED_DOC_RGX,
- 'type' : 'regexp', 'metavar' : '<regexp>',
- 'help' : 'Regular expression which should only match '
- 'function or class names that do not require a '
- 'docstring.'}
- ),
- ('docstring-min-length',
- {'default' : -1,
- 'type' : 'int', 'metavar' : '<int>',
- 'help': ('Minimum line length for functions/classes that'
- ' require docstrings, shorter ones are exempt.')}
- ),
- )
+ "C0111": (
+ "Missing %s docstring", # W0131
+ "missing-docstring",
+ "Used when a module, function, class or method has no docstring."
+ "Some special methods like __init__ doesn't necessary require a "
+ "docstring.",
+ ),
+ "C0112": (
+ "Empty %s docstring", # W0132
+ "empty-docstring",
+ "Used when a module, function, class or method has an empty "
+ "docstring (it would be too easy ;).",
+ ),
+ }
+ options = (
+ (
+ "no-docstring-rgx",
+ {
+ "default": NO_REQUIRED_DOC_RGX,
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "Regular expression which should only match "
+ "function or class names that do not require a "
+ "docstring.",
+ },
+ ),
+ (
+ "docstring-min-length",
+ {
+ "default": -1,
+ "type": "int",
+ "metavar": "<int>",
+ "help": (
+ "Minimum line length for functions/classes that"
+ " require docstrings, shorter ones are exempt."
+ ),
+ },
+ ),
+ )
def open(self):
- self.stats = self.linter.add_stats(undocumented_module=0,
- undocumented_function=0,
- undocumented_method=0,
- undocumented_class=0)
+ self.stats = self.linter.add_stats(
+ undocumented_module=0,
+ undocumented_function=0,
+ undocumented_method=0,
+ undocumented_class=0,
+ )
- @utils.check_messages('missing-docstring', 'empty-docstring')
+ @utils.check_messages("missing-docstring", "empty-docstring")
def visit_module(self, node):
- self._check_docstring('module', node)
+ self._check_docstring("module", node)
- @utils.check_messages('missing-docstring', 'empty-docstring')
+ @utils.check_messages("missing-docstring", "empty-docstring")
def visit_classdef(self, node):
if self.config.no_docstring_rgx.match(node.name) is None:
- self._check_docstring('class', node)
+ self._check_docstring("class", node)
@staticmethod
def _is_setter_or_deleter(node):
- names = {'setter', 'deleter'}
+ names = {"setter", "deleter"}
for decorator in node.decorators.nodes:
- if (isinstance(decorator, astroid.Attribute)
- and decorator.attrname in names):
+ if isinstance(decorator, astroid.Attribute) and decorator.attrname in names:
return True
return False
- @utils.check_messages('missing-docstring', 'empty-docstring')
+ @utils.check_messages("missing-docstring", "empty-docstring")
def visit_functiondef(self, node):
if self.config.no_docstring_rgx.match(node.name) is None:
- ftype = 'method' if node.is_method() else 'function'
+ ftype = "method" if node.is_method() else "function"
if node.decorators and self._is_setter_or_deleter(node):
return
if isinstance(node.parent.frame(), astroid.ClassDef):
overridden = False
- confidence = (interfaces.INFERENCE if utils.has_known_bases(node.parent.frame())
- else interfaces.INFERENCE_FAILURE)
+ confidence = (
+ interfaces.INFERENCE
+ if utils.has_known_bases(node.parent.frame())
+ else interfaces.INFERENCE_FAILURE
+ )
# check if node is from a method overridden by its ancestor
for ancestor in node.parent.frame().ancestors():
- if node.name in ancestor and \
- isinstance(ancestor[node.name], astroid.FunctionDef):
+ if node.name in ancestor and isinstance(
+ ancestor[node.name], astroid.FunctionDef
+ ):
overridden = True
break
- self._check_docstring(ftype, node,
- report_missing=not overridden,
- confidence=confidence)
+ self._check_docstring(
+ ftype, node, report_missing=not overridden, confidence=confidence
+ )
elif isinstance(node.parent.frame(), astroid.Module):
self._check_docstring(ftype, node)
else:
@@ -1687,8 +1912,9 @@ class DocStringChecker(_BasicChecker):
visit_asyncfunctiondef = visit_functiondef
- def _check_docstring(self, node_type, node, report_missing=True,
- confidence=interfaces.HIGH):
+ def _check_docstring(
+ self, node_type, node, report_missing=True, confidence=interfaces.HIGH
+ ):
"""check the node has a non empty docstring"""
docstring = node.doc
if docstring is None:
@@ -1696,55 +1922,65 @@ class DocStringChecker(_BasicChecker):
return
lines = get_node_last_lineno(node) - node.lineno
- if node_type == 'module' and not lines:
+ if node_type == "module" and not lines:
# If the module has no body, there's no reason
# to require a docstring.
return
max_lines = self.config.docstring_min_length
- if node_type != 'module' and max_lines > -1 and lines < max_lines:
+ if node_type != "module" and max_lines > -1 and lines < max_lines:
return
- self.stats['undocumented_'+node_type] += 1
- if (node.body and isinstance(node.body[0], astroid.Expr) and
- isinstance(node.body[0].value, astroid.Call)):
+ self.stats["undocumented_" + node_type] += 1
+ if (
+ node.body
+ and isinstance(node.body[0], astroid.Expr)
+ and isinstance(node.body[0].value, astroid.Call)
+ ):
# Most likely a string with a format call. Let's see.
func = utils.safe_infer(node.body[0].value.func)
- if (isinstance(func, astroid.BoundMethod)
- and isinstance(func.bound, astroid.Instance)):
+ if isinstance(func, astroid.BoundMethod) and isinstance(
+ func.bound, astroid.Instance
+ ):
# Strings in Python 3, others in Python 2.
- if PY3K and func.bound.name == 'str':
+ if PY3K and func.bound.name == "str":
return
- if func.bound.name in ('str', 'unicode', 'bytes'):
+ if func.bound.name in ("str", "unicode", "bytes"):
return
- self.add_message('missing-docstring', node=node, args=(node_type,),
- confidence=confidence)
+ self.add_message(
+ "missing-docstring", node=node, args=(node_type,), confidence=confidence
+ )
elif not docstring.strip():
- self.stats['undocumented_'+node_type] += 1
- self.add_message('empty-docstring', node=node, args=(node_type,),
- confidence=confidence)
+ self.stats["undocumented_" + node_type] += 1
+ self.add_message(
+ "empty-docstring", node=node, args=(node_type,), confidence=confidence
+ )
class PassChecker(_BasicChecker):
"""check if the pass statement is really necessary"""
- msgs = {'W0107': ('Unnecessary pass statement',
- 'unnecessary-pass',
- 'Used when a "pass" statement that can be avoided is '
- 'encountered.'),
- }
- @utils.check_messages('unnecessary-pass')
+ msgs = {
+ "W0107": (
+ "Unnecessary pass statement",
+ "unnecessary-pass",
+ 'Used when a "pass" statement that can be avoided is ' "encountered.",
+ )
+ }
+
+ @utils.check_messages("unnecessary-pass")
def visit_pass(self, node):
- if (len(node.parent.child_sequence(node)) > 1 or
- (isinstance(node.parent, (astroid.ClassDef, astroid.FunctionDef)) and
- (node.parent.doc is not None))):
- self.add_message('unnecessary-pass', node=node)
+ if len(node.parent.child_sequence(node)) > 1 or (
+ isinstance(node.parent, (astroid.ClassDef, astroid.FunctionDef))
+ and (node.parent.doc is not None)
+ ):
+ self.add_message("unnecessary-pass", node=node)
+
def _is_one_arg_pos_call(call):
"""Is this a call with exactly 1 argument,
where that argument is positional?
"""
- return (isinstance(call, astroid.Call)
- and len(call.args) == 1 and not call.keywords)
+ return isinstance(call, astroid.Call) and len(call.args) == 1 and not call.keywords
class ComparisonChecker(_BasicChecker):
@@ -1755,36 +1991,50 @@ class ComparisonChecker(_BasicChecker):
'<=', '>' or '>=', and right can be a variable, an attribute, a method or
a function
"""
- msgs = {'C0121': ('Comparison to %s should be %s',
- 'singleton-comparison',
- 'Used when an expression is compared to singleton '
- 'values like True, False or None.'),
- 'C0122': ('Comparison should be %s',
- 'misplaced-comparison-constant',
- 'Used when the constant is placed on the left side '
- 'of a comparison. It is usually clearer in intent to '
- 'place it in the right hand side of the comparison.'),
- 'C0123': ('Using type() instead of isinstance() for a typecheck.',
- 'unidiomatic-typecheck',
- 'The idiomatic way to perform an explicit typecheck in '
- 'Python is to use isinstance(x, Y) rather than '
- 'type(x) == Y, type(x) is Y. Though there are unusual '
- 'situations where these give different results.',
- {'old_names': [('W0154', 'unidiomatic-typecheck')]}),
- 'R0123': ('Comparison to literal',
- 'literal-comparison',
- 'Used when comparing an object to a literal, which is usually '
- 'what you do not want to do, since you can compare to a different '
- 'literal than what was expected altogether.'),
- 'R0124': ('Redundant comparison - %s',
- 'comparison-with-itself',
- 'Used when something is compared against itself.'),
- 'W0143': ('Comparing against a callable, did you omit the parenthesis?',
- 'comparison-with-callable',
- 'This message is emitted when pylint detects that a comparison with a '
- 'callable was made, which might suggest that some parenthesis were omitted, '
- 'resulting in potential unwanted behaviour.'),
- }
+
+ msgs = {
+ "C0121": (
+ "Comparison to %s should be %s",
+ "singleton-comparison",
+ "Used when an expression is compared to singleton "
+ "values like True, False or None.",
+ ),
+ "C0122": (
+ "Comparison should be %s",
+ "misplaced-comparison-constant",
+ "Used when the constant is placed on the left side "
+ "of a comparison. It is usually clearer in intent to "
+ "place it in the right hand side of the comparison.",
+ ),
+ "C0123": (
+ "Using type() instead of isinstance() for a typecheck.",
+ "unidiomatic-typecheck",
+ "The idiomatic way to perform an explicit typecheck in "
+ "Python is to use isinstance(x, Y) rather than "
+ "type(x) == Y, type(x) is Y. Though there are unusual "
+ "situations where these give different results.",
+ {"old_names": [("W0154", "unidiomatic-typecheck")]},
+ ),
+ "R0123": (
+ "Comparison to literal",
+ "literal-comparison",
+ "Used when comparing an object to a literal, which is usually "
+ "what you do not want to do, since you can compare to a different "
+ "literal than what was expected altogether.",
+ ),
+ "R0124": (
+ "Redundant comparison - %s",
+ "comparison-with-itself",
+ "Used when something is compared against itself.",
+ ),
+ "W0143": (
+ "Comparing against a callable, did you omit the parenthesis?",
+ "comparison-with-callable",
+ "This message is emitted when pylint detects that a comparison with a "
+ "callable was made, which might suggest that some parenthesis were omitted, "
+ "resulting in potential unwanted behaviour.",
+ ),
+ }
def _check_singleton_comparison(self, singleton, root_node, negative_check=False):
if singleton.value is True:
@@ -1792,32 +2042,29 @@ class ComparisonChecker(_BasicChecker):
suggestion = "just 'expr' or 'expr is True'"
else:
suggestion = "just 'not expr' or 'expr is False'"
- self.add_message('singleton-comparison',
- node=root_node,
- args=(True, suggestion))
+ self.add_message(
+ "singleton-comparison", node=root_node, args=(True, suggestion)
+ )
elif singleton.value is False:
if not negative_check:
suggestion = "'not expr' or 'expr is False'"
else:
suggestion = "'expr' or 'expr is not False'"
- self.add_message('singleton-comparison',
- node=root_node,
- args=(False, suggestion))
+ self.add_message(
+ "singleton-comparison", node=root_node, args=(False, suggestion)
+ )
elif singleton.value is None:
if not negative_check:
suggestion = "'expr is None'"
else:
suggestion = "'expr is not None'"
- self.add_message('singleton-comparison',
- node=root_node,
- args=(None, suggestion))
+ self.add_message(
+ "singleton-comparison", node=root_node, args=(None, suggestion)
+ )
def _check_literal_comparison(self, literal, node):
"""Check if we compare to a literal, which is usually what we do not want to do."""
- nodes = (astroid.List,
- astroid.Tuple,
- astroid.Dict,
- astroid.Set)
+ nodes = (astroid.List, astroid.Tuple, astroid.Dict, astroid.Set)
is_other_literal = isinstance(literal, nodes)
is_const = False
if isinstance(literal, astroid.Const):
@@ -1827,15 +2074,14 @@ class ComparisonChecker(_BasicChecker):
is_const = isinstance(literal.value, (bytes, str, int, float))
if is_const or is_other_literal:
- self.add_message('literal-comparison', node=node)
+ self.add_message("literal-comparison", node=node)
def _check_misplaced_constant(self, node, left, right, operator):
if isinstance(right, astroid.Const):
return
operator = REVERSED_COMPS.get(operator, operator)
- suggestion = '%s %s %r' % (right.as_string(), operator, left.value)
- self.add_message('misplaced-comparison-constant', node=node,
- args=(suggestion,))
+ suggestion = "%s %s %r" % (right.as_string(), operator, left.value)
+ self.add_message("misplaced-comparison-constant", node=node, args=(suggestion,))
def _check_logical_tautology(self, node):
"""Check if identifier is compared against itself.
@@ -1849,18 +2095,20 @@ class ComparisonChecker(_BasicChecker):
left_operand = node.left
right_operand = node.ops[0][1]
operator = node.ops[0][0]
- if (isinstance(left_operand, astroid.Const)
- and isinstance(right_operand, astroid.Const)):
+ if isinstance(left_operand, astroid.Const) and isinstance(
+ right_operand, astroid.Const
+ ):
left_operand = left_operand.value
right_operand = right_operand.value
- elif (isinstance(left_operand, astroid.Name)
- and isinstance(right_operand, astroid.Name)):
+ elif isinstance(left_operand, astroid.Name) and isinstance(
+ right_operand, astroid.Name
+ ):
left_operand = left_operand.name
right_operand = right_operand.name
if left_operand == right_operand:
suggestion = "%s %s %s" % (left_operand, operator, right_operand)
- self.add_message('comparison-with-itself', node=node, args=(suggestion,))
+ self.add_message("comparison-with-itself", node=node, args=(suggestion,))
def _check_callable_comparison(self, node):
operator = node.ops[0][0]
@@ -1871,13 +2119,24 @@ class ComparisonChecker(_BasicChecker):
left_operand, right_operand = node.left, node.ops[0][1]
# this message should be emitted only when there is comparison of bare callable
# with non bare callable.
- if sum(1 for operand in (left_operand, right_operand)
- if isinstance(utils.safe_infer(operand), bare_callables)) == 1:
- self.add_message('comparison-with-callable', node=node)
-
- @utils.check_messages('singleton-comparison', 'misplaced-comparison-constant',
- 'unidiomatic-typecheck', 'literal-comparison', 'comparison-with-itself',
- 'comparison-with-callable')
+ if (
+ sum(
+ 1
+ for operand in (left_operand, right_operand)
+ if isinstance(utils.safe_infer(operand), bare_callables)
+ )
+ == 1
+ ):
+ self.add_message("comparison-with-callable", node=node)
+
+ @utils.check_messages(
+ "singleton-comparison",
+ "misplaced-comparison-constant",
+ "unidiomatic-typecheck",
+ "literal-comparison",
+ "comparison-with-itself",
+ "comparison-with-callable",
+ )
def visit_compare(self, node):
self._check_callable_comparison(node)
self._check_logical_tautology(node)
@@ -1889,19 +2148,18 @@ class ComparisonChecker(_BasicChecker):
left = node.left
operator, right = node.ops[0]
- if (operator in COMPARISON_OPERATORS
- and isinstance(left, astroid.Const)):
+ if operator in COMPARISON_OPERATORS and isinstance(left, astroid.Const):
self._check_misplaced_constant(node, left, right, operator)
- if operator == '==':
+ if operator == "==":
if isinstance(left, astroid.Const):
self._check_singleton_comparison(left, node)
elif isinstance(right, astroid.Const):
self._check_singleton_comparison(right, node)
- if operator == '!=':
+ if operator == "!=":
if isinstance(right, astroid.Const):
self._check_singleton_comparison(right, node, negative_check=True)
- if operator in ('is', 'is not'):
+ if operator in ("is", "is not"):
self._check_literal_comparison(right, node)
def _check_unidiomatic_typecheck(self, node):
@@ -1914,20 +2172,23 @@ class ComparisonChecker(_BasicChecker):
def _check_type_x_is_y(self, node, left, operator, right):
"""Check for expressions like type(x) == Y."""
left_func = utils.safe_infer(left.func)
- if not (isinstance(left_func, astroid.ClassDef)
- and left_func.qname() == TYPE_QNAME):
+ if not (
+ isinstance(left_func, astroid.ClassDef) and left_func.qname() == TYPE_QNAME
+ ):
return
- if operator in ('is', 'is not') and _is_one_arg_pos_call(right):
+ if operator in ("is", "is not") and _is_one_arg_pos_call(right):
right_func = utils.safe_infer(right.func)
- if (isinstance(right_func, astroid.ClassDef)
- and right_func.qname() == TYPE_QNAME):
+ if (
+ isinstance(right_func, astroid.ClassDef)
+ and right_func.qname() == TYPE_QNAME
+ ):
# type(x) == type(a)
right_arg = utils.safe_infer(right.args[0])
if not isinstance(right_arg, LITERAL_NODE_TYPES):
# not e.g. type(x) == type([])
return
- self.add_message('unidiomatic-typecheck', node=node)
+ self.add_message("unidiomatic-typecheck", node=node)
def register(linter):
diff --git a/pylint/checkers/classes.py b/pylint/checkers/classes.py
index 3cee65023..c386d70e6 100644
--- a/pylint/checkers/classes.py
+++ b/pylint/checkers/classes.py
@@ -44,30 +44,41 @@ from astroid.scoped_nodes import function_to_method
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers.utils import (
- PYMETHODS, SPECIAL_METHODS_PARAMS,
- overrides_a_method, check_messages, is_attr_private,
- is_attr_protected, node_frame_class, is_builtin_object,
- decorated_with_property, unimplemented_abstract_methods,
- decorated_with, class_is_abstract,
- safe_infer, has_known_bases, is_iterable, is_comprehension)
+ PYMETHODS,
+ SPECIAL_METHODS_PARAMS,
+ overrides_a_method,
+ check_messages,
+ is_attr_private,
+ is_attr_protected,
+ node_frame_class,
+ is_builtin_object,
+ decorated_with_property,
+ unimplemented_abstract_methods,
+ decorated_with,
+ class_is_abstract,
+ safe_infer,
+ has_known_bases,
+ is_iterable,
+ is_comprehension,
+)
from pylint.utils import get_global_option
if sys.version_info >= (3, 0):
- NEXT_METHOD = '__next__'
+ NEXT_METHOD = "__next__"
else:
- NEXT_METHOD = 'next'
-INVALID_BASE_CLASSES = {'bool', 'range', 'slice', 'memoryview'}
+ NEXT_METHOD = "next"
+INVALID_BASE_CLASSES = {"bool", "range", "slice", "memoryview"}
# Dealing with useless override detection, with regard
# to parameters vs arguments
_CallSignature = collections.namedtuple(
- '_CallSignature', 'args kws starred_args starred_kws')
+ "_CallSignature", "args kws starred_args starred_kws"
+)
_ParameterSignature = collections.namedtuple(
- '_ParameterSignature',
- 'args kwonlyargs varargs kwargs',
+ "_ParameterSignature", "args kwonlyargs varargs kwargs"
)
@@ -103,13 +114,13 @@ def _signature_from_call(call):
def _signature_from_arguments(arguments):
kwarg = arguments.kwarg
vararg = arguments.vararg
- args = [arg.name for arg in arguments.args if arg.name != 'self']
+ args = [arg.name for arg in arguments.args if arg.name != "self"]
kwonlyargs = [arg.name for arg in arguments.kwonlyargs]
return _ParameterSignature(args, kwonlyargs, vararg, kwarg)
def _definition_equivalent_to_call(definition, call):
- '''Check if a definition signature is equivalent to a call.'''
+ """Check if a definition signature is equivalent to a call."""
if definition.kwargs:
same_kw_variadics = definition.kwargs in call.starred_kws
else:
@@ -134,19 +145,23 @@ def _definition_equivalent_to_call(definition, call):
no_additional_kwarg_arguments = False
break
- return all((
- same_args,
- same_kwonlyargs,
- same_args_variadics,
- same_kw_variadics,
- no_additional_kwarg_arguments,
- ))
+ return all(
+ (
+ same_args,
+ same_kwonlyargs,
+ same_args_variadics,
+ same_kw_variadics,
+ no_additional_kwarg_arguments,
+ )
+ )
+
# Deal with parameters overridding in two methods.
+
def _positional_parameters(method):
positional = method.args.args
- if method.type in ('classmethod', 'method'):
+ if method.type in ("classmethod", "method"):
positional = positional[1:]
return positional
@@ -208,26 +223,38 @@ def _has_different_parameters_default_value(original, overridden):
except astroid.exceptions.NoDefault:
overridden_default = default_missing
- default_list = [arg == default_missing for arg in (original_default, overridden_default)]
+ default_list = [
+ arg == default_missing for arg in (original_default, overridden_default)
+ ]
if any(default_list) and not all(default_list):
# Only one arg has no default value
return True
- astroid_type_compared_attr = {astroid.Const: "value", astroid.ClassDef: "name",
- astroid.Tuple: "elts", astroid.List: "elts"}
- handled_types = tuple(astroid_type for astroid_type in astroid_type_compared_attr)
+ astroid_type_compared_attr = {
+ astroid.Const: "value",
+ astroid.ClassDef: "name",
+ astroid.Tuple: "elts",
+ astroid.List: "elts",
+ }
+ handled_types = tuple(
+ astroid_type for astroid_type in astroid_type_compared_attr
+ )
original_type = _get_node_type(original_default, handled_types)
if original_type:
- # We handle only astroid types that are inside the dict astroid_type_compared_attr
+ #  We handle only astroid types that are inside the dict astroid_type_compared_attr
if not isinstance(overridden_default, original_type):
- # Two args with same name but different types
+ #  Two args with same name but different types
return True
- if not _check_arg_equality(original_default, overridden_default,
- astroid_type_compared_attr[original_type]):
+ if not _check_arg_equality(
+ original_default,
+ overridden_default,
+ astroid_type_compared_attr[original_type],
+ ):
# Two args with same type but different values
return True
return False
+
def _has_different_parameters(original, overridden, dummy_parameter_regex):
zipped = zip_longest(original, overridden)
for original_param, overridden_param in zipped:
@@ -259,13 +286,11 @@ def _different_parameters(original, overridden, dummy_parameter_regex):
overridden_parameters = _positional_parameters(overridden)
different_positional = _has_different_parameters(
- original_parameters,
- overridden_parameters,
- dummy_parameter_regex)
+ original_parameters, overridden_parameters, dummy_parameter_regex
+ )
different_kwonly = _has_different_parameters(
- original.args.kwonlyargs,
- overridden.args.kwonlyargs,
- dummy_parameter_regex)
+ original.args.kwonlyargs, overridden.args.kwonlyargs, dummy_parameter_regex
+ )
if original.name in PYMETHODS:
# Ignore the difference for special methods. If the parameter
# numbers are different, then that is going to be caught by
@@ -277,19 +302,18 @@ def _different_parameters(original, overridden, dummy_parameter_regex):
# Both or none should have extra variadics, otherwise the method
# loses or gains capabilities that are not reflected into the parent method,
# leading to potential inconsistencies in the code.
- different_kwarg = sum(
- 1 for param in (original.args.kwarg, overridden.args.kwarg)
- if not param) == 1
- different_vararg = sum(
- 1 for param in (original.args.vararg, overridden.args.vararg)
- if not param) == 1
+ different_kwarg = (
+ sum(1 for param in (original.args.kwarg, overridden.args.kwarg) if not param)
+ == 1
+ )
+ different_vararg = (
+ sum(1 for param in (original.args.vararg, overridden.args.vararg) if not param)
+ == 1
+ )
- return any((
- different_positional,
- different_kwarg,
- different_vararg,
- different_kwonly
- ))
+ return any(
+ (different_positional, different_kwarg, different_vararg, different_kwonly)
+ )
def _is_invalid_base_class(cls):
@@ -303,8 +327,8 @@ def _has_data_descriptor(cls, attr):
for inferred in attribute.infer():
if isinstance(inferred, astroid.Instance):
try:
- inferred.getattr('__get__')
- inferred.getattr('__set__')
+ inferred.getattr("__get__")
+ inferred.getattr("__set__")
except astroid.NotFoundError:
continue
else:
@@ -365,8 +389,9 @@ def _is_attribute_property(name, klass):
infered = next(attr.infer())
except astroid.InferenceError:
continue
- if (isinstance(infered, astroid.FunctionDef) and
- decorated_with_property(infered)):
+ if isinstance(infered, astroid.FunctionDef) and decorated_with_property(
+ infered
+ ):
return True
if infered.pytype() == property_name:
return True
@@ -376,9 +401,7 @@ def _is_attribute_property(name, klass):
def _has_bare_super_call(fundef_node):
for call in fundef_node.nodes_of_class(astroid.Call):
func = call.func
- if (isinstance(func, astroid.Name) and
- func.name == 'super' and
- not call.args):
+ if isinstance(func, astroid.Name) and func.name == "super" and not call.args:
return True
return False
@@ -410,156 +433,198 @@ def _has_same_layout_slots(slots, assigned_value):
inferred = next(assigned_value.infer())
if isinstance(inferred, astroid.ClassDef):
other_slots = inferred.slots()
- if all(first_slot and second_slot and first_slot.value == second_slot.value
- for (first_slot, second_slot) in zip_longest(slots, other_slots)):
+ if all(
+ first_slot and second_slot and first_slot.value == second_slot.value
+ for (first_slot, second_slot) in zip_longest(slots, other_slots)
+ ):
return True
return False
MSGS = {
- 'F0202': ('Unable to check methods signature (%s / %s)',
- 'method-check-failed',
- 'Used when Pylint has been unable to check methods signature '
- 'compatibility for an unexpected reason. Please report this kind '
- 'if you don\'t make sense of it.'),
-
- 'E0202': ('An attribute defined in %s line %s hides this method',
- 'method-hidden',
- 'Used when a class defines a method which is hidden by an '
- 'instance attribute from an ancestor class or set by some '
- 'client code.'),
- 'E0203': ('Access to member %r before its definition line %s',
- 'access-member-before-definition',
- 'Used when an instance member is accessed before it\'s actually '
- 'assigned.'),
- 'W0201': ('Attribute %r defined outside __init__',
- 'attribute-defined-outside-init',
- 'Used when an instance attribute is defined outside the __init__ '
- 'method.'),
-
- 'W0212': ('Access to a protected member %s of a client class', # E0214
- 'protected-access',
- 'Used when a protected member (i.e. class member with a name '
- 'beginning with an underscore) is access outside the class or a '
- 'descendant of the class where it\'s defined.'),
-
- 'E0211': ('Method has no argument',
- 'no-method-argument',
- 'Used when a method which should have the bound instance as '
- 'first argument has no argument defined.'),
- 'E0213': ('Method should have "self" as first argument',
- 'no-self-argument',
- 'Used when a method has an attribute different the "self" as '
- 'first argument. This is considered as an error since this is '
- 'a so common convention that you shouldn\'t break it!'),
- 'C0202': ('Class method %s should have %s as first argument',
- 'bad-classmethod-argument',
- 'Used when a class method has a first argument named differently '
- 'than the value specified in valid-classmethod-first-arg option '
- '(default to "cls"), recommended to easily differentiate them '
- 'from regular instance methods.'),
- 'C0203': ('Metaclass method %s should have %s as first argument',
- 'bad-mcs-method-argument',
- 'Used when a metaclass method has a first argument named '
- 'differently than the value specified in valid-classmethod-first'
- '-arg option (default to "cls"), recommended to easily '
- 'differentiate them from regular instance methods.'),
- 'C0204': ('Metaclass class method %s should have %s as first argument',
- 'bad-mcs-classmethod-argument',
- 'Used when a metaclass class method has a first argument named '
- 'differently than the value specified in valid-metaclass-'
- 'classmethod-first-arg option (default to "mcs"), recommended to '
- 'easily differentiate them from regular instance methods.'),
-
- 'W0211': ('Static method with %r as first argument',
- 'bad-staticmethod-argument',
- 'Used when a static method has "self" or a value specified in '
- 'valid-classmethod-first-arg option or '
- 'valid-metaclass-classmethod-first-arg option as first argument.'
- ),
- 'R0201': ('Method could be a function',
- 'no-self-use',
- 'Used when a method doesn\'t use its bound instance, and so could '
- 'be written as a function.'
- ),
- 'W0221': ('Parameters differ from %s %r method',
- 'arguments-differ',
- 'Used when a method has a different number of arguments than in '
- 'the implemented interface or in an overridden method.'),
- 'W0222': ('Signature differs from %s %r method',
- 'signature-differs',
- 'Used when a method signature is different than in the '
- 'implemented interface or in an overridden method.'),
- 'W0223': ('Method %r is abstract in class %r but is not overridden',
- 'abstract-method',
- 'Used when an abstract method (i.e. raise NotImplementedError) is '
- 'not overridden in concrete class.'
- ),
- 'W0231': ('__init__ method from base class %r is not called',
- 'super-init-not-called',
- 'Used when an ancestor class method has an __init__ method '
- 'which is not called by a derived class.'),
- 'W0232': ('Class has no __init__ method',
- 'no-init',
- 'Used when a class has no __init__ method, neither its parent '
- 'classes.'),
- 'W0233': ('__init__ method from a non direct base class %r is called',
- 'non-parent-init-called',
- 'Used when an __init__ method is called on a class which is not '
- 'in the direct ancestors for the analysed class.'),
- 'W0235': ('Useless super delegation in method %r',
- 'useless-super-delegation',
- 'Used whenever we can detect that an overridden method is useless, '
- 'relying on super() delegation to do the same thing as another method '
- 'from the MRO.'),
- 'E0236': ('Invalid object %r in __slots__, must contain '
- 'only non empty strings',
- 'invalid-slots-object',
- 'Used when an invalid (non-string) object occurs in __slots__.'),
- 'E0237': ('Assigning to attribute %r not defined in class slots',
- 'assigning-non-slot',
- 'Used when assigning to an attribute not defined '
- 'in the class slots.'),
- 'E0238': ('Invalid __slots__ object',
- 'invalid-slots',
- 'Used when an invalid __slots__ is found in class. '
- 'Only a string, an iterable or a sequence is permitted.'),
- 'E0239': ('Inheriting %r, which is not a class.',
- 'inherit-non-class',
- 'Used when a class inherits from something which is not a '
- 'class.'),
- 'E0240': ('Inconsistent method resolution order for class %r',
- 'inconsistent-mro',
- 'Used when a class has an inconsistent method resolution order.'),
- 'E0241': ('Duplicate bases for class %r',
- 'duplicate-bases',
- 'Used when a class has duplicate bases.'),
- 'R0202': ('Consider using a decorator instead of calling classmethod',
- 'no-classmethod-decorator',
- 'Used when a class method is defined without using the decorator '
- 'syntax.'),
- 'R0203': ('Consider using a decorator instead of calling staticmethod',
- 'no-staticmethod-decorator',
- 'Used when a static method is defined without using the decorator '
- 'syntax.'),
- 'C0205': ('Class __slots__ should be a non-string iterable',
- 'single-string-used-for-slots',
- 'Used when a class __slots__ is a simple string, rather '
- 'than an iterable.'),
- 'R0205': ('Class %r inherits from object, can be safely removed from bases in python3',
- 'useless-object-inheritance',
- 'Used when a class inherit from object, which under python3 is implicit, '
- 'hence can be safely removed from bases.')
- }
+ "F0202": (
+ "Unable to check methods signature (%s / %s)",
+ "method-check-failed",
+ "Used when Pylint has been unable to check methods signature "
+ "compatibility for an unexpected reason. Please report this kind "
+ "if you don't make sense of it.",
+ ),
+ "E0202": (
+ "An attribute defined in %s line %s hides this method",
+ "method-hidden",
+ "Used when a class defines a method which is hidden by an "
+ "instance attribute from an ancestor class or set by some "
+ "client code.",
+ ),
+ "E0203": (
+ "Access to member %r before its definition line %s",
+ "access-member-before-definition",
+ "Used when an instance member is accessed before it's actually " "assigned.",
+ ),
+ "W0201": (
+ "Attribute %r defined outside __init__",
+ "attribute-defined-outside-init",
+ "Used when an instance attribute is defined outside the __init__ " "method.",
+ ),
+ "W0212": (
+ "Access to a protected member %s of a client class", # E0214
+ "protected-access",
+ "Used when a protected member (i.e. class member with a name "
+ "beginning with an underscore) is access outside the class or a "
+ "descendant of the class where it's defined.",
+ ),
+ "E0211": (
+ "Method has no argument",
+ "no-method-argument",
+ "Used when a method which should have the bound instance as "
+ "first argument has no argument defined.",
+ ),
+ "E0213": (
+ 'Method should have "self" as first argument',
+ "no-self-argument",
+ 'Used when a method has an attribute different the "self" as '
+ "first argument. This is considered as an error since this is "
+ "a so common convention that you shouldn't break it!",
+ ),
+ "C0202": (
+ "Class method %s should have %s as first argument",
+ "bad-classmethod-argument",
+ "Used when a class method has a first argument named differently "
+ "than the value specified in valid-classmethod-first-arg option "
+ '(default to "cls"), recommended to easily differentiate them '
+ "from regular instance methods.",
+ ),
+ "C0203": (
+ "Metaclass method %s should have %s as first argument",
+ "bad-mcs-method-argument",
+ "Used when a metaclass method has a first argument named "
+ "differently than the value specified in valid-classmethod-first"
+ '-arg option (default to "cls"), recommended to easily '
+ "differentiate them from regular instance methods.",
+ ),
+ "C0204": (
+ "Metaclass class method %s should have %s as first argument",
+ "bad-mcs-classmethod-argument",
+ "Used when a metaclass class method has a first argument named "
+ "differently than the value specified in valid-metaclass-"
+ 'classmethod-first-arg option (default to "mcs"), recommended to '
+ "easily differentiate them from regular instance methods.",
+ ),
+ "W0211": (
+ "Static method with %r as first argument",
+ "bad-staticmethod-argument",
+ 'Used when a static method has "self" or a value specified in '
+ "valid-classmethod-first-arg option or "
+ "valid-metaclass-classmethod-first-arg option as first argument.",
+ ),
+ "R0201": (
+ "Method could be a function",
+ "no-self-use",
+ "Used when a method doesn't use its bound instance, and so could "
+ "be written as a function.",
+ ),
+ "W0221": (
+ "Parameters differ from %s %r method",
+ "arguments-differ",
+ "Used when a method has a different number of arguments than in "
+ "the implemented interface or in an overridden method.",
+ ),
+ "W0222": (
+ "Signature differs from %s %r method",
+ "signature-differs",
+ "Used when a method signature is different than in the "
+ "implemented interface or in an overridden method.",
+ ),
+ "W0223": (
+ "Method %r is abstract in class %r but is not overridden",
+ "abstract-method",
+ "Used when an abstract method (i.e. raise NotImplementedError) is "
+ "not overridden in concrete class.",
+ ),
+ "W0231": (
+ "__init__ method from base class %r is not called",
+ "super-init-not-called",
+ "Used when an ancestor class method has an __init__ method "
+ "which is not called by a derived class.",
+ ),
+ "W0232": (
+ "Class has no __init__ method",
+ "no-init",
+ "Used when a class has no __init__ method, neither its parent " "classes.",
+ ),
+ "W0233": (
+ "__init__ method from a non direct base class %r is called",
+ "non-parent-init-called",
+ "Used when an __init__ method is called on a class which is not "
+ "in the direct ancestors for the analysed class.",
+ ),
+ "W0235": (
+ "Useless super delegation in method %r",
+ "useless-super-delegation",
+ "Used whenever we can detect that an overridden method is useless, "
+ "relying on super() delegation to do the same thing as another method "
+ "from the MRO.",
+ ),
+ "E0236": (
+ "Invalid object %r in __slots__, must contain " "only non empty strings",
+ "invalid-slots-object",
+ "Used when an invalid (non-string) object occurs in __slots__.",
+ ),
+ "E0237": (
+ "Assigning to attribute %r not defined in class slots",
+ "assigning-non-slot",
+ "Used when assigning to an attribute not defined " "in the class slots.",
+ ),
+ "E0238": (
+ "Invalid __slots__ object",
+ "invalid-slots",
+ "Used when an invalid __slots__ is found in class. "
+ "Only a string, an iterable or a sequence is permitted.",
+ ),
+ "E0239": (
+ "Inheriting %r, which is not a class.",
+ "inherit-non-class",
+ "Used when a class inherits from something which is not a " "class.",
+ ),
+ "E0240": (
+ "Inconsistent method resolution order for class %r",
+ "inconsistent-mro",
+ "Used when a class has an inconsistent method resolution order.",
+ ),
+ "E0241": (
+ "Duplicate bases for class %r",
+ "duplicate-bases",
+ "Used when a class has duplicate bases.",
+ ),
+ "R0202": (
+ "Consider using a decorator instead of calling classmethod",
+ "no-classmethod-decorator",
+ "Used when a class method is defined without using the decorator " "syntax.",
+ ),
+ "R0203": (
+ "Consider using a decorator instead of calling staticmethod",
+ "no-staticmethod-decorator",
+ "Used when a static method is defined without using the decorator " "syntax.",
+ ),
+ "C0205": (
+ "Class __slots__ should be a non-string iterable",
+ "single-string-used-for-slots",
+ "Used when a class __slots__ is a simple string, rather " "than an iterable.",
+ ),
+ "R0205": (
+ "Class %r inherits from object, can be safely removed from bases in python3",
+ "useless-object-inheritance",
+ "Used when a class inherit from object, which under python3 is implicit, "
+ "hence can be safely removed from bases.",
+ ),
+}
class ScopeAccessMap:
"""Store the accessed variables per scope."""
def __init__(self):
- self._scopes = collections.defaultdict(
- lambda: collections.defaultdict(list)
- )
+ self._scopes = collections.defaultdict(lambda: collections.defaultdict(list))
def set_accessed(self, node):
"""Set the given node as accessed."""
@@ -587,42 +652,62 @@ class ClassChecker(BaseChecker):
__implements__ = (IAstroidChecker,)
# configuration section name
- name = 'classes'
+ name = "classes"
# messages
msgs = MSGS
priority = -2
# configuration options
- options = (('defining-attr-methods',
- {'default' : ('__init__', '__new__', 'setUp'),
- 'type' : 'csv',
- 'metavar' : '<method names>',
- 'help' : 'List of method names used to declare (i.e. assign) \
-instance attributes.'}
- ),
- ('valid-classmethod-first-arg',
- {'default' : ('cls',),
- 'type' : 'csv',
- 'metavar' : '<argument names>',
- 'help' : 'List of valid names for the first argument in \
-a class method.'}
- ),
- ('valid-metaclass-classmethod-first-arg',
- {'default' : ('cls',),
- 'type' : 'csv',
- 'metavar' : '<argument names>',
- 'help' : 'List of valid names for the first argument in \
-a metaclass class method.'}
- ),
- ('exclude-protected',
- {
- 'default': (
- # namedtuple public API.
- '_asdict', '_fields', '_replace', '_source', '_make'),
- 'type': 'csv',
- 'metavar': '<protected access exclusions>',
- 'help': ('List of member names, which should be excluded '
- 'from the protected access warning.')}
- ))
+ options = (
+ (
+ "defining-attr-methods",
+ {
+ "default": ("__init__", "__new__", "setUp"),
+ "type": "csv",
+ "metavar": "<method names>",
+ "help": "List of method names used to declare (i.e. assign) \
+instance attributes.",
+ },
+ ),
+ (
+ "valid-classmethod-first-arg",
+ {
+ "default": ("cls",),
+ "type": "csv",
+ "metavar": "<argument names>",
+ "help": "List of valid names for the first argument in \
+a class method.",
+ },
+ ),
+ (
+ "valid-metaclass-classmethod-first-arg",
+ {
+ "default": ("cls",),
+ "type": "csv",
+ "metavar": "<argument names>",
+ "help": "List of valid names for the first argument in \
+a metaclass class method.",
+ },
+ ),
+ (
+ "exclude-protected",
+ {
+ "default": (
+ # namedtuple public API.
+ "_asdict",
+ "_fields",
+ "_replace",
+ "_source",
+ "_make",
+ ),
+ "type": "csv",
+ "metavar": "<protected access exclusions>",
+ "help": (
+ "List of member names, which should be excluded "
+ "from the protected access warning."
+ ),
+ },
+ ),
+ )
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
@@ -632,24 +717,22 @@ a metaclass class method.'}
@decorators.cachedproperty
def _dummy_rgx(self):
- return get_global_option(
- self, 'dummy-variables-rgx', default=None)
+ return get_global_option(self, "dummy-variables-rgx", default=None)
@decorators.cachedproperty
def _ignore_mixin(self):
- return get_global_option(
- self, 'ignore-mixin-members', default=True)
+ return get_global_option(self, "ignore-mixin-members", default=True)
def visit_classdef(self, node):
"""init visit variable _accessed
"""
self._check_bases_classes(node)
# if not an exception or a metaclass
- if node.type == 'class' and has_known_bases(node):
+ if node.type == "class" and has_known_bases(node):
try:
- node.local_attr('__init__')
+ node.local_attr("__init__")
except astroid.NotFoundError:
- self.add_message('no-init', args=node, node=node)
+ self.add_message("no-init", args=node, node=node)
self._check_slots(node)
self._check_proper_bases(node)
self._check_consistent_mro(node)
@@ -659,9 +742,9 @@ a metaclass class method.'}
try:
node.mro()
except InconsistentMroError:
- self.add_message('inconsistent-mro', args=node.name, node=node)
+ self.add_message("inconsistent-mro", args=node.name, node=node)
except DuplicateBasesError:
- self.add_message('duplicate-bases', args=node.name, node=node)
+ self.add_message("duplicate-bases", args=node.name, node=node)
except NotImplementedError:
# Old style class, there's no mro so don't do anything.
pass
@@ -675,17 +758,20 @@ a metaclass class method.'}
ancestor = safe_infer(base)
if ancestor in (astroid.Uninferable, None):
continue
- if (isinstance(ancestor, astroid.Instance) and
- ancestor.is_subtype_of('%s.type' % (BUILTINS,))):
+ if isinstance(ancestor, astroid.Instance) and ancestor.is_subtype_of(
+ "%s.type" % (BUILTINS,)
+ ):
continue
- if (not isinstance(ancestor, astroid.ClassDef) or
- _is_invalid_base_class(ancestor)):
- self.add_message('inherit-non-class',
- args=base.as_string(), node=node)
+ if not isinstance(ancestor, astroid.ClassDef) or _is_invalid_base_class(
+ ancestor
+ ):
+ self.add_message("inherit-non-class", args=base.as_string(), node=node)
if ancestor.name == object.__name__:
- self.add_message('useless-object-inheritance', args=node.name, node=node)
+ self.add_message(
+ "useless-object-inheritance", args=node.name, node=node
+ )
def leave_classdef(self, cnode):
"""close a class node:
@@ -693,31 +779,33 @@ a metaclass class method.'}
access to existent members
"""
# check access to existent members on non metaclass classes
- if self._ignore_mixin and cnode.name[-5:].lower() == 'mixin':
+ if self._ignore_mixin and cnode.name[-5:].lower() == "mixin":
# We are in a mixin class. No need to try to figure out if
# something is missing, since it is most likely that it will
# miss.
return
accessed = self._accessed.accessed(cnode)
- if cnode.type != 'metaclass':
+ if cnode.type != "metaclass":
self._check_accessed_members(cnode, accessed)
# checks attributes are defined in an allowed method such as __init__
- if not self.linter.is_message_enabled('attribute-defined-outside-init'):
+ if not self.linter.is_message_enabled("attribute-defined-outside-init"):
return
defining_methods = self.config.defining_attr_methods
current_module = cnode.root()
for attr, nodes in cnode.instance_attrs.items():
# skip nodes which are not in the current module and it may screw up
# the output, while it's not worth it
- nodes = [n for n in nodes if not
- isinstance(n.statement(), (astroid.Delete, astroid.AugAssign))
- and n.root() is current_module]
+ nodes = [
+ n
+ for n in nodes
+ if not isinstance(n.statement(), (astroid.Delete, astroid.AugAssign))
+ and n.root() is current_module
+ ]
if not nodes:
- continue # error detected by typechecking
+ continue # error detected by typechecking
# check if any method attr is defined in is a defining method
- if any(node.frame().name in defining_methods
- for node in nodes):
+ if any(node.frame().name in defining_methods for node in nodes):
continue
# check attribute is defined in a parent's __init__
@@ -740,11 +828,13 @@ a metaclass class method.'}
# If the attribute was set by a call in any
# of the defining methods, then don't emit
# the warning.
- if _called_in_methods(node.frame(), cnode,
- defining_methods):
+ if _called_in_methods(
+ node.frame(), cnode, defining_methods
+ ):
continue
- self.add_message('attribute-defined-outside-init',
- args=attr, node=node)
+ self.add_message(
+ "attribute-defined-outside-init", args=attr, node=node
+ )
def visit_functiondef(self, node):
"""check method arguments, overriding"""
@@ -757,8 +847,8 @@ a metaclass class method.'}
klass = node.parent.frame()
self._meth_could_be_func = True
# check first argument is self if this is actually a method
- self._check_first_arg_for_type(node, klass.type == 'metaclass')
- if node.name == '__init__':
+ self._check_first_arg_for_type(node, klass.type == "metaclass")
+ if node.name == "__init__":
self._check_init(node)
return
# check signature if the method overloads inherited method
@@ -773,16 +863,19 @@ a metaclass class method.'}
continue
if not isinstance(meth_node, astroid.FunctionDef):
continue
- self._check_signature(node, meth_node, 'overridden', klass)
+ self._check_signature(node, meth_node, "overridden", klass)
break
if node.decorators:
for decorator in node.decorators.nodes:
- if isinstance(decorator, astroid.Attribute) and \
- decorator.attrname in ('getter', 'setter', 'deleter'):
+ if isinstance(decorator, astroid.Attribute) and decorator.attrname in (
+ "getter",
+ "setter",
+ "deleter",
+ ):
# attribute affectation will call this method, not hiding it
return
if isinstance(decorator, astroid.Name):
- if decorator.name == 'property':
+ if decorator.name == "property":
# attribute affectation will either call a setter or raise
# an attribute error, anyway not hiding the function
return
@@ -798,31 +891,36 @@ a metaclass class method.'}
except astroid.InferenceError:
return
try:
- if (isinstance(inferred, (astroid.Instance, astroid.ClassDef))
- and inferred.getattr('__get__')
- and inferred.getattr('__set__')):
+ if (
+ isinstance(inferred, (astroid.Instance, astroid.ClassDef))
+ and inferred.getattr("__get__")
+ and inferred.getattr("__set__")
+ ):
return
except astroid.AttributeInferenceError:
pass
# check if the method is hidden by an attribute
try:
- overridden = klass.instance_attr(node.name)[0] # XXX
+ overridden = klass.instance_attr(node.name)[0] # XXX
overridden_frame = overridden.frame()
- if (isinstance(overridden_frame, astroid.FunctionDef)
- and overridden_frame.type == 'method'):
+ if (
+ isinstance(overridden_frame, astroid.FunctionDef)
+ and overridden_frame.type == "method"
+ ):
overridden_frame = overridden_frame.parent.frame()
- if (isinstance(overridden_frame, astroid.ClassDef)
- and klass.is_subtype_of(overridden_frame.qname())):
+ if isinstance(overridden_frame, astroid.ClassDef) and klass.is_subtype_of(
+ overridden_frame.qname()
+ ):
args = (overridden.root().name, overridden.fromlineno)
- self.add_message('method-hidden', args=args, node=node)
+ self.add_message("method-hidden", args=args, node=node)
except astroid.NotFoundError:
pass
visit_asyncfunctiondef = visit_functiondef
def _check_useless_super_delegation(self, function):
- '''Check if the given function node is an useless method override
+ """Check if the given function node is an useless method override
We consider it *useless* if it uses the super() builtin, but having
nothing additional whatsoever than not implementing the method at all.
@@ -831,11 +929,13 @@ a metaclass class method.'}
passed to super() are the same as the parameters that were passed to
this method, then the method could be removed altogether, by letting
other implementation to take precedence.
- '''
+ """
- if (not function.is_method()
- # With decorators is a change of use
- or function.decorators):
+ if (
+ not function.is_method()
+ # With decorators is a change of use
+ or function.decorators
+ ):
return
body = function.body
@@ -850,9 +950,11 @@ a metaclass class method.'}
return
call = statement.value
- if (not isinstance(call, astroid.Call)
- # Not a super() attribute access.
- or not isinstance(call.func, astroid.Attribute)):
+ if (
+ not isinstance(call, astroid.Call)
+ # Not a super() attribute access.
+ or not isinstance(call.func, astroid.Attribute)
+ ):
return
# Should be a super call.
@@ -871,12 +973,14 @@ a metaclass class method.'}
# Should be a super call with the MRO pointer being the
# current class and the type being the current instance.
current_scope = function.parent.scope()
- if (super_call.mro_pointer != current_scope
- or not isinstance(super_call.type, astroid.Instance)
- or super_call.type.name != current_scope.name):
+ if (
+ super_call.mro_pointer != current_scope
+ or not isinstance(super_call.type, astroid.Instance)
+ or super_call.type.name != current_scope.name
+ ):
return
- # Check values of default args
+ #  Check values of default args
klass = function.parent.frame()
meth_node = None
for overridden in klass.local_attr_ancestors(function.name):
@@ -888,11 +992,14 @@ a metaclass class method.'}
# dictionary.
# This may happen with astroid build from living objects
continue
- if (not isinstance(meth_node, astroid.FunctionDef)
- # If the method have an ancestor which is not a
- # function then it is legitimate to redefine it
- or _has_different_parameters_default_value(
- meth_node.args, function.args)):
+ if (
+ not isinstance(meth_node, astroid.FunctionDef)
+ # If the method have an ancestor which is not a
+ # function then it is legitimate to redefine it
+ or _has_different_parameters_default_value(
+ meth_node.args, function.args
+ )
+ ):
return
break
@@ -901,8 +1008,12 @@ a metaclass class method.'}
args = _signature_from_call(call)
if meth_node is not None:
+
def form_annotations(annotations):
- return [annotation.as_string() for annotation in filter(None, annotations)]
+ return [
+ annotation.as_string() for annotation in filter(None, annotations)
+ ]
+
called_annotations = form_annotations(function.args.annotations)
overridden_annotations = form_annotations(meth_node.args.annotations)
if called_annotations and overridden_annotations:
@@ -910,25 +1021,26 @@ a metaclass class method.'}
return
if _definition_equivalent_to_call(params, args):
- self.add_message('useless-super-delegation', node=function,
- args=(function.name, ))
+ self.add_message(
+ "useless-super-delegation", node=function, args=(function.name,)
+ )
def _check_slots(self, node):
- if '__slots__' not in node.locals:
+ if "__slots__" not in node.locals:
return
- for slots in node.igetattr('__slots__'):
+ for slots in node.igetattr("__slots__"):
# check if __slots__ is a valid type
if slots is astroid.Uninferable:
continue
if not is_iterable(slots) and not is_comprehension(slots):
- self.add_message('invalid-slots', node=node)
+ self.add_message("invalid-slots", node=node)
continue
if isinstance(slots, astroid.Const):
# a string, ignore the following checks
- self.add_message('single-string-used-for-slots', node=node)
+ self.add_message("single-string-used-for-slots", node=node)
continue
- if not hasattr(slots, 'itered'):
+ if not hasattr(slots, "itered"):
# we can't obtain the values, maybe a .deque?
continue
@@ -949,16 +1061,17 @@ a metaclass class method.'}
for infered in elt.infer():
if infered is astroid.Uninferable:
continue
- if (not isinstance(infered, astroid.Const) or
- not isinstance(infered.value, str)):
- self.add_message('invalid-slots-object',
- args=infered.as_string(),
- node=elt)
+ if not isinstance(infered, astroid.Const) or not isinstance(
+ infered.value, str
+ ):
+ self.add_message(
+ "invalid-slots-object", args=infered.as_string(), node=elt
+ )
continue
if not infered.value:
- self.add_message('invalid-slots-object',
- args=infered.as_string(),
- node=elt)
+ self.add_message(
+ "invalid-slots-object", args=infered.as_string(), node=elt
+ )
def leave_functiondef(self, node):
"""on method node, check if this method couldn't be a function
@@ -969,16 +1082,21 @@ a metaclass class method.'}
if node.is_method():
if node.args.args is not None:
self._first_attrs.pop()
- if not self.linter.is_message_enabled('no-self-use'):
+ if not self.linter.is_message_enabled("no-self-use"):
return
class_node = node.parent.frame()
- if (self._meth_could_be_func and node.type == 'method'
- and node.name not in PYMETHODS
- and not (node.is_abstract() or
- overrides_a_method(class_node, node.name) or
- decorated_with_property(node) or
- _has_bare_super_call(node))):
- self.add_message('no-self-use', node=node)
+ if (
+ self._meth_could_be_func
+ and node.type == "method"
+ and node.name not in PYMETHODS
+ and not (
+ node.is_abstract()
+ or overrides_a_method(class_node, node.name)
+ or decorated_with_property(node)
+ or _has_bare_super_call(node)
+ )
+ ):
+ self.add_message("no-self-use", node=node)
def visit_attribute(self, node):
"""check if the getattr is an access to a class member
@@ -990,14 +1108,15 @@ a metaclass class method.'}
if self._uses_mandatory_method_param(node):
self._accessed.set_accessed(node)
return
- if not self.linter.is_message_enabled('protected-access'):
+ if not self.linter.is_message_enabled("protected-access"):
return
self._check_protected_attribute_access(node)
def visit_assignattr(self, node):
- if (isinstance(node.assign_type(), astroid.AugAssign) and
- self._uses_mandatory_method_param(node)):
+ if isinstance(
+ node.assign_type(), astroid.AugAssign
+ ) and self._uses_mandatory_method_param(node):
self._accessed.set_accessed(node)
self._check_in_slots(node)
@@ -1008,7 +1127,7 @@ a metaclass class method.'}
infered = safe_infer(node.expr)
if infered and isinstance(infered, astroid.Instance):
klass = infered._proxied
- if '__slots__' not in klass.locals or not klass.newstyle:
+ if "__slots__" not in klass.locals or not klass.newstyle:
return
slots = klass.slots()
@@ -1016,31 +1135,36 @@ a metaclass class method.'}
return
# If any ancestor doesn't use slots, the slots
# defined for this class are superfluous.
- if any('__slots__' not in ancestor.locals and
- ancestor.name != 'object'
- for ancestor in klass.ancestors()):
+ if any(
+ "__slots__" not in ancestor.locals and ancestor.name != "object"
+ for ancestor in klass.ancestors()
+ ):
return
if not any(slot.value == node.attrname for slot in slots):
# If we have a '__dict__' in slots, then
# assigning any name is valid.
- if not any(slot.value == '__dict__' for slot in slots):
+ if not any(slot.value == "__dict__" for slot in slots):
if _is_attribute_property(node.attrname, klass):
# Properties circumvent the slots mechanism,
# so we should not emit a warning for them.
return
- if (node.attrname in klass.locals
- and _has_data_descriptor(klass, node.attrname)):
+ if node.attrname in klass.locals and _has_data_descriptor(
+ klass, node.attrname
+ ):
# Descriptors circumvent the slots mechanism as well.
return
- if (node.attrname == '__class__'
- and _has_same_layout_slots(slots, node.parent.value)):
+ if node.attrname == "__class__" and _has_same_layout_slots(
+ slots, node.parent.value
+ ):
return
- self.add_message('assigning-non-slot',
- args=(node.attrname, ), node=node)
+ self.add_message(
+ "assigning-non-slot", args=(node.attrname,), node=node
+ )
- @check_messages('protected-access', 'no-classmethod-decorator',
- 'no-staticmethod-decorator')
+ @check_messages(
+ "protected-access", "no-classmethod-decorator", "no-staticmethod-decorator"
+ )
def visit_assign(self, assign_node):
self._check_classmethod_declaration(assign_node)
node = assign_node.targets[0]
@@ -1065,12 +1189,17 @@ a metaclass class method.'}
# check the function called is "classmethod" or "staticmethod"
func = node.value.func
- if (not isinstance(func, astroid.Name) or
- func.name not in ('classmethod', 'staticmethod')):
+ if not isinstance(func, astroid.Name) or func.name not in (
+ "classmethod",
+ "staticmethod",
+ ):
return
- msg = ('no-classmethod-decorator' if func.name == 'classmethod' else
- 'no-staticmethod-decorator')
+ msg = (
+ "no-classmethod-decorator"
+ if func.name == "classmethod"
+ else "no-staticmethod-decorator"
+ )
# assignment must be at a class scope
parent_class = node.scope()
if not isinstance(parent_class, astroid.ClassDef):
@@ -1082,12 +1211,11 @@ a metaclass class method.'}
return
method_name = classmeth_arg.name
- if any(method_name == member.name
- for member in parent_class.mymethods()):
+ if any(method_name == member.name for member in parent_class.mymethods()):
self.add_message(msg, node=node.targets[0])
def _check_protected_attribute_access(self, node):
- '''Given an attribute access node (set or get), check if attribute
+ """Given an attribute access node (set or get), check if attribute
access is legitimate. Call _check_first_attr with node before calling
this method. Valid cases are:
* self._attr in a method or cls._attr in a classmethod. Checked by
@@ -1095,11 +1223,13 @@ a metaclass class method.'}
* Klass._attr inside "Klass" class.
* Klass2._attr inside "Klass" class when Klass2 is a base class of
Klass.
- '''
+ """
attrname = node.attrname
- if (is_attr_protected(attrname) and
- attrname not in self.config.exclude_protected):
+ if (
+ is_attr_protected(attrname)
+ and attrname not in self.config.exclude_protected
+ ):
klass = node_frame_class(node)
@@ -1110,13 +1240,15 @@ a metaclass class method.'}
# We are not in a class, no remaining valid case
if klass is None:
- self.add_message('protected-access', node=node, args=attrname)
+ self.add_message("protected-access", node=node, args=attrname)
return
# If the expression begins with a call to super, that's ok.
- if isinstance(node.expr, astroid.Call) and \
- isinstance(node.expr.func, astroid.Name) and \
- node.expr.func.name == 'super':
+ if (
+ isinstance(node.expr, astroid.Call)
+ and isinstance(node.expr.func, astroid.Name)
+ and node.expr.func.name == "super"
+ ):
return
# If the expression begins with a call to type(self), that's ok.
@@ -1133,33 +1265,39 @@ a metaclass class method.'}
# b = property(lambda: self._b)
stmt = node.parent.statement()
- if (isinstance(stmt, astroid.Assign)
- and len(stmt.targets) == 1
- and isinstance(stmt.targets[0], astroid.AssignName)):
+ if (
+ isinstance(stmt, astroid.Assign)
+ and len(stmt.targets) == 1
+ and isinstance(stmt.targets[0], astroid.AssignName)
+ ):
name = stmt.targets[0].name
if _is_attribute_property(name, klass):
return
- self.add_message('protected-access', node=node, args=attrname)
+ self.add_message("protected-access", node=node, args=attrname)
def _is_type_self_call(self, expr):
- return (isinstance(expr, astroid.Call) and
- isinstance(expr.func, astroid.Name) and
- expr.func.name == 'type' and len(expr.args) == 1 and
- self._is_mandatory_method_param(expr.args[0]))
+ return (
+ isinstance(expr, astroid.Call)
+ and isinstance(expr.func, astroid.Name)
+ and expr.func.name == "type"
+ and len(expr.args) == 1
+ and self._is_mandatory_method_param(expr.args[0])
+ )
def visit_name(self, node):
"""check if the name handle an access to a class member
if so, register it
"""
- if self._first_attrs and (node.name == self._first_attrs[-1] or
- not self._first_attrs[-1]):
+ if self._first_attrs and (
+ node.name == self._first_attrs[-1] or not self._first_attrs[-1]
+ ):
self._meth_could_be_func = False
def _check_accessed_members(self, node, accessed):
"""check that accessed members are defined"""
# XXX refactor, probably much simpler now that E0201 is in type checker
- excs = ('AttributeError', 'Exception', 'BaseException')
+ excs = ("AttributeError", "Exception", "BaseException")
for attr, nodes in accessed.items():
try:
# is it a class attribute ?
@@ -1190,8 +1328,11 @@ a metaclass class method.'}
# filter defstmts to only pick the first one when there are
# several assignments in the same scope
scope = defstmts[0].scope()
- defstmts = [stmt for i, stmt in enumerate(defstmts)
- if i == 0 or stmt.scope() is not scope]
+ defstmts = [
+ stmt
+ for i, stmt in enumerate(defstmts)
+ if i == 0 or stmt.scope() is not scope
+ ]
# if there are still more than one, don't attempt to be smarter
# than we can be
if len(defstmts) == 1:
@@ -1201,10 +1342,18 @@ a metaclass class method.'}
frame = defstmt.frame()
lno = defstmt.fromlineno
for _node in nodes:
- if _node.frame() is frame and _node.fromlineno < lno \
- and not astroid.are_exclusive(_node.statement(), defstmt, excs):
- self.add_message('access-member-before-definition',
- node=_node, args=(attr, lno))
+ if (
+ _node.frame() is frame
+ and _node.fromlineno < lno
+ and not astroid.are_exclusive(
+ _node.statement(), defstmt, excs
+ )
+ ):
+ self.add_message(
+ "access-member-before-definition",
+ node=_node,
+ args=(attr, lno),
+ )
def _check_first_arg_for_type(self, node, metaclass=0):
"""check the name of first argument, expect:
@@ -1223,58 +1372,67 @@ a metaclass class method.'}
self._first_attrs.append(first_arg)
first = self._first_attrs[-1]
# static method
- if node.type == 'staticmethod':
- if (first_arg == 'self' or
- first_arg in self.config.valid_classmethod_first_arg or
- first_arg in self.config.valid_metaclass_classmethod_first_arg):
- self.add_message('bad-staticmethod-argument', args=first, node=node)
+ if node.type == "staticmethod":
+ if (
+ first_arg == "self"
+ or first_arg in self.config.valid_classmethod_first_arg
+ or first_arg in self.config.valid_metaclass_classmethod_first_arg
+ ):
+ self.add_message("bad-staticmethod-argument", args=first, node=node)
return
self._first_attrs[-1] = None
# class / regular method with no args
elif not node.args.args:
- self.add_message('no-method-argument', node=node)
+ self.add_message("no-method-argument", node=node)
# metaclass
elif metaclass:
# metaclass __new__ or classmethod
- if node.type == 'classmethod':
+ if node.type == "classmethod":
self._check_first_arg_config(
first,
- self.config.valid_metaclass_classmethod_first_arg, node,
- 'bad-mcs-classmethod-argument', node.name)
+ self.config.valid_metaclass_classmethod_first_arg,
+ node,
+ "bad-mcs-classmethod-argument",
+ node.name,
+ )
# metaclass regular method
else:
self._check_first_arg_config(
first,
- self.config.valid_classmethod_first_arg, node,
- 'bad-mcs-method-argument',
- node.name)
+ self.config.valid_classmethod_first_arg,
+ node,
+ "bad-mcs-method-argument",
+ node.name,
+ )
# regular class
else:
# class method
- if node.type == 'classmethod':
+ if node.type == "classmethod":
self._check_first_arg_config(
first,
- self.config.valid_classmethod_first_arg, node,
- 'bad-classmethod-argument',
- node.name)
+ self.config.valid_classmethod_first_arg,
+ node,
+ "bad-classmethod-argument",
+ node.name,
+ )
# regular method without self as argument
- elif first != 'self':
- self.add_message('no-self-argument', node=node)
+ elif first != "self":
+ self.add_message("no-self-argument", node=node)
- def _check_first_arg_config(self, first, config, node, message,
- method_name):
+ def _check_first_arg_config(self, first, config, node, message, method_name):
if first not in config:
if len(config) == 1:
valid = repr(config[0])
else:
- valid = ', '.join(repr(v) for v in config[:-1])
- valid = '%s or %r' % (valid, config[-1])
+ valid = ", ".join(repr(v) for v in config[:-1])
+ valid = "%s or %r" % (valid, config[-1])
self.add_message(message, args=(method_name, valid), node=node)
def _check_bases_classes(self, node):
"""check that the given class node implements abstract methods from
base classes
"""
+
def is_abstract(method):
return method.is_abstract(pass_is_abstract=False)
@@ -1295,28 +1453,29 @@ a metaclass class method.'}
if name in node.locals:
# it is redefined as an attribute or with a descriptor
continue
- self.add_message('abstract-method', node=node,
- args=(name, owner.name))
+ self.add_message("abstract-method", node=node, args=(name, owner.name))
def _check_init(self, node):
"""check that the __init__ method call super or ancestors'__init__
method
"""
- if (not self.linter.is_message_enabled('super-init-not-called') and
- not self.linter.is_message_enabled('non-parent-init-called')):
+ if not self.linter.is_message_enabled(
+ "super-init-not-called"
+ ) and not self.linter.is_message_enabled("non-parent-init-called"):
return
klass_node = node.parent.frame()
to_call = _ancestors_to_call(klass_node)
not_called_yet = dict(to_call)
for stmt in node.nodes_of_class(astroid.Call):
expr = stmt.func
- if not isinstance(expr, astroid.Attribute) \
- or expr.attrname != '__init__':
+ if not isinstance(expr, astroid.Attribute) or expr.attrname != "__init__":
continue
# skip the test if using super
- if isinstance(expr.expr, astroid.Call) and \
- isinstance(expr.expr.func, astroid.Name) and \
- expr.expr.func.name == 'super':
+ if (
+ isinstance(expr.expr, astroid.Call)
+ and isinstance(expr.expr.func, astroid.Name)
+ and expr.expr.func.name == "super"
+ ):
return
try:
for klass in expr.expr.infer():
@@ -1329,10 +1488,12 @@ a metaclass class method.'}
# base = super()
# base.__init__(...)
- if (isinstance(klass, astroid.Instance) and
- isinstance(klass._proxied, astroid.ClassDef) and
- is_builtin_object(klass._proxied) and
- klass._proxied.name == 'super'):
+ if (
+ isinstance(klass, astroid.Instance)
+ and isinstance(klass._proxied, astroid.ClassDef)
+ and is_builtin_object(klass._proxied)
+ and klass._proxied.name == "super"
+ ):
return
if isinstance(klass, objects.Super):
return
@@ -1340,23 +1501,27 @@ a metaclass class method.'}
del not_called_yet[klass]
except KeyError:
if klass not in to_call:
- self.add_message('non-parent-init-called',
- node=expr, args=klass.name)
+ self.add_message(
+ "non-parent-init-called", node=expr, args=klass.name
+ )
except astroid.InferenceError:
continue
for klass, method in not_called_yet.items():
cls = node_frame_class(method)
- if klass.name == 'object' or (cls and cls.name == 'object'):
+ if klass.name == "object" or (cls and cls.name == "object"):
continue
- self.add_message('super-init-not-called', args=klass.name, node=node)
+ self.add_message("super-init-not-called", args=klass.name, node=node)
def _check_signature(self, method1, refmethod, class_type, cls):
"""check that the signature of the two given methods match
"""
- if not (isinstance(method1, astroid.FunctionDef)
- and isinstance(refmethod, astroid.FunctionDef)):
- self.add_message('method-check-failed',
- args=(method1, refmethod), node=method1)
+ if not (
+ isinstance(method1, astroid.FunctionDef)
+ and isinstance(refmethod, astroid.FunctionDef)
+ ):
+ self.add_message(
+ "method-check-failed", args=(method1, refmethod), node=method1
+ )
return
instance = cls.instantiate_class()
@@ -1374,20 +1539,22 @@ a metaclass class method.'}
# which shouldn't be taken in consideration.
if method1.decorators:
for decorator in method1.decorators.nodes:
- if (isinstance(decorator, astroid.Attribute) and
- decorator.attrname == 'setter'):
+ if (
+ isinstance(decorator, astroid.Attribute)
+ and decorator.attrname == "setter"
+ ):
return
if _different_parameters(
- refmethod, method1,
- dummy_parameter_regex=self._dummy_rgx):
- self.add_message('arguments-differ',
- args=(class_type, method1.name),
- node=method1)
+ refmethod, method1, dummy_parameter_regex=self._dummy_rgx
+ ):
+ self.add_message(
+ "arguments-differ", args=(class_type, method1.name), node=method1
+ )
elif len(method1.args.defaults) < len(refmethod.args.defaults):
- self.add_message('signature-differs',
- args=(class_type, method1.name),
- node=method1)
+ self.add_message(
+ "signature-differs", args=(class_type, method1.name), node=method1
+ )
def _uses_mandatory_method_param(self, node):
"""Check that attribute lookup name use first attribute variable name
@@ -1401,44 +1568,62 @@ a metaclass class method.'}
Name is `self` for method, `cls` for classmethod and `mcs` for metaclass.
"""
- return (self._first_attrs and isinstance(node, astroid.Name)
- and node.name == self._first_attrs[-1])
+ return (
+ self._first_attrs
+ and isinstance(node, astroid.Name)
+ and node.name == self._first_attrs[-1]
+ )
class SpecialMethodsChecker(BaseChecker):
"""Checker which verifies that special methods
are implemented correctly.
"""
- __implements__ = (IAstroidChecker, )
- name = 'classes'
+
+ __implements__ = (IAstroidChecker,)
+ name = "classes"
msgs = {
- 'E0301': ('__iter__ returns non-iterator',
- 'non-iterator-returned',
- 'Used when an __iter__ method returns something which is not an '
- 'iterable (i.e. has no `%s` method)' % NEXT_METHOD,
- {'old_names': [('W0234', 'non-iterator-returned'),
- ('E0234', 'non-iterator-returned')]}),
- 'E0302': ('The special method %r expects %s param(s), %d %s given',
- 'unexpected-special-method-signature',
- 'Emitted when a special method was defined with an '
- 'invalid number of parameters. If it has too few or '
- 'too many, it might not work at all.',
- {'old_names': [('E0235', 'bad-context-manager')]}),
- 'E0303': ('__len__ does not return non-negative integer',
- 'invalid-length-returned',
- 'Used when a __len__ method returns something which is not a '
- 'non-negative integer', {}),
+ "E0301": (
+ "__iter__ returns non-iterator",
+ "non-iterator-returned",
+ "Used when an __iter__ method returns something which is not an "
+ "iterable (i.e. has no `%s` method)" % NEXT_METHOD,
+ {
+ "old_names": [
+ ("W0234", "non-iterator-returned"),
+ ("E0234", "non-iterator-returned"),
+ ]
+ },
+ ),
+ "E0302": (
+ "The special method %r expects %s param(s), %d %s given",
+ "unexpected-special-method-signature",
+ "Emitted when a special method was defined with an "
+ "invalid number of parameters. If it has too few or "
+ "too many, it might not work at all.",
+ {"old_names": [("E0235", "bad-context-manager")]},
+ ),
+ "E0303": (
+ "__len__ does not return non-negative integer",
+ "invalid-length-returned",
+ "Used when a __len__ method returns something which is not a "
+ "non-negative integer",
+ {},
+ ),
}
priority = -2
- @check_messages('unexpected-special-method-signature',
- 'non-iterator-returned', 'invalid-length-returned')
+ @check_messages(
+ "unexpected-special-method-signature",
+ "non-iterator-returned",
+ "invalid-length-returned",
+ )
def visit_functiondef(self, node):
if not node.is_method():
return
- if node.name == '__iter__':
+ if node.name == "__iter__":
self._check_iter(node)
- if node.name == '__len__':
+ if node.name == "__len__":
self._check_len(node)
if node.name in PYMETHODS:
self._check_unexpected_method_signature(node)
@@ -1486,9 +1671,11 @@ class SpecialMethodsChecker(BaseChecker):
if emit:
verb = "was" if current_params <= 1 else "were"
- self.add_message('unexpected-special-method-signature',
- args=(node.name, expected_params, current_params, verb),
- node=node)
+ self.add_message(
+ "unexpected-special-method-signature",
+ args=(node.name, expected_params, current_params, verb),
+ node=node,
+ )
@staticmethod
def _is_iterator(node):
@@ -1519,30 +1706,32 @@ class SpecialMethodsChecker(BaseChecker):
infered = _safe_infer_call_result(node, node)
if infered is not None:
if not self._is_iterator(infered):
- self.add_message('non-iterator-returned', node=node)
+ self.add_message("non-iterator-returned", node=node)
def _check_len(self, node):
inferred = _safe_infer_call_result(node, node)
if not inferred or inferred is astroid.Uninferable:
return
- if (isinstance(inferred, astroid.Instance)
- and inferred.name == 'int'
- and not isinstance(inferred, astroid.Const)):
+ if (
+ isinstance(inferred, astroid.Instance)
+ and inferred.name == "int"
+ and not isinstance(inferred, astroid.Const)
+ ):
# Assume it's good enough, since the int() call might wrap
# something that's uninferable for us
return
if not isinstance(inferred, astroid.Const):
- self.add_message('invalid-length-returned', node=node)
+ self.add_message("invalid-length-returned", node=node)
return
value = inferred.value
if not isinstance(value, int) or value < 0:
- self.add_message('invalid-length-returned', node=node)
+ self.add_message("invalid-length-returned", node=node)
-def _ancestors_to_call(klass_node, method='__init__'):
+def _ancestors_to_call(klass_node, method="__init__"):
"""return a dictionary where keys are the list of base classes providing
the queried method, and so that should/may be called from the method node
"""
diff --git a/pylint/checkers/design_analysis.py b/pylint/checkers/design_analysis.py
index 4855456b1..59d54ddee 100644
--- a/pylint/checkers/design_analysis.py
+++ b/pylint/checkers/design_analysis.py
@@ -31,47 +31,65 @@ from pylint import utils
MSGS = {
- 'R0901': ('Too many ancestors (%s/%s)',
- 'too-many-ancestors',
- 'Used when class has too many parent classes, try to reduce '
- 'this to get a simpler (and so easier to use) class.'),
- 'R0902': ('Too many instance attributes (%s/%s)',
- 'too-many-instance-attributes',
- 'Used when class has too many instance attributes, try to reduce '
- 'this to get a simpler (and so easier to use) class.'),
- 'R0903': ('Too few public methods (%s/%s)',
- 'too-few-public-methods',
- 'Used when class has too few public methods, so be sure it\'s '
- 'really worth it.'),
- 'R0904': ('Too many public methods (%s/%s)',
- 'too-many-public-methods',
- 'Used when class has too many public methods, try to reduce '
- 'this to get a simpler (and so easier to use) class.'),
-
- 'R0911': ('Too many return statements (%s/%s)',
- 'too-many-return-statements',
- 'Used when a function or method has too many return statement, '
- 'making it hard to follow.'),
- 'R0912': ('Too many branches (%s/%s)',
- 'too-many-branches',
- 'Used when a function or method has too many branches, '
- 'making it hard to follow.'),
- 'R0913': ('Too many arguments (%s/%s)',
- 'too-many-arguments',
- 'Used when a function or method takes too many arguments.'),
- 'R0914': ('Too many local variables (%s/%s)',
- 'too-many-locals',
- 'Used when a function or method has too many local variables.'),
- 'R0915': ('Too many statements (%s/%s)',
- 'too-many-statements',
- 'Used when a function or method has too many statements. You '
- 'should then split it in smaller functions / methods.'),
- 'R0916': ('Too many boolean expressions in if statement (%s/%s)',
- 'too-many-boolean-expressions',
- 'Used when an if statement contains too many boolean '
- 'expressions.'),
- }
-SPECIAL_OBJ = re.compile('^_{2}[a-z]+_{2}$')
+ "R0901": (
+ "Too many ancestors (%s/%s)",
+ "too-many-ancestors",
+ "Used when class has too many parent classes, try to reduce "
+ "this to get a simpler (and so easier to use) class.",
+ ),
+ "R0902": (
+ "Too many instance attributes (%s/%s)",
+ "too-many-instance-attributes",
+ "Used when class has too many instance attributes, try to reduce "
+ "this to get a simpler (and so easier to use) class.",
+ ),
+ "R0903": (
+ "Too few public methods (%s/%s)",
+ "too-few-public-methods",
+ "Used when class has too few public methods, so be sure it's "
+ "really worth it.",
+ ),
+ "R0904": (
+ "Too many public methods (%s/%s)",
+ "too-many-public-methods",
+ "Used when class has too many public methods, try to reduce "
+ "this to get a simpler (and so easier to use) class.",
+ ),
+ "R0911": (
+ "Too many return statements (%s/%s)",
+ "too-many-return-statements",
+ "Used when a function or method has too many return statement, "
+ "making it hard to follow.",
+ ),
+ "R0912": (
+ "Too many branches (%s/%s)",
+ "too-many-branches",
+ "Used when a function or method has too many branches, "
+ "making it hard to follow.",
+ ),
+ "R0913": (
+ "Too many arguments (%s/%s)",
+ "too-many-arguments",
+ "Used when a function or method takes too many arguments.",
+ ),
+ "R0914": (
+ "Too many local variables (%s/%s)",
+ "too-many-locals",
+ "Used when a function or method has too many local variables.",
+ ),
+ "R0915": (
+ "Too many statements (%s/%s)",
+ "too-many-statements",
+ "Used when a function or method has too many statements. You "
+ "should then split it in smaller functions / methods.",
+ ),
+ "R0916": (
+ "Too many boolean expressions in if statement (%s/%s)",
+ "too-many-boolean-expressions",
+ "Used when an if statement contains too many boolean " "expressions.",
+ ),
+}
+SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
def _count_boolean_expressions(bool_op):
@@ -89,12 +107,11 @@ def _count_boolean_expressions(bool_op):
def _count_methods_in_class(node):
- all_methods = sum(1 for method in node.methods()
- if not method.name.startswith('_'))
+ all_methods = sum(1 for method in node.methods() if not method.name.startswith("_"))
# Special methods count towards the number of public methods,
# but don't count towards there being too many methods.
for method in node.mymethods():
- if SPECIAL_OBJ.search(method.name) and method.name != '__init__':
+ if SPECIAL_OBJ.search(method.name) and method.name != "__init__":
all_methods += 1
return all_methods
@@ -108,68 +125,107 @@ class MisdesignChecker(BaseChecker):
__implements__ = (IAstroidChecker,)
# configuration section name
- name = 'design'
+ name = "design"
# messages
msgs = MSGS
priority = -2
# configuration options
- options = (('max-args',
- {'default' : 5, 'type' : 'int', 'metavar' : '<int>',
- 'help': 'Maximum number of arguments for function / method.'}
- ),
- ('max-locals',
- {'default' : 15, 'type' : 'int', 'metavar' : '<int>',
- 'help': 'Maximum number of locals for function / method body.'}
- ),
- ('max-returns',
- {'default' : 6, 'type' : 'int', 'metavar' : '<int>',
- 'help': 'Maximum number of return / yield for function / '
- 'method body.'}
- ),
- ('max-branches',
- {'default' : 12, 'type' : 'int', 'metavar' : '<int>',
- 'help': 'Maximum number of branch for function / method body.'}
- ),
- ('max-statements',
- {'default' : 50, 'type' : 'int', 'metavar' : '<int>',
- 'help': 'Maximum number of statements in function / method '
- 'body.'}
- ),
- ('max-parents',
- {'default' : 7,
- 'type' : 'int',
- 'metavar' : '<num>',
- 'help' : 'Maximum number of parents for a class (see R0901).'}
- ),
- ('max-attributes',
- {'default' : 7,
- 'type' : 'int',
- 'metavar' : '<num>',
- 'help' : 'Maximum number of attributes for a class \
-(see R0902).'}
- ),
- ('min-public-methods',
- {'default' : 2,
- 'type' : 'int',
- 'metavar' : '<num>',
- 'help' : 'Minimum number of public methods for a class \
-(see R0903).'}
- ),
- ('max-public-methods',
- {'default' : 20,
- 'type' : 'int',
- 'metavar' : '<num>',
- 'help' : 'Maximum number of public methods for a class \
-(see R0904).'}
- ),
- ('max-bool-expr',
- {'default': 5,
- 'type': 'int',
- 'metavar': '<num>',
- 'help': 'Maximum number of boolean expressions in an if '
- 'statement.'}
- ),
- )
+ options = (
+ (
+ "max-args",
+ {
+ "default": 5,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of arguments for function / method.",
+ },
+ ),
+ (
+ "max-locals",
+ {
+ "default": 15,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of locals for function / method body.",
+ },
+ ),
+ (
+ "max-returns",
+ {
+ "default": 6,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of return / yield for function / "
+ "method body.",
+ },
+ ),
+ (
+ "max-branches",
+ {
+ "default": 12,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of branch for function / method body.",
+ },
+ ),
+ (
+ "max-statements",
+ {
+ "default": 50,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of statements in function / method " "body.",
+ },
+ ),
+ (
+ "max-parents",
+ {
+ "default": 7,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of parents for a class (see R0901).",
+ },
+ ),
+ (
+ "max-attributes",
+ {
+ "default": 7,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of attributes for a class \
+(see R0902).",
+ },
+ ),
+ (
+ "min-public-methods",
+ {
+ "default": 2,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Minimum number of public methods for a class \
+(see R0903).",
+ },
+ ),
+ (
+ "max-public-methods",
+ {
+ "default": 20,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of public methods for a class \
+(see R0904).",
+ },
+ ),
+ (
+ "max-bool-expr",
+ {
+ "default": 5,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of boolean expressions in an if " "statement.",
+ },
+ ),
+ )
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
@@ -191,28 +247,38 @@ class MisdesignChecker(BaseChecker):
@decorators.cachedproperty
def _ignored_argument_names(self):
- return utils.get_global_option(self, 'ignored-argument-names', default=None)
-
- @check_messages('too-many-ancestors', 'too-many-instance-attributes',
- 'too-few-public-methods', 'too-many-public-methods')
+ return utils.get_global_option(self, "ignored-argument-names", default=None)
+
+ @check_messages(
+ "too-many-ancestors",
+ "too-many-instance-attributes",
+ "too-few-public-methods",
+ "too-many-public-methods",
+ )
def visit_classdef(self, node):
"""check size of inheritance hierarchy and number of instance attributes
"""
nb_parents = len(list(node.ancestors()))
if nb_parents > self.config.max_parents:
- self.add_message('too-many-ancestors', node=node,
- args=(nb_parents, self.config.max_parents))
+ self.add_message(
+ "too-many-ancestors",
+ node=node,
+ args=(nb_parents, self.config.max_parents),
+ )
if len(node.instance_attrs) > self.config.max_attributes:
- self.add_message('too-many-instance-attributes', node=node,
- args=(len(node.instance_attrs),
- self.config.max_attributes))
+ self.add_message(
+ "too-many-instance-attributes",
+ node=node,
+ args=(len(node.instance_attrs), self.config.max_attributes),
+ )
- @check_messages('too-few-public-methods', 'too-many-public-methods')
+ @check_messages("too-few-public-methods", "too-many-public-methods")
def leave_classdef(self, node):
"""check number of public methods"""
- my_methods = sum(1 for method in node.mymethods()
- if not method.name.startswith('_'))
+ my_methods = sum(
+ 1 for method in node.mymethods() if not method.name.startswith("_")
+ )
# Does the class contain less than n public methods ?
# This checks only the methods defined in the current class,
@@ -222,15 +288,19 @@ class MisdesignChecker(BaseChecker):
# a lot of assert methods. It doesn't make sense to warn
# when the user subclasses TestCase to add his own tests.
if my_methods > self.config.max_public_methods:
- self.add_message('too-many-public-methods', node=node,
- args=(my_methods,
- self.config.max_public_methods))
+ self.add_message(
+ "too-many-public-methods",
+ node=node,
+ args=(my_methods, self.config.max_public_methods),
+ )
# Stop here for exception, metaclass, interface classes and other
# classes for which we don't need to count the methods.
- if (node.type != 'class'
- or checker_utils.is_enum_class(node)
- or checker_utils.is_dataclass(node)):
+ if (
+ node.type != "class"
+ or checker_utils.is_enum_class(node)
+ or checker_utils.is_dataclass(node)
+ ):
return
# Does the class contain more than n public methods ?
@@ -238,13 +308,20 @@ class MisdesignChecker(BaseChecker):
# by the current class.
all_methods = _count_methods_in_class(node)
if all_methods < self.config.min_public_methods:
- self.add_message('too-few-public-methods', node=node,
- args=(all_methods,
- self.config.min_public_methods))
-
- @check_messages('too-many-return-statements', 'too-many-branches',
- 'too-many-arguments', 'too-many-locals',
- 'too-many-statements', 'keyword-arg-before-vararg')
+ self.add_message(
+ "too-few-public-methods",
+ node=node,
+ args=(all_methods, self.config.min_public_methods),
+ )
+
+ @check_messages(
+ "too-many-return-statements",
+ "too-many-branches",
+ "too-many-arguments",
+ "too-many-locals",
+ "too-many-statements",
+ "keyword-arg-before-vararg",
+ )
def visit_functiondef(self, node):
"""check function name, docstring, arguments, redefinition,
variable names, max locals
@@ -257,51 +334,70 @@ class MisdesignChecker(BaseChecker):
if args is not None:
ignored_args_num = 0
if ignored_argument_names:
- ignored_args_num = sum(1 for arg in args if ignored_argument_names.match(arg.name))
+ ignored_args_num = sum(
+ 1 for arg in args if ignored_argument_names.match(arg.name)
+ )
argnum = len(args) - ignored_args_num
if argnum > self.config.max_args:
- self.add_message('too-many-arguments', node=node,
- args=(len(args), self.config.max_args))
+ self.add_message(
+ "too-many-arguments",
+ node=node,
+ args=(len(args), self.config.max_args),
+ )
else:
ignored_args_num = 0
# check number of local variables
locnum = len(node.locals) - ignored_args_num
if locnum > self.config.max_locals:
- self.add_message('too-many-locals', node=node,
- args=(locnum, self.config.max_locals))
+ self.add_message(
+ "too-many-locals", node=node, args=(locnum, self.config.max_locals)
+ )
# init new statements counter
self._stmts.append(1)
visit_asyncfunctiondef = visit_functiondef
- @check_messages('too-many-return-statements', 'too-many-branches',
- 'too-many-arguments', 'too-many-locals',
- 'too-many-statements')
+ @check_messages(
+ "too-many-return-statements",
+ "too-many-branches",
+ "too-many-arguments",
+ "too-many-locals",
+ "too-many-statements",
+ )
def leave_functiondef(self, node):
"""most of the work is done here on close:
checks for max returns, branch, return in __init__
"""
returns = self._returns.pop()
if returns > self.config.max_returns:
- self.add_message('too-many-return-statements', node=node,
- args=(returns, self.config.max_returns))
+ self.add_message(
+ "too-many-return-statements",
+ node=node,
+ args=(returns, self.config.max_returns),
+ )
branches = self._branches[node]
if branches > self.config.max_branches:
- self.add_message('too-many-branches', node=node,
- args=(branches, self.config.max_branches))
+ self.add_message(
+ "too-many-branches",
+ node=node,
+ args=(branches, self.config.max_branches),
+ )
# check number of statements
stmts = self._stmts.pop()
if stmts > self.config.max_statements:
- self.add_message('too-many-statements', node=node,
- args=(stmts, self.config.max_statements))
+ self.add_message(
+ "too-many-statements",
+ node=node,
+ args=(stmts, self.config.max_statements),
+ )
leave_asyncfunctiondef = leave_functiondef
def visit_return(self, _):
"""count number of returns"""
if not self._returns:
- return # return outside function, reported by the base checker
+ return # return outside function, reported by the base checker
self._returns[-1] += 1
def visit_default(self, node):
@@ -324,14 +420,13 @@ class MisdesignChecker(BaseChecker):
self._inc_branch(node, 2)
self._inc_all_stmts(2)
- @check_messages('too-many-boolean-expressions')
+ @check_messages("too-many-boolean-expressions")
def visit_if(self, node):
"""increments the branches counter and checks boolean expressions"""
self._check_boolean_expressions(node)
branches = 1
# don't double count If nodes coming from some 'elif'
- if node.orelse and (len(node.orelse) > 1 or
- not isinstance(node.orelse[0], If)):
+ if node.orelse and (len(node.orelse) > 1 or not isinstance(node.orelse[0], If)):
branches += 1
self._inc_branch(node, branches)
self._inc_all_stmts(branches)
@@ -346,8 +441,11 @@ class MisdesignChecker(BaseChecker):
return
nb_bool_expr = _count_boolean_expressions(condition)
if nb_bool_expr > self.config.max_bool_expr:
- self.add_message('too-many-boolean-expressions', node=condition,
- args=(nb_bool_expr, self.config.max_bool_expr))
+ self.add_message(
+ "too-many-boolean-expressions",
+ node=condition,
+ args=(nb_bool_expr, self.config.max_bool_expr),
+ )
def visit_while(self, node):
"""increments the branches counter"""
diff --git a/pylint/checkers/exceptions.py b/pylint/checkers/exceptions.py
index 2a8e95e01..4eb65d923 100644
--- a/pylint/checkers/exceptions.py
+++ b/pylint/checkers/exceptions.py
@@ -70,74 +70,97 @@ def _is_raising(body: typing.List) -> bool:
PY3K = sys.version_info >= (3, 0)
-OVERGENERAL_EXCEPTIONS = ('Exception',)
+OVERGENERAL_EXCEPTIONS = ("Exception",)
BUILTINS_NAME = builtins.__name__
MSGS = {
- 'E0701': ('Bad except clauses order (%s)',
- 'bad-except-order',
- 'Used when except clauses are not in the correct order (from the '
- 'more specific to the more generic). If you don\'t fix the order, '
- 'some exceptions may not be caught by the most specific handler.'),
- 'E0702': ('Raising %s while only classes or instances are allowed',
- 'raising-bad-type',
- 'Used when something which is neither a class, an instance or a '
- 'string is raised (i.e. a `TypeError` will be raised).'),
- 'E0703': ('Exception context set to something which is not an '
- 'exception, nor None',
- 'bad-exception-context',
- 'Used when using the syntax "raise ... from ...", '
- 'where the exception context is not an exception, '
- 'nor None.'),
- 'E0704': ('The raise statement is not inside an except clause',
- 'misplaced-bare-raise',
- 'Used when a bare raise is not used inside an except clause. '
- 'This generates an error, since there are no active exceptions '
- 'to be reraised. An exception to this rule is represented by '
- 'a bare raise inside a finally clause, which might work, as long '
- 'as an exception is raised inside the try block, but it is '
- 'nevertheless a code smell that must not be relied upon.'),
- 'E0710': ('Raising a new style class which doesn\'t inherit from BaseException',
- 'raising-non-exception',
- 'Used when a new style class which doesn\'t inherit from '
- 'BaseException is raised.'),
- 'E0711': ('NotImplemented raised - should raise NotImplementedError',
- 'notimplemented-raised',
- 'Used when NotImplemented is raised instead of '
- 'NotImplementedError'),
- 'E0712': ('Catching an exception which doesn\'t inherit from Exception: %s',
- 'catching-non-exception',
- 'Used when a class which doesn\'t inherit from '
- 'Exception is used as an exception in an except clause.'),
- 'W0702': ('No exception type(s) specified',
- 'bare-except',
- 'Used when an except clause doesn\'t specify exceptions type to '
- 'catch.'),
- 'W0703': ('Catching too general exception %s',
- 'broad-except',
- 'Used when an except catches a too general exception, '
- 'possibly burying unrelated errors.'),
- 'W0705': ('Catching previously caught exception type %s',
- 'duplicate-except',
- 'Used when an except catches a type that was already caught by '
- 'a previous handler.'),
- 'W0706': ('The except handler raises immediately',
- 'try-except-raise',
- 'Used when an except handler uses raise as its first or only '
- 'operator. This is useless because it raises back the exception '
- 'immediately. Remove the raise operator or the entire '
- 'try-except-raise block!'),
- 'W0711': ('Exception to catch is the result of a binary "%s" operation',
- 'binary-op-exception',
- 'Used when the exception to catch is of the form '
- '"except A or B:". If intending to catch multiple, '
- 'rewrite as "except (A, B):"'),
- 'W0715': ('Exception arguments suggest string formatting might be intended',
- 'raising-format-tuple',
- 'Used when passing multiple arguments to an exception '
- 'constructor, the first of them a string literal containing what '
- 'appears to be placeholders intended for formatting'),
- }
+ "E0701": (
+ "Bad except clauses order (%s)",
+ "bad-except-order",
+ "Used when except clauses are not in the correct order (from the "
+ "more specific to the more generic). If you don't fix the order, "
+ "some exceptions may not be caught by the most specific handler.",
+ ),
+ "E0702": (
+ "Raising %s while only classes or instances are allowed",
+ "raising-bad-type",
+ "Used when something which is neither a class, an instance or a "
+ "string is raised (i.e. a `TypeError` will be raised).",
+ ),
+ "E0703": (
+ "Exception context set to something which is not an " "exception, nor None",
+ "bad-exception-context",
+ 'Used when using the syntax "raise ... from ...", '
+ "where the exception context is not an exception, "
+ "nor None.",
+ ),
+ "E0704": (
+ "The raise statement is not inside an except clause",
+ "misplaced-bare-raise",
+ "Used when a bare raise is not used inside an except clause. "
+ "This generates an error, since there are no active exceptions "
+ "to be reraised. An exception to this rule is represented by "
+ "a bare raise inside a finally clause, which might work, as long "
+ "as an exception is raised inside the try block, but it is "
+ "nevertheless a code smell that must not be relied upon.",
+ ),
+ "E0710": (
+ "Raising a new style class which doesn't inherit from BaseException",
+ "raising-non-exception",
+ "Used when a new style class which doesn't inherit from "
+ "BaseException is raised.",
+ ),
+ "E0711": (
+ "NotImplemented raised - should raise NotImplementedError",
+ "notimplemented-raised",
+ "Used when NotImplemented is raised instead of " "NotImplementedError",
+ ),
+ "E0712": (
+ "Catching an exception which doesn't inherit from Exception: %s",
+ "catching-non-exception",
+ "Used when a class which doesn't inherit from "
+ "Exception is used as an exception in an except clause.",
+ ),
+ "W0702": (
+ "No exception type(s) specified",
+ "bare-except",
+ "Used when an except clause doesn't specify exceptions type to " "catch.",
+ ),
+ "W0703": (
+ "Catching too general exception %s",
+ "broad-except",
+ "Used when an except catches a too general exception, "
+ "possibly burying unrelated errors.",
+ ),
+ "W0705": (
+ "Catching previously caught exception type %s",
+ "duplicate-except",
+ "Used when an except catches a type that was already caught by "
+ "a previous handler.",
+ ),
+ "W0706": (
+ "The except handler raises immediately",
+ "try-except-raise",
+ "Used when an except handler uses raise as its first or only "
+ "operator. This is useless because it raises back the exception "
+ "immediately. Remove the raise operator or the entire "
+ "try-except-raise block!",
+ ),
+ "W0711": (
+ 'Exception to catch is the result of a binary "%s" operation',
+ "binary-op-exception",
+ "Used when the exception to catch is of the form "
+ '"except A or B:". If intending to catch multiple, '
+ 'rewrite as "except (A, B):"',
+ ),
+ "W0715": (
+ "Exception arguments suggest string formatting might be intended",
+ "raising-format-tuple",
+ "Used when passing multiple arguments to an exception "
+ "constructor, the first of them a string literal containing what "
+ "appears to be placeholders intended for formatting",
+ ),
+}
class BaseVisitor:
@@ -149,13 +172,13 @@ class BaseVisitor:
def visit(self, node):
name = node.__class__.__name__.lower()
- dispatch_meth = getattr(self, 'visit_' + name, None)
+ dispatch_meth = getattr(self, "visit_" + name, None)
if dispatch_meth:
dispatch_meth(node)
else:
self.visit_default(node)
- def visit_default(self, node): # pylint: disable=unused-argument
+ def visit_default(self, node): # pylint: disable=unused-argument
"""Default implementation for all the nodes."""
@@ -163,23 +186,20 @@ class ExceptionRaiseRefVisitor(BaseVisitor):
"""Visit references (anything that is not an AST leaf)."""
def visit_name(self, name):
- if name.name == 'NotImplemented':
- self._checker.add_message(
- 'notimplemented-raised',
- node=self._node)
+ if name.name == "NotImplemented":
+ self._checker.add_message("notimplemented-raised", node=self._node)
def visit_call(self, call):
if isinstance(call.func, astroid.Name):
self.visit_name(call.func)
- if (len(call.args) > 1 and
- isinstance(call.args[0], astroid.Const) and
- isinstance(call.args[0].value, str)):
+ if (
+ len(call.args) > 1
+ and isinstance(call.args[0], astroid.Const)
+ and isinstance(call.args[0].value, str)
+ ):
msg = call.args[0].value
- if ('%' in msg or
- ('{' in msg and '}' in msg)):
- self._checker.add_message(
- 'raising-format-tuple',
- node=self._node)
+ if "%" in msg or ("{" in msg and "}" in msg):
+ self._checker.add_message("raising-format-tuple", node=self._node)
class ExceptionRaiseLeafVisitor(BaseVisitor):
@@ -188,8 +208,9 @@ class ExceptionRaiseLeafVisitor(BaseVisitor):
def visit_const(self, const):
if not isinstance(const.value, str):
# raising-string will be emitted from python3 porting checker.
- self._checker.add_message('raising-bad-type', node=self._node,
- args=const.value.__class__.__name__)
+ self._checker.add_message(
+ "raising-bad-type", node=self._node, args=const.value.__class__.__name__
+ )
def visit_instance(self, instance):
# pylint: disable=protected-access
@@ -200,18 +221,15 @@ class ExceptionRaiseLeafVisitor(BaseVisitor):
visit_exceptioninstance = visit_instance
def visit_classdef(self, cls):
- if (not utils.inherit_from_std_ex(cls) and
- utils.has_known_bases(cls)):
+ if not utils.inherit_from_std_ex(cls) and utils.has_known_bases(cls):
if cls.newstyle:
- self._checker.add_message('raising-non-exception', node=self._node)
+ self._checker.add_message("raising-non-exception", node=self._node)
else:
- self._checker.add_message('nonstandard-exception', node=self._node)
+ self._checker.add_message("nonstandard-exception", node=self._node)
def visit_tuple(self, tuple_node):
if PY3K or not tuple_node.elts:
- self._checker.add_message('raising-bad-type',
- node=self._node,
- args='tuple')
+ self._checker.add_message("raising-bad-type", node=self._node, args="tuple")
return
# On Python 2, using the following is not an error:
@@ -225,18 +243,18 @@ class ExceptionRaiseLeafVisitor(BaseVisitor):
if not inferred or inferred is astroid.Uninferable:
return
- if (isinstance(inferred, astroid.Instance)
- and inferred.__class__.__name__ != 'Instance'):
+ if (
+ isinstance(inferred, astroid.Instance)
+ and inferred.__class__.__name__ != "Instance"
+ ):
# TODO: explain why
self.visit_default(tuple_node)
else:
self.visit(inferred)
def visit_default(self, node):
- name = getattr(node, 'name', node.__class__.__name__)
- self._checker.add_message('raising-bad-type',
- node=self._node,
- args=name)
+ name = getattr(node, "name", node.__class__.__name__)
+ self._checker.add_message("raising-bad-type", node=self._node, args=name)
class ExceptionsChecker(checkers.BaseChecker):
@@ -244,26 +262,36 @@ class ExceptionsChecker(checkers.BaseChecker):
__implements__ = interfaces.IAstroidChecker
- name = 'exceptions'
+ name = "exceptions"
msgs = MSGS
priority = -4
- options = (('overgeneral-exceptions',
- {'default' : OVERGENERAL_EXCEPTIONS,
- 'type' : 'csv', 'metavar' : '<comma-separated class names>',
- 'help' : 'Exceptions that will emit a warning '
- 'when being caught. Defaults to "%s".' % (
- ', '.join(OVERGENERAL_EXCEPTIONS),)}
- ),
- )
+ options = (
+ (
+ "overgeneral-exceptions",
+ {
+ "default": OVERGENERAL_EXCEPTIONS,
+ "type": "csv",
+ "metavar": "<comma-separated class names>",
+ "help": "Exceptions that will emit a warning "
+ 'when being caught. Defaults to "%s".'
+ % (", ".join(OVERGENERAL_EXCEPTIONS),),
+ },
+ ),
+ )
def open(self):
self._builtin_exceptions = _builtin_exceptions()
super(ExceptionsChecker, self).open()
- @utils.check_messages('nonstandard-exception', 'misplaced-bare-raise',
- 'raising-bad-type', 'raising-non-exception',
- 'notimplemented-raised', 'bad-exception-context',
- 'raising-format-tuple')
+ @utils.check_messages(
+ "nonstandard-exception",
+ "misplaced-bare-raise",
+ "raising-bad-type",
+ "raising-non-exception",
+ "notimplemented-raised",
+ "bad-exception-context",
+ "raising-format-tuple",
+ )
def visit_raise(self, node):
if node.exc is None:
self._check_misplaced_bare_raise(node)
@@ -286,21 +314,23 @@ class ExceptionsChecker(checkers.BaseChecker):
def _check_misplaced_bare_raise(self, node):
# Filter out if it's present in __exit__.
scope = node.scope()
- if (isinstance(scope, astroid.FunctionDef)
- and scope.is_method()
- and scope.name == '__exit__'):
+ if (
+ isinstance(scope, astroid.FunctionDef)
+ and scope.is_method()
+ and scope.name == "__exit__"
+ ):
return
current = node
# Stop when a new scope is generated or when the raise
# statement is found inside a TryFinally.
- ignores = (astroid.ExceptHandler, astroid.FunctionDef,)
+ ignores = (astroid.ExceptHandler, astroid.FunctionDef)
while current and not isinstance(current.parent, ignores):
current = current.parent
expected = (astroid.ExceptHandler,)
if not current or not isinstance(current.parent, expected):
- self.add_message('misplaced-bare-raise', node=node)
+ self.add_message("misplaced-bare-raise", node=node)
def _check_bad_exception_context(self, node):
"""Verify that the exception context is properly set.
@@ -313,12 +343,11 @@ class ExceptionsChecker(checkers.BaseChecker):
if isinstance(cause, astroid.Const):
if cause.value is not None:
- self.add_message('bad-exception-context',
- node=node)
- elif (not isinstance(cause, astroid.ClassDef) and
- not utils.inherit_from_std_ex(cause)):
- self.add_message('bad-exception-context',
- node=node)
+ self.add_message("bad-exception-context", node=node)
+ elif not isinstance(cause, astroid.ClassDef) and not utils.inherit_from_std_ex(
+ cause
+ ):
+ self.add_message("bad-exception-context", node=node)
def _check_catching_non_exception(self, handler, exc, part):
if isinstance(exc, astroid.Tuple):
@@ -327,50 +356,59 @@ class ExceptionsChecker(checkers.BaseChecker):
if any(node is astroid.Uninferable for node in inferred):
# Don't emit if we don't know every component.
return
- if all(node and (utils.inherit_from_std_ex(node) or
- not utils.has_known_bases(node))
- for node in inferred):
+ if all(
+ node
+ and (utils.inherit_from_std_ex(node) or not utils.has_known_bases(node))
+ for node in inferred
+ ):
return
if not isinstance(exc, astroid.ClassDef):
# Don't emit the warning if the infered stmt
# is None, but the exception handler is something else,
# maybe it was redefined.
- if (isinstance(exc, astroid.Const) and
- exc.value is None):
- if ((isinstance(handler.type, astroid.Const) and
- handler.type.value is None) or
- handler.type.parent_of(exc)):
+ if isinstance(exc, astroid.Const) and exc.value is None:
+ if (
+ isinstance(handler.type, astroid.Const)
+ and handler.type.value is None
+ ) or handler.type.parent_of(exc):
# If the exception handler catches None or
# the exception component, which is None, is
# defined by the entire exception handler, then
# emit a warning.
- self.add_message('catching-non-exception',
- node=handler.type,
- args=(part.as_string(), ))
+ self.add_message(
+ "catching-non-exception",
+ node=handler.type,
+ args=(part.as_string(),),
+ )
else:
- self.add_message('catching-non-exception',
- node=handler.type,
- args=(part.as_string(), ))
+ self.add_message(
+ "catching-non-exception",
+ node=handler.type,
+ args=(part.as_string(),),
+ )
return
- if (not utils.inherit_from_std_ex(exc) and
- exc.name not in self._builtin_exceptions):
+ if (
+ not utils.inherit_from_std_ex(exc)
+ and exc.name not in self._builtin_exceptions
+ ):
if utils.has_known_bases(exc):
- self.add_message('catching-non-exception',
- node=handler.type,
- args=(exc.name, ))
+ self.add_message(
+ "catching-non-exception", node=handler.type, args=(exc.name,)
+ )
def _check_try_except_raise(self, node):
-
def gather_exceptions_from_handler(handler):
exceptions = []
if handler.type:
exceptions_in_handler = utils.safe_infer(handler.type)
if isinstance(exceptions_in_handler, astroid.Tuple):
- exceptions = {exception
- for exception in exceptions_in_handler.elts
- if isinstance(exception, astroid.Name)}
+ exceptions = {
+ exception
+ for exception in exceptions_in_handler.elts
+ if isinstance(exception, astroid.Name)
+ }
elif exceptions_in_handler:
exceptions = [exceptions_in_handler]
return exceptions
@@ -391,9 +429,12 @@ class ExceptionsChecker(checkers.BaseChecker):
for exc_in_current_handler in excs_in_current_handler:
inferred_current = utils.safe_infer(exc_in_current_handler)
- if any(utils.is_subclass_of(utils.safe_infer(exc_in_bare_handler),
- inferred_current)
- for exc_in_bare_handler in excs_in_bare_handler):
+ if any(
+ utils.is_subclass_of(
+ utils.safe_infer(exc_in_bare_handler), inferred_current
+ )
+ for exc_in_bare_handler in excs_in_bare_handler
+ ):
bare_raise = False
break
@@ -405,11 +446,17 @@ class ExceptionsChecker(checkers.BaseChecker):
handler_having_bare_raise = handler
excs_in_bare_handler = gather_exceptions_from_handler(handler)
if bare_raise:
- self.add_message('try-except-raise', node=handler_having_bare_raise)
-
- @utils.check_messages('bare-except', 'broad-except', 'try-except-raise',
- 'binary-op-exception', 'bad-except-order',
- 'catching-non-exception', 'duplicate-except')
+ self.add_message("try-except-raise", node=handler_having_bare_raise)
+
+ @utils.check_messages(
+ "bare-except",
+ "broad-except",
+ "try-except-raise",
+ "binary-op-exception",
+ "bad-except-order",
+ "catching-non-exception",
+ "duplicate-except",
+ )
def visit_tryexcept(self, node):
"""check for empty except"""
self._check_try_except_raise(node)
@@ -418,17 +465,18 @@ class ExceptionsChecker(checkers.BaseChecker):
for index, handler in enumerate(node.handlers):
if handler.type is None:
if not _is_raising(handler.body):
- self.add_message('bare-except', node=handler)
+ self.add_message("bare-except", node=handler)
# check if an "except:" is followed by some other
# except
if index < (nb_handlers - 1):
- msg = 'empty except clause should always appear last'
- self.add_message('bad-except-order', node=node, args=msg)
+ msg = "empty except clause should always appear last"
+ self.add_message("bad-except-order", node=node, args=msg)
elif isinstance(handler.type, astroid.BoolOp):
- self.add_message('binary-op-exception',
- node=handler, args=handler.type.op)
+ self.add_message(
+ "binary-op-exception", node=handler, args=handler.type.op
+ )
else:
try:
excs = list(_annotated_unpack_infer(handler.type))
@@ -438,8 +486,9 @@ class ExceptionsChecker(checkers.BaseChecker):
for part, exc in excs:
if exc is astroid.Uninferable:
continue
- if (isinstance(exc, astroid.Instance)
- and utils.inherit_from_std_ex(exc)):
+ if isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(
+ exc
+ ):
# pylint: disable=protected-access
exc = exc._proxied
@@ -448,24 +497,34 @@ class ExceptionsChecker(checkers.BaseChecker):
if not isinstance(exc, astroid.ClassDef):
continue
- exc_ancestors = [anc for anc in exc.ancestors()
- if isinstance(anc, astroid.ClassDef)]
+ exc_ancestors = [
+ anc
+ for anc in exc.ancestors()
+ if isinstance(anc, astroid.ClassDef)
+ ]
for previous_exc in exceptions_classes:
if previous_exc in exc_ancestors:
- msg = '%s is an ancestor class of %s' % (
- previous_exc.name, exc.name)
- self.add_message('bad-except-order',
- node=handler.type, args=msg)
- if (exc.name in self.config.overgeneral_exceptions
- and exc.root().name == utils.EXCEPTIONS_MODULE
- and not _is_raising(handler.body)):
- self.add_message('broad-except',
- args=exc.name, node=handler.type)
+ msg = "%s is an ancestor class of %s" % (
+ previous_exc.name,
+ exc.name,
+ )
+ self.add_message(
+ "bad-except-order", node=handler.type, args=msg
+ )
+ if (
+ exc.name in self.config.overgeneral_exceptions
+ and exc.root().name == utils.EXCEPTIONS_MODULE
+ and not _is_raising(handler.body)
+ ):
+ self.add_message(
+ "broad-except", args=exc.name, node=handler.type
+ )
if exc in exceptions_classes:
- self.add_message('duplicate-except',
- args=exc.name, node=handler.type)
+ self.add_message(
+ "duplicate-except", args=exc.name, node=handler.type
+ )
exceptions_classes += [exc for _, exc in excs]
diff --git a/pylint/checkers/format.py b/pylint/checkers/format.py
index 2a03c6176..4ed779054 100644
--- a/pylint/checkers/format.py
+++ b/pylint/checkers/format.py
@@ -55,18 +55,58 @@ from pylint.checkers import BaseTokenChecker
from pylint.checkers.utils import check_messages
from pylint.utils import WarningScope, OPTION_RGX
-_ASYNC_TOKEN = 'async'
-_CONTINUATION_BLOCK_OPENERS = ['elif', 'except', 'for', 'if', 'while', 'def', 'class', 'with']
-_KEYWORD_TOKENS = ['assert', 'del', 'elif', 'except', 'for', 'if', 'in', 'not',
- 'raise', 'return', 'while', 'yield', 'with']
+_ASYNC_TOKEN = "async"
+_CONTINUATION_BLOCK_OPENERS = [
+ "elif",
+ "except",
+ "for",
+ "if",
+ "while",
+ "def",
+ "class",
+ "with",
+]
+_KEYWORD_TOKENS = [
+ "assert",
+ "del",
+ "elif",
+ "except",
+ "for",
+ "if",
+ "in",
+ "not",
+ "raise",
+ "return",
+ "while",
+ "yield",
+ "with",
+]
if sys.version_info < (3, 0):
- _KEYWORD_TOKENS.append('print')
-
-_SPACED_OPERATORS = ['==', '<', '>', '!=', '<>', '<=', '>=',
- '+=', '-=', '*=', '**=', '/=', '//=', '&=', '|=', '^=',
- '%=', '>>=', '<<=']
-_OPENING_BRACKETS = ['(', '[', '{']
-_CLOSING_BRACKETS = [')', ']', '}']
+ _KEYWORD_TOKENS.append("print")
+
+_SPACED_OPERATORS = [
+ "==",
+ "<",
+ ">",
+ "!=",
+ "<>",
+ "<=",
+ ">=",
+ "+=",
+ "-=",
+ "*=",
+ "**=",
+ "/=",
+ "//=",
+ "&=",
+ "|=",
+ "^=",
+ "%=",
+ ">>=",
+ "<<=",
+]
+_OPENING_BRACKETS = ["(", "[", "{"]
+_CLOSING_BRACKETS = [")", "]", "}"]
_TAB_LENGTH = 8
_EOL = frozenset([tokenize.NEWLINE, tokenize.NL, tokenize.COMMENT])
@@ -78,66 +118,94 @@ _MUST_NOT = 1
_IGNORE = 2
# Whitespace checking config constants
-_DICT_SEPARATOR = 'dict-separator'
-_TRAILING_COMMA = 'trailing-comma'
-_EMPTY_LINE = 'empty-line'
+_DICT_SEPARATOR = "dict-separator"
+_TRAILING_COMMA = "trailing-comma"
+_EMPTY_LINE = "empty-line"
_NO_SPACE_CHECK_CHOICES = [_TRAILING_COMMA, _DICT_SEPARATOR, _EMPTY_LINE]
_DEFAULT_NO_SPACE_CHECK_CHOICES = [_TRAILING_COMMA, _DICT_SEPARATOR]
MSGS = {
- 'C0301': ('Line too long (%s/%s)',
- 'line-too-long',
- 'Used when a line is longer than a given number of characters.'),
- 'C0302': ('Too many lines in module (%s/%s)', # was W0302
- 'too-many-lines',
- 'Used when a module has too many lines, reducing its readability.'
- ),
- 'C0303': ('Trailing whitespace',
- 'trailing-whitespace',
- 'Used when there is whitespace between the end of a line and the '
- 'newline.'),
- 'C0304': ('Final newline missing',
- 'missing-final-newline',
- 'Used when the last line in a file is missing a newline.'),
- 'C0305': ('Trailing newlines',
- 'trailing-newlines',
- 'Used when there are trailing blank lines in a file.'),
- 'W0311': ('Bad indentation. Found %s %s, expected %s',
- 'bad-indentation',
- 'Used when an unexpected number of indentation\'s tabulations or '
- 'spaces has been found.'),
- 'C0330': ('Wrong %s indentation%s%s.\n%s%s',
- 'bad-continuation',
- 'TODO'),
- 'W0312': ('Found indentation with %ss instead of %ss',
- 'mixed-indentation',
- 'Used when there are some mixed tabs and spaces in a module.'),
- 'W0301': ('Unnecessary semicolon', # was W0106
- 'unnecessary-semicolon',
- 'Used when a statement is ended by a semi-colon (";"), which '
- 'isn\'t necessary (that\'s python, not C ;).'),
- 'C0321': ('More than one statement on a single line',
- 'multiple-statements',
- 'Used when more than on statement are found on the same line.',
- {'scope': WarningScope.NODE}),
- 'C0325': ('Unnecessary parens after %r keyword',
- 'superfluous-parens',
- 'Used when a single item in parentheses follows an if, for, or '
- 'other keyword.'),
- 'C0326': ('%s space %s %s %s\n%s',
- 'bad-whitespace',
- ('Used when a wrong number of spaces is used around an operator, '
- 'bracket or block opener.'),
- {'old_names': [('C0323', 'no-space-after-operator'),
- ('C0324', 'no-space-after-comma'),
- ('C0322', 'no-space-before-operator')]}),
- 'C0327': ('Mixed line endings LF and CRLF',
- 'mixed-line-endings',
- 'Used when there are mixed (LF and CRLF) newline signs in a file.'),
- 'C0328': ('Unexpected line ending format. There is \'%s\' while it should be \'%s\'.',
- 'unexpected-line-ending-format',
- 'Used when there is different newline than expected.'),
- }
+ "C0301": (
+ "Line too long (%s/%s)",
+ "line-too-long",
+ "Used when a line is longer than a given number of characters.",
+ ),
+ "C0302": (
+ "Too many lines in module (%s/%s)", # was W0302
+ "too-many-lines",
+ "Used when a module has too many lines, reducing its readability.",
+ ),
+ "C0303": (
+ "Trailing whitespace",
+ "trailing-whitespace",
+ "Used when there is whitespace between the end of a line and the " "newline.",
+ ),
+ "C0304": (
+ "Final newline missing",
+ "missing-final-newline",
+ "Used when the last line in a file is missing a newline.",
+ ),
+ "C0305": (
+ "Trailing newlines",
+ "trailing-newlines",
+ "Used when there are trailing blank lines in a file.",
+ ),
+ "W0311": (
+ "Bad indentation. Found %s %s, expected %s",
+ "bad-indentation",
+ "Used when an unexpected number of indentation's tabulations or "
+ "spaces has been found.",
+ ),
+ "C0330": ("Wrong %s indentation%s%s.\n%s%s", "bad-continuation", "TODO"),
+ "W0312": (
+ "Found indentation with %ss instead of %ss",
+ "mixed-indentation",
+ "Used when there are some mixed tabs and spaces in a module.",
+ ),
+ "W0301": (
+ "Unnecessary semicolon", # was W0106
+ "unnecessary-semicolon",
+ 'Used when a statement is ended by a semi-colon (";"), which '
+ "isn't necessary (that's python, not C ;).",
+ ),
+ "C0321": (
+ "More than one statement on a single line",
+ "multiple-statements",
+ "Used when more than on statement are found on the same line.",
+ {"scope": WarningScope.NODE},
+ ),
+ "C0325": (
+ "Unnecessary parens after %r keyword",
+ "superfluous-parens",
+ "Used when a single item in parentheses follows an if, for, or "
+ "other keyword.",
+ ),
+ "C0326": (
+ "%s space %s %s %s\n%s",
+ "bad-whitespace",
+ (
+ "Used when a wrong number of spaces is used around an operator, "
+ "bracket or block opener."
+ ),
+ {
+ "old_names": [
+ ("C0323", "no-space-after-operator"),
+ ("C0324", "no-space-after-comma"),
+ ("C0322", "no-space-before-operator"),
+ ]
+ },
+ ),
+ "C0327": (
+ "Mixed line endings LF and CRLF",
+ "mixed-line-endings",
+ "Used when there are mixed (LF and CRLF) newline signs in a file.",
+ ),
+ "C0328": (
+ "Unexpected line ending format. There is '%s' while it should be '%s'.",
+ "unexpected-line-ending-format",
+ "Used when there is different newline than expected.",
+ ),
+}
def _underline_token(token):
@@ -145,9 +213,9 @@ def _underline_token(token):
offset = token[2][1]
referenced_line = token[4]
# If the referenced line does not end with a newline char, fix it
- if referenced_line[-1] != '\n':
- referenced_line += '\n'
- return referenced_line + (' ' * offset) + ('^' * length)
+ if referenced_line[-1] != "\n":
+ referenced_line += "\n"
+ return referenced_line + (" " * offset) + ("^" * length)
def _column_distance(token1, token2):
@@ -161,22 +229,28 @@ def _column_distance(token1, token2):
def _last_token_on_line_is(tokens, line_end, token):
- return (line_end > 0 and tokens.token(line_end-1) == token or
- line_end > 1 and tokens.token(line_end-2) == token
- and tokens.type(line_end-1) == tokenize.COMMENT)
+ return (
+ line_end > 0
+ and tokens.token(line_end - 1) == token
+ or line_end > 1
+ and tokens.token(line_end - 2) == token
+ and tokens.type(line_end - 1) == tokenize.COMMENT
+ )
def _token_followed_by_eol(tokens, position):
- return (tokens.type(position+1) == tokenize.NL or
- tokens.type(position+1) == tokenize.COMMENT and
- tokens.type(position+2) == tokenize.NL)
+ return (
+ tokens.type(position + 1) == tokenize.NL
+ or tokens.type(position + 1) == tokenize.COMMENT
+ and tokens.type(position + 2) == tokenize.NL
+ )
def _get_indent_string(line):
"""Return the indention string of the given line."""
- result = ''
+ result = ""
for char in line:
- if char in ' \t':
+ if char in " \t":
result += char
else:
break
@@ -187,9 +261,9 @@ def _get_indent_length(line):
"""Return the length of the indentation on the given token's line."""
result = 0
for char in line:
- if char == ' ':
+ if char == " ":
result += 1
- elif char == '\t':
+ elif char == "\t":
result += _TAB_LENGTH
else:
break
@@ -199,40 +273,47 @@ def _get_indent_length(line):
def _get_indent_hint_line(bar_positions, bad_position):
"""Return a line with |s for each of the positions in the given lists."""
if not bar_positions:
- return ('', '')
+ return ("", "")
# TODO tabs should not be replaced by some random (8) number of spaces
bar_positions = [_get_indent_length(indent) for indent in bar_positions]
bad_position = _get_indent_length(bad_position)
- delta_message = ''
- markers = [(pos, '|') for pos in bar_positions]
+ delta_message = ""
+ markers = [(pos, "|") for pos in bar_positions]
if len(markers) == 1:
# if we have only one marker we'll provide an extra hint on how to fix
expected_position = markers[0][0]
delta = abs(expected_position - bad_position)
- direction = 'add' if expected_position > bad_position else 'remove'
+ direction = "add" if expected_position > bad_position else "remove"
delta_message = _CONTINUATION_HINT_MESSAGE % (
- direction, delta, 's' if delta > 1 else '')
- markers.append((bad_position, '^'))
+ direction,
+ delta,
+ "s" if delta > 1 else "",
+ )
+ markers.append((bad_position, "^"))
markers.sort()
- line = [' '] * (markers[-1][0] + 1)
+ line = [" "] * (markers[-1][0] + 1)
for position, marker in markers:
line[position] = marker
- return (''.join(line), delta_message)
+ return ("".join(line), delta_message)
class _ContinuedIndent:
- __slots__ = ('valid_outdent_strings',
- 'valid_continuation_strings',
- 'context_type',
- 'token',
- 'position')
-
- def __init__(self,
- context_type,
- token,
- position,
- valid_outdent_strings,
- valid_continuation_strings):
+ __slots__ = (
+ "valid_outdent_strings",
+ "valid_continuation_strings",
+ "context_type",
+ "token",
+ "position",
+ )
+
+ def __init__(
+ self,
+ context_type,
+ token,
+ position,
+ valid_outdent_strings,
+ valid_continuation_strings,
+ ):
self.valid_outdent_strings = valid_outdent_strings
self.valid_continuation_strings = valid_continuation_strings
self.context_type = context_type
@@ -242,28 +323,28 @@ class _ContinuedIndent:
# The contexts for hanging indents.
# A hanging indented dictionary value after :
-HANGING_DICT_VALUE = 'dict-value'
+HANGING_DICT_VALUE = "dict-value"
# Hanging indentation in an expression.
-HANGING = 'hanging'
+HANGING = "hanging"
# Hanging indentation in a block header.
-HANGING_BLOCK = 'hanging-block'
+HANGING_BLOCK = "hanging-block"
# Continued indentation inside an expression.
-CONTINUED = 'continued'
+CONTINUED = "continued"
# Continued indentation in a block header.
-CONTINUED_BLOCK = 'continued-block'
+CONTINUED_BLOCK = "continued-block"
-SINGLE_LINE = 'single'
-WITH_BODY = 'multi'
+SINGLE_LINE = "single"
+WITH_BODY = "multi"
_CONTINUATION_MSG_PARTS = {
- HANGING_DICT_VALUE: ('hanging', ' in dict value'),
- HANGING: ('hanging', ''),
- HANGING_BLOCK: ('hanging', ' before block'),
- CONTINUED: ('continued', ''),
- CONTINUED_BLOCK: ('continued', ' before block'),
+ HANGING_DICT_VALUE: ("hanging", " in dict value"),
+ HANGING: ("hanging", ""),
+ HANGING_BLOCK: ("hanging", " before block"),
+ CONTINUED: ("continued", ""),
+ CONTINUED_BLOCK: ("continued", " before block"),
}
-_CONTINUATION_HINT_MESSAGE = ' (%s %d space%s)' # Ex: (remove 2 spaces)
+_CONTINUATION_HINT_MESSAGE = " (%s %d space%s)" # Ex: (remove 2 spaces)
def _Indentations(*args):
@@ -318,7 +399,7 @@ class TokenWrapper:
is "<TAB><TAB> "
"""
line_indent = self.line_indent(idx)
- return line_indent + ' ' * (self.start_col(idx) - len(line_indent))
+ return line_indent + " " * (self.start_col(idx) - len(line_indent))
class ContinuedLineState:
@@ -338,7 +419,7 @@ class ContinuedLineState:
@property
def _block_indent_string(self):
- return self._config.indent_string.replace('\\t', '\t')
+ return self._config.indent_string.replace("\\t", "\t")
@property
def _continuation_string(self):
@@ -356,9 +437,9 @@ class ContinuedLineState:
check_token_position = pos
if self._tokens.token(pos) == _ASYNC_TOKEN:
check_token_position += 1
- self._is_block_opener = self._tokens.token(
- check_token_position
- ) in _CONTINUATION_BLOCK_OPENERS
+ self._is_block_opener = (
+ self._tokens.token(check_token_position) in _CONTINUATION_BLOCK_OPENERS
+ )
self._line_start = pos
def next_physical_line(self):
@@ -383,7 +464,10 @@ class ContinuedLineState:
# The closing brace on a dict or the 'for' in a dict comprehension may
# reset two indent levels because the dict value is ended implicitly
stack_top = -1
- if self._tokens.token(idx) in ('}', 'for') and self._cont_stack[-1].token == ':':
+ if (
+ self._tokens.token(idx) in ("}", "for")
+ and self._cont_stack[-1].token == ":"
+ ):
stack_top = -2
indent = self._cont_stack[stack_top]
if self._tokens.token(idx) in _CLOSING_BRACKETS:
@@ -404,15 +488,21 @@ class ContinuedLineState:
:rtype: _ContinuedIndent
"""
indentation = self._tokens.line_indent(position)
- if self._is_block_opener and self._continuation_string == self._block_indent_string:
+ if (
+ self._is_block_opener
+ and self._continuation_string == self._block_indent_string
+ ):
return _ContinuedIndent(
HANGING_BLOCK,
bracket,
position,
_Indentations(indentation + self._continuation_string, indentation),
- _BeforeBlockIndentations(indentation + self._continuation_string,
- indentation + self._continuation_string * 2))
- if bracket == ':':
+ _BeforeBlockIndentations(
+ indentation + self._continuation_string,
+ indentation + self._continuation_string * 2,
+ ),
+ )
+ if bracket == ":":
# If the dict key was on the same line as the open brace, the new
# correct indent should be relative to the key instead of the
# current indent level
@@ -426,34 +516,42 @@ class ContinuedLineState:
# 'c'
# }
# is handled by the special-casing for hanging continued string indents.
- return _ContinuedIndent(HANGING_DICT_VALUE, bracket, position, paren_align, next_align)
+ return _ContinuedIndent(
+ HANGING_DICT_VALUE, bracket, position, paren_align, next_align
+ )
return _ContinuedIndent(
HANGING,
bracket,
position,
_Indentations(indentation, indentation + self._continuation_string),
- _Indentations(indentation + self._continuation_string))
+ _Indentations(indentation + self._continuation_string),
+ )
def _continuation_inside_bracket(self, bracket, position):
"""Extracts indentation information for a continued indent."""
indentation = self._tokens.line_indent(position)
token_indent = self._tokens.token_indent(position)
next_token_indent = self._tokens.token_indent(position + 1)
- if self._is_block_opener and next_token_indent == indentation + self._block_indent_string:
+ if (
+ self._is_block_opener
+ and next_token_indent == indentation + self._block_indent_string
+ ):
return _ContinuedIndent(
CONTINUED_BLOCK,
bracket,
position,
_Indentations(token_indent),
_BeforeBlockIndentations(
- next_token_indent,
- next_token_indent + self._continuation_string))
+ next_token_indent, next_token_indent + self._continuation_string
+ ),
+ )
return _ContinuedIndent(
CONTINUED,
bracket,
position,
_Indentations(token_indent, next_token_indent),
- _Indentations(next_token_indent))
+ _Indentations(next_token_indent),
+ )
def pop_token(self):
self._cont_stack.pop()
@@ -473,11 +571,9 @@ class ContinuedLineState:
:param int position: The position of the token in the stream.
"""
if _token_followed_by_eol(self._tokens, position):
- self._cont_stack.append(
- self._hanging_indent_after_bracket(token, position))
+ self._cont_stack.append(self._hanging_indent_after_bracket(token, position))
else:
- self._cont_stack.append(
- self._continuation_inside_bracket(token, position))
+ self._cont_stack.append(self._continuation_inside_bracket(token, position))
class FormatChecker(BaseTokenChecker):
@@ -490,58 +586,118 @@ class FormatChecker(BaseTokenChecker):
__implements__ = (ITokenChecker, IAstroidChecker, IRawChecker)
# configuration section name
- name = 'format'
+ name = "format"
# messages
msgs = MSGS
# configuration options
# for available dict keys/values see the optik parser 'add_option' method
- options = (('max-line-length',
- {'default': 100, 'type': "int", 'metavar': '<int>',
- 'help': 'Maximum number of characters on a single line.'}),
- ('ignore-long-lines',
- {'type': 'regexp', 'metavar': '<regexp>',
- 'default': r'^\s*(# )?<?https?://\S+>?$',
- 'help': ('Regexp for a line that is allowed to be longer than '
- 'the limit.')}),
- ('single-line-if-stmt',
- {'default': False, 'type': 'yn', 'metavar': '<y_or_n>',
- 'help': ('Allow the body of an if to be on the same '
- 'line as the test if there is no else.')}),
- ('single-line-class-stmt',
- {'default': False, 'type': 'yn', 'metavar': '<y_or_n>',
- 'help': ('Allow the body of a class to be on the same '
- 'line as the declaration if body contains '
- 'single statement.')}),
- ('no-space-check',
- {'default': ','.join(_DEFAULT_NO_SPACE_CHECK_CHOICES),
- 'metavar': ','.join(_NO_SPACE_CHECK_CHOICES),
- 'type': 'multiple_choice',
- 'choices': _NO_SPACE_CHECK_CHOICES,
- 'help': ('List of optional constructs for which whitespace '
- 'checking is disabled. '
- '`'+ _DICT_SEPARATOR + '` is used to allow tabulation '
- 'in dicts, etc.: {1 : 1,\\n222: 2}. '
- '`'+ _TRAILING_COMMA + '` allows a space between comma '
- 'and closing bracket: (a, ). '
- '`'+ _EMPTY_LINE + '` allows space-only lines.')}),
- ('max-module-lines',
- {'default': 1000, 'type': 'int', 'metavar': '<int>',
- 'help': 'Maximum number of lines in a module.'}
- ),
- ('indent-string',
- {'default': ' ', 'type': "non_empty_string", 'metavar': '<string>',
- 'help': 'String used as indentation unit. This is usually '
- '" " (4 spaces) or "\\t" (1 tab).'}),
- ('indent-after-paren',
- {'type': 'int', 'metavar': '<int>', 'default': 4,
- 'help': 'Number of spaces of indent required inside a hanging '
- 'or continued line.'}),
- ('expected-line-ending-format',
- {'type': 'choice', 'metavar': '<empty or LF or CRLF>', 'default': '',
- 'choices': ['', 'LF', 'CRLF'],
- 'help': ('Expected format of line ending, '
- 'e.g. empty (any line ending), LF or CRLF.')}),
- )
+ options = (
+ (
+ "max-line-length",
+ {
+ "default": 100,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of characters on a single line.",
+ },
+ ),
+ (
+ "ignore-long-lines",
+ {
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "default": r"^\s*(# )?<?https?://\S+>?$",
+ "help": (
+ "Regexp for a line that is allowed to be longer than " "the limit."
+ ),
+ },
+ ),
+ (
+ "single-line-if-stmt",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": (
+ "Allow the body of an if to be on the same "
+ "line as the test if there is no else."
+ ),
+ },
+ ),
+ (
+ "single-line-class-stmt",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": (
+ "Allow the body of a class to be on the same "
+ "line as the declaration if body contains "
+ "single statement."
+ ),
+ },
+ ),
+ (
+ "no-space-check",
+ {
+ "default": ",".join(_DEFAULT_NO_SPACE_CHECK_CHOICES),
+ "metavar": ",".join(_NO_SPACE_CHECK_CHOICES),
+ "type": "multiple_choice",
+ "choices": _NO_SPACE_CHECK_CHOICES,
+ "help": (
+ "List of optional constructs for which whitespace "
+ "checking is disabled. "
+ "`" + _DICT_SEPARATOR + "` is used to allow tabulation "
+ "in dicts, etc.: {1 : 1,\\n222: 2}. "
+ "`" + _TRAILING_COMMA + "` allows a space between comma "
+ "and closing bracket: (a, ). "
+ "`" + _EMPTY_LINE + "` allows space-only lines."
+ ),
+ },
+ ),
+ (
+ "max-module-lines",
+ {
+ "default": 1000,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of lines in a module.",
+ },
+ ),
+ (
+ "indent-string",
+ {
+ "default": " ",
+ "type": "non_empty_string",
+ "metavar": "<string>",
+ "help": "String used as indentation unit. This is usually "
+ '" " (4 spaces) or "\\t" (1 tab).',
+ },
+ ),
+ (
+ "indent-after-paren",
+ {
+ "type": "int",
+ "metavar": "<int>",
+ "default": 4,
+ "help": "Number of spaces of indent required inside a hanging "
+ "or continued line.",
+ },
+ ),
+ (
+ "expected-line-ending-format",
+ {
+ "type": "choice",
+ "metavar": "<empty or LF or CRLF>",
+ "default": "",
+ "choices": ["", "LF", "CRLF"],
+ "help": (
+ "Expected format of line ending, "
+ "e.g. empty (any line ending), LF or CRLF."
+ ),
+ },
+ ),
+ )
def __init__(self, linter=None):
BaseTokenChecker.__init__(self, linter)
@@ -559,19 +715,19 @@ class FormatChecker(BaseTokenChecker):
def new_line(self, tokens, line_end, line_start):
"""a new line has been encountered, process it if necessary"""
- if _last_token_on_line_is(tokens, line_end, ';'):
- self.add_message('unnecessary-semicolon', line=tokens.start_line(line_end))
+ if _last_token_on_line_is(tokens, line_end, ";"):
+ self.add_message("unnecessary-semicolon", line=tokens.start_line(line_end))
line_num = tokens.start_line(line_start)
line = tokens.line(line_start)
if tokens.type(line_start) not in _JUNK_TOKENS:
- self._lines[line_num] = line.split('\n')[0]
+ self._lines[line_num] = line.split("\n")[0]
self.check_lines(line, line_num)
def process_module(self, module):
self._keywords_with_parens = set()
- if 'print_function' in module.future_imports:
- self._keywords_with_parens.add('print')
+ if "print_function" in module.future_imports:
+ self._keywords_with_parens.add("print")
def _check_keyword_parentheses(self, tokens, start):
"""Check that there are not unnecessary parens after a keyword.
@@ -585,9 +741,9 @@ class FormatChecker(BaseTokenChecker):
start: int; the position of the keyword in the token list.
"""
# If the next token is not a paren, we're fine.
- if self._inside_brackets(':') and tokens[start][1] == 'for':
+ if self._inside_brackets(":") and tokens[start][1] == "for":
self._pop_token()
- if tokens[start+1][1] != '(':
+ if tokens[start + 1][1] != "(":
return
found_and_or = False
@@ -602,73 +758,77 @@ class FormatChecker(BaseTokenChecker):
if token[0] == tokenize.NL:
return
- if token[1] == '(':
+ if token[1] == "(":
depth += 1
- elif token[1] == ')':
+ elif token[1] == ")":
depth -= 1
if depth:
continue
# ')' can't happen after if (foo), since it would be a syntax error.
- if (tokens[i+1][1] in (':', ')', ']', '}', 'in') or
- tokens[i+1][0] in (tokenize.NEWLINE,
- tokenize.ENDMARKER,
- tokenize.COMMENT)):
+ if tokens[i + 1][1] in (":", ")", "]", "}", "in") or tokens[i + 1][
+ 0
+ ] in (tokenize.NEWLINE, tokenize.ENDMARKER, tokenize.COMMENT):
# The empty tuple () is always accepted.
if i == start + 2:
return
- if keyword_token == 'not':
+ if keyword_token == "not":
if not found_and_or:
- self.add_message('superfluous-parens', line=line_num,
- args=keyword_token)
- elif keyword_token in ('return', 'yield'):
- self.add_message('superfluous-parens', line=line_num,
- args=keyword_token)
+ self.add_message(
+ "superfluous-parens", line=line_num, args=keyword_token
+ )
+ elif keyword_token in ("return", "yield"):
+ self.add_message(
+ "superfluous-parens", line=line_num, args=keyword_token
+ )
elif keyword_token not in self._keywords_with_parens:
if not found_and_or:
- self.add_message('superfluous-parens', line=line_num,
- args=keyword_token)
+ self.add_message(
+ "superfluous-parens", line=line_num, args=keyword_token
+ )
return
elif depth == 1:
# This is a tuple, which is always acceptable.
- if token[1] == ',':
+ if token[1] == ",":
return
# 'and' and 'or' are the only boolean operators with lower precedence
# than 'not', so parens are only required when they are found.
- if token[1] in ('and', 'or'):
+ if token[1] in ("and", "or"):
found_and_or = True
# A yield inside an expression must always be in parentheses,
# quit early without error.
- elif token[1] == 'yield':
+ elif token[1] == "yield":
return
# A generator expression always has a 'for' token in it, and
# the 'for' token is only legal inside parens when it is in a
# generator expression. The parens are necessary here, so bail
# without an error.
- elif token[1] == 'for':
+ elif token[1] == "for":
return
def _opening_bracket(self, tokens, i):
self._push_token(tokens[i][1], i)
# Special case: ignore slices
- if tokens[i][1] == '[' and tokens[i+1][1] == ':':
+ if tokens[i][1] == "[" and tokens[i + 1][1] == ":":
return
- if (i > 0 and (tokens[i-1][0] == tokenize.NAME and
- not (keyword.iskeyword(tokens[i-1][1]))
- or tokens[i-1][1] in _CLOSING_BRACKETS)):
+ if i > 0 and (
+ tokens[i - 1][0] == tokenize.NAME
+ and not (keyword.iskeyword(tokens[i - 1][1]))
+ or tokens[i - 1][1] in _CLOSING_BRACKETS
+ ):
self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
else:
self._check_space(tokens, i, (_IGNORE, _MUST_NOT))
def _closing_bracket(self, tokens, i):
- if self._inside_brackets(':'):
+ if self._inside_brackets(":"):
self._pop_token()
self._pop_token()
# Special case: ignore slices
- if tokens[i-1][1] == ':' and tokens[i][1] == ']':
+ if tokens[i - 1][1] == ":" and tokens[i][1] == "]":
return
policy_before = _MUST_NOT
- if tokens[i][1] in _CLOSING_BRACKETS and tokens[i-1][1] == ',':
+ if tokens[i][1] in _CLOSING_BRACKETS and tokens[i - 1][1] == ",":
if _TRAILING_COMMA in self.config.no_space_check:
policy_before = _IGNORE
@@ -676,25 +836,25 @@ class FormatChecker(BaseTokenChecker):
def _has_valid_type_annotation(self, tokens, i):
"""Extended check of PEP-484 type hint presence"""
- if not self._inside_brackets('('):
+ if not self._inside_brackets("("):
return False
# token_info
# type string start end line
# 0 1 2 3 4
bracket_level = 0
- for token in tokens[i-1::-1]:
- if token[1] == ':':
+ for token in tokens[i - 1 :: -1]:
+ if token[1] == ":":
return True
- if token[1] == '(':
+ if token[1] == "(":
return False
- if token[1] == ']':
+ if token[1] == "]":
bracket_level += 1
- elif token[1] == '[':
+ elif token[1] == "[":
bracket_level -= 1
- elif token[1] == ',':
+ elif token[1] == ",":
if not bracket_level:
return False
- elif token[1] in ('.', '...'):
+ elif token[1] in (".", "..."):
continue
elif token[0] not in (tokenize.NAME, tokenize.STRING, tokenize.NL):
return False
@@ -704,38 +864,37 @@ class FormatChecker(BaseTokenChecker):
"""Check the spacing of a single equals sign."""
if self._has_valid_type_annotation(tokens, i):
self._check_space(tokens, i, (_MUST, _MUST))
- elif self._inside_brackets('(') or self._inside_brackets('lambda'):
+ elif self._inside_brackets("(") or self._inside_brackets("lambda"):
self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
else:
self._check_space(tokens, i, (_MUST, _MUST))
- def _open_lambda(self, tokens, i): # pylint:disable=unused-argument
- self._push_token('lambda', i)
+ def _open_lambda(self, tokens, i): # pylint:disable=unused-argument
+ self._push_token("lambda", i)
def _handle_colon(self, tokens, i):
# Special case: ignore slices
- if self._inside_brackets('['):
+ if self._inside_brackets("["):
return
- if (self._inside_brackets('{') and
- _DICT_SEPARATOR in self.config.no_space_check):
+ if self._inside_brackets("{") and _DICT_SEPARATOR in self.config.no_space_check:
policy = (_IGNORE, _IGNORE)
else:
policy = (_MUST_NOT, _MUST)
self._check_space(tokens, i, policy)
- if self._inside_brackets('lambda'):
+ if self._inside_brackets("lambda"):
self._pop_token()
- elif self._inside_brackets('{'):
- self._push_token(':', i)
+ elif self._inside_brackets("{"):
+ self._push_token(":", i)
def _handle_comma(self, tokens, i):
# Only require a following whitespace if this is
# not a hanging comma before a closing bracket.
- if tokens[i+1][1] in _CLOSING_BRACKETS:
+ if tokens[i + 1][1] in _CLOSING_BRACKETS:
self._check_space(tokens, i, (_MUST_NOT, _IGNORE))
else:
self._check_space(tokens, i, (_MUST_NOT, _MUST))
- if self._inside_brackets(':'):
+ if self._inside_brackets(":"):
self._pop_token()
def _check_surrounded_by_space(self, tokens, i):
@@ -745,25 +904,25 @@ class FormatChecker(BaseTokenChecker):
def _check_space(self, tokens, i, policies):
def _policy_string(policy):
if policy == _MUST:
- return 'Exactly one', 'required'
- return 'No', 'allowed'
+ return "Exactly one", "required"
+ return "No", "allowed"
def _name_construct(token):
- if token[1] == ',':
- return 'comma'
- if token[1] == ':':
- return ':'
- if token[1] in '()[]{}':
- return 'bracket'
- if token[1] in ('<', '>', '<=', '>=', '!=', '=='):
- return 'comparison'
- if self._inside_brackets('('):
- return 'keyword argument assignment'
- return 'assignment'
+ if token[1] == ",":
+ return "comma"
+ if token[1] == ":":
+ return ":"
+ if token[1] in "()[]{}":
+ return "bracket"
+ if token[1] in ("<", ">", "<=", ">=", "!=", "=="):
+ return "comparison"
+ if self._inside_brackets("("):
+ return "keyword argument assignment"
+ return "assignment"
good_space = [True, True]
token = tokens[i]
- pairs = [(tokens[i-1], token), (token, tokens[i+1])]
+ pairs = [(tokens[i - 1], token), (token, tokens[i + 1])]
for other_idx, (policy, token_pair) in enumerate(zip(policies, pairs)):
if token_pair[other_idx][0] in _EOL or policy == _IGNORE:
@@ -772,47 +931,41 @@ class FormatChecker(BaseTokenChecker):
distance = _column_distance(*token_pair)
if distance is None:
continue
- good_space[other_idx] = (
- (policy == _MUST and distance == 1) or
- (policy == _MUST_NOT and distance == 0))
+ good_space[other_idx] = (policy == _MUST and distance == 1) or (
+ policy == _MUST_NOT and distance == 0
+ )
warnings = []
if not any(good_space) and policies[0] == policies[1]:
- warnings.append((policies[0], 'around'))
+ warnings.append((policies[0], "around"))
else:
- for ok, policy, position in zip(good_space, policies, ('before', 'after')):
+ for ok, policy, position in zip(good_space, policies, ("before", "after")):
if not ok:
warnings.append((policy, position))
for policy, position in warnings:
construct = _name_construct(token)
count, state = _policy_string(policy)
- self.add_message('bad-whitespace', line=token[2][0],
- args=(count, state, position, construct,
- _underline_token(token)), col_offset=token[2][1])
+ self.add_message(
+ "bad-whitespace",
+ line=token[2][0],
+ args=(count, state, position, construct, _underline_token(token)),
+ col_offset=token[2][1],
+ )
def _inside_brackets(self, left):
return self._bracket_stack[-1] == left
def _prepare_token_dispatcher(self):
raw = [
- (_KEYWORD_TOKENS,
- self._check_keyword_parentheses),
-
+ (_KEYWORD_TOKENS, self._check_keyword_parentheses),
(_OPENING_BRACKETS, self._opening_bracket),
-
(_CLOSING_BRACKETS, self._closing_bracket),
-
- (['='], self._check_equals_spacing),
-
+ (["="], self._check_equals_spacing),
(_SPACED_OPERATORS, self._check_surrounded_by_space),
-
- ([','], self._handle_comma),
-
- ([':'], self._handle_colon),
-
- (['lambda'], self._open_lambda),
-
- ]
+ ([","], self._handle_comma),
+ ([":"], self._handle_colon),
+ (["lambda"], self._open_lambda),
+ ]
dispatch = {}
for tokens, handler in raw:
@@ -847,9 +1000,9 @@ class FormatChecker(BaseTokenChecker):
# docstring, the line member of the INDENT token does not contain
# the full line; therefore we check the next token on the line.
if tok_type == tokenize.INDENT:
- self.new_line(TokenWrapper(tokens), idx-1, idx+1)
+ self.new_line(TokenWrapper(tokens), idx - 1, idx + 1)
else:
- self.new_line(TokenWrapper(tokens), idx-1, idx)
+ self.new_line(TokenWrapper(tokens), idx - 1, idx)
if tok_type == tokenize.NEWLINE:
# a program statement, or ENDMARKER, will eventually follow,
@@ -863,8 +1016,8 @@ class FormatChecker(BaseTokenChecker):
self._check_line_ending(token, line_num)
elif tok_type == tokenize.INDENT:
check_equal = False
- self.check_indent_level(token, indents[-1]+1, line_num)
- indents.append(indents[-1]+1)
+ self.check_indent_level(token, indents[-1] + 1, line_num)
+ indents.append(indents[-1] + 1)
elif tok_type == tokenize.DEDENT:
# there's nothing we need to check here! what's important is
# that when the run of DEDENTs ends, the indentation of the
@@ -874,9 +1027,9 @@ class FormatChecker(BaseTokenChecker):
if len(indents) > 1:
del indents[-1]
elif tok_type == tokenize.NL:
- if not line.strip('\r\n'):
+ if not line.strip("\r\n"):
last_blank_line_num = line_num
- self._check_continued_indentation(TokenWrapper(tokens), idx+1)
+ self._check_continued_indentation(TokenWrapper(tokens), idx + 1)
self._current_line.next_physical_line()
elif tok_type not in (tokenize.COMMENT, tokenize.ENCODING):
self._current_line.handle_line_start(idx)
@@ -890,8 +1043,8 @@ class FormatChecker(BaseTokenChecker):
check_equal = False
self.check_indent_level(line, indents[-1], line_num)
- if tok_type == tokenize.NUMBER and token.endswith('l'):
- self.add_message('lowercase-l-suffix', line=line_num)
+ if tok_type == tokenize.NUMBER and token.endswith("l"):
+ self.add_message("lowercase-l-suffix", line=line_num)
try:
handler = token_handlers[token]
@@ -904,24 +1057,25 @@ class FormatChecker(BaseTokenChecker):
if line_num > self.config.max_module_lines:
# Get the line where the too-many-lines (or its message id)
# was disabled or default to 1.
- symbol = self.linter.msgs_store.get_message_definition('too-many-lines')
- names = (symbol.msgid, 'too-many-lines')
- line = next(filter(None,
- map(self.linter._pragma_lineno.get, names)), 1)
- self.add_message('too-many-lines',
- args=(line_num, self.config.max_module_lines),
- line=line)
+ symbol = self.linter.msgs_store.get_message_definition("too-many-lines")
+ names = (symbol.msgid, "too-many-lines")
+ line = next(filter(None, map(self.linter._pragma_lineno.get, names)), 1)
+ self.add_message(
+ "too-many-lines",
+ args=(line_num, self.config.max_module_lines),
+ line=line,
+ )
# See if there are any trailing lines. Do not complain about empty
# files like __init__.py markers.
if line_num == last_blank_line_num and line_num > 0:
- self.add_message('trailing-newlines', line=line_num)
+ self.add_message("trailing-newlines", line=line_num)
def _check_line_ending(self, line_ending, line_num):
# check if line endings are mixed
if self._last_line_ending is not None:
if line_ending != self._last_line_ending:
- self.add_message('mixed-line-endings', line=line_num)
+ self.add_message("mixed-line-endings", line=line_num)
self._last_line_ending = line_ending
@@ -930,13 +1084,16 @@ class FormatChecker(BaseTokenChecker):
if expected:
# reduce multiple \n\n\n\n to one \n
line_ending = reduce(lambda x, y: x + y if x != y else x, line_ending, "")
- line_ending = 'LF' if line_ending == '\n' else 'CRLF'
+ line_ending = "LF" if line_ending == "\n" else "CRLF"
if line_ending != expected:
- self.add_message('unexpected-line-ending-format', args=(line_ending, expected),
- line=line_num)
+ self.add_message(
+ "unexpected-line-ending-format",
+ args=(line_ending, expected),
+ line=line_num,
+ )
def _process_retained_warnings(self, tokens, current_pos):
- single_line_block_stmt = not _last_token_on_line_is(tokens, current_pos, ':')
+ single_line_block_stmt = not _last_token_on_line_is(tokens, current_pos, ":")
for indent_pos, state, indentations in self._current_line.retained_warnings:
block_type = indentations[tokens.token_indent(indent_pos)]
@@ -948,8 +1105,10 @@ class FormatChecker(BaseTokenChecker):
def _check_continued_indentation(self, tokens, next_idx):
def same_token_around_nl(token_type):
- return (tokens.type(next_idx) == token_type and
- tokens.type(next_idx-2) == token_type)
+ return (
+ tokens.type(next_idx) == token_type
+ and tokens.type(next_idx - 2) == token_type
+ )
# Do not issue any warnings if the next line is empty.
if not self._current_line.has_content or tokens.type(next_idx) == tokenize.NL:
@@ -959,33 +1118,48 @@ class FormatChecker(BaseTokenChecker):
# Special handling for hanging comments and strings. If the last line ended
# with a comment (string) and the new line contains only a comment, the line
# may also be indented to the start of the previous token.
- if same_token_around_nl(tokenize.COMMENT) or same_token_around_nl(tokenize.STRING):
- valid_indentations[tokens.token_indent(next_idx-2)] = True
+ if same_token_around_nl(tokenize.COMMENT) or same_token_around_nl(
+ tokenize.STRING
+ ):
+ valid_indentations[tokens.token_indent(next_idx - 2)] = True
# We can only decide if the indentation of a continued line before opening
# a new block is valid once we know of the body of the block is on the
# same line as the block opener. Since the token processing is single-pass,
# emitting those warnings is delayed until the block opener is processed.
- if (state.context_type in (HANGING_BLOCK, CONTINUED_BLOCK)
- and tokens.token_indent(next_idx) in valid_indentations):
+ if (
+ state.context_type in (HANGING_BLOCK, CONTINUED_BLOCK)
+ and tokens.token_indent(next_idx) in valid_indentations
+ ):
self._current_line.add_block_warning(next_idx, state, valid_indentations)
elif tokens.token_indent(next_idx) not in valid_indentations:
length_indentation = len(tokens.token_indent(next_idx))
- if not any(length_indentation == 2 * len(indentation)
- for indentation in valid_indentations):
- self._add_continuation_message(state, valid_indentations, tokens, next_idx)
+ if not any(
+ length_indentation == 2 * len(indentation)
+ for indentation in valid_indentations
+ ):
+ self._add_continuation_message(
+ state, valid_indentations, tokens, next_idx
+ )
def _add_continuation_message(self, state, indentations, tokens, position):
readable_type, readable_position = _CONTINUATION_MSG_PARTS[state.context_type]
- hint_line, delta_message = _get_indent_hint_line(indentations,
- tokens.token_indent(position))
+ hint_line, delta_message = _get_indent_hint_line(
+ indentations, tokens.token_indent(position)
+ )
self.add_message(
- 'bad-continuation',
+ "bad-continuation",
line=tokens.start_line(position),
- args=(readable_type, readable_position, delta_message,
- tokens.line(position), hint_line))
-
- @check_messages('multiple-statements')
+ args=(
+ readable_type,
+ readable_position,
+ delta_message,
+ tokens.line(position),
+ hint_line,
+ ),
+ )
+
+ @check_messages("multiple-statements")
def visit_default(self, node):
"""check the node line number and check it if not yet done"""
if not node.is_statement:
@@ -1000,8 +1174,10 @@ class FormatChecker(BaseTokenChecker):
# is not directly represented in the AST. We infer it
# by taking the last line of the body and adding 1, which
# should be the line of finally:
- if (isinstance(node.parent, nodes.TryFinally)
- and node in node.parent.finalbody):
+ if (
+ isinstance(node.parent, nodes.TryFinally)
+ and node in node.parent.finalbody
+ ):
prev_line = node.parent.body[0].tolineno + 1
else:
prev_line = node.parent.statement().fromlineno
@@ -1023,7 +1199,7 @@ class FormatChecker(BaseTokenChecker):
try:
lines.append(self._lines[line].rstrip())
except KeyError:
- lines.append('')
+ lines.append("")
def _check_multi_statement_line(self, node, line):
"""Check for lines containing multiple statements."""
@@ -1033,16 +1209,23 @@ class FormatChecker(BaseTokenChecker):
return
# For try... except... finally..., the two nodes
# appear to be on the same line due to how the AST is built.
- if (isinstance(node, nodes.TryExcept) and
- isinstance(node.parent, nodes.TryFinally)):
+ if isinstance(node, nodes.TryExcept) and isinstance(
+ node.parent, nodes.TryFinally
+ ):
return
- if (isinstance(node.parent, nodes.If) and not node.parent.orelse
- and self.config.single_line_if_stmt):
+ if (
+ isinstance(node.parent, nodes.If)
+ and not node.parent.orelse
+ and self.config.single_line_if_stmt
+ ):
return
- if (isinstance(node.parent, nodes.ClassDef) and len(node.parent.body) == 1
- and self.config.single_line_class_stmt):
+ if (
+ isinstance(node.parent, nodes.ClassDef)
+ and len(node.parent.body) == 1
+ and self.config.single_line_class_stmt
+ ):
return
- self.add_message('multiple-statements', node=node)
+ self.add_message("multiple-statements", node=node)
self._visited_lines[line] = 2
def check_lines(self, lines, i):
@@ -1052,43 +1235,45 @@ class FormatChecker(BaseTokenChecker):
ignore_long_line = self.config.ignore_long_lines
def check_line(line, i):
- if not line.endswith('\n'):
- self.add_message('missing-final-newline', line=i)
+ if not line.endswith("\n"):
+ self.add_message("missing-final-newline", line=i)
else:
# exclude \f (formfeed) from the rstrip
- stripped_line = line.rstrip('\t\n\r\v ')
+ stripped_line = line.rstrip("\t\n\r\v ")
if not stripped_line and _EMPTY_LINE in self.config.no_space_check:
# allow empty lines
pass
- elif line[len(stripped_line):] not in ('\n', '\r\n'):
- self.add_message('trailing-whitespace', line=i,
- col_offset=len(stripped_line))
+ elif line[len(stripped_line) :] not in ("\n", "\r\n"):
+ self.add_message(
+ "trailing-whitespace", line=i, col_offset=len(stripped_line)
+ )
# Don't count excess whitespace in the line length.
line = stripped_line
mobj = OPTION_RGX.search(line)
- if mobj and '=' in line:
- front_of_equal, _, back_of_equal = mobj.group(1).partition('=')
- if front_of_equal.strip() == 'disable':
- if 'line-too-long' in {_msg_id.strip()
- for _msg_id in back_of_equal.split(',')}:
+ if mobj and "=" in line:
+ front_of_equal, _, back_of_equal = mobj.group(1).partition("=")
+ if front_of_equal.strip() == "disable":
+ if "line-too-long" in {
+ _msg_id.strip() for _msg_id in back_of_equal.split(",")
+ }:
return None
- line = line.rsplit('#', 1)[0].rstrip()
+ line = line.rsplit("#", 1)[0].rstrip()
if len(line) > max_chars and not ignore_long_line.search(line):
- self.add_message('line-too-long', line=i, args=(len(line), max_chars))
+ self.add_message("line-too-long", line=i, args=(len(line), max_chars))
return i + 1
unsplit_ends = {
- '\v',
- '\x0b',
- '\f',
- '\x0c',
- '\x1c',
- '\x1d',
- '\x1e',
- '\x85',
- '\u2028',
- '\u2029'
+ "\v",
+ "\x0b",
+ "\f",
+ "\x0c",
+ "\x1c",
+ "\x1d",
+ "\x1e",
+ "\x85",
+ "\u2028",
+ "\u2029",
}
unsplit = []
for line in lines.splitlines(True):
@@ -1098,7 +1283,7 @@ class FormatChecker(BaseTokenChecker):
if unsplit:
unsplit.append(line)
- line = ''.join(unsplit)
+ line = "".join(unsplit)
unsplit = []
i = check_line(line, i)
@@ -1106,37 +1291,39 @@ class FormatChecker(BaseTokenChecker):
break
if unsplit:
- check_line(''.join(unsplit), i)
+ check_line("".join(unsplit), i)
def check_indent_level(self, string, expected, line_num):
"""return the indent level of the string
"""
indent = self.config.indent_string
- if indent == '\\t': # \t is not interpreted in the configuration file
- indent = '\t'
+ if indent == "\\t": # \t is not interpreted in the configuration file
+ indent = "\t"
level = 0
unit_size = len(indent)
while string[:unit_size] == indent:
string = string[unit_size:]
level += 1
- suppl = ''
- while string and string[0] in ' \t':
+ suppl = ""
+ while string and string[0] in " \t":
if string[0] != indent[0]:
- if string[0] == '\t':
- args = ('tab', 'space')
+ if string[0] == "\t":
+ args = ("tab", "space")
else:
- args = ('space', 'tab')
- self.add_message('mixed-indentation', args=args, line=line_num)
+ args = ("space", "tab")
+ self.add_message("mixed-indentation", args=args, line=line_num)
return level
suppl += string[0]
string = string[1:]
if level != expected or suppl:
- i_type = 'spaces'
- if indent[0] == '\t':
- i_type = 'tabs'
- self.add_message('bad-indentation', line=line_num,
- args=(level * unit_size + len(suppl), i_type,
- expected * unit_size))
+ i_type = "spaces"
+ if indent[0] == "\t":
+ i_type = "tabs"
+ self.add_message(
+ "bad-indentation",
+ line=line_num,
+ args=(level * unit_size + len(suppl), i_type, expected * unit_size),
+ )
return None
diff --git a/pylint/checkers/imports.py b/pylint/checkers/imports.py
index 33710deaa..77ff79a60 100644
--- a/pylint/checkers/imports.py
+++ b/pylint/checkers/imports.py
@@ -37,7 +37,7 @@ import copy
import astroid
from astroid import are_exclusive, decorators
-from astroid.modutils import (get_module_part, is_standard_module)
+from astroid.modutils import get_module_part, is_standard_module
import isort
from pylint.interfaces import IAstroidChecker
@@ -47,7 +47,7 @@ from pylint.checkers import BaseChecker
from pylint.checkers.utils import (
check_messages,
node_ignores_exception,
- is_from_fallback_block
+ is_from_fallback_block,
)
from pylint.graph import get_cycles, DotBackend
from pylint.reporters.ureports.nodes import VerbatimText, Paragraph
@@ -61,8 +61,8 @@ def _qualified_names(modname):
returns
['pylint', 'pylint.checkers', 'pylint.checkers.ImportsChecker']
"""
- names = modname.split('.')
- return ['.'.join(names[0:i+1]) for i in range(len(names))]
+ names = modname.split(".")
+ return [".".join(names[0 : i + 1]) for i in range(len(names))]
def _get_import_name(importnode, modname):
@@ -78,14 +78,15 @@ def _get_import_name(importnode, modname):
root = importnode.root()
if isinstance(root, astroid.Module):
modname = root.relative_to_absolute_name(
- modname, level=importnode.level)
+ modname, level=importnode.level
+ )
return modname
def _get_first_import(node, context, name, base, level, alias):
"""return the node where [base.]<name> is imported or None if not found
"""
- fullname = '%s.%s' % (base, name) if base else name
+ fullname = "%s.%s" % (base, name) if base else name
first = None
found = False
@@ -101,10 +102,14 @@ def _get_first_import(node, context, name, base, level, alias):
elif isinstance(first, astroid.ImportFrom):
if level == first.level:
for imported_name, imported_alias in first.names:
- if fullname == '%s.%s' % (first.modname, imported_name):
+ if fullname == "%s.%s" % (first.modname, imported_name):
found = True
break
- if name != '*' and name == imported_name and not (alias or imported_alias):
+ if (
+ name != "*"
+ and name == imported_name
+ and not (alias or imported_alias)
+ ):
found = True
break
if found:
@@ -121,6 +126,7 @@ def _ignore_import_failure(node, modname, ignored_modules):
return node_ignores_exception(node, ImportError)
+
# utilities to represents import dependencies as tree and dot graph ###########
@@ -131,7 +137,7 @@ def _make_tree_defs(mod_files_list):
tree_defs = {}
for mod, files in mod_files_list:
node = (tree_defs, ())
- for prefix in mod.split('.'):
+ for prefix in mod.split("."):
node = node[0].setdefault(prefix, [{}, []])
node[1] += files
return tree_defs
@@ -143,28 +149,28 @@ def _repr_tree_defs(data, indent_str=None):
nodes = data.items()
for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])):
if not files:
- files = ''
+ files = ""
else:
- files = '(%s)' % ','.join(sorted(files))
+ files = "(%s)" % ",".join(sorted(files))
if indent_str is None:
- lines.append('%s %s' % (mod, files))
- sub_indent_str = ' '
+ lines.append("%s %s" % (mod, files))
+ sub_indent_str = " "
else:
- lines.append(r'%s\-%s %s' % (indent_str, mod, files))
- if i == len(nodes)-1:
- sub_indent_str = '%s ' % indent_str
+ lines.append(r"%s\-%s %s" % (indent_str, mod, files))
+ if i == len(nodes) - 1:
+ sub_indent_str = "%s " % indent_str
else:
- sub_indent_str = '%s| ' % indent_str
+ sub_indent_str = "%s| " % indent_str
if sub:
lines.append(_repr_tree_defs(sub, sub_indent_str))
- return '\n'.join(lines)
+ return "\n".join(lines)
def _dependencies_graph(filename, dep_info):
"""write dependencies as a dot (graphviz) file
"""
done = {}
- printer = DotBackend(filename[:-4], rankdir='LR')
+ printer = DotBackend(filename[:-4], rankdir="LR")
printer.emit('URL="." node[shape="box"]')
for modname, dependencies in sorted(dep_info.items()):
done[modname] = 1
@@ -184,73 +190,93 @@ def _make_graph(filename, dep_info, sect, gtype):
report's section
"""
_dependencies_graph(filename, dep_info)
- sect.append(Paragraph('%simports graph has been written to %s'
- % (gtype, filename)))
+ sect.append(Paragraph("%simports graph has been written to %s" % (gtype, filename)))
# the import checker itself ###################################################
MSGS = {
- 'E0401': ('Unable to import %s',
- 'import-error',
- 'Used when pylint has been unable to import a module.',
- {'old_names': [('F0401', 'import-error')]}),
- 'E0402': ('Attempted relative import beyond top-level package',
- 'relative-beyond-top-level',
- 'Used when a relative import tries to access too many levels '
- 'in the current package.'),
- 'R0401': ('Cyclic import (%s)',
- 'cyclic-import',
- 'Used when a cyclic import between two or more modules is '
- 'detected.'),
-
- 'W0401': ('Wildcard import %s',
- 'wildcard-import',
- 'Used when `from module import *` is detected.'),
- 'W0402': ('Uses of a deprecated module %r',
- 'deprecated-module',
- 'Used a module marked as deprecated is imported.'),
- 'W0403': ('Relative import %r, should be %r',
- 'relative-import',
- 'Used when an import relative to the package directory is '
- 'detected.',
- {'maxversion': (3, 0)}),
- 'W0404': ('Reimport %r (imported line %s)',
- 'reimported',
- 'Used when a module is reimported multiple times.'),
- 'W0406': ('Module import itself',
- 'import-self',
- 'Used when a module is importing itself.'),
-
- 'W0410': ('__future__ import is not the first non docstring statement',
- 'misplaced-future',
- 'Python 2.5 and greater require __future__ import to be the '
- 'first non docstring statement in the module.'),
-
- 'C0410': ('Multiple imports on one line (%s)',
- 'multiple-imports',
- 'Used when import statement importing multiple modules is '
- 'detected.'),
- 'C0411': ('%s should be placed before %s',
- 'wrong-import-order',
- 'Used when PEP8 import order is not respected (standard imports '
- 'first, then third-party libraries, then local imports)'),
- 'C0412': ('Imports from package %s are not grouped',
- 'ungrouped-imports',
- 'Used when imports are not grouped by packages'),
- 'C0413': ('Import "%s" should be placed at the top of the '
- 'module',
- 'wrong-import-position',
- 'Used when code and imports are mixed'),
- 'C0414': ('Import alias does not rename original package',
- 'useless-import-alias',
- 'Used when an import alias is same as original package.'
- 'e.g using import numpy as numpy instead of import numpy as np'),
- }
+ "E0401": (
+ "Unable to import %s",
+ "import-error",
+ "Used when pylint has been unable to import a module.",
+ {"old_names": [("F0401", "import-error")]},
+ ),
+ "E0402": (
+ "Attempted relative import beyond top-level package",
+ "relative-beyond-top-level",
+ "Used when a relative import tries to access too many levels "
+ "in the current package.",
+ ),
+ "R0401": (
+ "Cyclic import (%s)",
+ "cyclic-import",
+ "Used when a cyclic import between two or more modules is " "detected.",
+ ),
+ "W0401": (
+ "Wildcard import %s",
+ "wildcard-import",
+ "Used when `from module import *` is detected.",
+ ),
+ "W0402": (
+ "Uses of a deprecated module %r",
+ "deprecated-module",
+ "Used a module marked as deprecated is imported.",
+ ),
+ "W0403": (
+ "Relative import %r, should be %r",
+ "relative-import",
+ "Used when an import relative to the package directory is " "detected.",
+ {"maxversion": (3, 0)},
+ ),
+ "W0404": (
+ "Reimport %r (imported line %s)",
+ "reimported",
+ "Used when a module is reimported multiple times.",
+ ),
+ "W0406": (
+ "Module import itself",
+ "import-self",
+ "Used when a module is importing itself.",
+ ),
+ "W0410": (
+ "__future__ import is not the first non docstring statement",
+ "misplaced-future",
+ "Python 2.5 and greater require __future__ import to be the "
+ "first non docstring statement in the module.",
+ ),
+ "C0410": (
+ "Multiple imports on one line (%s)",
+ "multiple-imports",
+ "Used when import statement importing multiple modules is " "detected.",
+ ),
+ "C0411": (
+ "%s should be placed before %s",
+ "wrong-import-order",
+ "Used when PEP8 import order is not respected (standard imports "
+ "first, then third-party libraries, then local imports)",
+ ),
+ "C0412": (
+ "Imports from package %s are not grouped",
+ "ungrouped-imports",
+ "Used when imports are not grouped by packages",
+ ),
+ "C0413": (
+ 'Import "%s" should be placed at the top of the ' "module",
+ "wrong-import-position",
+ "Used when code and imports are mixed",
+ ),
+ "C0414": (
+ "Import alias does not rename original package",
+ "useless-import-alias",
+ "Used when an import alias is same as original package."
+ "e.g using import numpy as numpy instead of import numpy as np",
+ ),
+}
DEFAULT_STANDARD_LIBRARY = ()
-DEFAULT_KNOWN_THIRD_PARTY = ('enchant',)
+DEFAULT_KNOWN_THIRD_PARTY = ("enchant",)
class ImportsChecker(BaseChecker):
@@ -263,73 +289,99 @@ class ImportsChecker(BaseChecker):
__implements__ = IAstroidChecker
- name = 'imports'
+ name = "imports"
msgs = MSGS
priority = -2
if sys.version_info < (3, 5):
- deprecated_modules = ('optparse', )
+ deprecated_modules = ("optparse",)
else:
- deprecated_modules = ('optparse', 'tkinter.tix')
- options = (('deprecated-modules',
- {'default' : deprecated_modules,
- 'type' : 'csv',
- 'metavar' : '<modules>',
- 'help' : 'Deprecated modules which should not be used,'
- ' separated by a comma.'}
- ),
- ('import-graph',
- {'default' : '',
- 'type' : 'string',
- 'metavar' : '<file.dot>',
- 'help' : 'Create a graph of every (i.e. internal and'
- ' external) dependencies in the given file'
- ' (report RP0402 must not be disabled).'}
- ),
- ('ext-import-graph',
- {'default' : '',
- 'type' : 'string',
- 'metavar' : '<file.dot>',
- 'help' : 'Create a graph of external dependencies in the'
- ' given file (report RP0402 must not be disabled).'}
- ),
- ('int-import-graph',
- {'default' : '',
- 'type' : 'string',
- 'metavar' : '<file.dot>',
- 'help' : 'Create a graph of internal dependencies in the'
- ' given file (report RP0402 must not be disabled).'}
- ),
- ('known-standard-library',
- {'default': DEFAULT_STANDARD_LIBRARY,
- 'type': 'csv',
- 'metavar': '<modules>',
- 'help': 'Force import order to recognize a module as part of '
- 'the standard compatibility libraries.'}
- ),
- ('known-third-party',
- {'default': DEFAULT_KNOWN_THIRD_PARTY,
- 'type': 'csv',
- 'metavar': '<modules>',
- 'help': 'Force import order to recognize a module as part of '
- 'a third party library.'}
- ),
- ('analyse-fallback-blocks',
- {'default': False,
- 'type': 'yn',
- 'metavar': '<y_or_n>',
- 'help': 'Analyse import fallback blocks. This can be used to '
- 'support both Python 2 and 3 compatible code, which '
- 'means that the block might have code that exists '
- 'only in one or another interpreter, leading to false '
- 'positives when analysed.'},
- ),
- ('allow-wildcard-with-all',
- {'default': False,
- 'type': 'yn',
- 'metavar': '<y_or_n>',
- 'help': 'Allow wildcard imports from modules that define __all__.'}),
- )
+ deprecated_modules = ("optparse", "tkinter.tix")
+ options = (
+ (
+ "deprecated-modules",
+ {
+ "default": deprecated_modules,
+ "type": "csv",
+ "metavar": "<modules>",
+ "help": "Deprecated modules which should not be used,"
+ " separated by a comma.",
+ },
+ ),
+ (
+ "import-graph",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<file.dot>",
+ "help": "Create a graph of every (i.e. internal and"
+ " external) dependencies in the given file"
+ " (report RP0402 must not be disabled).",
+ },
+ ),
+ (
+ "ext-import-graph",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<file.dot>",
+ "help": "Create a graph of external dependencies in the"
+ " given file (report RP0402 must not be disabled).",
+ },
+ ),
+ (
+ "int-import-graph",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<file.dot>",
+ "help": "Create a graph of internal dependencies in the"
+ " given file (report RP0402 must not be disabled).",
+ },
+ ),
+ (
+ "known-standard-library",
+ {
+ "default": DEFAULT_STANDARD_LIBRARY,
+ "type": "csv",
+ "metavar": "<modules>",
+ "help": "Force import order to recognize a module as part of "
+ "the standard compatibility libraries.",
+ },
+ ),
+ (
+ "known-third-party",
+ {
+ "default": DEFAULT_KNOWN_THIRD_PARTY,
+ "type": "csv",
+ "metavar": "<modules>",
+ "help": "Force import order to recognize a module as part of "
+ "a third party library.",
+ },
+ ),
+ (
+ "analyse-fallback-blocks",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Analyse import fallback blocks. This can be used to "
+ "support both Python 2 and 3 compatible code, which "
+ "means that the block might have code that exists "
+ "only in one or another interpreter, leading to false "
+ "positives when analysed.",
+ },
+ ),
+ (
+ "allow-wildcard-with-all",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Allow wildcard imports from modules that define __all__.",
+ },
+ ),
+ )
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
@@ -337,11 +389,10 @@ class ImportsChecker(BaseChecker):
self.import_graph = None
self._imports_stack = []
self._first_non_import_node = None
- self.reports = (('RP0401', 'External dependencies',
- self._report_external_dependencies),
- ('RP0402', 'Modules dependencies graph',
- self._report_dependencies_graph),
- )
+ self.reports = (
+ ("RP0401", "External dependencies", self._report_external_dependencies),
+ ("RP0402", "Modules dependencies graph", self._report_dependencies_graph),
+ )
self._site_packages = self._compute_site_packages()
@@ -351,7 +402,7 @@ class ImportsChecker(BaseChecker):
return os.path.normcase(os.path.abspath(path))
paths = set()
- real_prefix = getattr(sys, 'real_prefix', None)
+ real_prefix = getattr(sys, "real_prefix", None)
for prefix in filter(None, (real_prefix, sys.prefix)):
path = sysconfig.get_python_lib(prefix=prefix)
path = _normalized_path(path)
@@ -360,9 +411,13 @@ class ImportsChecker(BaseChecker):
# Handle Debian's derivatives /usr/local.
if os.path.isfile("/etc/debian_version"):
for prefix in filter(None, (real_prefix, sys.prefix)):
- libpython = os.path.join(prefix, "local", "lib",
- "python" + sysconfig.get_python_version(),
- "dist-packages")
+ libpython = os.path.join(
+ prefix,
+ "local",
+ "lib",
+ "python" + sysconfig.get_python_version(),
+ "dist-packages",
+ )
paths.add(libpython)
return paths
@@ -372,10 +427,9 @@ class ImportsChecker(BaseChecker):
self.linter.add_stats(cycles=[])
self.stats = self.linter.stats
self.import_graph = collections.defaultdict(set)
- self._module_pkg = {} # mapping of modules to the pkg they belong in
+ self._module_pkg = {} # mapping of modules to the pkg they belong in
self._excluded_edges = collections.defaultdict(set)
- self._ignored_modules = get_global_option(
- self, 'ignored-modules', default=[])
+ self._ignored_modules = get_global_option(self, "ignored-modules", default=[])
def _import_graph_without_ignored_edges(self):
filtered_graph = copy.deepcopy(self.import_graph)
@@ -385,11 +439,11 @@ class ImportsChecker(BaseChecker):
def close(self):
"""called before visiting project (i.e set of modules)"""
- if self.linter.is_message_enabled('cyclic-import'):
+ if self.linter.is_message_enabled("cyclic-import"):
graph = self._import_graph_without_ignored_edges()
vertices = list(graph)
for cycle in get_cycles(graph, vertices=vertices):
- self.add_message('cyclic-import', args=' -> '.join(cycle))
+ self.add_message("cyclic-import", args=" -> ".join(cycle))
@check_messages(*MSGS)
def visit_import(self, node):
@@ -400,7 +454,7 @@ class ImportsChecker(BaseChecker):
modnode = node.root()
names = [name for name, _ in node.names]
if len(names) >= 2:
- self.add_message('multiple-imports', args=', '.join(names), node=node)
+ self.add_message("multiple-imports", args=", ".join(names), node=node)
for name in names:
self._check_deprecated_module(node, name)
@@ -441,8 +495,8 @@ class ImportsChecker(BaseChecker):
self._check_relative_import(modnode, node, imported_module, basename)
for name, _ in node.names:
- if name != '*':
- self._add_imported_module(node, '%s.%s' % (imported_module.name, name))
+ if name != "*":
+ self._add_imported_module(node, "%s.%s" % (imported_module.name, name))
else:
self._add_imported_module(node, imported_module.name)
@@ -455,12 +509,13 @@ class ImportsChecker(BaseChecker):
met = set()
current_package = None
for import_node, import_name in std_imports + ext_imports + loc_imports:
- if not self.linter.is_message_enabled('ungrouped-imports', import_node.fromlineno):
+ if not self.linter.is_message_enabled(
+ "ungrouped-imports", import_node.fromlineno
+ ):
continue
- package, _, _ = import_name.partition('.')
+ package, _, _ = import_name.partition(".")
if current_package and current_package != package and package in met:
- self.add_message('ungrouped-imports', node=import_node,
- args=package)
+ self.add_message("ungrouped-imports", node=import_node, args=package)
current_package = package
met.add(package)
@@ -468,7 +523,7 @@ class ImportsChecker(BaseChecker):
self._first_non_import_node = None
def compute_first_non_import_node(self, node):
- if not self.linter.is_message_enabled('wrong-import-position', node.fromlineno):
+ if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
return
# if the node does not contain an import instruction, and if it is the
# first node of the module, keep a track of it (all the import positions
@@ -480,27 +535,37 @@ class ImportsChecker(BaseChecker):
return
nested_allowed = [astroid.TryExcept, astroid.TryFinally]
is_nested_allowed = [
- allowed for allowed in nested_allowed if isinstance(node, allowed)]
- if is_nested_allowed and \
- any(node.nodes_of_class((astroid.Import, astroid.ImportFrom))):
+ allowed for allowed in nested_allowed if isinstance(node, allowed)
+ ]
+ if is_nested_allowed and any(
+ node.nodes_of_class((astroid.Import, astroid.ImportFrom))
+ ):
return
if isinstance(node, astroid.Assign):
# Add compatibility for module level dunder names
# https://www.python.org/dev/peps/pep-0008/#module-level-dunder-names
valid_targets = [
- isinstance(target, astroid.AssignName) and
- target.name.startswith('__') and target.name.endswith('__')
- for target in node.targets]
+ isinstance(target, astroid.AssignName)
+ and target.name.startswith("__")
+ and target.name.endswith("__")
+ for target in node.targets
+ ]
if all(valid_targets):
return
self._first_non_import_node = node
- visit_tryfinally = visit_tryexcept = visit_assignattr = visit_assign = \
- visit_ifexp = visit_comprehension = visit_expr = visit_if = \
- compute_first_non_import_node
+ visit_tryfinally = (
+ visit_tryexcept
+ ) = (
+ visit_assignattr
+ ) = (
+ visit_assign
+ ) = (
+ visit_ifexp
+ ) = visit_comprehension = visit_expr = visit_if = compute_first_non_import_node
def visit_functiondef(self, node):
- if not self.linter.is_message_enabled('wrong-import-position', node.fromlineno):
+ if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
return
# If it is the first non import instruction of the module, record it.
if self._first_non_import_node:
@@ -525,14 +590,16 @@ class ImportsChecker(BaseChecker):
def _check_misplaced_future(self, node):
basename = node.modname
- if basename == '__future__':
+ if basename == "__future__":
# check if this is the first non-docstring statement in the module
prev = node.previous_sibling()
if prev:
# consecutive future statements are possible
- if not (isinstance(prev, astroid.ImportFrom)
- and prev.modname == '__future__'):
- self.add_message('misplaced-future', node=node)
+ if not (
+ isinstance(prev, astroid.ImportFrom)
+ and prev.modname == "__future__"
+ ):
+ self.add_message("misplaced-future", node=node)
return
def _check_same_line_imports(self, node):
@@ -541,8 +608,7 @@ class ImportsChecker(BaseChecker):
counter = collections.Counter(names)
for name, count in counter.items():
if count > 1:
- self.add_message('reimported', node=node,
- args=(name, node.fromlineno))
+ self.add_message("reimported", node=node, args=(name, node.fromlineno))
def _check_position(self, node):
"""Check `node` import or importfrom node position is correct
@@ -552,8 +618,7 @@ class ImportsChecker(BaseChecker):
# if a first non-import instruction has already been encountered,
# it means the import comes after it and therefore is not well placed
if self._first_non_import_node:
- self.add_message('wrong-import-position', node=node,
- args=node.as_string())
+ self.add_message("wrong-import-position", node=node, args=node.as_string())
def _record_import(self, node, importedmodnode):
"""Record the package `node` imports from"""
@@ -562,7 +627,7 @@ class ImportsChecker(BaseChecker):
else:
importedname = importedmodnode.name if importedmodnode else None
if not importedname:
- importedname = node.names[0][0].split('.')[0]
+ importedname = node.names[0][0].split(".")[0]
if isinstance(node, astroid.ImportFrom) and (node.level or 0) >= 1:
# We need the importedname with first point to detect local package
@@ -572,15 +637,14 @@ class ImportsChecker(BaseChecker):
# Example of node:
# 'from . import my_package2'
# the output should be '.my_package2' instead of '{pyfile}'
- importedname = '.' + importedname
+ importedname = "." + importedname
self._imports_stack.append((node, importedname))
@staticmethod
def _is_fallback_import(node, imports):
imports = [import_node for (import_node, _) in imports]
- return any(astroid.are_exclusive(import_node, node)
- for import_node in imports)
+ return any(astroid.are_exclusive(import_node, node) for import_node in imports)
def _check_imports_order(self, _module_node):
"""Checks imports of module `node` are grouped by category
@@ -597,50 +661,70 @@ class ImportsChecker(BaseChecker):
first_party_not_ignored = []
local_not_ignored = []
isort_obj = isort.SortImports(
- file_contents='', known_third_party=self.config.known_third_party,
+ file_contents="",
+ known_third_party=self.config.known_third_party,
known_standard_library=self.config.known_standard_library,
)
for node, modname in self._imports_stack:
- if modname.startswith('.'):
- package = '.' + modname.split('.')[1]
+ if modname.startswith("."):
+ package = "." + modname.split(".")[1]
else:
- package = modname.split('.')[0]
+ package = modname.split(".")[0]
nested = not isinstance(node.parent, astroid.Module)
- ignore_for_import_order = not self.linter.is_message_enabled('wrong-import-order',
- node.fromlineno)
+ ignore_for_import_order = not self.linter.is_message_enabled(
+ "wrong-import-order", node.fromlineno
+ )
import_category = isort_obj.place_module(package)
node_and_package_import = (node, package)
- if import_category in ('FUTURE', 'STDLIB'):
+ if import_category in ("FUTURE", "STDLIB"):
std_imports.append(node_and_package_import)
- wrong_import = (third_party_not_ignored or first_party_not_ignored
- or local_not_ignored)
+ wrong_import = (
+ third_party_not_ignored
+ or first_party_not_ignored
+ or local_not_ignored
+ )
if self._is_fallback_import(node, wrong_import):
continue
if wrong_import and not nested:
- self.add_message('wrong-import-order', node=node,
- args=('standard import "%s"' % node.as_string(),
- '"%s"' % wrong_import[0][0].as_string()))
- elif import_category == 'THIRDPARTY':
+ self.add_message(
+ "wrong-import-order",
+ node=node,
+ args=(
+ 'standard import "%s"' % node.as_string(),
+ '"%s"' % wrong_import[0][0].as_string(),
+ ),
+ )
+ elif import_category == "THIRDPARTY":
third_party_imports.append(node_and_package_import)
external_imports.append(node_and_package_import)
if not nested and not ignore_for_import_order:
third_party_not_ignored.append(node_and_package_import)
wrong_import = first_party_not_ignored or local_not_ignored
if wrong_import and not nested:
- self.add_message('wrong-import-order', node=node,
- args=('third party import "%s"' % node.as_string(),
- '"%s"' % wrong_import[0][0].as_string()))
- elif import_category == 'FIRSTPARTY':
+ self.add_message(
+ "wrong-import-order",
+ node=node,
+ args=(
+ 'third party import "%s"' % node.as_string(),
+ '"%s"' % wrong_import[0][0].as_string(),
+ ),
+ )
+ elif import_category == "FIRSTPARTY":
first_party_imports.append(node_and_package_import)
external_imports.append(node_and_package_import)
if not nested and not ignore_for_import_order:
first_party_not_ignored.append(node_and_package_import)
wrong_import = local_not_ignored
if wrong_import and not nested:
- self.add_message('wrong-import-order', node=node,
- args=('first party import "%s"' % node.as_string(),
- '"%s"' % wrong_import[0][0].as_string()))
- elif import_category == 'LOCALFOLDER':
+ self.add_message(
+ "wrong-import-order",
+ node=node,
+ args=(
+ 'first party import "%s"' % node.as_string(),
+ '"%s"' % wrong_import[0][0].as_string(),
+ ),
+ )
+ elif import_category == "LOCALFOLDER":
local_imports.append((node, package))
if not nested and not ignore_for_import_order:
local_not_ignored.append((node, package))
@@ -653,43 +737,47 @@ class ImportsChecker(BaseChecker):
if _ignore_import_failure(importnode, modname, self._ignored_modules):
return None
- self.add_message('relative-beyond-top-level', node=importnode)
+ self.add_message("relative-beyond-top-level", node=importnode)
except astroid.AstroidSyntaxError as exc:
message = "Cannot import {!r} due to syntax error {!r}".format(
modname, str(exc.error) # pylint: disable=no-member; false positive
)
- self.add_message('syntax-error', line=importnode.lineno, args=message)
+ self.add_message("syntax-error", line=importnode.lineno, args=message)
except astroid.AstroidBuildingException:
- if not self.linter.is_message_enabled('import-error'):
+ if not self.linter.is_message_enabled("import-error"):
return None
if _ignore_import_failure(importnode, modname, self._ignored_modules):
return None
- if not self.config.analyse_fallback_blocks and is_from_fallback_block(importnode):
+ if not self.config.analyse_fallback_blocks and is_from_fallback_block(
+ importnode
+ ):
return None
dotted_modname = _get_import_name(importnode, modname)
- self.add_message('import-error', args=repr(dotted_modname),
- node=importnode)
+ self.add_message("import-error", args=repr(dotted_modname), node=importnode)
- def _check_relative_import(self, modnode, importnode, importedmodnode,
- importedasname):
+ def _check_relative_import(
+ self, modnode, importnode, importedmodnode, importedasname
+ ):
"""check relative import. node is either an Import or From node, modname
the imported module name.
"""
- if not self.linter.is_message_enabled('relative-import'):
+ if not self.linter.is_message_enabled("relative-import"):
return None
if importedmodnode.file is None:
return False # built-in module
if modnode is importedmodnode:
return False # module importing itself
- if modnode.absolute_import_activated() or getattr(importnode, 'level', None):
+ if modnode.absolute_import_activated() or getattr(importnode, "level", None):
return False
if importedmodnode.name != importedasname:
# this must be a relative import...
- self.add_message('relative-import',
- args=(importedasname, importedmodnode.name),
- node=importnode)
+ self.add_message(
+ "relative-import",
+ args=(importedasname, importedmodnode.name),
+ node=importnode,
+ )
return None
return None
@@ -700,37 +788,37 @@ class ImportsChecker(BaseChecker):
base = os.path.splitext(os.path.basename(module_file))[0]
try:
- importedmodname = get_module_part(importedmodname,
- module_file)
+ importedmodname = get_module_part(importedmodname, module_file)
except ImportError:
pass
if context_name == importedmodname:
- self.add_message('import-self', node=node)
+ self.add_message("import-self", node=node)
elif not is_standard_module(importedmodname):
# if this is not a package __init__ module
- if base != '__init__' and context_name not in self._module_pkg:
+ if base != "__init__" and context_name not in self._module_pkg:
# record the module's parent, or the module itself if this is
# a top level module, as the package it belongs to
- self._module_pkg[context_name] = context_name.rsplit('.', 1)[0]
+ self._module_pkg[context_name] = context_name.rsplit(".", 1)[0]
# handle dependencies
- importedmodnames = self.stats['dependencies'].setdefault(
- importedmodname, set())
+ importedmodnames = self.stats["dependencies"].setdefault(
+ importedmodname, set()
+ )
if context_name not in importedmodnames:
importedmodnames.add(context_name)
# update import graph
self.import_graph[context_name].add(importedmodname)
- if not self.linter.is_message_enabled('cyclic-import', line=node.lineno):
+ if not self.linter.is_message_enabled("cyclic-import", line=node.lineno):
self._excluded_edges[context_name].add(importedmodname)
def _check_deprecated_module(self, node, mod_path):
"""check if the module is deprecated"""
for mod_name in self.config.deprecated_modules:
- if mod_path == mod_name or mod_path.startswith(mod_name + '.'):
- self.add_message('deprecated-module', node=node, args=mod_path)
+ if mod_path == mod_name or mod_path.startswith(mod_name + "."):
+ self.add_message("deprecated-module", node=node, args=mod_path)
def _check_import_as_rename(self, node):
names = node.names
@@ -739,7 +827,7 @@ class ImportsChecker(BaseChecker):
return
real_name = name[0]
- splitted_packages = real_name.rsplit('.')
+ splitted_packages = real_name.rsplit(".")
real_name = splitted_packages[-1]
imported_name = name[1]
# consider only following cases
@@ -747,11 +835,11 @@ class ImportsChecker(BaseChecker):
# and ignore following
# import x.y.z as z
if real_name == imported_name and len(splitted_packages) == 1:
- self.add_message('useless-import-alias', node=node)
+ self.add_message("useless-import-alias", node=node)
def _check_reimport(self, node, basename=None, level=None):
"""check if the import is necessary (i.e. not already done)"""
- if not self.linter.is_message_enabled('reimported'):
+ if not self.linter.is_message_enabled("reimported"):
return
frame = node.frame()
@@ -763,12 +851,12 @@ class ImportsChecker(BaseChecker):
for known_context, known_level in contexts:
for name, alias in node.names:
first = _get_first_import(
- node, known_context,
- name, basename,
- known_level, alias)
+ node, known_context, name, basename, known_level, alias
+ )
if first is not None:
- self.add_message('reimported', node=node,
- args=(name, first.fromlineno))
+ self.add_message(
+ "reimported", node=node, args=(name, first.fromlineno)
+ )
def _report_external_dependencies(self, sect, _, _dummy):
"""return a verbatim layout for displaying dependencies"""
@@ -780,27 +868,27 @@ class ImportsChecker(BaseChecker):
def _report_dependencies_graph(self, sect, _, _dummy):
"""write dependencies as a dot (graphviz) file"""
- dep_info = self.stats['dependencies']
- if not dep_info or not (self.config.import_graph
- or self.config.ext_import_graph
- or self.config.int_import_graph):
+ dep_info = self.stats["dependencies"]
+ if not dep_info or not (
+ self.config.import_graph
+ or self.config.ext_import_graph
+ or self.config.int_import_graph
+ ):
raise EmptyReportError()
filename = self.config.import_graph
if filename:
- _make_graph(filename, dep_info, sect, '')
+ _make_graph(filename, dep_info, sect, "")
filename = self.config.ext_import_graph
if filename:
- _make_graph(filename, self._external_dependencies_info(),
- sect, 'external ')
+ _make_graph(filename, self._external_dependencies_info(), sect, "external ")
filename = self.config.int_import_graph
if filename:
- _make_graph(filename, self._internal_dependencies_info(),
- sect, 'internal ')
+ _make_graph(filename, self._internal_dependencies_info(), sect, "internal ")
def _filter_dependencies_graph(self, internal):
"""build the internal or the external depedency graph"""
graph = collections.defaultdict(set)
- for importee, importers in self.stats['dependencies'].items():
+ for importee, importers in self.stats["dependencies"].items():
for importer in importers:
package = self._module_pkg.get(importer, importer)
is_inside = importee.startswith(package)
@@ -827,17 +915,17 @@ class ImportsChecker(BaseChecker):
# Skip the check if in __init__.py issue #2026
return
- wildcard_import_is_allowed = (
- self._wildcard_import_is_allowed(imported_module)
- )
+ wildcard_import_is_allowed = self._wildcard_import_is_allowed(imported_module)
for name, _ in node.names:
- if name == '*' and not wildcard_import_is_allowed:
- self.add_message('wildcard-import', args=node.modname, node=node)
+ if name == "*" and not wildcard_import_is_allowed:
+ self.add_message("wildcard-import", args=node.modname, node=node)
def _wildcard_import_is_allowed(self, imported_module):
- return (self.config.allow_wildcard_with_all
- and imported_module is not None
- and '__all__' in imported_module.locals)
+ return (
+ self.config.allow_wildcard_with_all
+ and imported_module is not None
+ and "__all__" in imported_module.locals
+ )
def register(linter):
diff --git a/pylint/checkers/logging.py b/pylint/checkers/logging.py
index 3796c2268..92a4a0428 100644
--- a/pylint/checkers/logging.py
+++ b/pylint/checkers/logging.py
@@ -27,55 +27,74 @@ from pylint.checkers import utils
from pylint.checkers.utils import check_messages
-
MSGS = {
- 'W1201': ('Specify string format arguments as logging function parameters',
- 'logging-not-lazy',
- 'Used when a logging statement has a call form of '
- '"logging.<logging method>(format_string % (format_args...))". '
- 'Such calls should leave string interpolation to the logging '
- 'method itself and be written '
- '"logging.<logging method>(format_string, format_args...)" '
- 'so that the program may avoid incurring the cost of the '
- 'interpolation in those cases in which no message will be '
- 'logged. For more, see '
- 'http://www.python.org/dev/peps/pep-0282/.'),
- 'W1202': ('Use % formatting in logging functions and pass the % '
- 'parameters as arguments',
- 'logging-format-interpolation',
- 'Used when a logging statement has a call form of '
- '"logging.<logging method>(format_string.format(format_args...))"'
- '. Such calls should use % formatting instead, but leave '
- 'interpolation to the logging function by passing the parameters '
- 'as arguments.'),
- 'W1203': ('Use % formatting in logging functions and pass the % '
- 'parameters as arguments',
- 'logging-fstring-interpolation',
- 'Used when a logging statement has a call form of '
- '"logging.method(f\"...\"))"'
- '. Such calls should use % formatting instead, but leave '
- 'interpolation to the logging function by passing the parameters '
- 'as arguments.'
- ),
- 'E1200': ('Unsupported logging format character %r (%#02x) at index %d',
- 'logging-unsupported-format',
- 'Used when an unsupported format character is used in a logging '
- 'statement format string.'),
- 'E1201': ('Logging format string ends in middle of conversion specifier',
- 'logging-format-truncated',
- 'Used when a logging statement format string terminates before '
- 'the end of a conversion specifier.'),
- 'E1205': ('Too many arguments for logging format string',
- 'logging-too-many-args',
- 'Used when a logging format string is given too many arguments.'),
- 'E1206': ('Not enough arguments for logging format string',
- 'logging-too-few-args',
- 'Used when a logging format string is given too few arguments.'),
- }
+ "W1201": (
+ "Specify string format arguments as logging function parameters",
+ "logging-not-lazy",
+ "Used when a logging statement has a call form of "
+ '"logging.<logging method>(format_string % (format_args...))". '
+ "Such calls should leave string interpolation to the logging "
+ "method itself and be written "
+ '"logging.<logging method>(format_string, format_args...)" '
+ "so that the program may avoid incurring the cost of the "
+ "interpolation in those cases in which no message will be "
+ "logged. For more, see "
+ "http://www.python.org/dev/peps/pep-0282/.",
+ ),
+ "W1202": (
+ "Use % formatting in logging functions and pass the % "
+ "parameters as arguments",
+ "logging-format-interpolation",
+ "Used when a logging statement has a call form of "
+ '"logging.<logging method>(format_string.format(format_args...))"'
+ ". Such calls should use % formatting instead, but leave "
+ "interpolation to the logging function by passing the parameters "
+ "as arguments.",
+ ),
+ "W1203": (
+ "Use % formatting in logging functions and pass the % "
+ "parameters as arguments",
+ "logging-fstring-interpolation",
+ "Used when a logging statement has a call form of "
+ '"logging.method(f"..."))"'
+ ". Such calls should use % formatting instead, but leave "
+ "interpolation to the logging function by passing the parameters "
+ "as arguments.",
+ ),
+ "E1200": (
+ "Unsupported logging format character %r (%#02x) at index %d",
+ "logging-unsupported-format",
+ "Used when an unsupported format character is used in a logging "
+ "statement format string.",
+ ),
+ "E1201": (
+ "Logging format string ends in middle of conversion specifier",
+ "logging-format-truncated",
+ "Used when a logging statement format string terminates before "
+ "the end of a conversion specifier.",
+ ),
+ "E1205": (
+ "Too many arguments for logging format string",
+ "logging-too-many-args",
+ "Used when a logging format string is given too many arguments.",
+ ),
+ "E1206": (
+ "Not enough arguments for logging format string",
+ "logging-too-few-args",
+ "Used when a logging format string is given too few arguments.",
+ ),
+}
CHECKED_CONVENIENCE_FUNCTIONS = {
- 'critical', 'debug', 'error', 'exception', 'fatal', 'info', 'warn', 'warning'
+ "critical",
+ "debug",
+ "error",
+ "exception",
+ "fatal",
+ "info",
+ "warn",
+ "warning",
}
@@ -91,29 +110,35 @@ def is_method_call(func, types=(), methods=()):
bool: true if the node represents a method call for the given type and
method names, False otherwise.
"""
- return (isinstance(func, astroid.BoundMethod)
- and isinstance(func.bound, astroid.Instance)
- and (func.bound.name in types if types else True)
- and (func.name in methods if methods else True))
+ return (
+ isinstance(func, astroid.BoundMethod)
+ and isinstance(func.bound, astroid.Instance)
+ and (func.bound.name in types if types else True)
+ and (func.name in methods if methods else True)
+ )
class LoggingChecker(checkers.BaseChecker):
"""Checks use of the logging module."""
__implements__ = interfaces.IAstroidChecker
- name = 'logging'
+ name = "logging"
msgs = MSGS
- options = (('logging-modules',
- {'default': ('logging',),
- 'type': 'csv',
- 'metavar': '<comma separated list>',
- 'help': 'Logging modules to check that the string format '
- 'arguments are in logging function parameter format.'}
- ),
- )
-
- def visit_module(self, node): # pylint: disable=unused-argument
+ options = (
+ (
+ "logging-modules",
+ {
+ "default": ("logging",),
+ "type": "csv",
+ "metavar": "<comma separated list>",
+ "help": "Logging modules to check that the string format "
+ "arguments are in logging function parameter format.",
+ },
+ ),
+ )
+
+ def visit_module(self, node): # pylint: disable=unused-argument
"""Clears any state left in this checker from last module checked."""
# The code being checked can just as easily "import logging as foo",
# so it is necessary to process the imports and store in this field
@@ -124,7 +149,7 @@ class LoggingChecker(checkers.BaseChecker):
self._logging_modules = set(logging_mods)
self._from_imports = {}
for logging_mod in logging_mods:
- parts = logging_mod.rsplit('.', 1)
+ parts = logging_mod.rsplit(".", 1)
if len(parts) > 1:
self._from_imports[parts[0]] = parts[1]
@@ -147,20 +172,26 @@ class LoggingChecker(checkers.BaseChecker):
@check_messages(*MSGS)
def visit_call(self, node):
"""Checks calls to logging methods."""
+
def is_logging_name():
- return (isinstance(node.func, astroid.Attribute) and
- isinstance(node.func.expr, astroid.Name) and
- node.func.expr.name in self._logging_names)
+ return (
+ isinstance(node.func, astroid.Attribute)
+ and isinstance(node.func.expr, astroid.Name)
+ and node.func.expr.name in self._logging_names
+ )
def is_logger_class():
try:
for inferred in node.func.infer():
if isinstance(inferred, astroid.BoundMethod):
parent = inferred._proxied.parent
- if (isinstance(parent, astroid.ClassDef) and
- (parent.qname() == 'logging.Logger' or
- any(ancestor.qname() == 'logging.Logger'
- for ancestor in parent.ancestors()))):
+ if isinstance(parent, astroid.ClassDef) and (
+ parent.qname() == "logging.Logger"
+ or any(
+ ancestor.qname() == "logging.Logger"
+ for ancestor in parent.ancestors()
+ )
+ ):
return True, inferred._proxied.name
except astroid.exceptions.InferenceError:
pass
@@ -176,7 +207,7 @@ class LoggingChecker(checkers.BaseChecker):
def _check_log_method(self, node, name):
"""Checks calls to logging.log(level, format, *format_args)."""
- if name == 'log':
+ if name == "log":
if node.starargs or node.kwargs or len(node.args) < 2:
# Either a malformed call, star args, or double-star args. Beyond
# the scope of this checker.
@@ -193,28 +224,31 @@ class LoggingChecker(checkers.BaseChecker):
if isinstance(node.args[format_pos], astroid.BinOp):
binop = node.args[format_pos]
- emit = binop.op == '%'
- if binop.op == '+':
+ emit = binop.op == "%"
+ if binop.op == "+":
total_number_of_strings = sum(
- 1 for operand in (binop.left, binop.right)
+ 1
+ for operand in (binop.left, binop.right)
if self._is_operand_literal_str(utils.safe_infer(operand))
)
emit = total_number_of_strings > 0
if emit:
- self.add_message('logging-not-lazy', node=node)
+ self.add_message("logging-not-lazy", node=node)
elif isinstance(node.args[format_pos], astroid.Call):
self._check_call_func(node.args[format_pos])
elif isinstance(node.args[format_pos], astroid.Const):
self._check_format_string(node, format_pos)
- elif isinstance(node.args[format_pos], (astroid.FormattedValue, astroid.JoinedStr)):
- self.add_message('logging-fstring-interpolation', node=node)
+ elif isinstance(
+ node.args[format_pos], (astroid.FormattedValue, astroid.JoinedStr)
+ ):
+ self.add_message("logging-fstring-interpolation", node=node)
@staticmethod
def _is_operand_literal_str(operand):
"""
Return True if the operand in argument is a literal string
"""
- return isinstance(operand, astroid.Const) and operand.name == 'str'
+ return isinstance(operand, astroid.Const) and operand.name == "str"
def _check_call_func(self, node):
"""Checks that function call is not format_string.format().
@@ -224,10 +258,12 @@ class LoggingChecker(checkers.BaseChecker):
Call AST node to be checked.
"""
func = utils.safe_infer(node.func)
- types = ('str', 'unicode')
- methods = ('format',)
- if is_method_call(func, types, methods) and not is_complex_format_str(func.bound):
- self.add_message('logging-format-interpolation', node=node)
+ types = ("str", "unicode")
+ methods = ("format",)
+ if is_method_call(func, types, methods) and not is_complex_format_str(
+ func.bound
+ ):
+ self.add_message("logging-format-interpolation", node=node)
def _check_format_string(self, node, format_arg):
"""Checks that format string tokens match the supplied arguments.
@@ -236,7 +272,7 @@ class LoggingChecker(checkers.BaseChecker):
node (astroid.node_classes.NodeNG): AST node to be checked.
format_arg (int): Index of the format string in the node arguments.
"""
- num_args = _count_supplied_tokens(node.args[format_arg + 1:])
+ num_args = _count_supplied_tokens(node.args[format_arg + 1 :])
if not num_args:
# If no args were supplied, then all format strings are valid -
# don't check any further.
@@ -248,24 +284,28 @@ class LoggingChecker(checkers.BaseChecker):
required_num_args = 0
else:
try:
- keyword_args, required_num_args, _, _ = \
- utils.parse_format_string(format_string)
+ keyword_args, required_num_args, _, _ = utils.parse_format_string(
+ format_string
+ )
if keyword_args:
# Keyword checking on logging strings is complicated by
# special keywords - out of scope.
return
except utils.UnsupportedFormatCharacter as ex:
char = format_string[ex.index]
- self.add_message('logging-unsupported-format', node=node,
- args=(char, ord(char), ex.index))
+ self.add_message(
+ "logging-unsupported-format",
+ node=node,
+ args=(char, ord(char), ex.index),
+ )
return
except utils.IncompleteFormatString:
- self.add_message('logging-format-truncated', node=node)
+ self.add_message("logging-format-truncated", node=node)
return
if num_args > required_num_args:
- self.add_message('logging-too-many-args', node=node)
+ self.add_message("logging-too-many-args", node=node)
elif num_args < required_num_args:
- self.add_message('logging-too-few-args', node=node)
+ self.add_message("logging-too-few-args", node=node)
def is_complex_format_str(node):
diff --git a/pylint/checkers/misc.py b/pylint/checkers/misc.py
index 4b2fe191f..767da45ab 100644
--- a/pylint/checkers/misc.py
+++ b/pylint/checkers/misc.py
@@ -34,10 +34,14 @@ class ByIdManagedMessagesChecker(BaseChecker):
__implements__ = IRawChecker
# configuration section name
- name = 'miscellaneous'
- msgs = {'I0023': ('%s',
- 'use-symbolic-message-instead',
- 'Used when a message is enabled or disabled by id.'),}
+ name = "miscellaneous"
+ msgs = {
+ "I0023": (
+ "%s",
+ "use-symbolic-message-instead",
+ "Used when a message is enabled or disabled by id.",
+ )
+ }
options = ()
@@ -47,12 +51,14 @@ class ByIdManagedMessagesChecker(BaseChecker):
for (mod_name, msg_id, msg_symbol, lineno, is_disabled) in managed_msgs:
if mod_name == module.name:
if is_disabled:
- txt = ("Id '{ident}' is used to disable '{symbol}' message emission"
- .format(ident=msg_id, symbol=msg_symbol))
+ txt = "Id '{ident}' is used to disable '{symbol}' message emission".format(
+ ident=msg_id, symbol=msg_symbol
+ )
else:
- txt = ("Id '{ident}' is used to enable '{symbol}' message emission"
- .format(ident=msg_id, symbol=msg_symbol))
- self.add_message('use-symbolic-message-instead', line=lineno, args=txt)
+ txt = "Id '{ident}' is used to enable '{symbol}' message emission".format(
+ ident=msg_id, symbol=msg_symbol
+ )
+ self.add_message("use-symbolic-message-instead", line=lineno, args=txt)
MessagesHandlerMixIn.clear_by_id_managed_msgs()
@@ -62,25 +68,40 @@ class EncodingChecker(BaseChecker):
* warning notes in the code like FIXME, XXX
* encoding issues.
"""
+
__implements__ = IRawChecker
# configuration section name
- name = 'miscellaneous'
- msgs = {'W0511': ('%s',
- 'fixme',
- 'Used when a warning note as FIXME or XXX is detected.'),
- 'W0512': ('Cannot decode using encoding "%s",'
- ' unexpected byte at position %d',
- 'invalid-encoded-data',
- 'Used when a source line cannot be decoded using the specified '
- 'source file encoding.',
- {'maxversion': (3, 0)}),}
-
- options = (('notes',
- {'type': 'csv', 'metavar': '<comma separated values>',
- 'default': ('FIXME', 'XXX', 'TODO'),
- 'help': ('List of note tags to take in consideration, '
- 'separated by a comma.')}),)
+ name = "miscellaneous"
+ msgs = {
+ "W0511": (
+ "%s",
+ "fixme",
+ "Used when a warning note as FIXME or XXX is detected.",
+ ),
+ "W0512": (
+ 'Cannot decode using encoding "%s",' " unexpected byte at position %d",
+ "invalid-encoded-data",
+ "Used when a source line cannot be decoded using the specified "
+ "source file encoding.",
+ {"maxversion": (3, 0)},
+ ),
+ }
+
+ options = (
+ (
+ "notes",
+ {
+ "type": "csv",
+ "metavar": "<comma separated values>",
+ "default": ("FIXME", "XXX", "TODO"),
+ "help": (
+ "List of note tags to take in consideration, "
+ "separated by a comma."
+ ),
+ },
+ ),
+ )
def _check_note(self, notes, lineno, line, module_last_lineno):
"""
@@ -111,30 +132,39 @@ class EncodingChecker(BaseChecker):
disable_option_match = OPTION_RGX.search(line)
if disable_option_match:
try:
- _, value = disable_option_match.group(1).split('=', 1)
- values = [_val.strip().upper() for _val in value.split(',')]
+ _, value = disable_option_match.group(1).split("=", 1)
+ values = [_val.strip().upper() for _val in value.split(",")]
if set(values) & set(self.config.notes):
return
except ValueError:
- self.add_message('bad-inline-option',
- args=disable_option_match.group(1).strip(), line=line)
+ self.add_message(
+ "bad-inline-option",
+ args=disable_option_match.group(1).strip(),
+ line=line,
+ )
return
- self.add_message('fixme', args=line[match.start(1):].rstrip(), line=lineno,
- col_offset=match.start(1))
+ self.add_message(
+ "fixme",
+ args=line[match.start(1) :].rstrip(),
+ line=lineno,
+ col_offset=match.start(1),
+ )
def _check_encoding(self, lineno, line, file_encoding):
try:
return line.decode(file_encoding)
except UnicodeDecodeError as ex:
- self.add_message('invalid-encoded-data', line=lineno,
- args=(file_encoding, ex.args[2]))
+ self.add_message(
+ "invalid-encoded-data", line=lineno, args=(file_encoding, ex.args[2])
+ )
except LookupError as ex:
- if (line.startswith('#') and
- "coding" in line and file_encoding in line):
- self.add_message('syntax-error',
- line=lineno,
- args='Cannot decode using encoding "{}",'
- ' bad encoding'.format(file_encoding))
+ if line.startswith("#") and "coding" in line and file_encoding in line:
+ self.add_message(
+ "syntax-error",
+ line=lineno,
+ args='Cannot decode using encoding "{}",'
+ " bad encoding".format(file_encoding),
+ )
def process_module(self, module):
"""inspect the source file to find encoding problem or fixmes like
@@ -142,13 +172,14 @@ class EncodingChecker(BaseChecker):
"""
if self.config.notes:
notes = re.compile(
- r'#\s*(%s)\b' % "|".join(map(re.escape, self.config.notes)), re.I)
+ r"#\s*(%s)\b" % "|".join(map(re.escape, self.config.notes)), re.I
+ )
else:
notes = None
if module.file_encoding:
encoding = module.file_encoding
else:
- encoding = 'ascii'
+ encoding = "ascii"
with module.stream() as stream:
for lineno, line in enumerate(stream):
diff --git a/pylint/checkers/newstyle.py b/pylint/checkers/newstyle.py
index cda600d31..0da6a5228 100644
--- a/pylint/checkers/newstyle.py
+++ b/pylint/checkers/newstyle.py
@@ -19,23 +19,22 @@ import astroid
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
-from pylint.checkers.utils import (
- check_messages,
- node_frame_class,
- has_known_bases
-)
+from pylint.checkers.utils import check_messages, node_frame_class, has_known_bases
MSGS = {
- 'E1003': ('Bad first argument %r given to super()',
- 'bad-super-call',
- 'Used when another argument than the current class is given as '
- 'first argument of the super builtin.'),
- 'E1004': ('Missing argument to super()',
- 'missing-super-argument',
- 'Used when the super builtin didn\'t receive an '
- 'argument.',
- {'maxversion': (3, 0)}),
- }
+ "E1003": (
+ "Bad first argument %r given to super()",
+ "bad-super-call",
+ "Used when another argument than the current class is given as "
+ "first argument of the super builtin.",
+ ),
+ "E1004": (
+ "Missing argument to super()",
+ "missing-super-argument",
+ "Used when the super builtin didn't receive an " "argument.",
+ {"maxversion": (3, 0)},
+ ),
+}
class NewStyleConflictChecker(BaseChecker):
@@ -48,14 +47,14 @@ class NewStyleConflictChecker(BaseChecker):
__implements__ = (IAstroidChecker,)
# configuration section name
- name = 'newstyle'
+ name = "newstyle"
# messages
msgs = MSGS
priority = -2
# configuration options
options = ()
- @check_messages('bad-super-call', 'missing-super-argument')
+ @check_messages("bad-super-call", "missing-super-argument")
def visit_functiondef(self, node):
"""check use of super"""
# ignore actual functions or method within a new style class
@@ -73,9 +72,11 @@ class NewStyleConflictChecker(BaseChecker):
call = expr.expr
# skip the test if using super
- if not (isinstance(call, astroid.Call) and
- isinstance(call.func, astroid.Name) and
- call.func.name == 'super'):
+ if not (
+ isinstance(call, astroid.Call)
+ and isinstance(call.func, astroid.Name)
+ and call.func.name == "super"
+ ):
continue
if not klass.newstyle and has_known_bases(klass):
@@ -88,26 +89,32 @@ class NewStyleConflictChecker(BaseChecker):
# unless Python 3
continue
else:
- self.add_message('missing-super-argument', node=call)
+ self.add_message("missing-super-argument", node=call)
continue
# calling super(type(self), self) can lead to recursion loop
# in derived classes
arg0 = call.args[0]
- if isinstance(arg0, astroid.Call) and \
- isinstance(arg0.func, astroid.Name) and \
- arg0.func.name == 'type':
- self.add_message('bad-super-call', node=call, args=('type', ))
+ if (
+ isinstance(arg0, astroid.Call)
+ and isinstance(arg0.func, astroid.Name)
+ and arg0.func.name == "type"
+ ):
+ self.add_message("bad-super-call", node=call, args=("type",))
continue
# calling super(self.__class__, self) can lead to recursion loop
# in derived classes
- if len(call.args) >= 2 and \
- isinstance(call.args[1], astroid.Name) and \
- call.args[1].name == 'self' and \
- isinstance(arg0, astroid.Attribute) and \
- arg0.attrname == '__class__':
- self.add_message('bad-super-call', node=call, args=('self.__class__', ))
+ if (
+ len(call.args) >= 2
+ and isinstance(call.args[1], astroid.Name)
+ and call.args[1].name == "self"
+ and isinstance(arg0, astroid.Attribute)
+ and arg0.attrname == "__class__"
+ ):
+ self.add_message(
+ "bad-super-call", node=call, args=("self.__class__",)
+ )
continue
try:
@@ -122,10 +129,10 @@ class NewStyleConflictChecker(BaseChecker):
# for call.args[0].name
if supcls:
name = supcls.name
- elif call.args and hasattr(call.args[0], 'name'):
+ elif call.args and hasattr(call.args[0], "name"):
name = call.args[0].name
if name:
- self.add_message('bad-super-call', node=call, args=(name, ))
+ self.add_message("bad-super-call", node=call, args=(name,))
visit_asyncfunctiondef = visit_functiondef
diff --git a/pylint/checkers/python3.py b/pylint/checkers/python3.py
index 03c1741ed..29b5e722d 100644
--- a/pylint/checkers/python3.py
+++ b/pylint/checkers/python3.py
@@ -48,7 +48,7 @@ _ZERO = re.compile("^0+$")
def _is_old_octal(literal):
if _ZERO.match(literal):
return False
- if re.match(r'0\d+', literal):
+ if re.match(r"0\d+", literal):
try:
int(literal, 8)
except ValueError:
@@ -60,7 +60,7 @@ def _is_old_octal(literal):
def _inferred_value_is_dict(value):
if isinstance(value, astroid.Dict):
return True
- return isinstance(value, astroid.Instance) and 'dict' in value.basenames
+ return isinstance(value, astroid.Instance) and "dict" in value.basenames
def _check_dict_node(node):
@@ -79,19 +79,33 @@ def _check_dict_node(node):
def _is_builtin(node):
- return getattr(node, 'name', None) in ('__builtin__', 'builtins')
-
-
-_ACCEPTS_ITERATOR = {'iter', 'list', 'tuple', 'sorted', 'set', 'sum', 'any',
- 'all', 'enumerate', 'dict', 'filter', 'reversed',
- 'max', 'min', 'frozenset'}
-ATTRIBUTES_ACCEPTS_ITERATOR = {'join', 'from_iterable'}
+ return getattr(node, "name", None) in ("__builtin__", "builtins")
+
+
+_ACCEPTS_ITERATOR = {
+ "iter",
+ "list",
+ "tuple",
+ "sorted",
+ "set",
+ "sum",
+ "any",
+ "all",
+ "enumerate",
+ "dict",
+ "filter",
+ "reversed",
+ "max",
+ "min",
+ "frozenset",
+}
+ATTRIBUTES_ACCEPTS_ITERATOR = {"join", "from_iterable"}
_BUILTIN_METHOD_ACCEPTS_ITERATOR = {
- 'builtins.list.extend',
- 'builtins.dict.update',
- 'builtins.set.update',
+ "builtins.list.extend",
+ "builtins.dict.update",
+ "builtins.set.update",
}
-DICT_METHODS = {'items', 'keys', 'values'}
+DICT_METHODS = {"items", "keys", "values"}
def _in_iterating_context(node):
@@ -130,14 +144,18 @@ def _in_iterating_context(node):
return True
# If the call is in an unpacking, there's no need to warn,
# since it can be considered iterating.
- elif (isinstance(parent, astroid.Assign) and
- isinstance(parent.targets[0], (astroid.List, astroid.Tuple))):
+ elif isinstance(parent, astroid.Assign) and isinstance(
+ parent.targets[0], (astroid.List, astroid.Tuple)
+ ):
if len(parent.targets[0].elts) > 1:
return True
# If the call is in a containment check, we consider that to
# be an iterating context
- elif (isinstance(parent, astroid.Compare)
- and len(parent.ops) == 1 and parent.ops[0][0] == 'in'):
+ elif (
+ isinstance(parent, astroid.Compare)
+ and len(parent.ops) == 1
+ and parent.ops[0][0] == "in"
+ ):
return True
return False
@@ -146,446 +164,747 @@ def _is_conditional_import(node):
"""Checks if an import node is in the context of a conditional.
"""
parent = node.parent
- return isinstance(parent, (astroid.TryExcept, astroid.ExceptHandler,
- astroid.If, astroid.IfExp))
+ return isinstance(
+ parent, (astroid.TryExcept, astroid.ExceptHandler, astroid.If, astroid.IfExp)
+ )
-Branch = namedtuple('Branch', ['node', 'is_py2_only'])
+Branch = namedtuple("Branch", ["node", "is_py2_only"])
class Python3Checker(checkers.BaseChecker):
__implements__ = interfaces.IAstroidChecker
enabled = False
- name = 'python3'
+ name = "python3"
msgs = {
# Errors for what will syntactically break in Python 3, warnings for
# everything else.
- 'E1601': ('print statement used',
- 'print-statement',
- 'Used when a print statement is used '
- '(`print` is a function in Python 3)'),
- 'E1602': ('Parameter unpacking specified',
- 'parameter-unpacking',
- 'Used when parameter unpacking is specified for a function'
- "(Python 3 doesn't allow it)"),
- 'E1603': ('Implicit unpacking of exceptions is not supported '
- 'in Python 3',
- 'unpacking-in-except',
- 'Python3 will not allow implicit unpacking of '
- 'exceptions in except clauses. '
- 'See http://www.python.org/dev/peps/pep-3110/',
- {'old_names': [('W0712', 'unpacking-in-except')]}),
- 'E1604': ('Use raise ErrorClass(args) instead of '
- 'raise ErrorClass, args.',
- 'old-raise-syntax',
- "Used when the alternate raise syntax "
- "'raise foo, bar' is used "
- "instead of 'raise foo(bar)'.",
- {'old_names': [('W0121', 'old-raise-syntax')]}),
- 'E1605': ('Use of the `` operator',
- 'backtick',
- 'Used when the deprecated "``" (backtick) operator is used '
- 'instead of the str() function.',
- {'scope': WarningScope.NODE,
- 'old_names': [('W0333', 'backtick')]}),
- 'E1609': ('Import * only allowed at module level',
- 'import-star-module-level',
- 'Used when the import star syntax is used somewhere '
- 'else than the module level.',
- {'maxversion': (3, 0)}),
- 'W1601': ('apply built-in referenced',
- 'apply-builtin',
- 'Used when the apply built-in function is referenced '
- '(missing from Python 3)'),
- 'W1602': ('basestring built-in referenced',
- 'basestring-builtin',
- 'Used when the basestring built-in function is referenced '
- '(missing from Python 3)'),
- 'W1603': ('buffer built-in referenced',
- 'buffer-builtin',
- 'Used when the buffer built-in function is referenced '
- '(missing from Python 3)'),
- 'W1604': ('cmp built-in referenced',
- 'cmp-builtin',
- 'Used when the cmp built-in function is referenced '
- '(missing from Python 3)'),
- 'W1605': ('coerce built-in referenced',
- 'coerce-builtin',
- 'Used when the coerce built-in function is referenced '
- '(missing from Python 3)'),
- 'W1606': ('execfile built-in referenced',
- 'execfile-builtin',
- 'Used when the execfile built-in function is referenced '
- '(missing from Python 3)'),
- 'W1607': ('file built-in referenced',
- 'file-builtin',
- 'Used when the file built-in function is referenced '
- '(missing from Python 3)'),
- 'W1608': ('long built-in referenced',
- 'long-builtin',
- 'Used when the long built-in function is referenced '
- '(missing from Python 3)'),
- 'W1609': ('raw_input built-in referenced',
- 'raw_input-builtin',
- 'Used when the raw_input built-in function is referenced '
- '(missing from Python 3)'),
- 'W1610': ('reduce built-in referenced',
- 'reduce-builtin',
- 'Used when the reduce built-in function is referenced '
- '(missing from Python 3)'),
- 'W1611': ('StandardError built-in referenced',
- 'standarderror-builtin',
- 'Used when the StandardError built-in function is referenced '
- '(missing from Python 3)'),
- 'W1612': ('unicode built-in referenced',
- 'unicode-builtin',
- 'Used when the unicode built-in function is referenced '
- '(missing from Python 3)'),
- 'W1613': ('xrange built-in referenced',
- 'xrange-builtin',
- 'Used when the xrange built-in function is referenced '
- '(missing from Python 3)'),
- 'W1614': ('__coerce__ method defined',
- 'coerce-method',
- 'Used when a __coerce__ method is defined '
- '(method is not used by Python 3)'),
- 'W1615': ('__delslice__ method defined',
- 'delslice-method',
- 'Used when a __delslice__ method is defined '
- '(method is not used by Python 3)'),
- 'W1616': ('__getslice__ method defined',
- 'getslice-method',
- 'Used when a __getslice__ method is defined '
- '(method is not used by Python 3)'),
- 'W1617': ('__setslice__ method defined',
- 'setslice-method',
- 'Used when a __setslice__ method is defined '
- '(method is not used by Python 3)'),
- 'W1618': ('import missing `from __future__ import absolute_import`',
- 'no-absolute-import',
- 'Used when an import is not accompanied by '
- '``from __future__ import absolute_import`` '
- '(default behaviour in Python 3)'),
- 'W1619': ('division w/o __future__ statement',
- 'old-division',
- 'Used for non-floor division w/o a float literal or '
- '``from __future__ import division`` '
- '(Python 3 returns a float for int division unconditionally)'),
- 'W1620': ('Calling a dict.iter*() method',
- 'dict-iter-method',
- 'Used for calls to dict.iterkeys(), itervalues() or iteritems() '
- '(Python 3 lacks these methods)'),
- 'W1621': ('Calling a dict.view*() method',
- 'dict-view-method',
- 'Used for calls to dict.viewkeys(), viewvalues() or viewitems() '
- '(Python 3 lacks these methods)'),
- 'W1622': ('Called a next() method on an object',
- 'next-method-called',
- "Used when an object's next() method is called "
- '(Python 3 uses the next() built-in function)'),
- 'W1623': ("Assigning to a class's __metaclass__ attribute",
- 'metaclass-assignment',
- "Used when a metaclass is specified by assigning to __metaclass__ "
- '(Python 3 specifies the metaclass as a class statement argument)'),
- 'W1624': ('Indexing exceptions will not work on Python 3',
- 'indexing-exception',
- 'Indexing exceptions will not work on Python 3. Use '
- '`exception.args[index]` instead.',
- {'old_names': [('W0713', 'indexing-exception')]}),
- 'W1625': ('Raising a string exception',
- 'raising-string',
- 'Used when a string exception is raised. This will not '
- 'work on Python 3.',
- {'old_names': [('W0701', 'raising-string')]}),
- 'W1626': ('reload built-in referenced',
- 'reload-builtin',
- 'Used when the reload built-in function is referenced '
- '(missing from Python 3). You can use instead imp.reload '
- 'or importlib.reload.'),
- 'W1627': ('__oct__ method defined',
- 'oct-method',
- 'Used when an __oct__ method is defined '
- '(method is not used by Python 3)'),
- 'W1628': ('__hex__ method defined',
- 'hex-method',
- 'Used when a __hex__ method is defined '
- '(method is not used by Python 3)'),
- 'W1629': ('__nonzero__ method defined',
- 'nonzero-method',
- 'Used when a __nonzero__ method is defined '
- '(method is not used by Python 3)'),
- 'W1630': ('__cmp__ method defined',
- 'cmp-method',
- 'Used when a __cmp__ method is defined '
- '(method is not used by Python 3)'),
+ "E1601": (
+ "print statement used",
+ "print-statement",
+ "Used when a print statement is used "
+ "(`print` is a function in Python 3)",
+ ),
+ "E1602": (
+ "Parameter unpacking specified",
+ "parameter-unpacking",
+ "Used when parameter unpacking is specified for a function"
+ "(Python 3 doesn't allow it)",
+ ),
+ "E1603": (
+ "Implicit unpacking of exceptions is not supported " "in Python 3",
+ "unpacking-in-except",
+ "Python3 will not allow implicit unpacking of "
+ "exceptions in except clauses. "
+ "See http://www.python.org/dev/peps/pep-3110/",
+ {"old_names": [("W0712", "unpacking-in-except")]},
+ ),
+ "E1604": (
+ "Use raise ErrorClass(args) instead of " "raise ErrorClass, args.",
+ "old-raise-syntax",
+ "Used when the alternate raise syntax "
+ "'raise foo, bar' is used "
+ "instead of 'raise foo(bar)'.",
+ {"old_names": [("W0121", "old-raise-syntax")]},
+ ),
+ "E1605": (
+ "Use of the `` operator",
+ "backtick",
+ 'Used when the deprecated "``" (backtick) operator is used '
+ "instead of the str() function.",
+ {"scope": WarningScope.NODE, "old_names": [("W0333", "backtick")]},
+ ),
+ "E1609": (
+ "Import * only allowed at module level",
+ "import-star-module-level",
+ "Used when the import star syntax is used somewhere "
+ "else than the module level.",
+ {"maxversion": (3, 0)},
+ ),
+ "W1601": (
+ "apply built-in referenced",
+ "apply-builtin",
+ "Used when the apply built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1602": (
+ "basestring built-in referenced",
+ "basestring-builtin",
+ "Used when the basestring built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1603": (
+ "buffer built-in referenced",
+ "buffer-builtin",
+ "Used when the buffer built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1604": (
+ "cmp built-in referenced",
+ "cmp-builtin",
+ "Used when the cmp built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1605": (
+ "coerce built-in referenced",
+ "coerce-builtin",
+ "Used when the coerce built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1606": (
+ "execfile built-in referenced",
+ "execfile-builtin",
+ "Used when the execfile built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1607": (
+ "file built-in referenced",
+ "file-builtin",
+ "Used when the file built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1608": (
+ "long built-in referenced",
+ "long-builtin",
+ "Used when the long built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1609": (
+ "raw_input built-in referenced",
+ "raw_input-builtin",
+ "Used when the raw_input built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1610": (
+ "reduce built-in referenced",
+ "reduce-builtin",
+ "Used when the reduce built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1611": (
+ "StandardError built-in referenced",
+ "standarderror-builtin",
+ "Used when the StandardError built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1612": (
+ "unicode built-in referenced",
+ "unicode-builtin",
+ "Used when the unicode built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1613": (
+ "xrange built-in referenced",
+ "xrange-builtin",
+ "Used when the xrange built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1614": (
+ "__coerce__ method defined",
+ "coerce-method",
+ "Used when a __coerce__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1615": (
+ "__delslice__ method defined",
+ "delslice-method",
+ "Used when a __delslice__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1616": (
+ "__getslice__ method defined",
+ "getslice-method",
+ "Used when a __getslice__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1617": (
+ "__setslice__ method defined",
+ "setslice-method",
+ "Used when a __setslice__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1618": (
+ "import missing `from __future__ import absolute_import`",
+ "no-absolute-import",
+ "Used when an import is not accompanied by "
+ "``from __future__ import absolute_import`` "
+ "(default behaviour in Python 3)",
+ ),
+ "W1619": (
+ "division w/o __future__ statement",
+ "old-division",
+ "Used for non-floor division w/o a float literal or "
+ "``from __future__ import division`` "
+ "(Python 3 returns a float for int division unconditionally)",
+ ),
+ "W1620": (
+ "Calling a dict.iter*() method",
+ "dict-iter-method",
+ "Used for calls to dict.iterkeys(), itervalues() or iteritems() "
+ "(Python 3 lacks these methods)",
+ ),
+ "W1621": (
+ "Calling a dict.view*() method",
+ "dict-view-method",
+ "Used for calls to dict.viewkeys(), viewvalues() or viewitems() "
+ "(Python 3 lacks these methods)",
+ ),
+ "W1622": (
+ "Called a next() method on an object",
+ "next-method-called",
+ "Used when an object's next() method is called "
+ "(Python 3 uses the next() built-in function)",
+ ),
+ "W1623": (
+ "Assigning to a class's __metaclass__ attribute",
+ "metaclass-assignment",
+ "Used when a metaclass is specified by assigning to __metaclass__ "
+ "(Python 3 specifies the metaclass as a class statement argument)",
+ ),
+ "W1624": (
+ "Indexing exceptions will not work on Python 3",
+ "indexing-exception",
+ "Indexing exceptions will not work on Python 3. Use "
+ "`exception.args[index]` instead.",
+ {"old_names": [("W0713", "indexing-exception")]},
+ ),
+ "W1625": (
+ "Raising a string exception",
+ "raising-string",
+ "Used when a string exception is raised. This will not "
+ "work on Python 3.",
+ {"old_names": [("W0701", "raising-string")]},
+ ),
+ "W1626": (
+ "reload built-in referenced",
+ "reload-builtin",
+ "Used when the reload built-in function is referenced "
+ "(missing from Python 3). You can use instead imp.reload "
+ "or importlib.reload.",
+ ),
+ "W1627": (
+ "__oct__ method defined",
+ "oct-method",
+ "Used when an __oct__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1628": (
+ "__hex__ method defined",
+ "hex-method",
+ "Used when a __hex__ method is defined " "(method is not used by Python 3)",
+ ),
+ "W1629": (
+ "__nonzero__ method defined",
+ "nonzero-method",
+ "Used when a __nonzero__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1630": (
+ "__cmp__ method defined",
+ "cmp-method",
+ "Used when a __cmp__ method is defined " "(method is not used by Python 3)",
+ ),
# 'W1631': replaced by W1636
- 'W1632': ('input built-in referenced',
- 'input-builtin',
- 'Used when the input built-in is referenced '
- '(backwards-incompatible semantics in Python 3)'),
- 'W1633': ('round built-in referenced',
- 'round-builtin',
- 'Used when the round built-in is referenced '
- '(backwards-incompatible semantics in Python 3)'),
- 'W1634': ('intern built-in referenced',
- 'intern-builtin',
- 'Used when the intern built-in is referenced '
- '(Moved to sys.intern in Python 3)'),
- 'W1635': ('unichr built-in referenced',
- 'unichr-builtin',
- 'Used when the unichr built-in is referenced '
- '(Use chr in Python 3)'),
- 'W1636': ('map built-in referenced when not iterating',
- 'map-builtin-not-iterating',
- 'Used when the map built-in is referenced in a non-iterating '
- 'context (returns an iterator in Python 3)',
- {'old_names': [('W1631', 'implicit-map-evaluation')]}),
- 'W1637': ('zip built-in referenced when not iterating',
- 'zip-builtin-not-iterating',
- 'Used when the zip built-in is referenced in a non-iterating '
- 'context (returns an iterator in Python 3)'),
- 'W1638': ('range built-in referenced when not iterating',
- 'range-builtin-not-iterating',
- 'Used when the range built-in is referenced in a non-iterating '
- 'context (returns an iterator in Python 3)'),
- 'W1639': ('filter built-in referenced when not iterating',
- 'filter-builtin-not-iterating',
- 'Used when the filter built-in is referenced in a non-iterating '
- 'context (returns an iterator in Python 3)'),
- 'W1640': ('Using the cmp argument for list.sort / sorted',
- 'using-cmp-argument',
- 'Using the cmp argument for list.sort or the sorted '
- 'builtin should be avoided, since it was removed in '
- 'Python 3. Using either `key` or `functools.cmp_to_key` '
- 'should be preferred.'),
- 'W1641': ('Implementing __eq__ without also implementing __hash__',
- 'eq-without-hash',
- 'Used when a class implements __eq__ but not __hash__. In Python 2, objects '
- 'get object.__hash__ as the default implementation, in Python 3 objects get '
- 'None as their default __hash__ implementation if they also implement __eq__.'),
- 'W1642': ('__div__ method defined',
- 'div-method',
- 'Used when a __div__ method is defined. Using `__truediv__` and setting'
- '__div__ = __truediv__ should be preferred.'
- '(method is not used by Python 3)'),
- 'W1643': ('__idiv__ method defined',
- 'idiv-method',
- 'Used when an __idiv__ method is defined. Using `__itruediv__` and setting'
- '__idiv__ = __itruediv__ should be preferred.'
- '(method is not used by Python 3)'),
- 'W1644': ('__rdiv__ method defined',
- 'rdiv-method',
- 'Used when a __rdiv__ method is defined. Using `__rtruediv__` and setting'
- '__rdiv__ = __rtruediv__ should be preferred.'
- '(method is not used by Python 3)'),
- 'W1645': ('Exception.message removed in Python 3',
- 'exception-message-attribute',
- 'Used when the message attribute is accessed on an Exception. Use '
- 'str(exception) instead.'),
- 'W1646': ('non-text encoding used in str.decode',
- 'invalid-str-codec',
- 'Used when using str.encode or str.decode with a non-text encoding. Use '
- 'codecs module to handle arbitrary codecs.'),
- 'W1647': ('sys.maxint removed in Python 3',
- 'sys-max-int',
- 'Used when accessing sys.maxint. Use sys.maxsize instead.'),
- 'W1648': ('Module moved in Python 3',
- 'bad-python3-import',
- 'Used when importing a module that no longer exists in Python 3.'),
- 'W1649': ('Accessing a deprecated function on the string module',
- 'deprecated-string-function',
- 'Used when accessing a string function that has been deprecated in Python 3.'),
- 'W1650': ('Using str.translate with deprecated deletechars parameters',
- 'deprecated-str-translate-call',
- 'Used when using the deprecated deletechars parameters from str.translate. Use '
- 're.sub to remove the desired characters '),
- 'W1651': ('Accessing a deprecated function on the itertools module',
- 'deprecated-itertools-function',
- 'Used when accessing a function on itertools that has been removed in Python 3.'),
- 'W1652': ('Accessing a deprecated fields on the types module',
- 'deprecated-types-field',
- 'Used when accessing a field on types that has been removed in Python 3.'),
- 'W1653': ('next method defined',
- 'next-method-defined',
- 'Used when a next method is defined that would be an iterator in Python 2 but '
- 'is treated as a normal function in Python 3.',),
- 'W1654': ('dict.items referenced when not iterating',
- 'dict-items-not-iterating',
- 'Used when dict.items is referenced in a non-iterating '
- 'context (returns an iterator in Python 3)',),
- 'W1655': ('dict.keys referenced when not iterating',
- 'dict-keys-not-iterating',
- 'Used when dict.keys is referenced in a non-iterating '
- 'context (returns an iterator in Python 3)',),
- 'W1656': ('dict.values referenced when not iterating',
- 'dict-values-not-iterating',
- 'Used when dict.values is referenced in a non-iterating '
- 'context (returns an iterator in Python 3)',),
- 'W1657': ('Accessing a removed attribute on the operator module',
- 'deprecated-operator-function',
- 'Used when accessing a field on operator module that has been '
- 'removed in Python 3.',),
- 'W1658': ('Accessing a removed attribute on the urllib module',
- 'deprecated-urllib-function',
- 'Used when accessing a field on urllib module that has been '
- 'removed or moved in Python 3.',),
- 'W1659': ('Accessing a removed xreadlines attribute',
- 'xreadlines-attribute',
- 'Used when accessing the xreadlines() function on a file stream, '
- 'removed in Python 3.',),
- 'W1660': ('Accessing a removed attribute on the sys module',
- 'deprecated-sys-function',
- 'Used when accessing a field on sys module that has been '
- 'removed in Python 3.',),
- 'W1661': ('Using an exception object that was bound by an except handler',
- 'exception-escape',
- 'Emitted when using an exception, that was bound in an except '
- 'handler, outside of the except handler. On Python 3 these '
- 'exceptions will be deleted once they get out '
- 'of the except handler.'),
- 'W1662': ('Using a variable that was bound inside a comprehension',
- 'comprehension-escape',
- 'Emitted when using a variable, that was bound in a comprehension '
- 'handler, outside of the comprehension itself. On Python 3 these '
- 'variables will be deleted outside of the '
- 'comprehension.'),
+ "W1632": (
+ "input built-in referenced",
+ "input-builtin",
+ "Used when the input built-in is referenced "
+ "(backwards-incompatible semantics in Python 3)",
+ ),
+ "W1633": (
+ "round built-in referenced",
+ "round-builtin",
+ "Used when the round built-in is referenced "
+ "(backwards-incompatible semantics in Python 3)",
+ ),
+ "W1634": (
+ "intern built-in referenced",
+ "intern-builtin",
+ "Used when the intern built-in is referenced "
+ "(Moved to sys.intern in Python 3)",
+ ),
+ "W1635": (
+ "unichr built-in referenced",
+ "unichr-builtin",
+ "Used when the unichr built-in is referenced " "(Use chr in Python 3)",
+ ),
+ "W1636": (
+ "map built-in referenced when not iterating",
+ "map-builtin-not-iterating",
+ "Used when the map built-in is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ {"old_names": [("W1631", "implicit-map-evaluation")]},
+ ),
+ "W1637": (
+ "zip built-in referenced when not iterating",
+ "zip-builtin-not-iterating",
+ "Used when the zip built-in is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1638": (
+ "range built-in referenced when not iterating",
+ "range-builtin-not-iterating",
+ "Used when the range built-in is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1639": (
+ "filter built-in referenced when not iterating",
+ "filter-builtin-not-iterating",
+ "Used when the filter built-in is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1640": (
+ "Using the cmp argument for list.sort / sorted",
+ "using-cmp-argument",
+ "Using the cmp argument for list.sort or the sorted "
+ "builtin should be avoided, since it was removed in "
+ "Python 3. Using either `key` or `functools.cmp_to_key` "
+ "should be preferred.",
+ ),
+ "W1641": (
+ "Implementing __eq__ without also implementing __hash__",
+ "eq-without-hash",
+ "Used when a class implements __eq__ but not __hash__. In Python 2, objects "
+ "get object.__hash__ as the default implementation, in Python 3 objects get "
+ "None as their default __hash__ implementation if they also implement __eq__.",
+ ),
+ "W1642": (
+ "__div__ method defined",
+ "div-method",
+ "Used when a __div__ method is defined. Using `__truediv__` and setting"
+ "__div__ = __truediv__ should be preferred."
+ "(method is not used by Python 3)",
+ ),
+ "W1643": (
+ "__idiv__ method defined",
+ "idiv-method",
+ "Used when an __idiv__ method is defined. Using `__itruediv__` and setting"
+ "__idiv__ = __itruediv__ should be preferred."
+ "(method is not used by Python 3)",
+ ),
+ "W1644": (
+ "__rdiv__ method defined",
+ "rdiv-method",
+ "Used when a __rdiv__ method is defined. Using `__rtruediv__` and setting"
+ "__rdiv__ = __rtruediv__ should be preferred."
+ "(method is not used by Python 3)",
+ ),
+ "W1645": (
+ "Exception.message removed in Python 3",
+ "exception-message-attribute",
+ "Used when the message attribute is accessed on an Exception. Use "
+ "str(exception) instead.",
+ ),
+ "W1646": (
+ "non-text encoding used in str.decode",
+ "invalid-str-codec",
+ "Used when using str.encode or str.decode with a non-text encoding. Use "
+ "codecs module to handle arbitrary codecs.",
+ ),
+ "W1647": (
+ "sys.maxint removed in Python 3",
+ "sys-max-int",
+ "Used when accessing sys.maxint. Use sys.maxsize instead.",
+ ),
+ "W1648": (
+ "Module moved in Python 3",
+ "bad-python3-import",
+ "Used when importing a module that no longer exists in Python 3.",
+ ),
+ "W1649": (
+ "Accessing a deprecated function on the string module",
+ "deprecated-string-function",
+ "Used when accessing a string function that has been deprecated in Python 3.",
+ ),
+ "W1650": (
+ "Using str.translate with deprecated deletechars parameters",
+ "deprecated-str-translate-call",
+ "Used when using the deprecated deletechars parameters from str.translate. Use "
+ "re.sub to remove the desired characters ",
+ ),
+ "W1651": (
+ "Accessing a deprecated function on the itertools module",
+ "deprecated-itertools-function",
+ "Used when accessing a function on itertools that has been removed in Python 3.",
+ ),
+ "W1652": (
+ "Accessing a deprecated fields on the types module",
+ "deprecated-types-field",
+ "Used when accessing a field on types that has been removed in Python 3.",
+ ),
+ "W1653": (
+ "next method defined",
+ "next-method-defined",
+ "Used when a next method is defined that would be an iterator in Python 2 but "
+ "is treated as a normal function in Python 3.",
+ ),
+ "W1654": (
+ "dict.items referenced when not iterating",
+ "dict-items-not-iterating",
+ "Used when dict.items is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1655": (
+ "dict.keys referenced when not iterating",
+ "dict-keys-not-iterating",
+ "Used when dict.keys is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1656": (
+ "dict.values referenced when not iterating",
+ "dict-values-not-iterating",
+ "Used when dict.values is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1657": (
+ "Accessing a removed attribute on the operator module",
+ "deprecated-operator-function",
+ "Used when accessing a field on operator module that has been "
+ "removed in Python 3.",
+ ),
+ "W1658": (
+ "Accessing a removed attribute on the urllib module",
+ "deprecated-urllib-function",
+ "Used when accessing a field on urllib module that has been "
+ "removed or moved in Python 3.",
+ ),
+ "W1659": (
+ "Accessing a removed xreadlines attribute",
+ "xreadlines-attribute",
+ "Used when accessing the xreadlines() function on a file stream, "
+ "removed in Python 3.",
+ ),
+ "W1660": (
+ "Accessing a removed attribute on the sys module",
+ "deprecated-sys-function",
+ "Used when accessing a field on sys module that has been "
+ "removed in Python 3.",
+ ),
+ "W1661": (
+ "Using an exception object that was bound by an except handler",
+ "exception-escape",
+ "Emitted when using an exception, that was bound in an except "
+ "handler, outside of the except handler. On Python 3 these "
+ "exceptions will be deleted once they get out "
+ "of the except handler.",
+ ),
+ "W1662": (
+ "Using a variable that was bound inside a comprehension",
+ "comprehension-escape",
+ "Emitted when using a variable, that was bound in a comprehension "
+ "handler, outside of the comprehension itself. On Python 3 these "
+ "variables will be deleted outside of the "
+ "comprehension.",
+ ),
}
- _bad_builtins = frozenset([
- 'apply',
- 'basestring',
- 'buffer',
- 'cmp',
- 'coerce',
- 'execfile',
- 'file',
- 'input', # Not missing, but incompatible semantics
- 'intern',
- 'long',
- 'raw_input',
- 'reduce',
- 'round', # Not missing, but incompatible semantics
- 'StandardError',
- 'unichr',
- 'unicode',
- 'xrange',
- 'reload',
- ])
-
- _unused_magic_methods = frozenset([
- '__coerce__',
- '__delslice__',
- '__getslice__',
- '__setslice__',
- '__oct__',
- '__hex__',
- '__nonzero__',
- '__cmp__',
- '__div__',
- '__idiv__',
- '__rdiv__',
- ])
-
- _invalid_encodings = frozenset([
- 'base64_codec',
- 'base64',
- 'base_64',
- 'bz2_codec',
- 'bz2',
- 'hex_codec',
- 'hex',
- 'quopri_codec',
- 'quopri',
- 'quotedprintable',
- 'quoted_printable',
- 'uu_codec',
- 'uu',
- 'zlib_codec',
- 'zlib',
- 'zip',
- 'rot13',
- 'rot_13',
- ])
+ _bad_builtins = frozenset(
+ [
+ "apply",
+ "basestring",
+ "buffer",
+ "cmp",
+ "coerce",
+ "execfile",
+ "file",
+ "input", # Not missing, but incompatible semantics
+ "intern",
+ "long",
+ "raw_input",
+ "reduce",
+ "round", # Not missing, but incompatible semantics
+ "StandardError",
+ "unichr",
+ "unicode",
+ "xrange",
+ "reload",
+ ]
+ )
+
+ _unused_magic_methods = frozenset(
+ [
+ "__coerce__",
+ "__delslice__",
+ "__getslice__",
+ "__setslice__",
+ "__oct__",
+ "__hex__",
+ "__nonzero__",
+ "__cmp__",
+ "__div__",
+ "__idiv__",
+ "__rdiv__",
+ ]
+ )
+
+ _invalid_encodings = frozenset(
+ [
+ "base64_codec",
+ "base64",
+ "base_64",
+ "bz2_codec",
+ "bz2",
+ "hex_codec",
+ "hex",
+ "quopri_codec",
+ "quopri",
+ "quotedprintable",
+ "quoted_printable",
+ "uu_codec",
+ "uu",
+ "zlib_codec",
+ "zlib",
+ "zip",
+ "rot13",
+ "rot_13",
+ ]
+ )
_bad_python3_module_map = {
- 'sys-max-int': {
- 'sys': frozenset(['maxint'])
- },
- 'deprecated-itertools-function': {
- 'itertools': frozenset(['izip', 'ifilter', 'imap', 'izip_longest', 'ifilterfalse'])
- },
- 'deprecated-types-field': {
- 'types': frozenset([
- 'EllipsisType', 'XRangeType', 'ComplexType', 'StringType',
- 'TypeType', 'LongType', 'UnicodeType', 'ClassType',
- 'BufferType', 'StringTypes', 'NotImplementedType', 'NoneType',
- 'InstanceType', 'FloatType', 'SliceType', 'UnboundMethodType',
- 'ObjectType', 'IntType', 'TupleType', 'ListType', 'DictType',
- 'FileType', 'DictionaryType', 'BooleanType', 'DictProxyType'
- ])
+ "sys-max-int": {"sys": frozenset(["maxint"])},
+ "deprecated-itertools-function": {
+ "itertools": frozenset(
+ ["izip", "ifilter", "imap", "izip_longest", "ifilterfalse"]
+ )
},
- 'bad-python3-import': frozenset([
- 'anydbm', 'BaseHTTPServer', '__builtin__', 'CGIHTTPServer', 'ConfigParser', 'copy_reg',
- 'cPickle', 'cStringIO', 'Cookie', 'cookielib', 'dbhash', 'dumbdbm',
- 'dumbdb', 'Dialog', 'DocXMLRPCServer', 'FileDialog', 'FixTk', 'gdbm', 'htmlentitydefs',
- 'HTMLParser', 'httplib', 'markupbase', 'Queue', 'repr', 'robotparser', 'ScrolledText',
- 'SimpleDialog', 'SimpleHTTPServer', 'SimpleXMLRPCServer', 'StringIO', 'dummy_thread',
- 'SocketServer', 'test.test_support', 'Tkinter', 'Tix', 'Tkconstants', 'tkColorChooser',
- 'tkCommonDialog', 'Tkdnd', 'tkFileDialog', 'tkFont', 'tkMessageBox', 'tkSimpleDialog',
- 'UserList', 'UserString', 'whichdb', '_winreg', 'xmlrpclib', 'audiodev',
- 'Bastion', 'bsddb185', 'bsddb3', 'Canvas', 'cfmfile', 'cl', 'commands', 'compiler',
- 'dircache', 'dl', 'exception', 'fpformat', 'htmllib', 'ihooks', 'imageop', 'imputil',
- 'linuxaudiodev', 'md5', 'mhlib', 'mimetools', 'MimeWriter', 'mimify', 'multifile',
- 'mutex', 'new', 'popen2', 'posixfile', 'pure', 'rexec', 'rfc822', 'sets', 'sha',
- 'sgmllib', 'sre', 'stringold', 'sunaudio', 'sv', 'test.testall', 'thread', 'timing',
- 'toaiff', 'user', 'urllib2', 'urlparse'
- ]),
- 'deprecated-string-function': {
- 'string': frozenset([
- 'maketrans', 'atof', 'atoi', 'atol', 'capitalize', 'expandtabs', 'find', 'rfind',
- 'index', 'rindex', 'count', 'lower', 'letters', 'split', 'rsplit', 'splitfields',
- 'join', 'joinfields', 'lstrip', 'rstrip', 'strip', 'swapcase', 'translate',
- 'upper', 'ljust', 'rjust', 'center', 'zfill', 'replace',
- 'lowercase', 'letters', 'uppercase', 'atol_error',
- 'atof_error', 'atoi_error', 'index_error'
- ])
+ "deprecated-types-field": {
+ "types": frozenset(
+ [
+ "EllipsisType",
+ "XRangeType",
+ "ComplexType",
+ "StringType",
+ "TypeType",
+ "LongType",
+ "UnicodeType",
+ "ClassType",
+ "BufferType",
+ "StringTypes",
+ "NotImplementedType",
+ "NoneType",
+ "InstanceType",
+ "FloatType",
+ "SliceType",
+ "UnboundMethodType",
+ "ObjectType",
+ "IntType",
+ "TupleType",
+ "ListType",
+ "DictType",
+ "FileType",
+ "DictionaryType",
+ "BooleanType",
+ "DictProxyType",
+ ]
+ )
},
- 'deprecated-operator-function': {
- 'operator': frozenset({'div'}),
+ "bad-python3-import": frozenset(
+ [
+ "anydbm",
+ "BaseHTTPServer",
+ "__builtin__",
+ "CGIHTTPServer",
+ "ConfigParser",
+ "copy_reg",
+ "cPickle",
+ "cStringIO",
+ "Cookie",
+ "cookielib",
+ "dbhash",
+ "dumbdbm",
+ "dumbdb",
+ "Dialog",
+ "DocXMLRPCServer",
+ "FileDialog",
+ "FixTk",
+ "gdbm",
+ "htmlentitydefs",
+ "HTMLParser",
+ "httplib",
+ "markupbase",
+ "Queue",
+ "repr",
+ "robotparser",
+ "ScrolledText",
+ "SimpleDialog",
+ "SimpleHTTPServer",
+ "SimpleXMLRPCServer",
+ "StringIO",
+ "dummy_thread",
+ "SocketServer",
+ "test.test_support",
+ "Tkinter",
+ "Tix",
+ "Tkconstants",
+ "tkColorChooser",
+ "tkCommonDialog",
+ "Tkdnd",
+ "tkFileDialog",
+ "tkFont",
+ "tkMessageBox",
+ "tkSimpleDialog",
+ "UserList",
+ "UserString",
+ "whichdb",
+ "_winreg",
+ "xmlrpclib",
+ "audiodev",
+ "Bastion",
+ "bsddb185",
+ "bsddb3",
+ "Canvas",
+ "cfmfile",
+ "cl",
+ "commands",
+ "compiler",
+ "dircache",
+ "dl",
+ "exception",
+ "fpformat",
+ "htmllib",
+ "ihooks",
+ "imageop",
+ "imputil",
+ "linuxaudiodev",
+ "md5",
+ "mhlib",
+ "mimetools",
+ "MimeWriter",
+ "mimify",
+ "multifile",
+ "mutex",
+ "new",
+ "popen2",
+ "posixfile",
+ "pure",
+ "rexec",
+ "rfc822",
+ "sets",
+ "sha",
+ "sgmllib",
+ "sre",
+ "stringold",
+ "sunaudio",
+ "sv",
+ "test.testall",
+ "thread",
+ "timing",
+ "toaiff",
+ "user",
+ "urllib2",
+ "urlparse",
+ ]
+ ),
+ "deprecated-string-function": {
+ "string": frozenset(
+ [
+ "maketrans",
+ "atof",
+ "atoi",
+ "atol",
+ "capitalize",
+ "expandtabs",
+ "find",
+ "rfind",
+ "index",
+ "rindex",
+ "count",
+ "lower",
+ "letters",
+ "split",
+ "rsplit",
+ "splitfields",
+ "join",
+ "joinfields",
+ "lstrip",
+ "rstrip",
+ "strip",
+ "swapcase",
+ "translate",
+ "upper",
+ "ljust",
+ "rjust",
+ "center",
+ "zfill",
+ "replace",
+ "lowercase",
+ "letters",
+ "uppercase",
+ "atol_error",
+ "atof_error",
+ "atoi_error",
+ "index_error",
+ ]
+ )
},
- 'deprecated-urllib-function': {
- 'urllib': frozenset({
- 'addbase', 'addclosehook', 'addinfo', 'addinfourl', 'always_safe',
- 'basejoin', 'ftpcache', 'ftperrors', 'ftpwrapper', 'getproxies',
- 'getproxies_environment', 'getproxies_macosx_sysconf', 'main', 'noheaders',
- 'pathname2url', 'proxy_bypass', 'proxy_bypass_environment',
- 'proxy_bypass_macosx_sysconf', 'quote', 'quote_plus', 'reporthook',
- 'splitattr', 'splithost', 'splitnport', 'splitpasswd', 'splitport',
- 'splitquery', 'splittag', 'splittype', 'splituser', 'splitvalue', 'unquote',
- 'unquote_plus', 'unwrap', 'url2pathname', 'urlcleanup', 'urlencode',
- 'urlopen', 'urlretrieve'
- }),
+ "deprecated-operator-function": {"operator": frozenset({"div"})},
+ "deprecated-urllib-function": {
+ "urllib": frozenset(
+ {
+ "addbase",
+ "addclosehook",
+ "addinfo",
+ "addinfourl",
+ "always_safe",
+ "basejoin",
+ "ftpcache",
+ "ftperrors",
+ "ftpwrapper",
+ "getproxies",
+ "getproxies_environment",
+ "getproxies_macosx_sysconf",
+ "main",
+ "noheaders",
+ "pathname2url",
+ "proxy_bypass",
+ "proxy_bypass_environment",
+ "proxy_bypass_macosx_sysconf",
+ "quote",
+ "quote_plus",
+ "reporthook",
+ "splitattr",
+ "splithost",
+ "splitnport",
+ "splitpasswd",
+ "splitport",
+ "splitquery",
+ "splittag",
+ "splittype",
+ "splituser",
+ "splitvalue",
+ "unquote",
+ "unquote_plus",
+ "unwrap",
+ "url2pathname",
+ "urlcleanup",
+ "urlencode",
+ "urlopen",
+ "urlretrieve",
+ }
+ )
},
- 'deprecated-sys-function': {
- 'sys': frozenset({'exc_clear'}),
- }
+ "deprecated-sys-function": {"sys": frozenset({"exc_clear"})},
}
if (3, 4) <= sys.version_info < (3, 4, 4):
# Python 3.4.0 -> 3.4.3 has a bug which breaks `repr_tree()`:
# https://bugs.python.org/issue23572
- _python_2_tests = frozenset() # type: FrozenSet[str]
+ _python_2_tests = frozenset() # type: FrozenSet[str]
else:
_python_2_tests = frozenset(
- [astroid.extract_node(x).repr_tree() for x in [
- 'sys.version_info[0] == 2',
- 'sys.version_info[0] < 3',
- 'sys.version_info == (2, 7)',
- 'sys.version_info <= (2, 7)',
- 'sys.version_info < (3, 0)',
- ]])
+ [
+ astroid.extract_node(x).repr_tree()
+ for x in [
+ "sys.version_info[0] == 2",
+ "sys.version_info[0] < 3",
+ "sys.version_info == (2, 7)",
+ "sys.version_info <= (2, 7)",
+ "sys.version_info < (3, 0)",
+ ]
+ ]
+ )
def __init__(self, *args, **kwargs):
self._future_division = False
@@ -594,18 +913,23 @@ class Python3Checker(checkers.BaseChecker):
self._branch_stack = []
super(Python3Checker, self).__init__(*args, **kwargs)
- # pylint: disable=keyword-arg-before-vararg
- def add_message(self, msg_id, always_warn=False, # pylint: disable=arguments-differ
- *args, **kwargs):
- if always_warn or not (self._branch_stack and self._branch_stack[-1].is_py2_only):
+ # pylint: disable=keyword-arg-before-vararg, arguments-differ
+ def add_message(self, msg_id, always_warn=False, *args, **kwargs):
+ if always_warn or not (
+ self._branch_stack and self._branch_stack[-1].is_py2_only
+ ):
super(Python3Checker, self).add_message(msg_id, *args, **kwargs)
def _is_py2_test(self, node):
- if isinstance(node.test, astroid.Attribute) and isinstance(node.test.expr, astroid.Name):
- if node.test.expr.name == 'six' and node.test.attrname == 'PY2':
+ if isinstance(node.test, astroid.Attribute) and isinstance(
+ node.test.expr, astroid.Name
+ ):
+ if node.test.expr.name == "six" and node.test.attrname == "PY2":
return True
- elif (isinstance(node.test, astroid.Compare) and
- node.test.repr_tree() in self._python_2_tests):
+ elif (
+ isinstance(node.test, astroid.Compare)
+ and node.test.repr_tree() in self._python_2_tests
+ ):
return True
return False
@@ -630,45 +954,40 @@ class Python3Checker(checkers.BaseChecker):
if node.is_method():
if node.name in self._unused_magic_methods:
method_name = node.name
- if node.name.startswith('__'):
+ if node.name.startswith("__"):
method_name = node.name[2:-2]
- self.add_message(method_name + '-method', node=node)
- elif node.name == 'next':
+ self.add_message(method_name + "-method", node=node)
+ elif node.name == "next":
# If there is a method named `next` declared, if it is invokable
# with zero arguments then it implements the Iterator protocol.
# This means if the method is an instance method or a
# classmethod 1 argument should cause a failure, if it is a
# staticmethod 0 arguments should cause a failure.
failing_arg_count = 1
- if utils.decorated_with(node,
- [bases.BUILTINS + ".staticmethod"]):
+ if utils.decorated_with(node, [bases.BUILTINS + ".staticmethod"]):
failing_arg_count = 0
if len(node.args.args) == failing_arg_count:
- self.add_message('next-method-defined', node=node)
+ self.add_message("next-method-defined", node=node)
- @utils.check_messages('parameter-unpacking')
+ @utils.check_messages("parameter-unpacking")
def visit_arguments(self, node):
for arg in node.args:
if isinstance(arg, astroid.Tuple):
- self.add_message('parameter-unpacking', node=arg)
+ self.add_message("parameter-unpacking", node=arg)
- @utils.check_messages('comprehension-escape')
+ @utils.check_messages("comprehension-escape")
def visit_listcomp(self, node):
names = {
- generator.target.name for generator in node.generators
+ generator.target.name
+ for generator in node.generators
if isinstance(generator.target, astroid.AssignName)
}
scope = node.parent.scope()
- scope_names = scope.nodes_of_class(
- astroid.Name,
- skip_klass=astroid.FunctionDef,
- )
+ scope_names = scope.nodes_of_class(astroid.Name, skip_klass=astroid.FunctionDef)
has_redefined_assign_name = any(
assign_name
- for assign_name in
- scope.nodes_of_class(
- astroid.AssignName,
- skip_klass=astroid.FunctionDef,
+ for assign_name in scope.nodes_of_class(
+ astroid.AssignName, skip_klass=astroid.FunctionDef
)
if assign_name.name in names and assign_name.lineno > node.lineno
)
@@ -678,14 +997,16 @@ class Python3Checker(checkers.BaseChecker):
emitted_for_names = set()
scope_names = list(scope_names)
for scope_name in scope_names:
- if (scope_name.name not in names
- or scope_name.lineno <= node.lineno
- or scope_name.name in emitted_for_names
- or scope_name.scope() == node):
+ if (
+ scope_name.name not in names
+ or scope_name.lineno <= node.lineno
+ or scope_name.name in emitted_for_names
+ or scope_name.scope() == node
+ ):
continue
emitted_for_names.add(scope_name.name)
- self.add_message('comprehension-escape', node=scope_name)
+ self.add_message("comprehension-escape", node=scope_name)
def visit_name(self, node):
"""Detect when a "bad" built-in is referenced."""
@@ -694,16 +1015,17 @@ class Python3Checker(checkers.BaseChecker):
return
if node.name not in self._bad_builtins:
return
- if (node_ignores_exception(node)
- or isinstance(find_try_except_wrapper_node(node), astroid.ExceptHandler)):
+ if node_ignores_exception(node) or isinstance(
+ find_try_except_wrapper_node(node), astroid.ExceptHandler
+ ):
return
- message = node.name.lower() + '-builtin'
+ message = node.name.lower() + "-builtin"
self.add_message(message, node=node)
- @utils.check_messages('print-statement')
+ @utils.check_messages("print-statement")
def visit_print(self, node):
- self.add_message('print-statement', node=node, always_warn=True)
+ self.add_message("print-statement", node=node, always_warn=True)
def _warn_if_deprecated(self, node, module, attributes, report_on_modules=True):
for message, module_map in self._bad_python3_module_map.items():
@@ -716,67 +1038,64 @@ class Python3Checker(checkers.BaseChecker):
self.add_message(message, node=node)
def visit_importfrom(self, node):
- if node.modname == '__future__':
+ if node.modname == "__future__":
for name, _ in node.names:
- if name == 'division':
+ if name == "division":
self._future_division = True
- elif name == 'absolute_import':
+ elif name == "absolute_import":
self._future_absolute_import = True
else:
if not self._future_absolute_import:
- if self.linter.is_message_enabled('no-absolute-import'):
- self.add_message('no-absolute-import', node=node)
+ if self.linter.is_message_enabled("no-absolute-import"):
+ self.add_message("no-absolute-import", node=node)
self._future_absolute_import = True
if not _is_conditional_import(node) and not node.level:
self._warn_if_deprecated(node, node.modname, {x[0] for x in node.names})
- if node.names[0][0] == '*':
- if self.linter.is_message_enabled('import-star-module-level'):
+ if node.names[0][0] == "*":
+ if self.linter.is_message_enabled("import-star-module-level"):
if not isinstance(node.scope(), astroid.Module):
- self.add_message('import-star-module-level', node=node)
+ self.add_message("import-star-module-level", node=node)
def visit_import(self, node):
if not self._future_absolute_import:
- if self.linter.is_message_enabled('no-absolute-import'):
- self.add_message('no-absolute-import', node=node)
+ if self.linter.is_message_enabled("no-absolute-import"):
+ self.add_message("no-absolute-import", node=node)
self._future_absolute_import = True
if not _is_conditional_import(node):
for name, _ in node.names:
self._warn_if_deprecated(node, name, None)
- @utils.check_messages('metaclass-assignment')
+ @utils.check_messages("metaclass-assignment")
def visit_classdef(self, node):
- if '__metaclass__' in node.locals:
- self.add_message('metaclass-assignment', node=node)
+ if "__metaclass__" in node.locals:
+ self.add_message("metaclass-assignment", node=node)
locals_and_methods = set(node.locals).union(x.name for x in node.mymethods())
- if '__eq__' in locals_and_methods and '__hash__' not in locals_and_methods:
- self.add_message('eq-without-hash', node=node)
+ if "__eq__" in locals_and_methods and "__hash__" not in locals_and_methods:
+ self.add_message("eq-without-hash", node=node)
- @utils.check_messages('old-division')
+ @utils.check_messages("old-division")
def visit_binop(self, node):
- if not self._future_division and node.op == '/':
+ if not self._future_division and node.op == "/":
for arg in (node.left, node.right):
if isinstance(arg, astroid.Const) and isinstance(arg.value, float):
break
else:
- self.add_message('old-division', node=node)
+ self.add_message("old-division", node=node)
def _check_cmp_argument(self, node):
# Check that the `cmp` argument is used
kwargs = []
- if (isinstance(node.func, astroid.Attribute)
- and node.func.attrname == 'sort'):
+ if isinstance(node.func, astroid.Attribute) and node.func.attrname == "sort":
inferred = utils.safe_infer(node.func.expr)
if not inferred:
return
builtins_list = "{}.list".format(bases.BUILTINS)
- if (isinstance(inferred, astroid.List)
- or inferred.qname() == builtins_list):
+ if isinstance(inferred, astroid.List) or inferred.qname() == builtins_list:
kwargs = node.keywords
- elif (isinstance(node.func, astroid.Name)
- and node.func.name == 'sorted'):
+ elif isinstance(node.func, astroid.Name) and node.func.name == "sorted":
inferred = utils.safe_infer(node.func)
if not inferred:
return
@@ -786,8 +1105,8 @@ class Python3Checker(checkers.BaseChecker):
kwargs = node.keywords
for kwarg in kwargs or []:
- if kwarg.arg == 'cmp':
- self.add_message('using-cmp-argument', node=node)
+ if kwarg.arg == "cmp":
+ self.add_message("using-cmp-argument", node=node)
return
@staticmethod
@@ -810,8 +1129,10 @@ class Python3Checker(checkers.BaseChecker):
for inferred_type in inferred_types:
if inferred_type is astroid.Uninferable:
confidence = INFERENCE_FAILURE
- elif not (isinstance(inferred_type, astroid.Const) and
- isinstance(inferred_type.value, str)):
+ elif not (
+ isinstance(inferred_type, astroid.Const)
+ and isinstance(inferred_type.value, str)
+ ):
return None
return confidence
@@ -826,27 +1147,37 @@ class Python3Checker(checkers.BaseChecker):
continue
inferred_types.add(inferred_receiver)
if isinstance(inferred_receiver, astroid.Module):
- self._warn_if_deprecated(node, inferred_receiver.name,
- {node.func.attrname},
- report_on_modules=False)
- if (_inferred_value_is_dict(inferred_receiver)
- and node.func.attrname in DICT_METHODS):
+ self._warn_if_deprecated(
+ node,
+ inferred_receiver.name,
+ {node.func.attrname},
+ report_on_modules=False,
+ )
+ if (
+ _inferred_value_is_dict(inferred_receiver)
+ and node.func.attrname in DICT_METHODS
+ ):
if not _in_iterating_context(node):
- checker = 'dict-{}-not-iterating'.format(node.func.attrname)
+ checker = "dict-{}-not-iterating".format(node.func.attrname)
self.add_message(checker, node=node)
except astroid.InferenceError:
pass
if node.args:
is_str_confidence = self._could_be_string(inferred_types)
if is_str_confidence:
- if (node.func.attrname in ('encode', 'decode') and
- len(node.args) >= 1 and node.args[0]):
+ if (
+ node.func.attrname in ("encode", "decode")
+ and len(node.args) >= 1
+ and node.args[0]
+ ):
first_arg = node.args[0]
self._validate_encoding(first_arg, node)
- if (node.func.attrname == 'translate' and
- self._has_only_n_positional_args(node, 2) and
- self._is_none(node.args[0]) and
- self._is_constant_string_or_name(node.args[1])):
+ if (
+ node.func.attrname == "translate"
+ and self._has_only_n_positional_args(node, 2)
+ and self._is_none(node.args[0])
+ and self._is_constant_string_or_name(node.args[1])
+ ):
# The above statement looking for calls of the form:
#
# foo.translate(None, 'abc123')
@@ -859,31 +1190,33 @@ class Python3Checker(checkers.BaseChecker):
# after checking several large codebases it did not have any false
# positives while finding several real issues. This call pattern seems
# rare enough that the trade off is worth it.
- self.add_message('deprecated-str-translate-call',
- node=node,
- confidence=is_str_confidence)
+ self.add_message(
+ "deprecated-str-translate-call",
+ node=node,
+ confidence=is_str_confidence,
+ )
return
if node.keywords:
return
- if node.func.attrname == 'next':
- self.add_message('next-method-called', node=node)
+ if node.func.attrname == "next":
+ self.add_message("next-method-called", node=node)
else:
if _check_dict_node(node.func.expr):
- if node.func.attrname in ('iterkeys', 'itervalues', 'iteritems'):
- self.add_message('dict-iter-method', node=node)
- elif node.func.attrname in ('viewkeys', 'viewvalues', 'viewitems'):
- self.add_message('dict-view-method', node=node)
+ if node.func.attrname in ("iterkeys", "itervalues", "iteritems"):
+ self.add_message("dict-iter-method", node=node)
+ elif node.func.attrname in ("viewkeys", "viewvalues", "viewitems"):
+ self.add_message("dict-view-method", node=node)
elif isinstance(node.func, astroid.Name):
found_node = node.func.lookup(node.func.name)[0]
if _is_builtin(found_node):
- if node.func.name in ('filter', 'map', 'range', 'zip'):
+ if node.func.name in ("filter", "map", "range", "zip"):
if not _in_iterating_context(node):
- checker = '{}-builtin-not-iterating'.format(node.func.name)
+ checker = "{}-builtin-not-iterating".format(node.func.name)
self.add_message(checker, node=node)
- if node.func.name == 'open' and node.keywords:
+ if node.func.name == "open" and node.keywords:
kwargs = node.keywords
for kwarg in kwargs or []:
- if kwarg.arg == 'encoding':
+ if kwarg.arg == "encoding":
self._validate_encoding(kwarg.value, node)
break
@@ -891,10 +1224,9 @@ class Python3Checker(checkers.BaseChecker):
if isinstance(encoding, astroid.Const):
value = encoding.value
if value in self._invalid_encodings:
- self.add_message('invalid-str-codec',
- node=node)
+ self.add_message("invalid-str-codec", node=node)
- @utils.check_messages('indexing-exception')
+ @utils.check_messages("indexing-exception")
def visit_subscript(self, node):
""" Look for indexing exceptions. """
try:
@@ -902,7 +1234,7 @@ class Python3Checker(checkers.BaseChecker):
if not isinstance(inferred, astroid.Instance):
continue
if utils.inherit_from_std_ex(inferred):
- self.add_message('indexing-exception', node=node)
+ self.add_message("indexing-exception", node=node)
except astroid.InferenceError:
return
@@ -913,42 +1245,49 @@ class Python3Checker(checkers.BaseChecker):
def visit_delattr(self, node):
self.visit_attribute(node)
- @utils.check_messages('exception-message-attribute', 'xreadlines-attribute')
+ @utils.check_messages("exception-message-attribute", "xreadlines-attribute")
def visit_attribute(self, node):
"""Look for removed attributes"""
- if node.attrname == 'xreadlines':
- self.add_message('xreadlines-attribute', node=node)
+ if node.attrname == "xreadlines":
+ self.add_message("xreadlines-attribute", node=node)
return
- exception_message = 'message'
+ exception_message = "message"
try:
for inferred in node.expr.infer():
- if (isinstance(inferred, astroid.Instance) and
- utils.inherit_from_std_ex(inferred)):
+ if isinstance(inferred, astroid.Instance) and utils.inherit_from_std_ex(
+ inferred
+ ):
if node.attrname == exception_message:
# Exceptions with .message clearly defined are an exception
if exception_message in inferred.instance_attrs:
continue
- self.add_message('exception-message-attribute', node=node)
+ self.add_message("exception-message-attribute", node=node)
if isinstance(inferred, astroid.Module):
- self._warn_if_deprecated(node, inferred.name, {node.attrname},
- report_on_modules=False)
+ self._warn_if_deprecated(
+ node, inferred.name, {node.attrname}, report_on_modules=False
+ )
except astroid.InferenceError:
return
- @utils.check_messages('unpacking-in-except', 'comprehension-escape')
+ @utils.check_messages("unpacking-in-except", "comprehension-escape")
def visit_excepthandler(self, node):
"""Visit an except handler block and check for exception unpacking."""
+
def _is_used_in_except_block(node):
scope = node.scope()
current = node
- while current and current != scope and not isinstance(current, astroid.ExceptHandler):
+ while (
+ current
+ and current != scope
+ and not isinstance(current, astroid.ExceptHandler)
+ ):
current = current.parent
return isinstance(current, astroid.ExceptHandler) and current.type != node
if isinstance(node.name, (astroid.Tuple, astroid.List)):
- self.add_message('unpacking-in-except', node=node)
+ self.add_message("unpacking-in-except", node=node)
return
if not node.name:
@@ -956,36 +1295,35 @@ class Python3Checker(checkers.BaseChecker):
# Find any names
scope = node.parent.scope()
- scope_names = scope.nodes_of_class(
- astroid.Name,
- skip_klass=astroid.FunctionDef,
- )
+ scope_names = scope.nodes_of_class(astroid.Name, skip_klass=astroid.FunctionDef)
scope_names = list(scope_names)
potential_leaked_names = [
scope_name
for scope_name in scope_names
- if scope_name.name == node.name.name and scope_name.lineno > node.lineno
+ if scope_name.name == node.name.name
+ and scope_name.lineno > node.lineno
and not _is_used_in_except_block(scope_name)
]
reassignments_for_same_name = {
assign_name.lineno
- for assign_name in
- scope.nodes_of_class(
- astroid.AssignName,
- skip_klass=astroid.FunctionDef,
+ for assign_name in scope.nodes_of_class(
+ astroid.AssignName, skip_klass=astroid.FunctionDef
)
if assign_name.name == node.name.name
}
for leaked_name in potential_leaked_names:
- if any(node.lineno < elem < leaked_name.lineno for elem in reassignments_for_same_name):
+ if any(
+ node.lineno < elem < leaked_name.lineno
+ for elem in reassignments_for_same_name
+ ):
continue
- self.add_message('exception-escape', node=leaked_name)
+ self.add_message("exception-escape", node=leaked_name)
- @utils.check_messages('backtick')
+ @utils.check_messages("backtick")
def visit_repr(self, node):
- self.add_message('backtick', node=node)
+ self.add_message("backtick", node=node)
- @utils.check_messages('raising-string', 'old-raise-syntax')
+ @utils.check_messages("raising-string", "old-raise-syntax")
def visit_raise(self, node):
"""Visit a raise statement and check for raising
strings or old-raise-syntax.
@@ -1007,55 +1345,62 @@ class Python3Checker(checkers.BaseChecker):
if isinstance(expr, astroid.Const):
value = expr.value
if isinstance(value, str):
- self.add_message('raising-string', node=node)
+ self.add_message("raising-string", node=node)
return True
return None
class Python3TokenChecker(checkers.BaseTokenChecker):
__implements__ = interfaces.ITokenChecker
- name = 'python3'
+ name = "python3"
enabled = False
msgs = {
- 'E1606': ('Use of long suffix',
- 'long-suffix',
- 'Used when "l" or "L" is used to mark a long integer. '
- 'This will not work in Python 3, since `int` and `long` '
- 'types have merged.',
- {'maxversion': (3, 0)}),
- 'E1607': ('Use of the <> operator',
- 'old-ne-operator',
- 'Used when the deprecated "<>" operator is used instead '
- 'of "!=". This is removed in Python 3.',
- {'maxversion': (3, 0),
- 'old_names': [('W0331', 'old-ne-operator')]}),
- 'E1608': ('Use of old octal literal',
- 'old-octal-literal',
- 'Used when encountering the old octal syntax, '
- 'removed in Python 3. To use the new syntax, '
- 'prepend 0o on the number.',
- {'maxversion': (3, 0)}),
- 'E1610': ('Non-ascii bytes literals not supported in 3.x',
- 'non-ascii-bytes-literal',
- 'Used when non-ascii bytes literals are found in a program. '
- 'They are no longer supported in Python 3.',
- {'maxversion': (3, 0)}),
+ "E1606": (
+ "Use of long suffix",
+ "long-suffix",
+ 'Used when "l" or "L" is used to mark a long integer. '
+ "This will not work in Python 3, since `int` and `long` "
+ "types have merged.",
+ {"maxversion": (3, 0)},
+ ),
+ "E1607": (
+ "Use of the <> operator",
+ "old-ne-operator",
+ 'Used when the deprecated "<>" operator is used instead '
+ 'of "!=". This is removed in Python 3.',
+ {"maxversion": (3, 0), "old_names": [("W0331", "old-ne-operator")]},
+ ),
+ "E1608": (
+ "Use of old octal literal",
+ "old-octal-literal",
+ "Used when encountering the old octal syntax, "
+ "removed in Python 3. To use the new syntax, "
+ "prepend 0o on the number.",
+ {"maxversion": (3, 0)},
+ ),
+ "E1610": (
+ "Non-ascii bytes literals not supported in 3.x",
+ "non-ascii-bytes-literal",
+ "Used when non-ascii bytes literals are found in a program. "
+ "They are no longer supported in Python 3.",
+ {"maxversion": (3, 0)},
+ ),
}
def process_tokens(self, tokens):
for idx, (tok_type, token, start, _, _) in enumerate(tokens):
if tok_type == tokenize.NUMBER:
- if token.lower().endswith('l'):
+ if token.lower().endswith("l"):
# This has a different semantic than lowercase-l-suffix.
- self.add_message('long-suffix', line=start[0])
+ self.add_message("long-suffix", line=start[0])
elif _is_old_octal(token):
- self.add_message('old-octal-literal', line=start[0])
- if tokens[idx][1] == '<>':
- self.add_message('old-ne-operator', line=tokens[idx][2][0])
- if tok_type == tokenize.STRING and token.startswith('b'):
+ self.add_message("old-octal-literal", line=start[0])
+ if tokens[idx][1] == "<>":
+ self.add_message("old-ne-operator", line=tokens[idx][2][0])
+ if tok_type == tokenize.STRING and token.startswith("b"):
if any(elem for elem in token if ord(elem) > 127):
- self.add_message('non-ascii-bytes-literal', line=start[0])
+ self.add_message("non-ascii-bytes-literal", line=start[0])
def register(linter):
diff --git a/pylint/checkers/raw_metrics.py b/pylint/checkers/raw_metrics.py
index 08b76db38..7ab165675 100644
--- a/pylint/checkers/raw_metrics.py
+++ b/pylint/checkers/raw_metrics.py
@@ -28,22 +28,21 @@ from pylint.reporters.ureports.nodes import Table
def report_raw_stats(sect, stats, old_stats):
"""calculate percentage of code / doc / comment / empty
"""
- total_lines = stats['total_lines']
+ total_lines = stats["total_lines"]
if not total_lines:
raise EmptyReportError()
- sect.description = '%s lines have been analyzed' % total_lines
- lines = ('type', 'number', '%', 'previous', 'difference')
- for node_type in ('code', 'docstring', 'comment', 'empty'):
- key = node_type + '_lines'
+ sect.description = "%s lines have been analyzed" % total_lines
+ lines = ("type", "number", "%", "previous", "difference")
+ for node_type in ("code", "docstring", "comment", "empty"):
+ key = node_type + "_lines"
total = stats[key]
percent = float(total * 100) / total_lines
old = old_stats.get(key, None)
if old is not None:
diff_str = diff_string(old, total)
else:
- old, diff_str = 'NC', 'NC'
- lines += (node_type, str(total), '%.2f' % percent,
- str(old), diff_str)
+ old, diff_str = "NC", "NC"
+ lines += (node_type, str(total), "%.2f" % percent, str(old), diff_str)
sect.append(Table(children=lines, cols=5, rheaders=1))
@@ -59,13 +58,13 @@ class RawMetricsChecker(BaseTokenChecker):
__implements__ = (ITokenChecker,)
# configuration section name
- name = 'metrics'
+ name = "metrics"
# configuration options
options = ()
# messages
- msgs = {} # type: Any
+ msgs = {} # type: Any
# reports
- reports = (('RP0701', 'Raw metrics', report_raw_stats),)
+ reports = (("RP0701", "Raw metrics", report_raw_stats),)
def __init__(self, linter):
BaseTokenChecker.__init__(self, linter)
@@ -73,9 +72,13 @@ class RawMetricsChecker(BaseTokenChecker):
def open(self):
"""init statistics"""
- self.stats = self.linter.add_stats(total_lines=0, code_lines=0,
- empty_lines=0, docstring_lines=0,
- comment_lines=0)
+ self.stats = self.linter.add_stats(
+ total_lines=0,
+ code_lines=0,
+ empty_lines=0,
+ docstring_lines=0,
+ comment_lines=0,
+ )
def process_tokens(self, tokens):
"""update stats"""
@@ -83,12 +86,13 @@ class RawMetricsChecker(BaseTokenChecker):
tokens = list(tokens)
while i < len(tokens):
i, lines_number, line_type = get_type(tokens, i)
- self.stats['total_lines'] += lines_number
+ self.stats["total_lines"] += lines_number
self.stats[line_type] += lines_number
JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)
+
def get_type(tokens, start_index):
"""return the line type : docstring, comment, code, empty"""
i = start_index
@@ -101,16 +105,16 @@ def get_type(tokens, start_index):
pos = tokens[i][3]
if line_type is None:
if tok_type == tokenize.STRING:
- line_type = 'docstring_lines'
+ line_type = "docstring_lines"
elif tok_type == tokenize.COMMENT:
- line_type = 'comment_lines'
+ line_type = "comment_lines"
elif tok_type in JUNK:
pass
else:
- line_type = 'code_lines'
+ line_type = "code_lines"
i += 1
if line_type is None:
- line_type = 'empty_lines'
+ line_type = "empty_lines"
elif i < len(tokens) and tokens[i][0] == tokenize.NEWLINE:
i += 1
return i, pos[0] - start[0] + 1, line_type
diff --git a/pylint/checkers/refactoring.py b/pylint/checkers/refactoring.py
index 392b648d8..caadd791e 100644
--- a/pylint/checkers/refactoring.py
+++ b/pylint/checkers/refactoring.py
@@ -37,15 +37,14 @@ from pylint import utils as lint_utils
from pylint.checkers import utils
-KNOWN_INFINITE_ITERATORS = {
- 'itertools.count',
-}
+KNOWN_INFINITE_ITERATORS = {"itertools.count"}
def _all_elements_are_true(gen):
values = list(gen)
return values and all(values)
+
def _if_statement_is_always_returning(if_node):
def _has_return_node(elems, scope):
for node in elems:
@@ -55,9 +54,7 @@ def _if_statement_is_always_returning(if_node):
yield node.scope() is scope
scope = if_node.scope()
- return _all_elements_are_true(
- _has_return_node(if_node.body, scope=scope)
- )
+ return _all_elements_are_true(_has_return_node(if_node.body, scope=scope))
class RefactoringChecker(checkers.BaseTokenChecker):
@@ -70,129 +67,162 @@ class RefactoringChecker(checkers.BaseTokenChecker):
__implements__ = (interfaces.ITokenChecker, interfaces.IAstroidChecker)
- name = 'refactoring'
+ name = "refactoring"
msgs = {
- 'R1701': ("Consider merging these isinstance calls to isinstance(%s, (%s))",
- "consider-merging-isinstance",
- "Used when multiple consecutive isinstance calls can be merged into one."),
- 'R1706': ("Consider using ternary (%s)",
- "consider-using-ternary",
- "Used when one of known pre-python 2.5 ternary syntax is used.",),
- 'R1709': ("Boolean expression may be simplified to %s",
- "simplify-boolean-expression",
- "Emitted when redundant pre-python 2.5 ternary syntax is used.",),
- 'R1702': ('Too many nested blocks (%s/%s)',
- 'too-many-nested-blocks',
- 'Used when a function or a method has too many nested '
- 'blocks. This makes the code less understandable and '
- 'maintainable.',
- {'old_names': [('R0101', 'too-many-nested-blocks')]}),
- 'R1703': ('The if statement can be replaced with %s',
- 'simplifiable-if-statement',
- 'Used when an if statement can be replaced with '
- '\'bool(test)\'. ',
- {'old_names': [('R0102', 'simplifiable-if-statement')]}),
- 'R1704': ('Redefining argument with the local name %r',
- 'redefined-argument-from-local',
- 'Used when a local name is redefining an argument, which might '
- 'suggest a potential error. This is taken in account only for '
- 'a handful of name binding operations, such as for iteration, '
- 'with statement assignment and exception handler assignment.'
- ),
- 'R1705': ('Unnecessary "%s" after "return"',
- 'no-else-return',
- 'Used in order to highlight an unnecessary block of '
- 'code following an if containing a return statement. '
- 'As such, it will warn when it encounters an else '
- 'following a chain of ifs, all of them containing a '
- 'return statement.'
- ),
- 'R1707': ('Disallow trailing comma tuple',
- 'trailing-comma-tuple',
- 'In Python, a tuple is actually created by the comma symbol, '
- 'not by the parentheses. Unfortunately, one can actually create a '
- 'tuple by misplacing a trailing comma, which can lead to potential '
- 'weird bugs in your code. You should always use parentheses '
- 'explicitly for creating a tuple.'),
- 'R1708': ('Do not raise StopIteration in generator, use return statement instead',
- 'stop-iteration-return',
- 'According to PEP479, the raise of StopIteration to end the loop of '
- 'a generator may lead to hard to find bugs. This PEP specify that '
- 'raise StopIteration has to be replaced by a simple return statement'),
- 'R1710': ('Either all return statements in a function should return an expression, '
- 'or none of them should.',
- 'inconsistent-return-statements',
- 'According to PEP8, if any return statement returns an expression, '
- 'any return statements where no value is returned should explicitly '
- 'state this as return None, and an explicit return statement '
- 'should be present at the end of the function (if reachable)'
- ),
- 'R1711': ("Useless return at end of function or method",
- 'useless-return',
- 'Emitted when a single "return" or "return None" statement is found '
- 'at the end of function or method definition. This statement can safely be '
- 'removed because Python will implicitly return None'
- ),
- 'R1712': ('Consider using tuple unpacking for swapping variables',
- 'consider-swap-variables',
- 'You do not have to use a temporary variable in order to '
- 'swap variables. Using "tuple unpacking" to directly swap '
- 'variables makes the intention more clear.'
- ),
- 'R1713': ('Consider using str.join(sequence) for concatenating '
- 'strings from an iterable',
- 'consider-using-join',
- 'Using str.join(sequence) is faster, uses less memory '
- 'and increases readability compared to for-loop iteration.'
- ),
- 'R1714': ('Consider merging these comparisons with "in" to %r',
- 'consider-using-in',
- 'To check if a variable is equal to one of many values,'
- 'combine the values into a tuple and check if the variable is contained "in" it '
- 'instead of checking for equality against each of the values.'
- 'This is faster and less verbose.'
- ),
- 'R1715': ('Consider using dict.get for getting values from a dict '
- 'if a key is present or a default if not',
- 'consider-using-get',
- 'Using the builtin dict.get for getting a value from a dictionary '
- 'if a key is present or a default if not, is simpler and considered '
- 'more idiomatic, although sometimes a bit slower'
- ),
- 'R1716': ('Simplify chained comparison between the operands',
- 'chained-comparison',
- 'This message is emitted when pylint encounters boolean operation like'
- '"a < b and b < c", suggesting instead to refactor it to "a < b < c"',
- ),
- 'R1717': ('Consider using a dictionary comprehension',
- 'consider-using-dict-comprehension',
- 'Although there is nothing syntactically wrong with this code, '
- 'it is hard to read and can be simplified to a dict comprehension.'
- 'Also it is faster since you don\'t need to create another '
- 'transient list',
- ),
- 'R1718': ('Consider using a set comprehension',
- 'consider-using-set-comprehension',
- 'Although there is nothing syntactically wrong with this code, '
- 'it is hard to read and can be simplified to a set comprehension.'
- 'Also it is faster since you don\'t need to create another '
- 'transient list',
- ),
+ "R1701": (
+ "Consider merging these isinstance calls to isinstance(%s, (%s))",
+ "consider-merging-isinstance",
+ "Used when multiple consecutive isinstance calls can be merged into one.",
+ ),
+ "R1706": (
+ "Consider using ternary (%s)",
+ "consider-using-ternary",
+ "Used when one of known pre-python 2.5 ternary syntax is used.",
+ ),
+ "R1709": (
+ "Boolean expression may be simplified to %s",
+ "simplify-boolean-expression",
+ "Emitted when redundant pre-python 2.5 ternary syntax is used.",
+ ),
+ "R1702": (
+ "Too many nested blocks (%s/%s)",
+ "too-many-nested-blocks",
+ "Used when a function or a method has too many nested "
+ "blocks. This makes the code less understandable and "
+ "maintainable.",
+ {"old_names": [("R0101", "too-many-nested-blocks")]},
+ ),
+ "R1703": (
+ "The if statement can be replaced with %s",
+ "simplifiable-if-statement",
+ "Used when an if statement can be replaced with " "'bool(test)'. ",
+ {"old_names": [("R0102", "simplifiable-if-statement")]},
+ ),
+ "R1704": (
+ "Redefining argument with the local name %r",
+ "redefined-argument-from-local",
+ "Used when a local name is redefining an argument, which might "
+ "suggest a potential error. This is taken in account only for "
+ "a handful of name binding operations, such as for iteration, "
+ "with statement assignment and exception handler assignment.",
+ ),
+ "R1705": (
+ 'Unnecessary "%s" after "return"',
+ "no-else-return",
+ "Used in order to highlight an unnecessary block of "
+ "code following an if containing a return statement. "
+ "As such, it will warn when it encounters an else "
+ "following a chain of ifs, all of them containing a "
+ "return statement.",
+ ),
+ "R1707": (
+ "Disallow trailing comma tuple",
+ "trailing-comma-tuple",
+ "In Python, a tuple is actually created by the comma symbol, "
+ "not by the parentheses. Unfortunately, one can actually create a "
+ "tuple by misplacing a trailing comma, which can lead to potential "
+ "weird bugs in your code. You should always use parentheses "
+ "explicitly for creating a tuple.",
+ ),
+ "R1708": (
+ "Do not raise StopIteration in generator, use return statement instead",
+ "stop-iteration-return",
+ "According to PEP479, the raise of StopIteration to end the loop of "
+ "a generator may lead to hard to find bugs. This PEP specify that "
+ "raise StopIteration has to be replaced by a simple return statement",
+ ),
+ "R1710": (
+ "Either all return statements in a function should return an expression, "
+ "or none of them should.",
+ "inconsistent-return-statements",
+ "According to PEP8, if any return statement returns an expression, "
+ "any return statements where no value is returned should explicitly "
+ "state this as return None, and an explicit return statement "
+ "should be present at the end of the function (if reachable)",
+ ),
+ "R1711": (
+ "Useless return at end of function or method",
+ "useless-return",
+ 'Emitted when a single "return" or "return None" statement is found '
+ "at the end of function or method definition. This statement can safely be "
+ "removed because Python will implicitly return None",
+ ),
+ "R1712": (
+ "Consider using tuple unpacking for swapping variables",
+ "consider-swap-variables",
+ "You do not have to use a temporary variable in order to "
+ 'swap variables. Using "tuple unpacking" to directly swap '
+ "variables makes the intention more clear.",
+ ),
+ "R1713": (
+ "Consider using str.join(sequence) for concatenating "
+ "strings from an iterable",
+ "consider-using-join",
+ "Using str.join(sequence) is faster, uses less memory "
+ "and increases readability compared to for-loop iteration.",
+ ),
+ "R1714": (
+ 'Consider merging these comparisons with "in" to %r',
+ "consider-using-in",
+ "To check if a variable is equal to one of many values,"
+ 'combine the values into a tuple and check if the variable is contained "in" it '
+ "instead of checking for equality against each of the values."
+ "This is faster and less verbose.",
+ ),
+ "R1715": (
+ "Consider using dict.get for getting values from a dict "
+ "if a key is present or a default if not",
+ "consider-using-get",
+ "Using the builtin dict.get for getting a value from a dictionary "
+ "if a key is present or a default if not, is simpler and considered "
+ "more idiomatic, although sometimes a bit slower",
+ ),
+ "R1716": (
+ "Simplify chained comparison between the operands",
+ "chained-comparison",
+ "This message is emitted when pylint encounters boolean operation like"
+ '"a < b and b < c", suggesting instead to refactor it to "a < b < c"',
+ ),
+ "R1717": (
+ "Consider using a dictionary comprehension",
+ "consider-using-dict-comprehension",
+ "Although there is nothing syntactically wrong with this code, "
+ "it is hard to read and can be simplified to a dict comprehension."
+ "Also it is faster since you don't need to create another "
+ "transient list",
+ ),
+ "R1718": (
+ "Consider using a set comprehension",
+ "consider-using-set-comprehension",
+ "Although there is nothing syntactically wrong with this code, "
+ "it is hard to read and can be simplified to a set comprehension."
+ "Also it is faster since you don't need to create another "
+ "transient list",
+ ),
}
- options = (('max-nested-blocks',
- {'default': 5, 'type': 'int', 'metavar': '<int>',
- 'help': 'Maximum number of nested blocks for function / '
- 'method body'}
- ),
- ('never-returning-functions',
- {'default': ('sys.exit',),
- 'type': 'csv',
- 'help': 'Complete name of functions that never returns. When checking '
- 'for inconsistent-return-statements if a never returning function is '
- 'called then it will be considered as an explicit return statement '
- 'and no message will be printed.'}
- ),)
+ options = (
+ (
+ "max-nested-blocks",
+ {
+ "default": 5,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of nested blocks for function / " "method body",
+ },
+ ),
+ (
+ "never-returning-functions",
+ {
+ "default": ("sys.exit",),
+ "type": "csv",
+ "help": "Complete name of functions that never returns. When checking "
+ "for inconsistent-return-statements if a never returning function is "
+ "called then it will be considered as an explicit return statement "
+ "and no message will be printed.",
+ },
+ ),
+ )
priority = 0
@@ -214,13 +244,13 @@ class RefactoringChecker(checkers.BaseTokenChecker):
@decorators.cachedproperty
def _dummy_rgx(self):
- return lint_utils.get_global_option(
- self, 'dummy-variables-rgx', default=None)
+ return lint_utils.get_global_option(self, "dummy-variables-rgx", default=None)
@staticmethod
def _is_bool_const(node):
- return (isinstance(node.value, astroid.Const)
- and isinstance(node.value.value, bool))
+ return isinstance(node.value, astroid.Const) and isinstance(
+ node.value.value, bool
+ )
def _is_actual_elif(self, node):
"""Check if the given node is an actual elif
@@ -269,11 +299,13 @@ class RefactoringChecker(checkers.BaseTokenChecker):
# Check if we assign to the same value
first_branch_targets = [
- target.name for target in first_branch.targets
+ target.name
+ for target in first_branch.targets
if isinstance(target, astroid.AssignName)
]
else_branch_targets = [
- target.name for target in else_branch.targets
+ target.name
+ for target in else_branch.targets
if isinstance(target, astroid.AssignName)
]
if not first_branch_targets or not else_branch_targets:
@@ -305,30 +337,28 @@ class RefactoringChecker(checkers.BaseTokenChecker):
# The original form is easier to grasp.
return
- self.add_message('simplifiable-if-statement', node=node,
- args=(reduced_to,))
+ self.add_message("simplifiable-if-statement", node=node, args=(reduced_to,))
def process_tokens(self, tokens):
# Process tokens and look for 'if' or 'elif'
for index, token in enumerate(tokens):
token_string = token[1]
- if token_string == 'elif':
+ if token_string == "elif":
# AST exists by the time process_tokens is called, so
# it's safe to assume tokens[index+1]
# exists. tokens[index+1][2] is the elif's position as
# reported by CPython and PyPy,
# tokens[index][2] is the actual position and also is
# reported by IronPython.
- self._elifs.extend([tokens[index][2], tokens[index+1][2]])
+ self._elifs.extend([tokens[index][2], tokens[index + 1][2]])
elif is_trailing_comma(tokens, index):
- if self.linter.is_message_enabled('trailing-comma-tuple'):
- self.add_message('trailing-comma-tuple',
- line=token.start[0])
+ if self.linter.is_message_enabled("trailing-comma-tuple"):
+ self.add_message("trailing-comma-tuple", line=token.start[0])
def leave_module(self, _):
self._init()
- @utils.check_messages('too-many-nested-blocks')
+ @utils.check_messages("too-many-nested-blocks")
def visit_tryexcept(self, node):
self._check_nested_blocks(node)
@@ -346,27 +376,29 @@ class RefactoringChecker(checkers.BaseTokenChecker):
if not isinstance(scope, astroid.FunctionDef):
return
- for defined_argument in scope.args.nodes_of_class(astroid.AssignName,
- skip_klass=(astroid.Lambda, )):
+ for defined_argument in scope.args.nodes_of_class(
+ astroid.AssignName, skip_klass=(astroid.Lambda,)
+ ):
if defined_argument.name == name_node.name:
- self.add_message('redefined-argument-from-local',
- node=name_node,
- args=(name_node.name, ))
+ self.add_message(
+ "redefined-argument-from-local",
+ node=name_node,
+ args=(name_node.name,),
+ )
- @utils.check_messages('redefined-argument-from-local',
- 'too-many-nested-blocks')
+ @utils.check_messages("redefined-argument-from-local", "too-many-nested-blocks")
def visit_for(self, node):
self._check_nested_blocks(node)
for name in node.target.nodes_of_class(astroid.AssignName):
self._check_redefined_argument_from_local(name)
- @utils.check_messages('redefined-argument-from-local')
+ @utils.check_messages("redefined-argument-from-local")
def visit_excepthandler(self, node):
if node.name and isinstance(node.name, astroid.AssignName):
self._check_redefined_argument_from_local(node.name)
- @utils.check_messages('redefined-argument-from-local')
+ @utils.check_messages("redefined-argument-from-local")
def visit_with(self, node):
for _, names in node.items:
if not names:
@@ -383,8 +415,7 @@ class RefactoringChecker(checkers.BaseTokenChecker):
orelse = node.orelse and node.orelse[0]
followed_by_elif = (orelse.lineno, orelse.col_offset) in self._elifs
self.add_message(
- 'no-else-return', node=node,
- args='elif' if followed_by_elif else 'else',
+ "no-else-return", node=node, args="elif" if followed_by_elif else "else"
)
def _check_consider_get(self, node):
@@ -406,38 +437,49 @@ class RefactoringChecker(checkers.BaseTokenChecker):
and type_and_name_are_equal(node.body[0].value.slice.value, node.test.left)
and len(node.body[0].targets) == 1
and isinstance(node.body[0].targets[0], astroid.AssignName)
- and isinstance(utils.safe_infer(node.test.ops[0][1]), astroid.Dict))
+ and isinstance(utils.safe_infer(node.test.ops[0][1]), astroid.Dict)
+ )
if if_block_ok and not node.orelse:
- self.add_message('consider-using-get', node=node)
- elif (if_block_ok and len(node.orelse) == 1
- and isinstance(node.orelse[0], astroid.Assign)
- and type_and_name_are_equal(node.orelse[0].targets[0], node.body[0].targets[0])
- and len(node.orelse[0].targets) == 1):
- self.add_message('consider-using-get', node=node)
-
- @utils.check_messages('too-many-nested-blocks', 'simplifiable-if-statement',
- 'no-else-return', 'consider-using-get')
+ self.add_message("consider-using-get", node=node)
+ elif (
+ if_block_ok
+ and len(node.orelse) == 1
+ and isinstance(node.orelse[0], astroid.Assign)
+ and type_and_name_are_equal(
+ node.orelse[0].targets[0], node.body[0].targets[0]
+ )
+ and len(node.orelse[0].targets) == 1
+ ):
+ self.add_message("consider-using-get", node=node)
+
+ @utils.check_messages(
+ "too-many-nested-blocks",
+ "simplifiable-if-statement",
+ "no-else-return",
+ "consider-using-get",
+ )
def visit_if(self, node):
self._check_simplifiable_if(node)
self._check_nested_blocks(node)
self._check_superfluous_else_return(node)
self._check_consider_get(node)
- @utils.check_messages('too-many-nested-blocks', 'inconsistent-return-statements',
- 'useless-return')
+ @utils.check_messages(
+ "too-many-nested-blocks", "inconsistent-return-statements", "useless-return"
+ )
def leave_functiondef(self, node):
# check left-over nested blocks stack
self._emit_nested_blocks_message_if_needed(self._nested_blocks)
# new scope = reinitialize the stack of nested blocks
self._nested_blocks = []
- # check consistent return statements
+ #  check consistent return statements
self._check_consistent_returns(node)
# check for single return or return None at the end
self._check_return_at_the_end(node)
self._return_nodes[node.name] = []
- @utils.check_messages('stop-iteration-return')
+ @utils.check_messages("stop-iteration-return")
def visit_raise(self, node):
self._check_stop_iteration_inside_generator(node)
@@ -454,25 +496,29 @@ class RefactoringChecker(checkers.BaseTokenChecker):
if exc is None or exc is astroid.Uninferable:
return
if self._check_exception_inherit_from_stopiteration(exc):
- self.add_message('stop-iteration-return', node=node)
+ self.add_message("stop-iteration-return", node=node)
@staticmethod
def _check_exception_inherit_from_stopiteration(exc):
"""Return True if the exception node in argument inherit from StopIteration"""
- stopiteration_qname = '{}.StopIteration'.format(utils.EXCEPTIONS_MODULE)
+ stopiteration_qname = "{}.StopIteration".format(utils.EXCEPTIONS_MODULE)
return any(_class.qname() == stopiteration_qname for _class in exc.mro())
def _check_consider_using_comprehension_constructor(self, node):
- if (isinstance(node.func, astroid.Name) and
- node.args
- and node.func.name in {'dict', 'set'}
- and isinstance(node.args[0], astroid.ListComp)):
- message_name = 'consider-using-{}-comprehension'.format(node.func.name)
+ if (
+ isinstance(node.func, astroid.Name)
+ and node.args
+ and node.func.name in {"dict", "set"}
+ and isinstance(node.args[0], astroid.ListComp)
+ ):
+ message_name = "consider-using-{}-comprehension".format(node.func.name)
self.add_message(message_name, node=node)
- @utils.check_messages('stop-iteration-return',
- 'consider-using-dict-comprehension',
- 'consider-using-set-comprehension')
+ @utils.check_messages(
+ "stop-iteration-return",
+ "consider-using-dict-comprehension",
+ "consider-using-set-comprehension",
+ )
def visit_call(self, node):
self._check_raising_stopiteration_in_generator_next_call(node)
self._check_consider_using_comprehension_constructor(node)
@@ -497,17 +543,19 @@ class RefactoringChecker(checkers.BaseTokenChecker):
return
inferred = utils.safe_infer(node.func)
- if getattr(inferred, 'name', '') == 'next':
+ if getattr(inferred, "name", "") == "next":
frame = node.frame()
# The next builtin can only have up to two
# positional arguments and no keyword arguments
has_sentinel_value = len(node.args) > 1
- if (isinstance(frame, astroid.FunctionDef)
- and frame.is_generator()
- and not has_sentinel_value
- and not utils.node_ignores_exception(node, StopIteration)
- and not _looks_like_infinite_iterator(node.args[0])):
- self.add_message('stop-iteration-return', node=node)
+ if (
+ isinstance(frame, astroid.FunctionDef)
+ and frame.is_generator()
+ and not has_sentinel_value
+ and not utils.node_ignores_exception(node, StopIteration)
+ and not _looks_like_infinite_iterator(node.args[0])
+ ):
+ self.add_message("stop-iteration-return", node=node)
def _check_nested_blocks(self, node):
"""Update and check the number of nested blocks
@@ -538,8 +586,11 @@ class RefactoringChecker(checkers.BaseTokenChecker):
def _emit_nested_blocks_message_if_needed(self, nested_blocks):
if len(nested_blocks) > self.config.max_nested_blocks:
- self.add_message('too-many-nested-blocks', node=nested_blocks[0],
- args=(len(nested_blocks), self.config.max_nested_blocks))
+ self.add_message(
+ "too-many-nested-blocks",
+ node=nested_blocks[0],
+ args=(len(nested_blocks), self.config.max_nested_blocks),
+ )
@staticmethod
def _duplicated_isinstance_types(node):
@@ -561,7 +612,7 @@ class RefactoringChecker(checkers.BaseTokenChecker):
if not inferred or not utils.is_builtin_object(inferred):
continue
- if inferred.name != 'isinstance':
+ if inferred.name != "isinstance":
continue
isinstance_object = call.args[0].as_string()
@@ -571,38 +622,44 @@ class RefactoringChecker(checkers.BaseTokenChecker):
duplicated_objects.add(isinstance_object)
if isinstance(isinstance_types, astroid.Tuple):
- elems = [class_type.as_string() for class_type in isinstance_types.itered()]
+ elems = [
+ class_type.as_string() for class_type in isinstance_types.itered()
+ ]
else:
elems = [isinstance_types.as_string()]
all_types[isinstance_object].update(elems)
# Remove all keys which not duplicated
- return {key: value for key, value in all_types.items()
- if key in duplicated_objects}
+ return {
+ key: value for key, value in all_types.items() if key in duplicated_objects
+ }
def _check_consider_merging_isinstance(self, node):
"""Check isinstance calls which can be merged together."""
- if node.op != 'or':
+ if node.op != "or":
return
first_args = self._duplicated_isinstance_types(node)
for duplicated_name, class_names in first_args.items():
names = sorted(name for name in class_names)
- self.add_message('consider-merging-isinstance',
- node=node,
- args=(duplicated_name, ', '.join(names)))
+ self.add_message(
+ "consider-merging-isinstance",
+ node=node,
+ args=(duplicated_name, ", ".join(names)),
+ )
def _check_consider_using_in(self, node):
- allowed_ops = {'or': '==',
- 'and': '!='}
+ allowed_ops = {"or": "==", "and": "!="}
if node.op not in allowed_ops or len(node.values) < 2:
return
for value in node.values:
- if (not isinstance(value, astroid.Compare)
- or len(value.ops) != 1
- or value.ops[0][0] not in allowed_ops[node.op]):
+ if (
+ not isinstance(value, astroid.Compare)
+ or len(value.ops) != 1
+ or value.ops[0][0] not in allowed_ops[node.op]
+ ):
return
for comparable in value.left, value.ops[0][1]:
if isinstance(comparable, astroid.Call):
@@ -626,13 +683,13 @@ class RefactoringChecker(checkers.BaseTokenChecker):
# Gather information for the suggestion
common_variable = sorted(list(common_variables))[0]
- comprehension = 'in' if node.op == 'or' else 'not in'
+ comprehension = "in" if node.op == "or" else "not in"
values = list(collections.OrderedDict.fromkeys(values))
values.remove(common_variable)
- values_string = ', '.join(values) if len(values) != 1 else values[0] + ','
+ values_string = ", ".join(values) if len(values) != 1 else values[0] + ","
suggestion = "%s %s (%s)" % (common_variable, comprehension, values_string)
- self.add_message('consider-using-in', node=node, args=(suggestion,))
+ self.add_message("consider-using-in", node=node, args=(suggestion,))
def _check_chained_comparison(self, node):
"""Check if there is any chained comparison in the expression.
@@ -640,13 +697,19 @@ class RefactoringChecker(checkers.BaseTokenChecker):
Add a refactoring message if a boolOp contains comparison like a < b and b < c,
which can be chained as a < b < c.
"""
- if (node.op != 'and' or len(node.values) < 2
- or not all(isinstance(value, astroid.Compare) for value in node.values)):
+ if (
+ node.op != "and"
+ or len(node.values) < 2
+ or not all(isinstance(value, astroid.Compare) for value in node.values)
+ ):
return
def _find_lower_upper_bounds(comparison_node, lower_bounds, upper_bounds):
operator = comparison_node.ops[0][0]
- left_operand, right_operand = comparison_node.left, comparison_node.ops[0][1]
+ left_operand, right_operand = (
+ comparison_node.left,
+ comparison_node.ops[0][1],
+ )
for operand in (left_operand, right_operand):
value = None
if isinstance(operand, astroid.Name):
@@ -657,12 +720,12 @@ class RefactoringChecker(checkers.BaseTokenChecker):
if value is None:
continue
- if operator in ('<', '<='):
+ if operator in ("<", "<="):
if operand is left_operand:
lower_bounds.append(value)
else:
upper_bounds.append(value)
- elif operator in ('>', '>='):
+ elif operator in (">", ">="):
if operand is left_operand:
upper_bounds.append(value)
else:
@@ -674,11 +737,11 @@ class RefactoringChecker(checkers.BaseTokenChecker):
_find_lower_upper_bounds(comparison_node, lower_bounds, upper_bounds)
if set(lower_bounds).intersection(upper_bounds):
- self.add_message('chained-comparison',
- node=node)
+ self.add_message("chained-comparison", node=node)
- @utils.check_messages('consider-merging-isinstance', 'consider-using-in',
- 'chained-comparison')
+ @utils.check_messages(
+ "consider-merging-isinstance", "consider-using-in", "chained-comparison"
+ )
def visit_boolop(self, node):
self._check_consider_merging_isinstance(node)
self._check_consider_using_in(node)
@@ -686,17 +749,17 @@ class RefactoringChecker(checkers.BaseTokenChecker):
@staticmethod
def _is_simple_assignment(node):
- return (isinstance(node, astroid.Assign)
- and len(node.targets) == 1
- and isinstance(node.targets[0], astroid.node_classes.AssignName)
- and isinstance(node.value, astroid.node_classes.Name))
+ return (
+ isinstance(node, astroid.Assign)
+ and len(node.targets) == 1
+ and isinstance(node.targets[0], astroid.node_classes.AssignName)
+ and isinstance(node.value, astroid.node_classes.Name)
+ )
def _check_swap_variables(self, node):
if not node.next_sibling() or not node.next_sibling().next_sibling():
return
- assignments = [
- node, node.next_sibling(), node.next_sibling().next_sibling()
- ]
+ assignments = [node, node.next_sibling(), node.next_sibling().next_sibling()]
if not all(self._is_simple_assignment(node) for node in assignments):
return
if any(node in self._reported_swap_nodes for node in assignments):
@@ -705,12 +768,14 @@ class RefactoringChecker(checkers.BaseTokenChecker):
right = [node.value.name for node in assignments]
if left[0] == right[-1] and left[1:] == right[:-1]:
self._reported_swap_nodes.update(assignments)
- message = 'consider-swap-variables'
+ message = "consider-swap-variables"
self.add_message(message, node=node)
- @utils.check_messages('simplify-boolean-expression',
- 'consider-using-ternary',
- 'consider-swap-variables')
+ @utils.check_messages(
+ "simplify-boolean-expression",
+ "consider-using-ternary",
+ "consider-swap-variables",
+ )
def visit_assign(self, node):
self._check_swap_variables(node)
if self._is_and_or_ternary(node.value):
@@ -718,7 +783,9 @@ class RefactoringChecker(checkers.BaseTokenChecker):
else:
return
- if all(isinstance(value, astroid.Compare) for value in (truth_value, false_value)):
+ if all(
+ isinstance(value, astroid.Compare) for value in (truth_value, false_value)
+ ):
return
inferred_truth_value = utils.safe_infer(truth_value)
@@ -728,14 +795,14 @@ class RefactoringChecker(checkers.BaseTokenChecker):
truth_boolean_value = truth_value.bool_value()
if truth_boolean_value is False:
- message = 'simplify-boolean-expression'
+ message = "simplify-boolean-expression"
suggestion = false_value.as_string()
else:
- message = 'consider-using-ternary'
- suggestion = '{truth} if {cond} else {false}'.format(
+ message = "consider-using-ternary"
+ suggestion = "{truth} if {cond} else {false}".format(
truth=truth_value.as_string(),
cond=cond.as_string(),
- false=false_value.as_string()
+ false=false_value.as_string(),
)
self.add_message(message, node=node, args=(suggestion,))
@@ -756,21 +823,25 @@ class RefactoringChecker(checkers.BaseTokenChecker):
if not isinstance(assign, astroid.Assign):
return
result_assign_names = {
- target.name for target in assign.targets if isinstance(target, astroid.AssignName)
+ target.name
+ for target in assign.targets
+ if isinstance(target, astroid.AssignName)
}
- is_concat_loop = (aug_assign.op == '+='
- and isinstance(aug_assign.target, astroid.AssignName)
- and len(for_loop.body) == 1
- and aug_assign.target.name in result_assign_names
- and isinstance(assign.value, astroid.Const)
- and isinstance(assign.value.value, str)
- and isinstance(aug_assign.value, astroid.Name)
- and aug_assign.value.name == for_loop.target.name)
+ is_concat_loop = (
+ aug_assign.op == "+="
+ and isinstance(aug_assign.target, astroid.AssignName)
+ and len(for_loop.body) == 1
+ and aug_assign.target.name in result_assign_names
+ and isinstance(assign.value, astroid.Const)
+ and isinstance(assign.value.value, str)
+ and isinstance(aug_assign.value, astroid.Name)
+ and aug_assign.value.name == for_loop.target.name
+ )
if is_concat_loop:
- self.add_message('consider-using-join', node=aug_assign)
+ self.add_message("consider-using-join", node=aug_assign)
- @utils.check_messages('consider-using-join')
+ @utils.check_messages("consider-using-join")
def visit_augassign(self, node):
self._check_consider_using_join(node)
@@ -781,13 +852,16 @@ class RefactoringChecker(checkers.BaseTokenChecker):
All of: condition, true_value and false_value should not be a complex boolean expression
"""
- return (isinstance(node, astroid.BoolOp)
- and node.op == 'or' and len(node.values) == 2
- and isinstance(node.values[0], astroid.BoolOp)
- and not isinstance(node.values[1], astroid.BoolOp)
- and node.values[0].op == 'and'
- and not isinstance(node.values[0].values[1], astroid.BoolOp)
- and len(node.values[0].values) == 2)
+ return (
+ isinstance(node, astroid.BoolOp)
+ and node.op == "or"
+ and len(node.values) == 2
+ and isinstance(node.values[0], astroid.BoolOp)
+ and not isinstance(node.values[1], astroid.BoolOp)
+ and node.values[0].op == "and"
+ and not isinstance(node.values[0].values[1], astroid.BoolOp)
+ and len(node.values[0].values) == 2
+ )
@staticmethod
def _and_or_ternary_arguments(node):
@@ -797,7 +871,8 @@ class RefactoringChecker(checkers.BaseTokenChecker):
def visit_functiondef(self, node):
self._return_nodes[node.name] = list(
- node.nodes_of_class(astroid.Return, skip_klass=astroid.FunctionDef))
+ node.nodes_of_class(astroid.Return, skip_klass=astroid.FunctionDef)
+ )
def _check_consistent_returns(self, node):
"""Check that all return statements inside a function are consistent.
@@ -811,14 +886,16 @@ class RefactoringChecker(checkers.BaseTokenChecker):
"""
# explicit return statements are those with a not None value
- explicit_returns = [_node for _node in self._return_nodes[node.name]
- if _node.value is not None]
+ explicit_returns = [
+ _node for _node in self._return_nodes[node.name] if _node.value is not None
+ ]
if not explicit_returns:
return
- if (len(explicit_returns) == len(self._return_nodes[node.name])
- and self._is_node_return_ended(node)):
+ if len(explicit_returns) == len(
+ self._return_nodes[node.name]
+ ) and self._is_node_return_ended(node):
return
- self.add_message('inconsistent-return-statements', node=node)
+ self.add_message("inconsistent-return-statements", node=node)
def _is_node_return_ended(self, node):
"""Check if the node ends with an explicit return statement.
@@ -830,7 +907,7 @@ class RefactoringChecker(checkers.BaseTokenChecker):
bool: True if the node ends with an explicit statement, False otherwise.
"""
- # Recursion base case
+ #  Recursion base case
if isinstance(node, astroid.Return):
return True
if isinstance(node, astroid.Call):
@@ -841,7 +918,7 @@ class RefactoringChecker(checkers.BaseTokenChecker):
except astroid.InferenceError:
pass
# Avoid the check inside while loop as we don't know
- # if they will be completed
+ #  if they will be completed
if isinstance(node, astroid.While):
return True
if isinstance(node, astroid.Raise):
@@ -853,34 +930,45 @@ class RefactoringChecker(checkers.BaseTokenChecker):
return True
if not utils.is_node_inside_try_except(node):
# If the raise statement is not inside a try/except statement
- # then the exception is raised and cannot be caught. No need
- # to infer it.
+ #  then the exception is raised and cannot be caught. No need
+ #  to infer it.
return True
exc = utils.safe_infer(node.exc)
if exc is None or exc is astroid.Uninferable:
return False
- exc_name = exc.pytype().split('.')[-1]
+ exc_name = exc.pytype().split(".")[-1]
handlers = utils.get_exception_handlers(node, exc_name)
handlers = list(handlers) if handlers is not None else []
if handlers:
# among all the handlers handling the exception at least one
# must end with a return statement
- return any(self._is_node_return_ended(_handler) for _handler in handlers)
+ return any(
+ self._is_node_return_ended(_handler) for _handler in handlers
+ )
# if no handlers handle the exception then it's ok
return True
if isinstance(node, astroid.If):
# if statement is returning if there are exactly two return statements in its
- # children : one for the body part, the other for the orelse part
+ #  children : one for the body part, the other for the orelse part
# Do not check if inner function definition are return ended.
- is_orelse_returning = any(self._is_node_return_ended(_ore) for _ore in node.orelse
- if not isinstance(_ore, astroid.FunctionDef))
- is_if_returning = any(self._is_node_return_ended(_ifn) for _ifn in node.body
- if not isinstance(_ifn, astroid.FunctionDef))
+ is_orelse_returning = any(
+ self._is_node_return_ended(_ore)
+ for _ore in node.orelse
+ if not isinstance(_ore, astroid.FunctionDef)
+ )
+ is_if_returning = any(
+ self._is_node_return_ended(_ifn)
+ for _ifn in node.body
+ if not isinstance(_ifn, astroid.FunctionDef)
+ )
return is_if_returning and is_orelse_returning
- # recurses on the children of the node except for those which are except handler
+ #  recurses on the children of the node except for those which are except handler
# because one cannot be sure that the handler will really be used
- return any(self._is_node_return_ended(_child) for _child in node.get_children()
- if not isinstance(_child, astroid.ExceptHandler))
+ return any(
+ self._is_node_return_ended(_child)
+ for _child in node.get_children()
+ if not isinstance(_child, astroid.ExceptHandler)
+ )
def _is_function_def_never_returning(self, node):
"""Return True if the function never returns. False otherwise.
@@ -915,26 +1003,31 @@ class RefactoringChecker(checkers.BaseTokenChecker):
if isinstance(last, astroid.Return):
# e.g. "return"
if last.value is None:
- self.add_message('useless-return', node=node)
+ self.add_message("useless-return", node=node)
# return None"
elif isinstance(last.value, astroid.Const) and (last.value.value is None):
- self.add_message('useless-return', node=node)
+ self.add_message("useless-return", node=node)
class RecommandationChecker(checkers.BaseChecker):
__implements__ = (interfaces.IAstroidChecker,)
- name = 'refactoring'
- msgs = {'C0200': ('Consider using enumerate instead of iterating with range and len',
- 'consider-using-enumerate',
- 'Emitted when code that iterates with range and len is '
- 'encountered. Such code can be simplified by using the '
- 'enumerate builtin.'),
- 'C0201': ('Consider iterating the dictionary directly instead of calling .keys()',
- 'consider-iterating-dictionary',
- 'Emitted when the keys of a dictionary are iterated through the .keys() '
- 'method. It is enough to just iterate through the dictionary itself, as '
- 'in "for key in dictionary".'),
- }
+ name = "refactoring"
+ msgs = {
+ "C0200": (
+ "Consider using enumerate instead of iterating with range and len",
+ "consider-using-enumerate",
+ "Emitted when code that iterates with range and len is "
+ "encountered. Such code can be simplified by using the "
+ "enumerate builtin.",
+ ),
+ "C0201": (
+ "Consider iterating the dictionary directly instead of calling .keys()",
+ "consider-iterating-dictionary",
+ "Emitted when the keys of a dictionary are iterated through the .keys() "
+ "method. It is enough to just iterate through the dictionary itself, as "
+ 'in "for key in dictionary".',
+ ),
+ }
@staticmethod
def _is_builtin(node, function):
@@ -943,20 +1036,20 @@ class RecommandationChecker(checkers.BaseChecker):
return False
return utils.is_builtin_object(inferred) and inferred.name == function
- @utils.check_messages('consider-iterating-dictionary')
+ @utils.check_messages("consider-iterating-dictionary")
def visit_call(self, node):
inferred = utils.safe_infer(node.func)
if not inferred:
return
if not isinstance(inferred, astroid.BoundMethod):
return
- if not isinstance(inferred.bound, astroid.Dict) or inferred.name != 'keys':
+ if not isinstance(inferred.bound, astroid.Dict) or inferred.name != "keys":
return
if isinstance(node.parent, (astroid.For, astroid.Comprehension)):
- self.add_message('consider-iterating-dictionary', node=node)
+ self.add_message("consider-iterating-dictionary", node=node)
- @utils.check_messages('consider-using-enumerate')
+ @utils.check_messages("consider-using-enumerate")
def visit_for(self, node):
"""Emit a convention whenever range and len are used for indexing."""
# Verify that we have a `range([start], len(...), [stop])` call and
@@ -966,7 +1059,7 @@ class RecommandationChecker(checkers.BaseChecker):
# Is it a proper range call?
if not isinstance(node.iter, astroid.Call):
return
- if not self._is_builtin(node.iter.func, 'range'):
+ if not self._is_builtin(node.iter.func, "range"):
return
if len(node.iter.args) == 2 and not _is_constant_zero(node.iter.args[0]):
return
@@ -977,7 +1070,7 @@ class RecommandationChecker(checkers.BaseChecker):
if not isinstance(node.iter.args[-1], astroid.Call):
return
second_func = node.iter.args[-1].func
- if not self._is_builtin(second_func, 'len'):
+ if not self._is_builtin(second_func, "len"):
return
len_args = node.iter.args[-1].args
if not len_args or len(len_args) != 1:
@@ -1008,7 +1101,7 @@ class RecommandationChecker(checkers.BaseChecker):
# loop, another scope was created, where the same
# name for the iterating object was used.
continue
- self.add_message('consider-using-enumerate', node=node)
+ self.add_message("consider-using-enumerate", node=node)
return
@@ -1018,32 +1111,46 @@ class NotChecker(checkers.BaseChecker):
- "not not" should trigger a warning
- "not" followed by a comparison should trigger a warning
"""
+
__implements__ = (interfaces.IAstroidChecker,)
- msgs = {'C0113': ('Consider changing "%s" to "%s"',
- 'unneeded-not',
- 'Used when a boolean expression contains an unneeded '
- 'negation.'),
- }
- name = 'basic'
- reverse_op = {'<': '>=', '<=': '>', '>': '<=', '>=': '<', '==': '!=',
- '!=': '==', 'in': 'not in', 'is': 'is not'}
+ msgs = {
+ "C0113": (
+ 'Consider changing "%s" to "%s"',
+ "unneeded-not",
+ "Used when a boolean expression contains an unneeded " "negation.",
+ )
+ }
+ name = "basic"
+ reverse_op = {
+ "<": ">=",
+ "<=": ">",
+ ">": "<=",
+ ">=": "<",
+ "==": "!=",
+ "!=": "==",
+ "in": "not in",
+ "is": "is not",
+ }
# sets are not ordered, so for example "not set(LEFT_VALS) <= set(RIGHT_VALS)" is
# not equivalent to "set(LEFT_VALS) > set(RIGHT_VALS)"
skipped_nodes = (astroid.Set,)
# 'builtins' py3, '__builtin__' py2
- skipped_classnames = ['%s.%s' % (builtins.__name__, qname)
- for qname in ('set', 'frozenset')]
+ skipped_classnames = [
+ "%s.%s" % (builtins.__name__, qname) for qname in ("set", "frozenset")
+ ]
- @utils.check_messages('unneeded-not')
+ @utils.check_messages("unneeded-not")
def visit_unaryop(self, node):
- if node.op != 'not':
+ if node.op != "not":
return
operand = node.operand
- if isinstance(operand, astroid.UnaryOp) and operand.op == 'not':
- self.add_message('unneeded-not', node=node,
- args=(node.as_string(),
- operand.operand.as_string()))
+ if isinstance(operand, astroid.UnaryOp) and operand.op == "not":
+ self.add_message(
+ "unneeded-not",
+ node=node,
+ args=(node.as_string(), operand.operand.as_string()),
+ )
elif isinstance(operand, astroid.Compare):
left = operand.left
# ignore multiple comparisons
@@ -1054,34 +1161,45 @@ class NotChecker(checkers.BaseChecker):
return
# Ignore __ne__ as function of __eq__
frame = node.frame()
- if frame.name == '__ne__' and operator == '==':
+ if frame.name == "__ne__" and operator == "==":
return
for _type in (utils.node_type(left), utils.node_type(right)):
if not _type:
return
if isinstance(_type, self.skipped_nodes):
return
- if (isinstance(_type, astroid.Instance) and
- _type.qname() in self.skipped_classnames):
+ if (
+ isinstance(_type, astroid.Instance)
+ and _type.qname() in self.skipped_classnames
+ ):
return
- suggestion = '%s %s %s' % (left.as_string(),
- self.reverse_op[operator],
- right.as_string())
- self.add_message('unneeded-not', node=node,
- args=(node.as_string(), suggestion))
+ suggestion = "%s %s %s" % (
+ left.as_string(),
+ self.reverse_op[operator],
+ right.as_string(),
+ )
+ self.add_message(
+ "unneeded-not", node=node, args=(node.as_string(), suggestion)
+ )
def _is_len_call(node):
"""Checks if node is len(SOMETHING)."""
- return (isinstance(node, astroid.Call) and isinstance(node.func, astroid.Name) and
- node.func.name == 'len')
+ return (
+ isinstance(node, astroid.Call)
+ and isinstance(node.func, astroid.Name)
+ and node.func.name == "len"
+ )
+
def _is_constant_zero(node):
return isinstance(node, astroid.Const) and node.value == 0
+
def _has_constant_value(node, value):
return isinstance(node, astroid.Const) and node.value == value
+
def _node_is_test_condition(node):
""" Checks if node is an if, while, assert or if expression statement."""
return isinstance(node, (astroid.If, astroid.While, astroid.Assert, astroid.IfExp))
@@ -1111,19 +1229,22 @@ class LenChecker(checkers.BaseChecker):
__implements__ = (interfaces.IAstroidChecker,)
# configuration section name
- name = 'len'
- msgs = {'C1801': ('Do not use `len(SEQUENCE)` to determine if a sequence is empty',
- 'len-as-condition',
- 'Used when Pylint detects that len(sequence) is being used inside '
- 'a condition to determine if a sequence is empty. Instead of '
- 'comparing the length to 0, rely on the fact that empty sequences '
- 'are false.'),
- }
+ name = "len"
+ msgs = {
+ "C1801": (
+ "Do not use `len(SEQUENCE)` to determine if a sequence is empty",
+ "len-as-condition",
+ "Used when Pylint detects that len(sequence) is being used inside "
+ "a condition to determine if a sequence is empty. Instead of "
+ "comparing the length to 0, rely on the fact that empty sequences "
+ "are false.",
+ )
+ }
priority = -2
options = ()
- @utils.check_messages('len-as-condition')
+ @utils.check_messages("len-as-condition")
def visit_call(self, node):
# a len(S) call is used inside a test condition
# could be if, while, assert or if expression statement
@@ -1141,17 +1262,21 @@ class LenChecker(checkers.BaseChecker):
return
if not (node is parent.test or parent.test.parent_of(node)):
return
- self.add_message('len-as-condition', node=node)
+ self.add_message("len-as-condition", node=node)
- @utils.check_messages('len-as-condition')
+ @utils.check_messages("len-as-condition")
def visit_unaryop(self, node):
"""`not len(S)` must become `not S` regardless if the parent block
is a test condition or something else (boolean expression)
e.g. `if not len(S):`"""
- if isinstance(node, astroid.UnaryOp) and node.op == 'not' and _is_len_call(node.operand):
- self.add_message('len-as-condition', node=node)
-
- @utils.check_messages('len-as-condition')
+ if (
+ isinstance(node, astroid.UnaryOp)
+ and node.op == "not"
+ and _is_len_call(node.operand)
+ ):
+ self.add_message("len-as-condition", node=node)
+
+ @utils.check_messages("len-as-condition")
def visit_compare(self, node):
# compare nodes are trickier because the len(S) expression
# may be somewhere in the middle of the node
@@ -1160,7 +1285,7 @@ class LenChecker(checkers.BaseChecker):
# while the rest are a list of tuples in node.ops
# the format of the tuple is ('compare operator sign', node)
# here we squash everything into `ops` to make it easier for processing later
- ops = [('', node.left)]
+ ops = [("", node.left)]
ops.extend(node.ops)
ops = list(itertools.chain(*ops))
@@ -1171,14 +1296,30 @@ class LenChecker(checkers.BaseChecker):
error_detected = False
# 0 ?? len()
- if _is_constant_zero(op_1) and op_2 in ['==', '!=', '<', '>='] and _is_len_call(op_3):
+ if (
+ _is_constant_zero(op_1)
+ and op_2 in ["==", "!=", "<", ">="]
+ and _is_len_call(op_3)
+ ):
error_detected = True
# len() ?? 0
- elif _is_len_call(op_1) and op_2 in ['==', '!=', '>', '<='] and _is_constant_zero(op_3):
+ elif (
+ _is_len_call(op_1)
+ and op_2 in ["==", "!=", ">", "<="]
+ and _is_constant_zero(op_3)
+ ):
error_detected = True
- elif _has_constant_value(op_1, value=1) and op_2 == '>' and _is_len_call(op_3):
+ elif (
+ _has_constant_value(op_1, value=1)
+ and op_2 == ">"
+ and _is_len_call(op_3)
+ ):
error_detected = True
- elif _is_len_call(op_1) and op_2 == '<' and _has_constant_value(op_3, value=1):
+ elif (
+ _is_len_call(op_1)
+ and op_2 == "<"
+ and _has_constant_value(op_3, value=1)
+ ):
error_detected = True
if error_detected:
@@ -1191,7 +1332,7 @@ class LenChecker(checkers.BaseChecker):
# report only if this len() comparison is part of a test condition
# for example: return len() > 0 should not report anything
if _node_is_test_condition(parent):
- self.add_message('len-as-condition', node=node)
+ self.add_message("len-as-condition", node=node)
def is_trailing_comma(tokens, index):
@@ -1208,10 +1349,12 @@ def is_trailing_comma(tokens, index):
return False
# Must have remaining tokens on the same line such as NEWLINE
left_tokens = itertools.islice(tokens, index + 1, None)
- same_line_remaining_tokens = list(itertools.takewhile(
- lambda other_token, _token=token: other_token.start[0] == _token.start[0],
- left_tokens
- ))
+ same_line_remaining_tokens = list(
+ itertools.takewhile(
+ lambda other_token, _token=token: other_token.start[0] == _token.start[0],
+ left_tokens,
+ )
+ )
# Note: If the newline is tokenize.NEWLINE and not tokenize.NL
# then the newline denotes the end of expression
is_last_element = all(
@@ -1220,6 +1363,7 @@ def is_trailing_comma(tokens, index):
)
if not same_line_remaining_tokens or not is_last_element:
return False
+
def get_curline_index_start():
"""Get the index denoting the start of the current line"""
for subindex, token in enumerate(reversed(tokens[:index])):
@@ -1229,9 +1373,9 @@ def is_trailing_comma(tokens, index):
return 0
curline_start = get_curline_index_start()
- expected_tokens = {'return', 'yield'}
+ expected_tokens = {"return", "yield"}
for prevtoken in tokens[curline_start:index]:
- if '=' in prevtoken.string or prevtoken.string in expected_tokens:
+ if "=" in prevtoken.string or prevtoken.string in expected_tokens:
return True
return False
diff --git a/pylint/checkers/similar.py b/pylint/checkers/similar.py
index a53058fd7..369d0ce37 100644
--- a/pylint/checkers/similar.py
+++ b/pylint/checkers/similar.py
@@ -30,8 +30,13 @@ from pylint.reporters.ureports.nodes import Table
class Similar:
"""finds copy-pasted lines of code in a project"""
- def __init__(self, min_lines=4, ignore_comments=False,
- ignore_docstrings=False, ignore_imports=False):
+ def __init__(
+ self,
+ min_lines=4,
+ ignore_comments=False,
+ ignore_docstrings=False,
+ ignore_imports=False,
+ ):
self.min_lines = min_lines
self.ignore_comments = ignore_comments
self.ignore_docstrings = ignore_docstrings
@@ -45,11 +50,15 @@ class Similar:
else:
readlines = decoding_stream(stream, encoding).readlines
try:
- self.linesets.append(LineSet(streamid,
- readlines(),
- self.ignore_comments,
- self.ignore_docstrings,
- self.ignore_imports))
+ self.linesets.append(
+ LineSet(
+ streamid,
+ readlines(),
+ self.ignore_comments,
+ self.ignore_docstrings,
+ self.ignore_imports,
+ )
+ )
except UnicodeDecodeError:
pass
@@ -87,13 +96,18 @@ class Similar:
for lineset, idx in couples:
print("==%s:%s" % (lineset.name, idx))
# pylint: disable=W0631
- for line in lineset._real_lines[idx:idx+num]:
+ for line in lineset._real_lines[idx : idx + num]:
print(" ", line.rstrip())
- nb_lignes_dupliquees += num * (len(couples)-1)
+ nb_lignes_dupliquees += num * (len(couples) - 1)
nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
- print("TOTAL lines=%s duplicates=%s percent=%.2f" \
- % (nb_total_lignes, nb_lignes_dupliquees,
- nb_lignes_dupliquees*100. / nb_total_lignes))
+ print(
+ "TOTAL lines=%s duplicates=%s percent=%.2f"
+ % (
+ nb_total_lignes,
+ nb_lignes_dupliquees,
+ nb_lignes_dupliquees * 100. / nb_total_lignes,
+ )
+ )
def _find_common(self, lineset1, lineset2):
"""find similarities in the two given linesets"""
@@ -108,7 +122,8 @@ class Similar:
for index2 in find(lineset1[index1]):
non_blank = 0
for num, ((_, line1), (_, line2)) in enumerate(
- zip(lines1(index1), lines2(index2))):
+ zip(lines1(index1), lines2(index2))
+ ):
if line1 != line2:
if non_blank > min_lines:
yield num, lineset1, index1, lineset2, index2
@@ -129,10 +144,11 @@ class Similar:
product
"""
for idx, lineset in enumerate(self.linesets[:-1]):
- for lineset2 in self.linesets[idx+1:]:
+ for lineset2 in self.linesets[idx + 1 :]:
for sim in self._find_common(lineset, lineset2):
yield sim
+
def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
"""return lines with leading/trailing whitespace and any ignored code
features removed
@@ -143,38 +159,44 @@ def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
for line in lines:
line = line.strip()
if ignore_docstrings:
- if not docstring and \
- (line.startswith('"""') or line.startswith("'''")):
+ if not docstring and (line.startswith('"""') or line.startswith("'''")):
docstring = line[:3]
line = line[3:]
if docstring:
if line.endswith(docstring):
docstring = None
- line = ''
+ line = ""
if ignore_imports:
if line.startswith("import ") or line.startswith("from "):
- line = ''
+ line = ""
if ignore_comments:
# XXX should use regex in checkers/format to avoid cutting
# at a "#" in a string
- line = line.split('#', 1)[0].strip()
+ line = line.split("#", 1)[0].strip()
strippedlines.append(line)
return strippedlines
class LineSet:
"""Holds and indexes all the lines of a single source file"""
- def __init__(self, name, lines, ignore_comments=False,
- ignore_docstrings=False, ignore_imports=False):
+
+ def __init__(
+ self,
+ name,
+ lines,
+ ignore_comments=False,
+ ignore_docstrings=False,
+ ignore_imports=False,
+ ):
self.name = name
self._real_lines = lines
- self._stripped_lines = stripped_lines(lines, ignore_comments,
- ignore_docstrings,
- ignore_imports)
+ self._stripped_lines = stripped_lines(
+ lines, ignore_comments, ignore_docstrings, ignore_imports
+ )
self._index = self._mk_index()
def __str__(self):
- return '<Lineset for %s>' % self.name
+ return "<Lineset for %s>" % self.name
def __len__(self):
return len(self._real_lines)
@@ -198,7 +220,7 @@ class LineSet:
else:
lines = self._stripped_lines
for line in lines:
- #if line:
+ # if line:
yield idx, line
idx += 1
@@ -215,18 +237,23 @@ class LineSet:
return index
-MSGS = {'R0801': ('Similar lines in %s files\n%s',
- 'duplicate-code',
- 'Indicates that a set of similar lines has been detected '
- 'among multiple file. This usually means that the code should '
- 'be refactored to avoid this duplication.')}
+MSGS = {
+ "R0801": (
+ "Similar lines in %s files\n%s",
+ "duplicate-code",
+ "Indicates that a set of similar lines has been detected "
+ "among multiple file. This usually means that the code should "
+ "be refactored to avoid this duplication.",
+ )
+}
+
def report_similarities(sect, stats, old_stats):
"""make a layout with some stats about duplication"""
- lines = ['', 'now', 'previous', 'difference']
- lines += table_lines_from_stats(stats, old_stats,
- ('nb_duplicated_lines',
- 'percent_duplicated_lines'))
+ lines = ["", "now", "previous", "difference"]
+ lines += table_lines_from_stats(
+ stats, old_stats, ("nb_duplicated_lines", "percent_duplicated_lines")
+ )
sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
@@ -239,34 +266,57 @@ class SimilarChecker(BaseChecker, Similar):
__implements__ = (IRawChecker,)
# configuration section name
- name = 'similarities'
+ name = "similarities"
# messages
msgs = MSGS
# configuration options
# for available dict keys/values see the optik parser 'add_option' method
- options = (('min-similarity-lines', # type: ignore
- {'default' : 4, 'type' : "int", 'metavar' : '<int>',
- 'help' : 'Minimum lines number of a similarity.'}),
- ('ignore-comments',
- {'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
- 'help': 'Ignore comments when computing similarities.'}
- ),
- ('ignore-docstrings',
- {'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
- 'help': 'Ignore docstrings when computing similarities.'}
- ),
- ('ignore-imports',
- {'default' : False, 'type' : 'yn', 'metavar' : '<y or n>',
- 'help': 'Ignore imports when computing similarities.'}
- ),
- )
+ options = (
+ (
+ "min-similarity-lines", # type: ignore
+ {
+ "default": 4,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Minimum lines number of a similarity.",
+ },
+ ),
+ (
+ "ignore-comments",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Ignore comments when computing similarities.",
+ },
+ ),
+ (
+ "ignore-docstrings",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Ignore docstrings when computing similarities.",
+ },
+ ),
+ (
+ "ignore-imports",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Ignore imports when computing similarities.",
+ },
+ ),
+ )
# reports
- reports = (('RP0801', 'Duplication', report_similarities),) # type: ignore
+ reports = (("RP0801", "Duplication", report_similarities),) # type: ignore
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
- Similar.__init__(self, min_lines=4,
- ignore_comments=True, ignore_docstrings=True)
+ Similar.__init__(
+ self, min_lines=4, ignore_comments=True, ignore_docstrings=True
+ )
self.stats = None
def set_option(self, optname, value, action=None, optdict=None):
@@ -275,20 +325,21 @@ class SimilarChecker(BaseChecker, Similar):
overridden to report options setting to Similar
"""
BaseChecker.set_option(self, optname, value, action, optdict)
- if optname == 'min-similarity-lines':
+ if optname == "min-similarity-lines":
self.min_lines = self.config.min_similarity_lines
- elif optname == 'ignore-comments':
+ elif optname == "ignore-comments":
self.ignore_comments = self.config.ignore_comments
- elif optname == 'ignore-docstrings':
+ elif optname == "ignore-docstrings":
self.ignore_docstrings = self.config.ignore_docstrings
- elif optname == 'ignore-imports':
+ elif optname == "ignore-imports":
self.ignore_imports = self.config.ignore_imports
def open(self):
"""init the checkers: reset linesets and statistics information"""
self.linesets = []
- self.stats = self.linter.add_stats(nb_duplicated_lines=0,
- percent_duplicated_lines=0)
+ self.stats = self.linter.add_stats(
+ nb_duplicated_lines=0, percent_duplicated_lines=0
+ )
def process_module(self, node):
"""process a module
@@ -298,9 +349,7 @@ class SimilarChecker(BaseChecker, Similar):
stream must implement the readlines method
"""
with node.stream() as stream:
- self.append_stream(self.linter.current_name,
- stream,
- node.file_encoding)
+ self.append_stream(self.linter.current_name, stream, node.file_encoding)
def close(self):
"""compute and display similarities on closing (i.e. end of parsing)"""
@@ -313,49 +362,59 @@ class SimilarChecker(BaseChecker, Similar):
msg.append("==%s:%s" % (lineset.name, idx))
msg.sort()
# pylint: disable=W0631
- for line in lineset._real_lines[idx:idx+num]:
+ for line in lineset._real_lines[idx : idx + num]:
msg.append(line.rstrip())
- self.add_message('R0801', args=(len(couples), '\n'.join(msg)))
+ self.add_message("R0801", args=(len(couples), "\n".join(msg)))
duplicated += num * (len(couples) - 1)
- stats['nb_duplicated_lines'] = duplicated
- stats['percent_duplicated_lines'] = total and duplicated * 100. / total
+ stats["nb_duplicated_lines"] = duplicated
+ stats["percent_duplicated_lines"] = total and duplicated * 100. / total
def register(linter):
"""required method to auto register this checker """
linter.register_checker(SimilarChecker(linter))
+
def usage(status=0):
"""display command line usage information"""
print("finds copy pasted blocks in a set of files")
print()
- print('Usage: symilar [-d|--duplicates min_duplicated_lines] \
-[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1...')
+ print(
+ "Usage: symilar [-d|--duplicates min_duplicated_lines] \
+[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1..."
+ )
sys.exit(status)
+
def Run(argv=None):
"""standalone command line access point"""
if argv is None:
argv = sys.argv[1:]
from getopt import getopt
- s_opts = 'hdi'
- l_opts = ('help', 'duplicates=', 'ignore-comments', 'ignore-imports',
- 'ignore-docstrings')
+
+ s_opts = "hdi"
+ l_opts = (
+ "help",
+ "duplicates=",
+ "ignore-comments",
+ "ignore-imports",
+ "ignore-docstrings",
+ )
min_lines = 4
ignore_comments = False
ignore_docstrings = False
ignore_imports = False
opts, args = getopt(argv, s_opts, l_opts)
for opt, val in opts:
- if opt in ('-d', '--duplicates'):
+ if opt in ("-d", "--duplicates"):
min_lines = int(val)
- elif opt in ('-h', '--help'):
+ elif opt in ("-h", "--help"):
usage()
- elif opt in ('-i', '--ignore-comments'):
+ elif opt in ("-i", "--ignore-comments"):
ignore_comments = True
- elif opt in ('--ignore-docstrings',):
+ elif opt in ("--ignore-docstrings",):
ignore_docstrings = True
- elif opt in ('--ignore-imports',):
+ elif opt in ("--ignore-imports",):
ignore_imports = True
if not args:
usage(1)
@@ -366,5 +425,6 @@ def Run(argv=None):
sim.run()
sys.exit(0)
-if __name__ == '__main__':
+
+if __name__ == "__main__":
Run()
diff --git a/pylint/checkers/spelling.py b/pylint/checkers/spelling.py
index 6985aa7bd..b67dedfd8 100644
--- a/pylint/checkers/spelling.py
+++ b/pylint/checkers/spelling.py
@@ -24,12 +24,14 @@ import re
try:
import enchant
- from enchant.tokenize import (get_tokenizer, # type: ignore
- Chunker,
- Filter,
- EmailFilter,
- URLFilter,
- WikiWordFilter)
+ from enchant.tokenize import (
+ get_tokenizer, # type: ignore
+ Chunker,
+ Filter,
+ EmailFilter,
+ URLFilter,
+ WikiWordFilter,
+ )
except ImportError:
enchant = None
# pylint: disable=no-init
@@ -48,13 +50,13 @@ from pylint.checkers.utils import check_messages
if enchant is not None:
br = enchant.Broker()
dicts = br.list_dicts()
- dict_choices = [''] + [d[0] for d in dicts]
+ dict_choices = [""] + [d[0] for d in dicts]
dicts = ["%s (%s)" % (d[0], d[1].name) for d in dicts]
dicts = ", ".join(dicts)
instr = ""
else:
dicts = "none"
- dict_choices = ['']
+ dict_choices = [""]
instr = " To make it working install python-enchant package."
@@ -74,8 +76,9 @@ class WordsWithUnderscores(Filter):
They are probably function parameter names.
"""
+
def _skip(self, word):
- return '_' in word
+ return "_" in word
class CamelCasedWord(Filter):
@@ -108,86 +111,123 @@ class SphinxDirectives(Filter):
class ForwardSlashChunkder(Chunker):
- '''
+ """
This chunker allows splitting words like 'before/after' into 'before' and 'after'
- '''
+ """
+
def next(self):
while True:
if not self._text:
raise StopIteration()
- if '/' not in self._text:
+ if "/" not in self._text:
text = self._text
self._offset = 0
- self._text = ''
+ self._text = ""
return (text, 0)
- pre_text, post_text = self._text.split('/', 1)
+ pre_text, post_text = self._text.split("/", 1)
self._text = post_text
self._offset = 0
- if not pre_text or not post_text or \
- not pre_text[-1].isalpha() or not post_text[0].isalpha():
- self._text = ''
+ if (
+ not pre_text
+ or not post_text
+ or not pre_text[-1].isalpha()
+ or not post_text[0].isalpha()
+ ):
+ self._text = ""
self._offset = 0
- return (pre_text + '/' + post_text, 0)
+ return (pre_text + "/" + post_text, 0)
return (pre_text, 0)
def _next(self):
while True:
- if '/' not in self._text:
+ if "/" not in self._text:
return (self._text, 0)
- pre_text, post_text = self._text.split('/', 1)
+ pre_text, post_text = self._text.split("/", 1)
if not pre_text or not post_text:
break
if not pre_text[-1].isalpha() or not post_text[0].isalpha():
raise StopIteration()
- self._text = pre_text + ' ' + post_text
+ self._text = pre_text + " " + post_text
raise StopIteration()
class SpellingChecker(BaseTokenChecker):
"""Check spelling in comments and docstrings"""
+
__implements__ = (ITokenChecker, IAstroidChecker)
- name = 'spelling'
+ name = "spelling"
msgs = {
- 'C0401': ('Wrong spelling of a word \'%s\' in a comment:\n%s\n'
- '%s\nDid you mean: \'%s\'?',
- 'wrong-spelling-in-comment',
- 'Used when a word in comment is not spelled correctly.'),
- 'C0402': ('Wrong spelling of a word \'%s\' in a docstring:\n%s\n'
- '%s\nDid you mean: \'%s\'?',
- 'wrong-spelling-in-docstring',
- 'Used when a word in docstring is not spelled correctly.'),
- 'C0403': ('Invalid characters %r in a docstring',
- 'invalid-characters-in-docstring',
- 'Used when a word in docstring cannot be checked by enchant.'),
- }
- options = (('spelling-dict',
- {'default' : '', 'type' : 'choice', 'metavar' : '<dict name>',
- 'choices': dict_choices,
- 'help' : 'Spelling dictionary name. '
- 'Available dictionaries: %s.%s.' % (dicts, instr)}),
- ('spelling-ignore-words',
- {'default' : '',
- 'type' : 'string',
- 'metavar' : '<comma separated words>',
- 'help' : 'List of comma separated words that '
- 'should not be checked.'}),
- ('spelling-private-dict-file',
- {'default' : '',
- 'type' : 'string',
- 'metavar' : '<path to file>',
- 'help' : 'A path to a file that contains private '
- 'dictionary; one word per line.'}),
- ('spelling-store-unknown-words',
- {'default' : 'n', 'type' : 'yn', 'metavar' : '<y_or_n>',
- 'help' : 'Tells whether to store unknown words to '
- 'indicated private dictionary in '
- '--spelling-private-dict-file option instead of '
- 'raising a message.'}),
- ('max-spelling-suggestions',
- {'default': 4, 'type': 'int', 'metavar': 'N',
- 'help': 'Limits count of emitted suggestions for '
- 'spelling mistakes.'}),
- )
+ "C0401": (
+ "Wrong spelling of a word '%s' in a comment:\n%s\n"
+ "%s\nDid you mean: '%s'?",
+ "wrong-spelling-in-comment",
+ "Used when a word in comment is not spelled correctly.",
+ ),
+ "C0402": (
+ "Wrong spelling of a word '%s' in a docstring:\n%s\n"
+ "%s\nDid you mean: '%s'?",
+ "wrong-spelling-in-docstring",
+ "Used when a word in docstring is not spelled correctly.",
+ ),
+ "C0403": (
+ "Invalid characters %r in a docstring",
+ "invalid-characters-in-docstring",
+ "Used when a word in docstring cannot be checked by enchant.",
+ ),
+ }
+ options = (
+ (
+ "spelling-dict",
+ {
+ "default": "",
+ "type": "choice",
+ "metavar": "<dict name>",
+ "choices": dict_choices,
+ "help": "Spelling dictionary name. "
+ "Available dictionaries: %s.%s." % (dicts, instr),
+ },
+ ),
+ (
+ "spelling-ignore-words",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<comma separated words>",
+ "help": "List of comma separated words that " "should not be checked.",
+ },
+ ),
+ (
+ "spelling-private-dict-file",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<path to file>",
+ "help": "A path to a file that contains private "
+ "dictionary; one word per line.",
+ },
+ ),
+ (
+ "spelling-store-unknown-words",
+ {
+ "default": "n",
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether to store unknown words to "
+ "indicated private dictionary in "
+ "--spelling-private-dict-file option instead of "
+ "raising a message.",
+ },
+ ),
+ (
+ "max-spelling-suggestions",
+ {
+ "default": 4,
+ "type": "int",
+ "metavar": "N",
+ "help": "Limits count of emitted suggestions for " "spelling mistakes.",
+ },
+ ),
+ )
def open(self):
self.initialized = False
@@ -199,7 +239,9 @@ class SpellingChecker(BaseTokenChecker):
if not dict_name:
return
- self.ignore_list = [w.strip() for w in self.config.spelling_ignore_words.split(",")]
+ self.ignore_list = [
+ w.strip() for w in self.config.spelling_ignore_words.split(",")
+ ]
# "param" appears in docstring in param description and
# "pylint" appears in comments in pylint pragmas.
self.ignore_list.extend(["param", "pylint"])
@@ -207,28 +249,33 @@ class SpellingChecker(BaseTokenChecker):
# Expand tilde to allow e.g. spelling-private-dict-file = ~/.pylintdict
if self.config.spelling_private_dict_file:
self.config.spelling_private_dict_file = os.path.expanduser(
- self.config.spelling_private_dict_file)
+ self.config.spelling_private_dict_file
+ )
if self.config.spelling_private_dict_file:
self.spelling_dict = enchant.DictWithPWL(
- dict_name, self.config.spelling_private_dict_file)
- self.private_dict_file = open(
- self.config.spelling_private_dict_file, "a")
+ dict_name, self.config.spelling_private_dict_file
+ )
+ self.private_dict_file = open(self.config.spelling_private_dict_file, "a")
else:
self.spelling_dict = enchant.Dict(dict_name)
if self.config.spelling_store_unknown_words:
self.unknown_words = set()
- self.tokenizer = get_tokenizer(dict_name,
- chunkers=[ForwardSlashChunkder],
- filters=[EmailFilter,
- URLFilter,
- WikiWordFilter,
- WordsWithDigigtsFilter,
- WordsWithUnderscores,
- CamelCasedWord,
- SphinxDirectives])
+ self.tokenizer = get_tokenizer(
+ dict_name,
+ chunkers=[ForwardSlashChunkder],
+ filters=[
+ EmailFilter,
+ URLFilter,
+ WikiWordFilter,
+ WordsWithDigigtsFilter,
+ WordsWithUnderscores,
+ CamelCasedWord,
+ SphinxDirectives,
+ ],
+ )
self.initialized = True
def close(self):
@@ -237,7 +284,7 @@ class SpellingChecker(BaseTokenChecker):
def _check_spelling(self, msgid, line, line_num):
original_line = line
- if line.strip().startswith('#'):
+ if line.strip().startswith("#"):
line = line.strip()[1:]
starts_with_comment = True
else:
@@ -266,8 +313,9 @@ class SpellingChecker(BaseTokenChecker):
if self.spelling_dict.check(word):
continue
except enchant.errors.Error:
- self.add_message('invalid-characters-in-docstring',
- line=line_num, args=(word,))
+ self.add_message(
+ "invalid-characters-in-docstring", line=line_num, args=(word,)
+ )
continue
# Store word to private dict or raise a message.
@@ -278,7 +326,7 @@ class SpellingChecker(BaseTokenChecker):
else:
# Present up to N suggestions.
suggestions = self.spelling_dict.suggest(word)
- del suggestions[self.config.max_spelling_suggestions:]
+ del suggestions[self.config.max_spelling_suggestions :]
m = re.search(r"(\W|^)(%s)(\W|$)" % word, line)
if m:
@@ -291,10 +339,16 @@ class SpellingChecker(BaseTokenChecker):
col += 1
indicator = (" " * col) + ("^" * len(word))
- self.add_message(msgid, line=line_num,
- args=(word, original_line,
- indicator,
- "'{}'".format("' or '".join(suggestions))))
+ self.add_message(
+ msgid,
+ line=line_num,
+ args=(
+ word,
+ original_line,
+ indicator,
+ "'{}'".format("' or '".join(suggestions)),
+ ),
+ )
def process_tokens(self, tokens):
if not self.initialized:
@@ -303,28 +357,27 @@ class SpellingChecker(BaseTokenChecker):
# Process tokens and look for comments.
for (tok_type, token, (start_row, _), _, _) in tokens:
if tok_type == tokenize.COMMENT:
- if start_row == 1 and token.startswith('#!/'):
+ if start_row == 1 and token.startswith("#!/"):
# Skip shebang lines
continue
- if token.startswith('# pylint:'):
+ if token.startswith("# pylint:"):
# Skip pylint enable/disable comments
continue
- self._check_spelling('wrong-spelling-in-comment',
- token, start_row)
+ self._check_spelling("wrong-spelling-in-comment", token, start_row)
- @check_messages('wrong-spelling-in-docstring')
+ @check_messages("wrong-spelling-in-docstring")
def visit_module(self, node):
if not self.initialized:
return
self._check_docstring(node)
- @check_messages('wrong-spelling-in-docstring')
+ @check_messages("wrong-spelling-in-docstring")
def visit_classdef(self, node):
if not self.initialized:
return
self._check_docstring(node)
- @check_messages('wrong-spelling-in-docstring')
+ @check_messages("wrong-spelling-in-docstring")
def visit_functiondef(self, node):
if not self.initialized:
return
@@ -342,8 +395,7 @@ class SpellingChecker(BaseTokenChecker):
# Go through lines of docstring
for idx, line in enumerate(docstring.splitlines()):
- self._check_spelling('wrong-spelling-in-docstring',
- line, start_line + idx)
+ self._check_spelling("wrong-spelling-in-docstring", line, start_line + idx)
def register(linter):
diff --git a/pylint/checkers/stdlib.py b/pylint/checkers/stdlib.py
index 464f9e8a6..872d91330 100644
--- a/pylint/checkers/stdlib.py
+++ b/pylint/checkers/stdlib.py
@@ -31,18 +31,18 @@ from pylint.checkers import BaseChecker
from pylint.checkers import utils
-OPEN_FILES = {'open', 'file'}
-UNITTEST_CASE = 'unittest.case'
-THREADING_THREAD = 'threading.Thread'
-COPY_COPY = 'copy.copy'
-OS_ENVIRON = 'os._Environ'
-ENV_GETTERS = {'os.getenv'}
-SUBPROCESS_POPEN = 'subprocess.Popen'
+OPEN_FILES = {"open", "file"}
+UNITTEST_CASE = "unittest.case"
+THREADING_THREAD = "threading.Thread"
+COPY_COPY = "copy.copy"
+OS_ENVIRON = "os._Environ"
+ENV_GETTERS = {"os.getenv"}
+SUBPROCESS_POPEN = "subprocess.Popen"
if sys.version_info >= (3, 0):
- OPEN_MODULE = '_io'
+ OPEN_MODULE = "_io"
else:
- OPEN_MODULE = '__builtin__'
+ OPEN_MODULE = "__builtin__"
def _check_mode_str(mode):
@@ -77,164 +77,183 @@ def _check_mode_str(mode):
class StdlibChecker(BaseChecker):
__implements__ = (IAstroidChecker,)
- name = 'stdlib'
+ name = "stdlib"
msgs = {
- 'W1501': ('"%s" is not a valid mode for open.',
- 'bad-open-mode',
- 'Python supports: r, w, a[, x] modes with b, +, '
- 'and U (only with r) options. '
- 'See http://docs.python.org/2/library/functions.html#open'),
- 'W1502': ('Using datetime.time in a boolean context.',
- 'boolean-datetime',
- 'Using datetime.time in a boolean context can hide '
- 'subtle bugs when the time they represent matches '
- 'midnight UTC. This behaviour was fixed in Python 3.5. '
- 'See http://bugs.python.org/issue13936 for reference.',
- {'maxversion': (3, 5)}),
- 'W1503': ('Redundant use of %s with constant '
- 'value %r',
- 'redundant-unittest-assert',
- 'The first argument of assertTrue and assertFalse is '
- 'a condition. If a constant is passed as parameter, that '
- 'condition will be always true. In this case a warning '
- 'should be emitted.'),
- 'W1505': ('Using deprecated method %s()',
- 'deprecated-method',
- 'The method is marked as deprecated and will be removed in '
- 'a future version of Python. Consider looking for an '
- 'alternative in the documentation.'),
- 'W1506': ('threading.Thread needs the target function',
- 'bad-thread-instantiation',
- 'The warning is emitted when a threading.Thread class '
- 'is instantiated without the target function being passed. '
- 'By default, the first parameter is the group param, not the target param. '),
- 'W1507': ('Using copy.copy(os.environ). Use os.environ.copy() '
- 'instead. ',
- 'shallow-copy-environ',
- 'os.environ is not a dict object but proxy object, so '
- 'shallow copy has still effects on original object. '
- 'See https://bugs.python.org/issue15373 for reference. '),
- 'E1507': ('%s does not support %s type argument',
- 'invalid-envvar-value',
- 'Env manipulation functions support only string type arguments. '
- 'See https://docs.python.org/3/library/os.html#os.getenv. '),
- 'W1508': ('%s default type is %s. Expected str or None.',
- 'invalid-envvar-default',
- 'Env manipulation functions return None or str values. '
- 'Supplying anything different as a default may cause bugs. '
- 'See https://docs.python.org/3/library/os.html#os.getenv. '),
- 'W1509': ('Using preexec_fn keyword which may be unsafe in the presence '
- 'of threads',
- 'subprocess-popen-preexec-fn',
- 'The preexec_fn parameter is not safe to use in the presence '
- 'of threads in your application. The child process could '
- 'deadlock before exec is called. If you must use it, keep it '
- 'trivial! Minimize the number of libraries you call into.'
- 'https://docs.python.org/3/library/subprocess.html#popen-constructor'),
-
+ "W1501": (
+ '"%s" is not a valid mode for open.',
+ "bad-open-mode",
+ "Python supports: r, w, a[, x] modes with b, +, "
+ "and U (only with r) options. "
+ "See http://docs.python.org/2/library/functions.html#open",
+ ),
+ "W1502": (
+ "Using datetime.time in a boolean context.",
+ "boolean-datetime",
+ "Using datetime.time in a boolean context can hide "
+ "subtle bugs when the time they represent matches "
+ "midnight UTC. This behaviour was fixed in Python 3.5. "
+ "See http://bugs.python.org/issue13936 for reference.",
+ {"maxversion": (3, 5)},
+ ),
+ "W1503": (
+ "Redundant use of %s with constant " "value %r",
+ "redundant-unittest-assert",
+ "The first argument of assertTrue and assertFalse is "
+ "a condition. If a constant is passed as parameter, that "
+ "condition will be always true. In this case a warning "
+ "should be emitted.",
+ ),
+ "W1505": (
+ "Using deprecated method %s()",
+ "deprecated-method",
+ "The method is marked as deprecated and will be removed in "
+ "a future version of Python. Consider looking for an "
+ "alternative in the documentation.",
+ ),
+ "W1506": (
+ "threading.Thread needs the target function",
+ "bad-thread-instantiation",
+ "The warning is emitted when a threading.Thread class "
+ "is instantiated without the target function being passed. "
+ "By default, the first parameter is the group param, not the target param. ",
+ ),
+ "W1507": (
+ "Using copy.copy(os.environ). Use os.environ.copy() " "instead. ",
+ "shallow-copy-environ",
+ "os.environ is not a dict object but proxy object, so "
+ "shallow copy has still effects on original object. "
+ "See https://bugs.python.org/issue15373 for reference. ",
+ ),
+ "E1507": (
+ "%s does not support %s type argument",
+ "invalid-envvar-value",
+ "Env manipulation functions support only string type arguments. "
+ "See https://docs.python.org/3/library/os.html#os.getenv. ",
+ ),
+ "W1508": (
+ "%s default type is %s. Expected str or None.",
+ "invalid-envvar-default",
+ "Env manipulation functions return None or str values. "
+ "Supplying anything different as a default may cause bugs. "
+ "See https://docs.python.org/3/library/os.html#os.getenv. ",
+ ),
+ "W1509": (
+ "Using preexec_fn keyword which may be unsafe in the presence "
+ "of threads",
+ "subprocess-popen-preexec-fn",
+ "The preexec_fn parameter is not safe to use in the presence "
+ "of threads in your application. The child process could "
+ "deadlock before exec is called. If you must use it, keep it "
+ "trivial! Minimize the number of libraries you call into."
+ "https://docs.python.org/3/library/subprocess.html#popen-constructor",
+ ),
}
deprecated = {
0: [
- 'cgi.parse_qs', 'cgi.parse_qsl',
- 'ctypes.c_buffer',
- 'distutils.command.register.register.check_metadata',
- 'distutils.command.sdist.sdist.check_metadata',
- 'tkinter.Misc.tk_menuBar',
- 'tkinter.Menu.tk_bindForTraversal',
+ "cgi.parse_qs",
+ "cgi.parse_qsl",
+ "ctypes.c_buffer",
+ "distutils.command.register.register.check_metadata",
+ "distutils.command.sdist.sdist.check_metadata",
+ "tkinter.Misc.tk_menuBar",
+ "tkinter.Menu.tk_bindForTraversal",
],
2: {
(2, 6, 0): [
- 'commands.getstatus',
- 'os.popen2',
- 'os.popen3',
- 'os.popen4',
- 'macostools.touched',
+ "commands.getstatus",
+ "os.popen2",
+ "os.popen3",
+ "os.popen4",
+ "macostools.touched",
],
(2, 7, 0): [
- 'unittest.case.TestCase.assertEquals',
- 'unittest.case.TestCase.assertNotEquals',
- 'unittest.case.TestCase.assertAlmostEquals',
- 'unittest.case.TestCase.assertNotAlmostEquals',
- 'unittest.case.TestCase.assert_',
- 'xml.etree.ElementTree.Element.getchildren',
- 'xml.etree.ElementTree.Element.getiterator',
- 'xml.etree.ElementTree.XMLParser.getiterator',
- 'xml.etree.ElementTree.XMLParser.doctype',
+ "unittest.case.TestCase.assertEquals",
+ "unittest.case.TestCase.assertNotEquals",
+ "unittest.case.TestCase.assertAlmostEquals",
+ "unittest.case.TestCase.assertNotAlmostEquals",
+ "unittest.case.TestCase.assert_",
+ "xml.etree.ElementTree.Element.getchildren",
+ "xml.etree.ElementTree.Element.getiterator",
+ "xml.etree.ElementTree.XMLParser.getiterator",
+ "xml.etree.ElementTree.XMLParser.doctype",
],
},
3: {
(3, 0, 0): [
- 'inspect.getargspec',
- 'unittest.case.TestCase._deprecate.deprecated_func',
+ "inspect.getargspec",
+ "unittest.case.TestCase._deprecate.deprecated_func",
],
(3, 1, 0): [
- 'base64.encodestring', 'base64.decodestring',
- 'ntpath.splitunc',
+ "base64.encodestring",
+ "base64.decodestring",
+ "ntpath.splitunc",
],
(3, 2, 0): [
- 'cgi.escape',
- 'configparser.RawConfigParser.readfp',
- 'xml.etree.ElementTree.Element.getchildren',
- 'xml.etree.ElementTree.Element.getiterator',
- 'xml.etree.ElementTree.XMLParser.getiterator',
- 'xml.etree.ElementTree.XMLParser.doctype',
+ "cgi.escape",
+ "configparser.RawConfigParser.readfp",
+ "xml.etree.ElementTree.Element.getchildren",
+ "xml.etree.ElementTree.Element.getiterator",
+ "xml.etree.ElementTree.XMLParser.getiterator",
+ "xml.etree.ElementTree.XMLParser.doctype",
],
(3, 3, 0): [
- 'inspect.getmoduleinfo',
- 'logging.warn', 'logging.Logger.warn',
- 'logging.LoggerAdapter.warn',
- 'nntplib._NNTPBase.xpath',
- 'platform.popen',
+ "inspect.getmoduleinfo",
+ "logging.warn",
+ "logging.Logger.warn",
+ "logging.LoggerAdapter.warn",
+ "nntplib._NNTPBase.xpath",
+ "platform.popen",
],
(3, 4, 0): [
- 'importlib.find_loader',
- 'plistlib.readPlist', 'plistlib.writePlist',
- 'plistlib.readPlistFromBytes',
- 'plistlib.writePlistToBytes',
- ],
- (3, 4, 4): [
- 'asyncio.tasks.async',
+ "importlib.find_loader",
+ "plistlib.readPlist",
+ "plistlib.writePlist",
+ "plistlib.readPlistFromBytes",
+ "plistlib.writePlistToBytes",
],
+ (3, 4, 4): ["asyncio.tasks.async"],
(3, 5, 0): [
- 'fractions.gcd',
- 'inspect.getargvalues',
- 'inspect.formatargspec', 'inspect.formatargvalues',
- 'inspect.getcallargs',
- 'platform.linux_distribution', 'platform.dist',
- ],
- (3, 6, 0): [
- 'importlib._bootstrap_external.FileLoader.load_module',
+ "fractions.gcd",
+ "inspect.getargvalues",
+ "inspect.formatargspec",
+ "inspect.formatargvalues",
+ "inspect.getcallargs",
+ "platform.linux_distribution",
+ "platform.dist",
],
+ (3, 6, 0): ["importlib._bootstrap_external.FileLoader.load_module"],
},
}
def _check_bad_thread_instantiation(self, node):
if not node.kwargs and not node.keywords and len(node.args) <= 1:
- self.add_message('bad-thread-instantiation', node=node)
+ self.add_message("bad-thread-instantiation", node=node)
def _check_for_preexec_fn_in_Popen(self, node):
if node.keywords:
for keyword in node.keywords:
- if keyword.arg == 'preexec_fn':
- self.add_message('subprocess-popen-preexec-fn', node=node)
+ if keyword.arg == "preexec_fn":
+ self.add_message("subprocess-popen-preexec-fn", node=node)
def _check_shallow_copy_environ(self, node):
arg = utils.get_argument_from_call(node, position=0)
for inferred in arg.inferred():
if inferred.qname() == OS_ENVIRON:
- self.add_message('shallow-copy-environ', node=node)
+ self.add_message("shallow-copy-environ", node=node)
break
- @utils.check_messages('bad-open-mode',
- 'redundant-unittest-assert',
- 'deprecated-method',
- 'bad-thread-instantiation',
- 'shallow-copy-environ',
- 'invalid-envvar-value',
- 'invalid-envvar-default',
- 'subprocess-popen-preexec-fn')
+ @utils.check_messages(
+ "bad-open-mode",
+ "redundant-unittest-assert",
+ "deprecated-method",
+ "bad-thread-instantiation",
+ "shallow-copy-environ",
+ "invalid-envvar-value",
+ "invalid-envvar-default",
+ "subprocess-popen-preexec-fn",
+ )
def visit_call(self, node):
"""Visit a Call node."""
try:
@@ -242,7 +261,7 @@ class StdlibChecker(BaseChecker):
if inferred is astroid.Uninferable:
continue
elif inferred.root().name == OPEN_MODULE:
- if getattr(node.func, 'name', None) in OPEN_FILES:
+ if getattr(node.func, "name", None) in OPEN_FILES:
self._check_open_mode(node)
elif inferred.root().name == UNITTEST_CASE:
self._check_redundant_assert(node, inferred)
@@ -261,20 +280,20 @@ class StdlibChecker(BaseChecker):
except astroid.InferenceError:
return
- @utils.check_messages('boolean-datetime')
+ @utils.check_messages("boolean-datetime")
def visit_unaryop(self, node):
- if node.op == 'not':
+ if node.op == "not":
self._check_datetime(node.operand)
- @utils.check_messages('boolean-datetime')
+ @utils.check_messages("boolean-datetime")
def visit_if(self, node):
self._check_datetime(node.test)
- @utils.check_messages('boolean-datetime')
+ @utils.check_messages("boolean-datetime")
def visit_ifexp(self, node):
self._check_datetime(node.test)
- @utils.check_messages('boolean-datetime')
+ @utils.check_messages("boolean-datetime")
def visit_boolop(self, node):
for value in node.values:
self._check_datetime(value)
@@ -291,30 +310,35 @@ class StdlibChecker(BaseChecker):
return
# Reject nodes which aren't of interest to us.
- acceptable_nodes = (astroid.BoundMethod,
- astroid.UnboundMethod,
- astroid.FunctionDef)
+ acceptable_nodes = (
+ astroid.BoundMethod,
+ astroid.UnboundMethod,
+ astroid.FunctionDef,
+ )
if not isinstance(inferred, acceptable_nodes):
return
qname = inferred.qname()
if qname in self.deprecated[0]:
- self.add_message('deprecated-method', node=node,
- args=(func_name, ))
+ self.add_message("deprecated-method", node=node, args=(func_name,))
else:
for since_vers, func_list in self.deprecated[py_vers].items():
if since_vers <= sys.version_info and qname in func_list:
- self.add_message('deprecated-method', node=node,
- args=(func_name, ))
+ self.add_message("deprecated-method", node=node, args=(func_name,))
break
def _check_redundant_assert(self, node, infer):
- if (isinstance(infer, astroid.BoundMethod) and
- node.args and isinstance(node.args[0], astroid.Const) and
- infer.name in ['assertTrue', 'assertFalse']):
- self.add_message('redundant-unittest-assert',
- args=(infer.name, node.args[0].value, ),
- node=node)
+ if (
+ isinstance(infer, astroid.BoundMethod)
+ and node.args
+ and isinstance(node.args[0], astroid.Const)
+ and infer.name in ["assertTrue", "assertFalse"]
+ ):
+ self.add_message(
+ "redundant-unittest-assert",
+ args=(infer.name, node.args[0].value),
+ node=node,
+ )
def _check_datetime(self, node):
""" Check that a datetime was infered.
@@ -324,27 +348,25 @@ class StdlibChecker(BaseChecker):
infered = next(node.infer())
except astroid.InferenceError:
return
- if (isinstance(infered, Instance) and
- infered.qname() == 'datetime.time'):
- self.add_message('boolean-datetime', node=node)
+ if isinstance(infered, Instance) and infered.qname() == "datetime.time":
+ self.add_message("boolean-datetime", node=node)
def _check_open_mode(self, node):
"""Check that the mode argument of an open or file call is valid."""
try:
- mode_arg = utils.get_argument_from_call(node, position=1,
- keyword='mode')
+ mode_arg = utils.get_argument_from_call(node, position=1, keyword="mode")
except utils.NoSuchArgumentError:
return
if mode_arg:
mode_arg = utils.safe_infer(mode_arg)
- if (isinstance(mode_arg, astroid.Const)
- and not _check_mode_str(mode_arg.value)):
- self.add_message('bad-open-mode', node=node,
- args=mode_arg.value)
+ if isinstance(mode_arg, astroid.Const) and not _check_mode_str(
+ mode_arg.value
+ ):
+ self.add_message("bad-open-mode", node=node, args=mode_arg.value)
def _check_env_function(self, node, infer):
- env_name_kwarg = 'key'
- env_value_kwarg = 'default'
+ env_name_kwarg = "key"
+ env_value_kwarg = "default"
if node.keywords:
kwargs = {keyword.arg: keyword.value for keyword in node.keywords}
else:
@@ -359,10 +381,10 @@ class StdlibChecker(BaseChecker):
if env_name_arg:
self._check_invalid_envvar_value(
node=node,
- message='invalid-envvar-value',
+ message="invalid-envvar-value",
call_arg=utils.safe_infer(env_name_arg),
infer=infer,
- allow_none=False
+ allow_none=False,
)
if len(node.args) == 2:
@@ -376,7 +398,7 @@ class StdlibChecker(BaseChecker):
self._check_invalid_envvar_value(
node=node,
infer=infer,
- message='invalid-envvar-default',
+ message="invalid-envvar-default",
call_arg=utils.safe_infer(env_value_arg),
allow_none=True,
)
diff --git a/pylint/checkers/strings.py b/pylint/checkers/strings.py
index 10af9a7ac..c287298c7 100644
--- a/pylint/checkers/strings.py
+++ b/pylint/checkers/strings.py
@@ -41,98 +41,138 @@ _PY3K = sys.version_info[:2] >= (3, 0)
_PY27 = sys.version_info[:2] == (2, 7)
MSGS = {
- 'E1300': ("Unsupported format character %r (%#02x) at index %d",
- "bad-format-character",
- "Used when an unsupported format character is used in a format"
- "string."),
- 'E1301': ("Format string ends in middle of conversion specifier",
- "truncated-format-string",
- "Used when a format string terminates before the end of a "
- "conversion specifier."),
- 'E1302': ("Mixing named and unnamed conversion specifiers in format string",
- "mixed-format-string",
- "Used when a format string contains both named (e.g. '%(foo)d') "
- "and unnamed (e.g. '%d') conversion specifiers. This is also "
- "used when a named conversion specifier contains * for the "
- "minimum field width and/or precision."),
- 'E1303': ("Expected mapping for format string, not %s",
- "format-needs-mapping",
- "Used when a format string that uses named conversion specifiers "
- "is used with an argument that is not a mapping."),
- 'W1300': ("Format string dictionary key should be a string, not %s",
- "bad-format-string-key",
- "Used when a format string that uses named conversion specifiers "
- "is used with a dictionary whose keys are not all strings."),
- 'W1301': ("Unused key %r in format string dictionary",
- "unused-format-string-key",
- "Used when a format string that uses named conversion specifiers "
- "is used with a dictionary that contains keys not required by the "
- "format string."),
- 'E1304': ("Missing key %r in format string dictionary",
- "missing-format-string-key",
- "Used when a format string that uses named conversion specifiers "
- "is used with a dictionary that doesn't contain all the keys "
- "required by the format string."),
- 'E1305': ("Too many arguments for format string",
- "too-many-format-args",
- "Used when a format string that uses unnamed conversion "
- "specifiers is given too many arguments."),
- 'E1306': ("Not enough arguments for format string",
- "too-few-format-args",
- "Used when a format string that uses unnamed conversion "
- "specifiers is given too few arguments"),
- 'E1307': ("Argument %r does not match format type %r",
- "bad-string-format-type",
- "Used when a type required by format string "
- "is not suitable for actual argument type"),
- 'E1310': ("Suspicious argument in %s.%s call",
- "bad-str-strip-call",
- "The argument to a str.{l,r,}strip call contains a"
- " duplicate character, "),
- 'W1302': ("Invalid format string",
- "bad-format-string",
- "Used when a PEP 3101 format string is invalid."),
- 'W1303': ("Missing keyword argument %r for format string",
- "missing-format-argument-key",
- "Used when a PEP 3101 format string that uses named fields "
- "doesn't receive one or more required keywords."),
- 'W1304': ("Unused format argument %r",
- "unused-format-string-argument",
- "Used when a PEP 3101 format string that uses named "
- "fields is used with an argument that "
- "is not required by the format string."),
- 'W1305': ("Format string contains both automatic field numbering "
- "and manual field specification",
- "format-combined-specification",
- "Used when a PEP 3101 format string contains both automatic "
- "field numbering (e.g. '{}') and manual field "
- "specification (e.g. '{0}')."),
- 'W1306': ("Missing format attribute %r in format specifier %r",
- "missing-format-attribute",
- "Used when a PEP 3101 format string uses an "
- "attribute specifier ({0.length}), but the argument "
- "passed for formatting doesn't have that attribute."),
- 'W1307': ("Using invalid lookup key %r in format specifier %r",
- "invalid-format-index",
- "Used when a PEP 3101 format string uses a lookup specifier "
- "({a[1]}), but the argument passed for formatting "
- "doesn't contain or doesn't have that key as an attribute."),
- 'W1308': ("Duplicate string formatting argument %r, consider passing as named argument",
- "duplicate-string-formatting-argument",
- "Used when we detect that a string formatting is "
- "repeating an argument instead of using named string arguments"),
- }
-
-OTHER_NODES = (astroid.Const, astroid.List,
- astroid.Lambda, astroid.FunctionDef,
- astroid.ListComp, astroid.SetComp, astroid.GeneratorExp)
+ "E1300": (
+ "Unsupported format character %r (%#02x) at index %d",
+ "bad-format-character",
+ "Used when an unsupported format character is used in a format" "string.",
+ ),
+ "E1301": (
+ "Format string ends in middle of conversion specifier",
+ "truncated-format-string",
+ "Used when a format string terminates before the end of a "
+ "conversion specifier.",
+ ),
+ "E1302": (
+ "Mixing named and unnamed conversion specifiers in format string",
+ "mixed-format-string",
+ "Used when a format string contains both named (e.g. '%(foo)d') "
+ "and unnamed (e.g. '%d') conversion specifiers. This is also "
+ "used when a named conversion specifier contains * for the "
+ "minimum field width and/or precision.",
+ ),
+ "E1303": (
+ "Expected mapping for format string, not %s",
+ "format-needs-mapping",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with an argument that is not a mapping.",
+ ),
+ "W1300": (
+ "Format string dictionary key should be a string, not %s",
+ "bad-format-string-key",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with a dictionary whose keys are not all strings.",
+ ),
+ "W1301": (
+ "Unused key %r in format string dictionary",
+ "unused-format-string-key",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with a dictionary that contains keys not required by the "
+ "format string.",
+ ),
+ "E1304": (
+ "Missing key %r in format string dictionary",
+ "missing-format-string-key",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with a dictionary that doesn't contain all the keys "
+ "required by the format string.",
+ ),
+ "E1305": (
+ "Too many arguments for format string",
+ "too-many-format-args",
+ "Used when a format string that uses unnamed conversion "
+ "specifiers is given too many arguments.",
+ ),
+ "E1306": (
+ "Not enough arguments for format string",
+ "too-few-format-args",
+ "Used when a format string that uses unnamed conversion "
+ "specifiers is given too few arguments",
+ ),
+ "E1307": (
+ "Argument %r does not match format type %r",
+ "bad-string-format-type",
+ "Used when a type required by format string "
+ "is not suitable for actual argument type",
+ ),
+ "E1310": (
+ "Suspicious argument in %s.%s call",
+ "bad-str-strip-call",
+ "The argument to a str.{l,r,}strip call contains a" " duplicate character, ",
+ ),
+ "W1302": (
+ "Invalid format string",
+ "bad-format-string",
+ "Used when a PEP 3101 format string is invalid.",
+ ),
+ "W1303": (
+ "Missing keyword argument %r for format string",
+ "missing-format-argument-key",
+ "Used when a PEP 3101 format string that uses named fields "
+ "doesn't receive one or more required keywords.",
+ ),
+ "W1304": (
+ "Unused format argument %r",
+ "unused-format-string-argument",
+ "Used when a PEP 3101 format string that uses named "
+ "fields is used with an argument that "
+ "is not required by the format string.",
+ ),
+ "W1305": (
+ "Format string contains both automatic field numbering "
+ "and manual field specification",
+ "format-combined-specification",
+ "Used when a PEP 3101 format string contains both automatic "
+ "field numbering (e.g. '{}') and manual field "
+ "specification (e.g. '{0}').",
+ ),
+ "W1306": (
+ "Missing format attribute %r in format specifier %r",
+ "missing-format-attribute",
+ "Used when a PEP 3101 format string uses an "
+ "attribute specifier ({0.length}), but the argument "
+ "passed for formatting doesn't have that attribute.",
+ ),
+ "W1307": (
+ "Using invalid lookup key %r in format specifier %r",
+ "invalid-format-index",
+ "Used when a PEP 3101 format string uses a lookup specifier "
+ "({a[1]}), but the argument passed for formatting "
+ "doesn't contain or doesn't have that key as an attribute.",
+ ),
+ "W1308": (
+ "Duplicate string formatting argument %r, consider passing as named argument",
+ "duplicate-string-formatting-argument",
+ "Used when we detect that a string formatting is "
+ "repeating an argument instead of using named string arguments",
+ ),
+}
+
+OTHER_NODES = (
+ astroid.Const,
+ astroid.List,
+ astroid.Lambda,
+ astroid.FunctionDef,
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.GeneratorExp,
+)
BUILTINS_STR = builtins.__name__ + ".str"
BUILTINS_FLOAT = builtins.__name__ + ".float"
BUILTINS_INT = builtins.__name__ + ".int"
if _PY3K:
- import _string # pylint: disable=wrong-import-position, wrong-import-order
+ import _string # pylint: disable=wrong-import-position, wrong-import-order
def split_format_field_names(format_string):
try:
@@ -140,7 +180,9 @@ if _PY3K:
except ValueError:
raise utils.IncompleteFormatString()
+
else:
+
def _field_iterator_convertor(iterator):
for is_attr, key in iterator:
if isinstance(key, numbers.Number):
@@ -191,6 +233,7 @@ def collect_string_fields(format_string):
return
raise utils.IncompleteFormatString(format_string)
+
def parse_format_method_string(format_string):
"""
Parses a PEP 3101 format string, returning a tuple of
@@ -233,6 +276,7 @@ def get_access_path(key, parts):
path.append("[{!r}]".format(specifier))
return str(key) + "".join(path)
+
def arg_matches_format_type(arg_type, format_type):
if format_type in "sr":
# All types can be printed with %s and %r
@@ -249,42 +293,44 @@ def arg_matches_format_type(arg_type, format_type):
return False
return True
+
class StringFormatChecker(BaseChecker):
"""Checks string formatting operations to ensure that the format string
is valid and the arguments match the format string.
"""
__implements__ = (IAstroidChecker,)
- name = 'string'
+ name = "string"
msgs = MSGS
# pylint: disable=too-many-branches
@check_messages(*MSGS)
def visit_binop(self, node):
- if node.op != '%':
+ if node.op != "%":
return
left = node.left
args = node.right
- if not (isinstance(left, astroid.Const)
- and isinstance(left.value, str)):
+ if not (isinstance(left, astroid.Const) and isinstance(left.value, str)):
return
format_string = left.value
try:
- required_keys, required_num_args, required_key_types, \
- required_arg_types = utils.parse_format_string(format_string)
+ required_keys, required_num_args, required_key_types, required_arg_types = utils.parse_format_string(
+ format_string
+ )
except utils.UnsupportedFormatCharacter as e:
c = format_string[e.index]
- self.add_message('bad-format-character',
- node=node, args=(c, ord(c), e.index))
+ self.add_message(
+ "bad-format-character", node=node, args=(c, ord(c), e.index)
+ )
return
except utils.IncompleteFormatString:
- self.add_message('truncated-format-string', node=node)
+ self.add_message("truncated-format-string", node=node)
return
if required_keys and required_num_args:
# The format string uses both named and unnamed format
# specifiers.
- self.add_message('mixed-format-string', node=node)
+ self.add_message("mixed-format-string", node=node)
elif required_keys:
# The format string uses only named format specifiers.
# Check that the RHS of the % operator is a mapping object
@@ -299,8 +345,9 @@ class StringFormatChecker(BaseChecker):
if isinstance(key, str):
keys.add(key)
else:
- self.add_message('bad-format-string-key',
- node=node, args=key)
+ self.add_message(
+ "bad-format-string-key", node=node, args=key
+ )
else:
# One of the keys was something other than a
# constant. Since we can't tell what it is,
@@ -310,32 +357,36 @@ class StringFormatChecker(BaseChecker):
if not unknown_keys:
for key in required_keys:
if key not in keys:
- self.add_message('missing-format-string-key',
- node=node, args=key)
+ self.add_message(
+ "missing-format-string-key", node=node, args=key
+ )
for key in keys:
if key not in required_keys:
- self.add_message('unused-format-string-key',
- node=node, args=key)
+ self.add_message(
+ "unused-format-string-key", node=node, args=key
+ )
for key, arg in args.items:
if not isinstance(key, astroid.Const):
continue
format_type = required_key_types.get(key.value, None)
arg_type = utils.safe_infer(arg)
- if (format_type is not None and
- arg_type not in (None, astroid.Uninferable) and
- not arg_matches_format_type(arg_type,
- format_type)):
- self.add_message('bad-string-format-type',
- node=node,
- args=(arg_type.pytype(), format_type))
+ if (
+ format_type is not None
+ and arg_type not in (None, astroid.Uninferable)
+ and not arg_matches_format_type(arg_type, format_type)
+ ):
+ self.add_message(
+ "bad-string-format-type",
+ node=node,
+ args=(arg_type.pytype(), format_type),
+ )
elif isinstance(args, (OTHER_NODES, astroid.Tuple)):
type_name = type(args).__name__
- self.add_message('format-needs-mapping',
- node=node, args=type_name)
+ self.add_message("format-needs-mapping", node=node, args=type_name)
# else:
- # The RHS of the format specifier is a name or
- # expression. It may be a mapping object, so
- # there's nothing we can check.
+ # The RHS of the format specifier is a name or
+ # expression. It may be a mapping object, so
+ # there's nothing we can check.
else:
# The format string uses only unnamed format specifiers.
# Check that the number of arguments passed to the RHS of
@@ -345,7 +396,7 @@ class StringFormatChecker(BaseChecker):
if isinstance(args, astroid.Tuple):
rhs_tuple = utils.safe_infer(args)
num_args = None
- if hasattr(rhs_tuple, 'elts'):
+ if hasattr(rhs_tuple, "elts"):
args_elts = rhs_tuple.elts
num_args = len(args_elts)
elif isinstance(args, (OTHER_NODES, (astroid.Dict, astroid.DictComp))):
@@ -358,38 +409,52 @@ class StringFormatChecker(BaseChecker):
num_args = None
if num_args is not None:
if num_args > required_num_args:
- self.add_message('too-many-format-args', node=node)
+ self.add_message("too-many-format-args", node=node)
elif num_args < required_num_args:
- self.add_message('too-few-format-args', node=node)
+ self.add_message("too-few-format-args", node=node)
for arg, format_type in zip(args_elts, required_arg_types):
arg_type = utils.safe_infer(arg)
- if (arg_type not in (None, astroid.Uninferable) and
- not arg_matches_format_type(arg_type, format_type)):
- self.add_message('bad-string-format-type',
- node=node, args=(arg_type.pytype(), format_type))
+ if arg_type not in (
+ None,
+ astroid.Uninferable,
+ ) and not arg_matches_format_type(arg_type, format_type):
+ self.add_message(
+ "bad-string-format-type",
+ node=node,
+ args=(arg_type.pytype(), format_type),
+ )
@check_messages(*MSGS)
def visit_call(self, node):
func = utils.safe_infer(node.func)
- if (isinstance(func, astroid.BoundMethod)
- and isinstance(func.bound, astroid.Instance)
- and func.bound.name in ('str', 'unicode', 'bytes')):
- if func.name in ('strip', 'lstrip', 'rstrip') and node.args:
+ if (
+ isinstance(func, astroid.BoundMethod)
+ and isinstance(func.bound, astroid.Instance)
+ and func.bound.name in ("str", "unicode", "bytes")
+ ):
+ if func.name in ("strip", "lstrip", "rstrip") and node.args:
arg = utils.safe_infer(node.args[0])
if not isinstance(arg, astroid.Const):
return
if len(arg.value) != len(set(arg.value)):
- self.add_message('bad-str-strip-call', node=node,
- args=(func.bound.name, func.name))
- elif func.name == 'format':
+ self.add_message(
+ "bad-str-strip-call",
+ node=node,
+ args=(func.bound.name, func.name),
+ )
+ elif func.name == "format":
self._check_new_format(node, func)
def _detect_vacuous_formatting(self, node, positional_arguments):
- counter = Counter(arg.name for arg in positional_arguments if isinstance(arg, astroid.Name))
+ counter = Counter(
+ arg.name for arg in positional_arguments if isinstance(arg, astroid.Name)
+ )
for name, count in counter.items():
if count == 1:
continue
- self.add_message('duplicate-string-formatting-argument', node=node, args=(name, ))
+ self.add_message(
+ "duplicate-string-formatting-argument", node=node, args=(name,)
+ )
def _check_new_format(self, node, func):
""" Check the new string formatting. """
@@ -404,8 +469,9 @@ class StringFormatChecker(BaseChecker):
#
# fmt = 'some string {}'.format
# fmt('arg')
- if (isinstance(node.func, astroid.Attribute)
- and not isinstance(node.func.expr, astroid.Const)):
+ if isinstance(node.func, astroid.Attribute) and not isinstance(
+ node.func.expr, astroid.Const
+ ):
return
if node.starargs or node.kwargs:
return
@@ -423,35 +489,34 @@ class StringFormatChecker(BaseChecker):
try:
fields, num_args, manual_pos = parse_format_method_string(strnode.value)
except utils.IncompleteFormatString:
- self.add_message('bad-format-string', node=node)
+ self.add_message("bad-format-string", node=node)
return
positional_arguments = call_site.positional_arguments
named_arguments = call_site.keyword_arguments
named_fields = {field[0] for field in fields if isinstance(field[0], str)}
if num_args and manual_pos:
- self.add_message('format-combined-specification',
- node=node)
+ self.add_message("format-combined-specification", node=node)
return
check_args = False
# Consider "{[0]} {[1]}" as num_args.
- num_args += sum(1 for field in named_fields if field == '')
+ num_args += sum(1 for field in named_fields if field == "")
if named_fields:
for field in named_fields:
if field and field not in named_arguments:
- self.add_message('missing-format-argument-key',
- node=node,
- args=(field, ))
+ self.add_message(
+ "missing-format-argument-key", node=node, args=(field,)
+ )
for field in named_arguments:
if field not in named_fields:
- self.add_message('unused-format-string-argument',
- node=node,
- args=(field, ))
+ self.add_message(
+ "unused-format-string-argument", node=node, args=(field,)
+ )
# num_args can be 0 if manual_pos is not.
num_args = num_args or manual_pos
if positional_arguments or num_args:
- empty = any(True for field in named_fields if field == '')
+ empty = any(True for field in named_fields if field == "")
if named_arguments or empty:
# Verify the required number of positional arguments
# only if the .format got at least one keyword argument.
@@ -465,9 +530,9 @@ class StringFormatChecker(BaseChecker):
# num_args can be 0 if manual_pos is not.
num_args = num_args or manual_pos
if len(positional_arguments) > num_args:
- self.add_message('too-many-format-args', node=node)
+ self.add_message("too-many-format-args", node=node)
elif len(positional_arguments) < num_args:
- self.add_message('too-few-format-args', node=node)
+ self.add_message("too-few-format-args", node=node)
self._detect_vacuous_formatting(node, positional_arguments)
self._check_new_format_specifiers(node, fields, named_arguments)
@@ -480,7 +545,7 @@ class StringFormatChecker(BaseChecker):
for key, specifiers in fields:
# Obtain the argument. If it can't be obtained
# or infered, skip this check.
- if key == '':
+ if key == "":
# {[0]} will have an unnamed argument, defaulting
# to 0. It will not be present in `named`, so use the value
# 0 for it.
@@ -518,23 +583,29 @@ class StringFormatChecker(BaseChecker):
try:
previous = previous.getattr(specifier)[0]
except astroid.NotFoundError:
- if (hasattr(previous, 'has_dynamic_getattr') and
- previous.has_dynamic_getattr()):
+ if (
+ hasattr(previous, "has_dynamic_getattr")
+ and previous.has_dynamic_getattr()
+ ):
# Don't warn if the object has a custom __getattr__
break
path = get_access_path(key, parsed)
- self.add_message('missing-format-attribute',
- args=(specifier, path),
- node=node)
+ self.add_message(
+ "missing-format-attribute",
+ args=(specifier, path),
+ node=node,
+ )
break
else:
warn_error = False
- if hasattr(previous, 'getitem'):
+ if hasattr(previous, "getitem"):
try:
previous = previous.getitem(astroid.Const(specifier))
- except (astroid.AstroidIndexError,
- astroid.AstroidTypeError,
- astroid.AttributeInferenceError):
+ except (
+ astroid.AstroidIndexError,
+ astroid.AstroidTypeError,
+ astroid.AttributeInferenceError,
+ ):
warn_error = True
except astroid.InferenceError:
break
@@ -545,15 +616,15 @@ class StringFormatChecker(BaseChecker):
# Lookup __getitem__ in the current node,
# but skip further checks, because we can't
# retrieve the looked object
- previous.getattr('__getitem__')
+ previous.getattr("__getitem__")
break
except astroid.NotFoundError:
warn_error = True
if warn_error:
path = get_access_path(key, parsed)
- self.add_message('invalid-format-index',
- args=(specifier, path),
- node=node)
+ self.add_message(
+ "invalid-format-index", args=(specifier, path), node=node
+ )
break
try:
@@ -565,34 +636,38 @@ class StringFormatChecker(BaseChecker):
class StringConstantChecker(BaseTokenChecker):
"""Check string literals"""
+
__implements__ = (ITokenChecker, IRawChecker)
- name = 'string_constant'
+ name = "string_constant"
msgs = {
- 'W1401': ('Anomalous backslash in string: \'%s\'. '
- 'String constant might be missing an r prefix.',
- 'anomalous-backslash-in-string',
- 'Used when a backslash is in a literal string but not as an '
- 'escape.'),
- 'W1402': ('Anomalous Unicode escape in byte string: \'%s\'. '
- 'String constant might be missing an r or u prefix.',
- 'anomalous-unicode-escape-in-string',
- 'Used when an escape like \\u is encountered in a byte '
- 'string where it has no effect.'),
- }
+ "W1401": (
+ "Anomalous backslash in string: '%s'. "
+ "String constant might be missing an r prefix.",
+ "anomalous-backslash-in-string",
+ "Used when a backslash is in a literal string but not as an " "escape.",
+ ),
+ "W1402": (
+ "Anomalous Unicode escape in byte string: '%s'. "
+ "String constant might be missing an r or u prefix.",
+ "anomalous-unicode-escape-in-string",
+ "Used when an escape like \\u is encountered in a byte "
+ "string where it has no effect.",
+ ),
+ }
# Characters that have a special meaning after a backslash in either
# Unicode or byte strings.
- ESCAPE_CHARACTERS = 'abfnrtvx\n\r\t\\\'\"01234567'
+ ESCAPE_CHARACTERS = "abfnrtvx\n\r\t\\'\"01234567"
# TODO(mbp): Octal characters are quite an edge case today; people may
# prefer a separate warning where they occur. \0 should be allowed.
# Characters that have a special meaning after a backslash but only in
# Unicode strings.
- UNICODE_ESCAPE_CHARACTERS = 'uUN'
+ UNICODE_ESCAPE_CHARACTERS = "uUN"
def process_module(self, module):
- self._unicode_literals = 'unicode_literals' in module.future_imports
+ self._unicode_literals = "unicode_literals" in module.future_imports
def process_tokens(self, tokens):
for (tok_type, token, (start_row, _), _, _) in tokens:
@@ -603,18 +678,18 @@ class StringConstantChecker(BaseTokenChecker):
def process_string_token(self, token, start_row):
for i, c in enumerate(token):
- if c in '\'\"':
+ if c in "'\"":
quote_char = c
break
# pylint: disable=undefined-loop-variable
- prefix = token[:i].lower() # markers like u, b, r.
+ prefix = token[:i].lower() # markers like u, b, r.
after_prefix = token[i:]
if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
string_body = after_prefix[3:-3]
else:
string_body = after_prefix[1:-1] # Chop off quotes
# No special checks on raw strings at the moment.
- if 'r' not in prefix:
+ if "r" not in prefix:
self.process_non_raw_string_token(prefix, string_body, start_row)
def process_non_raw_string_token(self, prefix, string_body, start_row):
@@ -640,31 +715,34 @@ class StringConstantChecker(BaseTokenChecker):
# add_message doesn't seem to have a way to pass it through at present.
i = 0
while True:
- i = string_body.find('\\', i)
+ i = string_body.find("\\", i)
if i == -1:
break
# There must be a next character; having a backslash at the end
# of the string would be a SyntaxError.
- next_char = string_body[i+1]
- match = string_body[i:i+2]
+ next_char = string_body[i + 1]
+ match = string_body[i : i + 2]
if next_char in self.UNICODE_ESCAPE_CHARACTERS:
- if 'u' in prefix:
+ if "u" in prefix:
pass
- elif (_PY3K or self._unicode_literals) and 'b' not in prefix:
+ elif (_PY3K or self._unicode_literals) and "b" not in prefix:
pass # unicode by default
else:
- self.add_message('anomalous-unicode-escape-in-string',
- line=start_row, args=(match, ))
+ self.add_message(
+ "anomalous-unicode-escape-in-string",
+ line=start_row,
+ args=(match,),
+ )
elif next_char not in self.ESCAPE_CHARACTERS:
- self.add_message('anomalous-backslash-in-string',
- line=start_row, args=(match, ))
+ self.add_message(
+ "anomalous-backslash-in-string", line=start_row, args=(match,)
+ )
# Whether it was a valid escape or not, backslash followed by
# another character can always be consumed whole: the second
# character can never be the start of a new backslash escape.
i += 2
-
def register(linter):
"""required method to auto register this checker """
linter.register_checker(StringFormatChecker(linter))
diff --git a/pylint/checkers/typecheck.py b/pylint/checkers/typecheck.py
index fff87753b..5df5c9504 100644
--- a/pylint/checkers/typecheck.py
+++ b/pylint/checkers/typecheck.py
@@ -58,34 +58,41 @@ from astroid import modutils
from pylint.interfaces import IAstroidChecker, INFERENCE
from pylint.checkers import BaseChecker
from pylint.checkers.utils import (
- is_super, check_messages, decorated_with_property,
- decorated_with, node_ignores_exception,
- is_iterable, is_mapping, supports_membership_test,
- is_comprehension, is_inside_abstract_class,
+ is_super,
+ check_messages,
+ decorated_with_property,
+ decorated_with,
+ node_ignores_exception,
+ is_iterable,
+ is_mapping,
+ supports_membership_test,
+ is_comprehension,
+ is_inside_abstract_class,
supports_getitem,
supports_setitem,
supports_delitem,
safe_infer,
has_known_bases,
is_builtin_object,
- singledispatch)
+ singledispatch,
+)
from pylint.utils import get_global_option
BUILTINS = builtins.__name__
STR_FORMAT = {"%s.str.format" % BUILTINS}
-ASYNCIO_COROUTINE = 'asyncio.coroutines.coroutine'
+ASYNCIO_COROUTINE = "asyncio.coroutines.coroutine"
def _unflatten(iterable):
for index, elem in enumerate(iterable):
- if (isinstance(elem, collections.Sequence) and
- not isinstance(elem, str)):
+ if isinstance(elem, collections.Sequence) and not isinstance(elem, str):
for single_elem in _unflatten(elem):
yield single_elem
elif elem and not index:
# We're interested only in the first element.
yield elem
+
def _flatten_container(iterable):
# Flatten nested containers into a single iterable
for item in iterable:
@@ -110,16 +117,19 @@ def _is_owner_ignored(owner, name, ignored_classes, ignored_modules):
ignored_modules = set(ignored_modules)
module_name = owner.root().name
module_qname = owner.root().qname()
- if any(module_name in ignored_modules or
- module_qname in ignored_modules or
- fnmatch.fnmatch(module_qname, ignore) for ignore in ignored_modules):
+ if any(
+ module_name in ignored_modules
+ or module_qname in ignored_modules
+ or fnmatch.fnmatch(module_qname, ignore)
+ for ignore in ignored_modules
+ ):
return True
ignored_classes = set(ignored_classes)
- if hasattr(owner, 'qname'):
+ if hasattr(owner, "qname"):
qname = owner.qname()
else:
- qname = ''
+ qname = ""
return any(ignore in (name, qname) for ignore in ignored_classes)
@@ -127,7 +137,7 @@ def _is_owner_ignored(owner, name, ignored_classes, ignored_modules):
def _node_names(node):
# TODO: maybe we need an ABC for checking if an object is a scoped node
# or not?
- if not hasattr(node, 'locals'):
+ if not hasattr(node, "locals"):
return []
return node.locals.keys()
@@ -158,8 +168,7 @@ def _string_distance(seq1, seq2):
row[seq2_index] = min(
last_row[seq2_index] + 1,
row[seq2_index - 1] + 1,
- last_row[seq2_index - 1] + (seq1_char != seq2_char)
-
+ last_row[seq2_index - 1] + (seq1_char != seq2_char),
)
return row[seq2_length - 1]
@@ -184,9 +193,12 @@ def _similar_names(owner, attrname, distance_threshold, max_choices):
# Now get back the values with a minimum, up to the given
# limit or choices.
- picked = [name for (name, _) in
- heapq.nsmallest(max_choices, possible_names,
- key=operator.itemgetter(1))]
+ picked = [
+ name
+ for (name, _) in heapq.nsmallest(
+ max_choices, possible_names, key=operator.itemgetter(1)
+ )
+ ]
return sorted(picked)
@@ -206,111 +218,165 @@ def _missing_member_hint(owner, attrname, distance_threshold, max_choices):
MSGS = {
- 'E1101': ('%s %r has no %r member%s',
- 'no-member',
- 'Used when a variable is accessed for an unexistent member.',
- {'old_names': [('E1103', 'maybe-no-member')]}),
- 'I1101': ('%s %r has no %r member%s, but source is unavailable. Consider '
- 'adding this module to extension-pkg-whitelist if you want '
- 'to perform analysis based on run-time introspection of living objects.',
- 'c-extension-no-member',
- 'Used when a variable is accessed for non-existent member of C '
- 'extension. Due to unavailability of source static analysis is impossible, '
- 'but it may be performed by introspecting living objects in run-time.'),
- 'E1102': ('%s is not callable',
- 'not-callable',
- 'Used when an object being called has been inferred to a non '
- 'callable object.'),
- 'E1111': ('Assigning result of a function call, where the function has no return',
- 'assignment-from-no-return',
- 'Used when an assignment is done on a function call but the '
- 'inferred function doesn\'t return anything.'),
- 'E1120': ('No value for argument %s in %s call',
- 'no-value-for-parameter',
- 'Used when a function call passes too few arguments.'),
- 'E1121': ('Too many positional arguments for %s call',
- 'too-many-function-args',
- 'Used when a function call passes too many positional '
- 'arguments.'),
- 'E1123': ('Unexpected keyword argument %r in %s call',
- 'unexpected-keyword-arg',
- 'Used when a function call passes a keyword argument that '
- 'doesn\'t correspond to one of the function\'s parameter names.'),
- 'E1124': ('Argument %r passed by position and keyword in %s call',
- 'redundant-keyword-arg',
- 'Used when a function call would result in assigning multiple '
- 'values to a function parameter, one value from a positional '
- 'argument and one from a keyword argument.'),
- 'E1125': ('Missing mandatory keyword argument %r in %s call',
- 'missing-kwoa',
- ('Used when a function call does not pass a mandatory'
- ' keyword-only argument.')),
- 'E1126': ('Sequence index is not an int, slice, or instance with __index__',
- 'invalid-sequence-index',
- 'Used when a sequence type is indexed with an invalid type. '
- 'Valid types are ints, slices, and objects with an __index__ '
- 'method.'),
- 'E1127': ('Slice index is not an int, None, or instance with __index__',
- 'invalid-slice-index',
- 'Used when a slice index is not an integer, None, or an object '
- 'with an __index__ method.'),
- 'E1128': ('Assigning result of a function call, where the function returns None',
- 'assignment-from-none',
- 'Used when an assignment is done on a function call but the '
- 'inferred function returns nothing but None.',
- {'old_names': [('W1111', 'assignment-from-none')]}),
- 'E1129': ("Context manager '%s' doesn't implement __enter__ and __exit__.",
- 'not-context-manager',
- 'Used when an instance in a with statement doesn\'t implement '
- 'the context manager protocol(__enter__/__exit__).'),
- 'E1130': ('%s',
- 'invalid-unary-operand-type',
- 'Emitted when a unary operand is used on an object which does not '
- 'support this type of operation.'),
- 'E1131': ('%s',
- 'unsupported-binary-operation',
- 'Emitted when a binary arithmetic operation between two '
- 'operands is not supported.'),
- 'E1132': ('Got multiple values for keyword argument %r in function call',
- 'repeated-keyword',
- 'Emitted when a function call got multiple values for a keyword.'),
- 'E1135': ("Value '%s' doesn't support membership test",
- 'unsupported-membership-test',
- 'Emitted when an instance in membership test expression doesn\'t '
- 'implement membership protocol (__contains__/__iter__/__getitem__).'),
- 'E1136': ("Value '%s' is unsubscriptable",
- 'unsubscriptable-object',
- "Emitted when a subscripted value doesn't support subscription "
- "(i.e. doesn't define __getitem__ method)."),
- 'E1137': ("%r does not support item assignment",
- 'unsupported-assignment-operation',
- "Emitted when an object does not support item assignment "
- "(i.e. doesn't define __setitem__ method)."),
- 'E1138': ("%r does not support item deletion",
- 'unsupported-delete-operation',
- "Emitted when an object does not support item deletion "
- "(i.e. doesn't define __delitem__ method)."),
- 'E1139': ('Invalid metaclass %r used',
- 'invalid-metaclass',
- 'Emitted whenever we can detect that a class is using, '
- 'as a metaclass, something which might be invalid for using as '
- 'a metaclass.'),
- 'E1140': ("Dict key is unhashable",
- 'unhashable-dict-key',
- 'Emitted when a dict key is not hashable '
- "(i.e. doesn't define __hash__ method)."),
- 'W1113': ('Keyword argument before variable positional arguments list '
- 'in the definition of %s function',
- 'keyword-arg-before-vararg',
- 'When defining a keyword argument before variable positional arguments, one can '
- 'end up in having multiple values passed for the aforementioned parameter in '
- 'case the method is called with keyword arguments.'),
- }
+ "E1101": (
+ "%s %r has no %r member%s",
+ "no-member",
+ "Used when a variable is accessed for an unexistent member.",
+ {"old_names": [("E1103", "maybe-no-member")]},
+ ),
+ "I1101": (
+ "%s %r has no %r member%s, but source is unavailable. Consider "
+ "adding this module to extension-pkg-whitelist if you want "
+ "to perform analysis based on run-time introspection of living objects.",
+ "c-extension-no-member",
+ "Used when a variable is accessed for non-existent member of C "
+ "extension. Due to unavailability of source static analysis is impossible, "
+ "but it may be performed by introspecting living objects in run-time.",
+ ),
+ "E1102": (
+ "%s is not callable",
+ "not-callable",
+ "Used when an object being called has been inferred to a non "
+ "callable object.",
+ ),
+ "E1111": (
+ "Assigning result of a function call, where the function has no return",
+ "assignment-from-no-return",
+ "Used when an assignment is done on a function call but the "
+ "inferred function doesn't return anything.",
+ ),
+ "E1120": (
+ "No value for argument %s in %s call",
+ "no-value-for-parameter",
+ "Used when a function call passes too few arguments.",
+ ),
+ "E1121": (
+ "Too many positional arguments for %s call",
+ "too-many-function-args",
+ "Used when a function call passes too many positional " "arguments.",
+ ),
+ "E1123": (
+ "Unexpected keyword argument %r in %s call",
+ "unexpected-keyword-arg",
+ "Used when a function call passes a keyword argument that "
+ "doesn't correspond to one of the function's parameter names.",
+ ),
+ "E1124": (
+ "Argument %r passed by position and keyword in %s call",
+ "redundant-keyword-arg",
+ "Used when a function call would result in assigning multiple "
+ "values to a function parameter, one value from a positional "
+ "argument and one from a keyword argument.",
+ ),
+ "E1125": (
+ "Missing mandatory keyword argument %r in %s call",
+ "missing-kwoa",
+ (
+ "Used when a function call does not pass a mandatory"
+ " keyword-only argument."
+ ),
+ ),
+ "E1126": (
+ "Sequence index is not an int, slice, or instance with __index__",
+ "invalid-sequence-index",
+ "Used when a sequence type is indexed with an invalid type. "
+ "Valid types are ints, slices, and objects with an __index__ "
+ "method.",
+ ),
+ "E1127": (
+ "Slice index is not an int, None, or instance with __index__",
+ "invalid-slice-index",
+ "Used when a slice index is not an integer, None, or an object "
+ "with an __index__ method.",
+ ),
+ "E1128": (
+ "Assigning result of a function call, where the function returns None",
+ "assignment-from-none",
+ "Used when an assignment is done on a function call but the "
+ "inferred function returns nothing but None.",
+ {"old_names": [("W1111", "assignment-from-none")]},
+ ),
+ "E1129": (
+ "Context manager '%s' doesn't implement __enter__ and __exit__.",
+ "not-context-manager",
+ "Used when an instance in a with statement doesn't implement "
+ "the context manager protocol(__enter__/__exit__).",
+ ),
+ "E1130": (
+ "%s",
+ "invalid-unary-operand-type",
+ "Emitted when a unary operand is used on an object which does not "
+ "support this type of operation.",
+ ),
+ "E1131": (
+ "%s",
+ "unsupported-binary-operation",
+ "Emitted when a binary arithmetic operation between two "
+ "operands is not supported.",
+ ),
+ "E1132": (
+ "Got multiple values for keyword argument %r in function call",
+ "repeated-keyword",
+ "Emitted when a function call got multiple values for a keyword.",
+ ),
+ "E1135": (
+ "Value '%s' doesn't support membership test",
+ "unsupported-membership-test",
+ "Emitted when an instance in membership test expression doesn't "
+ "implement membership protocol (__contains__/__iter__/__getitem__).",
+ ),
+ "E1136": (
+ "Value '%s' is unsubscriptable",
+ "unsubscriptable-object",
+ "Emitted when a subscripted value doesn't support subscription "
+ "(i.e. doesn't define __getitem__ method).",
+ ),
+ "E1137": (
+ "%r does not support item assignment",
+ "unsupported-assignment-operation",
+ "Emitted when an object does not support item assignment "
+ "(i.e. doesn't define __setitem__ method).",
+ ),
+ "E1138": (
+ "%r does not support item deletion",
+ "unsupported-delete-operation",
+ "Emitted when an object does not support item deletion "
+ "(i.e. doesn't define __delitem__ method).",
+ ),
+ "E1139": (
+ "Invalid metaclass %r used",
+ "invalid-metaclass",
+ "Emitted whenever we can detect that a class is using, "
+ "as a metaclass, something which might be invalid for using as "
+ "a metaclass.",
+ ),
+ "E1140": (
+ "Dict key is unhashable",
+ "unhashable-dict-key",
+ "Emitted when a dict key is not hashable "
+ "(i.e. doesn't define __hash__ method).",
+ ),
+ "W1113": (
+ "Keyword argument before variable positional arguments list "
+ "in the definition of %s function",
+ "keyword-arg-before-vararg",
+ "When defining a keyword argument before variable positional arguments, one can "
+ "end up in having multiple values passed for the aforementioned parameter in "
+ "case the method is called with keyword arguments.",
+ ),
+}
# builtin sequence types in Python 2 and 3.
SEQUENCE_TYPES = {
- 'str', 'unicode', 'list', 'tuple', 'bytearray',
- 'xrange', 'range', 'bytes', 'memoryview'
+ "str",
+ "unicode",
+ "list",
+ "tuple",
+ "bytearray",
+ "xrange",
+ "range",
+ "bytes",
+ "memoryview",
}
@@ -331,9 +397,9 @@ def _emit_no_member(node, owner, owner_name, ignored_mixins=True, ignored_none=T
return False
if ignored_none and isinstance(owner, astroid.Const) and owner.value is None:
return False
- if is_super(owner) or getattr(owner, 'type', None) == 'metaclass':
+ if is_super(owner) or getattr(owner, "type", None) == "metaclass":
return False
- if ignored_mixins and owner_name[-5:].lower() == 'mixin':
+ if ignored_mixins and owner_name[-5:].lower() == "mixin":
return False
if isinstance(owner, astroid.FunctionDef) and owner.decorators:
return False
@@ -353,13 +419,13 @@ def _emit_no_member(node, owner, owner_name, ignored_mixins=True, ignored_none=T
return False
if isinstance(owner, astroid.Module):
try:
- owner.getattr('__getattr__')
+ owner.getattr("__getattr__")
return False
except astroid.NotFoundError:
pass
- if node.attrname.startswith('_' + owner_name):
+ if node.attrname.startswith("_" + owner_name):
# Test if an attribute has been mangled ('private' attribute)
- unmangled_name = node.attrname.split('_' + owner_name)[-1]
+ unmangled_name = node.attrname.split("_" + owner_name)[-1]
try:
if owner.getattr(unmangled_name, context=None) is not None:
return False
@@ -372,18 +438,18 @@ def _determine_callable(callable_obj):
# Ordering is important, since BoundMethod is a subclass of UnboundMethod,
# and Function inherits Lambda.
parameters = 0
- if hasattr(callable_obj, 'implicit_parameters'):
+ if hasattr(callable_obj, "implicit_parameters"):
# TODO: replace with a Callable check
parameters = callable_obj.implicit_parameters()
if isinstance(callable_obj, astroid.BoundMethod):
# Bound methods have an extra implicit 'self' argument.
return callable_obj, parameters, callable_obj.type
if isinstance(callable_obj, astroid.UnboundMethod):
- return callable_obj, parameters, 'unbound method'
+ return callable_obj, parameters, "unbound method"
if isinstance(callable_obj, astroid.FunctionDef):
return callable_obj, parameters, callable_obj.type
if isinstance(callable_obj, astroid.Lambda):
- return callable_obj, parameters, 'lambda'
+ return callable_obj, parameters, "lambda"
if isinstance(callable_obj, astroid.ClassDef):
# Class instantiation, lookup __new__ instead.
# If we only find object.__new__, we can safely check __init__
@@ -392,17 +458,17 @@ def _determine_callable(callable_obj):
# argument information for the builtin __new__ function.
try:
# Use the last definition of __new__.
- new = callable_obj.local_attr('__new__')[-1]
+ new = callable_obj.local_attr("__new__")[-1]
except exceptions.NotFoundError:
new = None
- from_object = new and new.parent.scope().name == 'object'
+ from_object = new and new.parent.scope().name == "object"
from_builtins = new and new.root().name in sys.builtin_module_names
if not new or from_object or from_builtins:
try:
# Use the last definition of __init__.
- callable_obj = callable_obj.local_attr('__init__')[-1]
+ callable_obj = callable_obj.local_attr("__init__")[-1]
except exceptions.NotFoundError:
# do nothing, covered by no-init.
raise ValueError
@@ -412,7 +478,7 @@ def _determine_callable(callable_obj):
if not isinstance(callable_obj, astroid.FunctionDef):
raise ValueError
# both have an extra implicit 'cls'/'self' argument.
- return callable_obj, parameters, 'constructor'
+ return callable_obj, parameters, "constructor"
else:
raise ValueError
@@ -427,8 +493,10 @@ def _has_parent_of_type(node, node_type, statement):
def _is_name_used_as_variadic(name, variadics):
"""Check if the given name is used as a variadic argument."""
- return any(variadic.value == name or variadic.value.parent_of(name)
- for variadic in variadics)
+ return any(
+ variadic.value == name or variadic.value.parent_of(name)
+ for variadic in variadics
+ )
def _no_context_variadic_keywords(node):
@@ -439,8 +507,9 @@ def _no_context_variadic_keywords(node):
if not isinstance(scope, astroid.FunctionDef):
return False
- if (isinstance(statement, (astroid.Return, astroid.Expr))
- and isinstance(statement.value, astroid.Call)):
+ if isinstance(statement, (astroid.Return, astroid.Expr)) and isinstance(
+ statement.value, astroid.Call
+ ):
call = statement.value
variadics = list(call.keywords or []) + call.kwargs
@@ -455,8 +524,9 @@ def _no_context_variadic_positional(node):
if not isinstance(scope, astroid.FunctionDef):
return False
- if (isinstance(statement, (astroid.Expr, astroid.Return))
- and isinstance(statement.value, astroid.Call)):
+ if isinstance(statement, (astroid.Expr, astroid.Return)) and isinstance(
+ statement.value, astroid.Call
+ ):
call = statement.value
variadics = call.starargs + call.kwargs
@@ -503,8 +573,7 @@ def _is_invalid_metaclass(metaclass):
# Cannot have a metaclass which is not a newstyle class.
return True
else:
- if not any(is_builtin_object(cls) and cls.name == 'type'
- for cls in mro):
+ if not any(is_builtin_object(cls) and cls.name == "type" for cls in mro):
return True
return False
@@ -547,7 +616,10 @@ def _infer_from_metaclass_constructor(cls, func):
def _is_c_extension(module_node):
- return not modutils.is_standard_module(module_node.name) and not module_node.fully_defined()
+ return (
+ not modutils.is_standard_module(module_node.name)
+ and not module_node.fully_defined()
+ )
class TypeChecker(BaseChecker):
@@ -557,103 +629,135 @@ class TypeChecker(BaseChecker):
__implements__ = (IAstroidChecker,)
# configuration section name
- name = 'typecheck'
+ name = "typecheck"
# messages
msgs = MSGS
priority = -1
# configuration options
- options = (('ignore-on-opaque-inference',
- {'default': True, 'type': 'yn', 'metavar': '<y_or_n>',
- 'help': 'This flag controls whether pylint should warn about '
- 'no-member and similar checks whenever an opaque object '
- 'is returned when inferring. The inference can return '
- 'multiple potential results while evaluating a Python object, '
- 'but some branches might not be evaluated, which results in '
- 'partial inference. In that case, it might be useful to still emit '
- 'no-member and other checks for the rest of the inferred objects.'}
- ),
- ('ignore-mixin-members',
- {'default' : True, 'type' : 'yn', 'metavar': '<y_or_n>',
- 'help' : 'Tells whether missing members accessed in mixin \
+ options = (
+ (
+ "ignore-on-opaque-inference",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "This flag controls whether pylint should warn about "
+ "no-member and similar checks whenever an opaque object "
+ "is returned when inferring. The inference can return "
+ "multiple potential results while evaluating a Python object, "
+ "but some branches might not be evaluated, which results in "
+ "partial inference. In that case, it might be useful to still emit "
+ "no-member and other checks for the rest of the inferred objects.",
+ },
+ ),
+ (
+ "ignore-mixin-members",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": 'Tells whether missing members accessed in mixin \
class should be ignored. A mixin class is detected if its name ends with \
-"mixin" (case insensitive).'}
- ),
- ('ignore-none',
- {'default': True, 'type': 'yn', 'metavar': '<y_or_n>',
- 'help': 'Tells whether to warn about missing members when the owner '
- 'of the attribute is inferred to be None.'
- }
- ),
- ('ignored-modules',
- {'default': (),
- 'type': 'csv',
- 'metavar': '<module names>',
- 'help': 'List of module names for which member attributes '
- 'should not be checked (useful for modules/projects '
- 'where namespaces are manipulated during runtime and '
- 'thus existing member attributes cannot be '
- 'deduced by static analysis. It supports qualified '
- 'module names, as well as Unix pattern matching.'}
- ),
- # the defaults here are *stdlib* names that (almost) always
- # lead to false positives, since their idiomatic use is
- # 'too dynamic' for pylint to grok.
- ('ignored-classes',
- {'default' : ('optparse.Values', 'thread._local', '_thread._local'),
- 'type' : 'csv',
- 'metavar' : '<members names>',
- 'help' : 'List of class names for which member attributes '
- 'should not be checked (useful for classes with '
- 'dynamically set attributes). This supports '
- 'the use of qualified names.'}
- ),
-
- ('generated-members',
- {'default' : (),
- 'type' : 'string',
- 'metavar' : '<members names>',
- 'help' : 'List of members which are set dynamically and \
-missed by pylint inference system, and so shouldn\'t trigger E1101 when \
-accessed. Python regular expressions are accepted.'}
- ),
- ('contextmanager-decorators',
- {'default': ['contextlib.contextmanager'],
- 'type': 'csv',
- 'metavar': '<decorator names>',
- 'help': 'List of decorators that produce context managers, '
- 'such as contextlib.contextmanager. Add to this list '
- 'to register other decorators that produce valid '
- 'context managers.'}
- ),
- ('missing-member-hint-distance',
- {'default': 1,
- 'type': 'int',
- 'metavar': '<member hint edit distance>',
- 'help': 'The minimum edit distance a name should have in order '
- 'to be considered a similar match for a missing member name.'
- }
- ),
- ('missing-member-max-choices',
- {'default': 1,
- 'type': "int",
- 'metavar': '<member hint max choices>',
- 'help': 'The total number of similar names that should be taken in '
- 'consideration when showing a hint for a missing member.'
- }
- ),
- ('missing-member-hint',
- {'default': True,
- 'type': "yn",
- 'metavar': '<missing member hint>',
- 'help': 'Show a hint with possible names when a member name was not '
- 'found. The aspect of finding the hint is based on edit distance.'
- }
- ),
- )
+"mixin" (case insensitive).',
+ },
+ ),
+ (
+ "ignore-none",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether to warn about missing members when the owner "
+ "of the attribute is inferred to be None.",
+ },
+ ),
+ (
+ "ignored-modules",
+ {
+ "default": (),
+ "type": "csv",
+ "metavar": "<module names>",
+ "help": "List of module names for which member attributes "
+ "should not be checked (useful for modules/projects "
+ "where namespaces are manipulated during runtime and "
+ "thus existing member attributes cannot be "
+ "deduced by static analysis. It supports qualified "
+ "module names, as well as Unix pattern matching.",
+ },
+ ),
+ # the defaults here are *stdlib* names that (almost) always
+ # lead to false positives, since their idiomatic use is
+ # 'too dynamic' for pylint to grok.
+ (
+ "ignored-classes",
+ {
+ "default": ("optparse.Values", "thread._local", "_thread._local"),
+ "type": "csv",
+ "metavar": "<members names>",
+ "help": "List of class names for which member attributes "
+ "should not be checked (useful for classes with "
+ "dynamically set attributes). This supports "
+ "the use of qualified names.",
+ },
+ ),
+ (
+ "generated-members",
+ {
+ "default": (),
+ "type": "string",
+ "metavar": "<members names>",
+ "help": "List of members which are set dynamically and \
+missed by pylint inference system, and so shouldn't trigger E1101 when \
+accessed. Python regular expressions are accepted.",
+ },
+ ),
+ (
+ "contextmanager-decorators",
+ {
+ "default": ["contextlib.contextmanager"],
+ "type": "csv",
+ "metavar": "<decorator names>",
+ "help": "List of decorators that produce context managers, "
+ "such as contextlib.contextmanager. Add to this list "
+ "to register other decorators that produce valid "
+ "context managers.",
+ },
+ ),
+ (
+ "missing-member-hint-distance",
+ {
+ "default": 1,
+ "type": "int",
+ "metavar": "<member hint edit distance>",
+ "help": "The minimum edit distance a name should have in order "
+ "to be considered a similar match for a missing member name.",
+ },
+ ),
+ (
+ "missing-member-max-choices",
+ {
+ "default": 1,
+ "type": "int",
+ "metavar": "<member hint max choices>",
+ "help": "The total number of similar names that should be taken in "
+ "consideration when showing a hint for a missing member.",
+ },
+ ),
+ (
+ "missing-member-hint",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<missing member hint>",
+ "help": "Show a hint with possible names when a member name was not "
+ "found. The aspect of finding the hint is based on edit distance.",
+ },
+ ),
+ )
@decorators.cachedproperty
def _suggestion_mode(self):
- return get_global_option(self, 'suggestion-mode', default=True)
+ return get_global_option(self, "suggestion-mode", default=True)
def open(self):
# do this in open since config not fully initialized in __init__
@@ -663,22 +767,20 @@ accessed. Python regular expressions are accepted.'}
# ('REQUEST', 'aq_parent', '[a-zA-Z]+_set{1,2}')
if isinstance(self.config.generated_members, str):
gen = shlex.shlex(self.config.generated_members)
- gen.whitespace += ','
- gen.wordchars += r'[]-+\.*?()|'
+ gen.whitespace += ","
+ gen.wordchars += r"[]-+\.*?()|"
self.config.generated_members = tuple(tok.strip('"') for tok in gen)
- @check_messages('keyword-arg-before-vararg')
+ @check_messages("keyword-arg-before-vararg")
def visit_functiondef(self, node):
# check for keyword arg before varargs
if node.args.vararg and node.args.defaults:
- self.add_message('keyword-arg-before-vararg', node=node,
- args=(node.name))
+ self.add_message("keyword-arg-before-vararg", node=node, args=(node.name))
visit_asyncfunctiondef = visit_functiondef
- @check_messages('invalid-metaclass')
+ @check_messages("invalid-metaclass")
def visit_classdef(self, node):
-
def _metaclass_name(metaclass):
if isinstance(metaclass, (astroid.ClassDef, astroid.FunctionDef)):
return metaclass.name
@@ -697,11 +799,13 @@ accessed. Python regular expressions are accepted.'}
if isinstance(metaclass, astroid.ClassDef):
if _is_invalid_metaclass(metaclass):
- self.add_message('invalid-metaclass', node=node,
- args=(_metaclass_name(metaclass), ))
+ self.add_message(
+ "invalid-metaclass", node=node, args=(_metaclass_name(metaclass),)
+ )
else:
- self.add_message('invalid-metaclass', node=node,
- args=(_metaclass_name(metaclass), ))
+ self.add_message(
+ "invalid-metaclass", node=node, args=(_metaclass_name(metaclass),)
+ )
def visit_assignattr(self, node):
if isinstance(node.assign_type(), astroid.AugAssign):
@@ -710,7 +814,7 @@ accessed. Python regular expressions are accepted.'}
def visit_delattr(self, node):
self.visit_attribute(node)
- @check_messages('no-member', 'c-extension-no-member')
+ @check_messages("no-member", "c-extension-no-member")
def visit_attribute(self, node):
"""check that the accessed attribute exists
@@ -735,26 +839,33 @@ accessed. Python regular expressions are accepted.'}
missingattr = set()
non_opaque_inference_results = [
- owner for owner in inferred
+ owner
+ for owner in inferred
if owner is not astroid.Uninferable
and not isinstance(owner, astroid.nodes.Unknown)
]
- if (len(non_opaque_inference_results) != len(inferred)
- and self.config.ignore_on_opaque_inference):
+ if (
+ len(non_opaque_inference_results) != len(inferred)
+ and self.config.ignore_on_opaque_inference
+ ):
# There is an ambiguity in the inference. Since we can't
# make sure that we won't emit a false positive, we just stop
# whenever the inference returns an opaque inference object.
return
for owner in non_opaque_inference_results:
- name = getattr(owner, 'name', None)
- if _is_owner_ignored(owner, name, self.config.ignored_classes,
- self.config.ignored_modules):
+ name = getattr(owner, "name", None)
+ if _is_owner_ignored(
+ owner, name, self.config.ignored_classes, self.config.ignored_modules
+ ):
continue
try:
- if not [n for n in owner.getattr(node.attrname)
- if not isinstance(n.statement(), astroid.AugAssign)]:
+ if not [
+ n
+ for n in owner.getattr(node.attrname)
+ if not isinstance(n.statement(), astroid.AugAssign)
+ ]:
missingattr.add((owner, name))
continue
except AttributeError:
@@ -768,9 +879,13 @@ accessed. Python regular expressions are accepted.'}
# but we continue to the next values which doesn't have the
# attribute, then we'll have a false positive.
# So call this only after the call has been made.
- if not _emit_no_member(node, owner, name,
- ignored_mixins=self.config.ignore_mixin_members,
- ignored_none=self.config.ignore_none):
+ if not _emit_no_member(
+ node,
+ owner,
+ name,
+ ignored_mixins=self.config.ignore_mixin_members,
+ ignored_none=self.config.ignore_none,
+ ):
continue
missingattr.add((owner, name))
@@ -791,27 +906,34 @@ accessed. Python regular expressions are accepted.'}
done.add(actual)
msg, hint = self._get_nomember_msgid_hint(node, owner)
- self.add_message(msg, node=node,
- args=(owner.display_type(), name,
- node.attrname, hint),
- confidence=INFERENCE)
+ self.add_message(
+ msg,
+ node=node,
+ args=(owner.display_type(), name, node.attrname, hint),
+ confidence=INFERENCE,
+ )
def _get_nomember_msgid_hint(self, node, owner):
- suggestions_are_possible = self._suggestion_mode and isinstance(owner, astroid.Module)
+ suggestions_are_possible = self._suggestion_mode and isinstance(
+ owner, astroid.Module
+ )
if suggestions_are_possible and _is_c_extension(owner):
- msg = 'c-extension-no-member'
+ msg = "c-extension-no-member"
hint = ""
else:
- msg = 'no-member'
+ msg = "no-member"
if self.config.missing_member_hint:
- hint = _missing_member_hint(owner, node.attrname,
- self.config.missing_member_hint_distance,
- self.config.missing_member_max_choices)
+ hint = _missing_member_hint(
+ owner,
+ node.attrname,
+ self.config.missing_member_hint_distance,
+ self.config.missing_member_max_choices,
+ )
else:
hint = ""
return msg, hint
- @check_messages('assignment-from-no-return', 'assignment-from-none')
+ @check_messages("assignment-from-no-return", "assignment-from-none")
def visit_assign(self, node):
"""check that if assigning to a function call, the function is
possibly returning something valuable
@@ -820,31 +942,34 @@ accessed. Python regular expressions are accepted.'}
return
function_node = safe_infer(node.value.func)
# skip class, generator and incomplete function definition
- funcs = (
- astroid.FunctionDef,
- astroid.UnboundMethod,
- astroid.BoundMethod,
- )
- if not (isinstance(function_node, funcs)
- and function_node.root().fully_defined()
- and not function_node.decorators):
+ funcs = (astroid.FunctionDef, astroid.UnboundMethod, astroid.BoundMethod)
+ if not (
+ isinstance(function_node, funcs)
+ and function_node.root().fully_defined()
+ and not function_node.decorators
+ ):
return
- if (function_node.is_generator()
- or function_node.is_abstract(pass_is_abstract=False)
- or isinstance(function_node, astroid.AsyncFunctionDef)):
+ if (
+ function_node.is_generator()
+ or function_node.is_abstract(pass_is_abstract=False)
+ or isinstance(function_node, astroid.AsyncFunctionDef)
+ ):
return
- returns = list(function_node.nodes_of_class(astroid.Return,
- skip_klass=astroid.FunctionDef))
+ returns = list(
+ function_node.nodes_of_class(astroid.Return, skip_klass=astroid.FunctionDef)
+ )
if not returns:
- self.add_message('assignment-from-no-return', node=node)
+ self.add_message("assignment-from-no-return", node=node)
else:
for rnode in returns:
- if not (isinstance(rnode.value, astroid.Const)
- and rnode.value.value is None
- or rnode.value is None):
+ if not (
+ isinstance(rnode.value, astroid.Const)
+ and rnode.value.value is None
+ or rnode.value is None
+ ):
break
else:
- self.add_message('assignment-from-none', node=node)
+ self.add_message("assignment-from-none", node=node)
def _check_uninferable_call(self, node):
"""
@@ -864,8 +989,11 @@ accessed. Python regular expressions are accepted.'}
expr = node.func.expr
klass = safe_infer(expr)
- if (klass is None or klass is astroid.Uninferable or
- not isinstance(klass, astroid.Instance)):
+ if (
+ klass is None
+ or klass is astroid.Uninferable
+ or not isinstance(klass, astroid.Instance)
+ ):
return
try:
@@ -892,8 +1020,9 @@ accessed. Python regular expressions are accepted.'}
continue
if not all_returns_are_callable:
- self.add_message('not-callable', node=node,
- args=node.func.as_string())
+ self.add_message(
+ "not-callable", node=node, args=node.func.as_string()
+ )
break
# pylint: disable=too-many-branches
@@ -909,14 +1038,16 @@ accessed. Python regular expressions are accepted.'}
# yet
if called and not called.callable():
if isinstance(called, astroid.Instance) and (
- not has_known_bases(called)
- or (isinstance(called.scope(), astroid.ClassDef)
- and '__get__' in called.locals)):
+ not has_known_bases(called)
+ or (
+ isinstance(called.scope(), astroid.ClassDef)
+ and "__get__" in called.locals
+ )
+ ):
# Don't emit if we can't make sure this object is callable.
pass
else:
- self.add_message('not-callable', node=node,
- args=node.func.as_string())
+ self.add_message("not-callable", node=node, args=node.func.as_string())
self._check_uninferable_call(node)
@@ -942,8 +1073,7 @@ accessed. Python regular expressions are accepted.'}
# Warn about duplicated keyword arguments, such as `f=24, **{'f': 24}`
for keyword in call_site.duplicated_keywords:
- self.add_message('repeated-keyword',
- node=node, args=(keyword, ))
+ self.add_message("repeated-keyword", node=node, args=(keyword,))
if call_site.has_invalid_arguments() or call_site.has_invalid_keywords():
# Can't make sense of this.
@@ -957,11 +1087,13 @@ accessed. Python regular expressions are accepted.'}
has_no_context_positional_variadic = _no_context_variadic_positional(node)
has_no_context_keywords_variadic = _no_context_variadic_keywords(node)
else:
- has_no_context_positional_variadic = has_no_context_keywords_variadic = False
+ has_no_context_positional_variadic = (
+ has_no_context_keywords_variadic
+ ) = False
# These are coming from the functools.partial implementation in astroid
- already_filled_positionals = getattr(called, 'filled_positionals', 0)
- already_filled_keywords = getattr(called, 'filled_keywords', {})
+ already_filled_positionals = getattr(called, "filled_positionals", 0)
+ already_filled_keywords = getattr(called, "filled_keywords", {})
keyword_args += list(already_filled_keywords)
num_positional_args += implicit_args + already_filled_positionals
@@ -1009,8 +1141,9 @@ accessed. Python regular expressions are accepted.'}
break
else:
# Too many positional arguments.
- self.add_message('too-many-function-args',
- node=node, args=(callable_name,))
+ self.add_message(
+ "too-many-function-args", node=node, args=(callable_name,)
+ )
break
# 2. Match the keyword arguments.
@@ -1025,16 +1158,22 @@ accessed. Python regular expressions are accepted.'}
# by keyword argument, as in `.format(self=self)`.
# It's perfectly valid to so, so we're just skipping
# it if that's the case.
- if not (keyword == 'self' and called.qname() in STR_FORMAT):
- self.add_message('redundant-keyword-arg',
- node=node, args=(keyword, callable_name))
+ if not (keyword == "self" and called.qname() in STR_FORMAT):
+ self.add_message(
+ "redundant-keyword-arg",
+ node=node,
+ args=(keyword, callable_name),
+ )
else:
parameters[i][1] = True
elif keyword in kwparams:
if kwparams[keyword][1]: # XXX is that even possible?
# Duplicate definition of function parameter.
- self.add_message('redundant-keyword-arg', node=node,
- args=(keyword, callable_name))
+ self.add_message(
+ "redundant-keyword-arg",
+ node=node,
+ args=(keyword, callable_name),
+ )
else:
kwparams[keyword][1] = True
elif called.args.kwarg is not None:
@@ -1042,8 +1181,9 @@ accessed. Python regular expressions are accepted.'}
pass
else:
# Unexpected keyword argument.
- self.add_message('unexpected-keyword-arg', node=node,
- args=(keyword, callable_name))
+ self.add_message(
+ "unexpected-keyword-arg", node=node, args=(keyword, callable_name)
+ )
# 3. Match the **kwargs, if any.
if node.kwargs:
@@ -1061,27 +1201,29 @@ accessed. Python regular expressions are accepted.'}
for [(name, defval), assigned] in parameters:
if (defval is None) and not assigned:
if name is None:
- display_name = '<tuple>'
+ display_name = "<tuple>"
else:
display_name = repr(name)
# TODO(cpopa): this should be removed after PyCQA/astroid/issues/177
if not has_no_context_positional_variadic:
- self.add_message('no-value-for-parameter', node=node,
- args=(display_name, callable_name))
+ self.add_message(
+ "no-value-for-parameter",
+ node=node,
+ args=(display_name, callable_name),
+ )
for name in kwparams:
defval, assigned = kwparams[name]
if defval is None and not assigned and not has_no_context_keywords_variadic:
- self.add_message('missing-kwoa', node=node,
- args=(name, callable_name))
+ self.add_message("missing-kwoa", node=node, args=(name, callable_name))
- @check_messages('invalid-sequence-index')
+ @check_messages("invalid-sequence-index")
def visit_extslice(self, node):
# Check extended slice objects as if they were used as a sequence
# index to check if the object being sliced can support them
return self.visit_index(node)
- @check_messages('invalid-sequence-index')
+ @check_messages("invalid-sequence-index")
def visit_index(self, node):
if not node.parent or not hasattr(node.parent, "value"):
return None
@@ -1089,19 +1231,20 @@ accessed. Python regular expressions are accepted.'}
# If the types can be determined, only allow indices to be int,
# slice or instances with __index__.
parent_type = safe_infer(node.parent.value)
- if (not isinstance(parent_type, (astroid.ClassDef, astroid.Instance))
- or not has_known_bases(parent_type)):
+ if not isinstance(
+ parent_type, (astroid.ClassDef, astroid.Instance)
+ ) or not has_known_bases(parent_type):
return None
# Determine what method on the parent this index will use
# The parent of this node will be a Subscript, and the parent of that
# node determines if the Subscript is a get, set, or delete operation.
if node.parent.ctx is astroid.Store:
- methodname = '__setitem__'
+ methodname = "__setitem__"
elif node.parent.ctx is astroid.Del:
- methodname = '__delitem__'
+ methodname = "__delitem__"
else:
- methodname = '__getitem__'
+ methodname = "__getitem__"
# Check if this instance's __getitem__, __setitem__, or __delitem__, as
# appropriate to the statement, is implemented in a builtin sequence
@@ -1112,15 +1255,19 @@ accessed. Python regular expressions are accepted.'}
if methods is astroid.Uninferable:
return None
itemmethod = methods[0]
- except (exceptions.NotFoundError,
- exceptions.AttributeInferenceError,
- IndexError):
+ except (
+ exceptions.NotFoundError,
+ exceptions.AttributeInferenceError,
+ IndexError,
+ ):
return None
- if (not isinstance(itemmethod, astroid.FunctionDef)
- or itemmethod.root().name != BUILTINS
- or not itemmethod.parent
- or itemmethod.parent.name not in SEQUENCE_TYPES):
+ if (
+ not isinstance(itemmethod, astroid.FunctionDef)
+ or itemmethod.root().name != BUILTINS
+ or not itemmethod.parent
+ or itemmethod.parent.name not in SEQUENCE_TYPES
+ ):
return None
# For ExtSlice objects coming from visit_extslice, no further
@@ -1138,10 +1285,10 @@ accessed. Python regular expressions are accepted.'}
return None
# Instance values must be int, slice, or have an __index__ method
elif isinstance(index_type, astroid.Instance):
- if index_type.pytype() in (BUILTINS + '.int', BUILTINS + '.slice'):
+ if index_type.pytype() in (BUILTINS + ".int", BUILTINS + ".slice"):
return None
try:
- index_type.getattr('__index__')
+ index_type.getattr("__index__")
return None
except exceptions.NotFoundError:
pass
@@ -1152,10 +1299,10 @@ accessed. Python regular expressions are accepted.'}
return self.visit_slice(index_type)
# Anything else is an error
- self.add_message('invalid-sequence-index', node=node)
+ self.add_message("invalid-sequence-index", node=node)
return None
- @check_messages('invalid-slice-index')
+ @check_messages("invalid-slice-index")
def visit_slice(self, node):
# Check the type of each part of the slice
invalid_slices = 0
@@ -1174,12 +1321,11 @@ accessed. Python regular expressions are accepted.'}
# Instance values must be of type int, None or an object
# with __index__
elif isinstance(index_type, astroid.Instance):
- if index_type.pytype() in (BUILTINS + '.int',
- BUILTINS + '.NoneType'):
+ if index_type.pytype() in (BUILTINS + ".int", BUILTINS + ".NoneType"):
continue
try:
- index_type.getattr('__index__')
+ index_type.getattr("__index__")
return
except exceptions.NotFoundError:
pass
@@ -1209,9 +1355,9 @@ accessed. Python regular expressions are accepted.'}
# Might be an instance that knows how to handle this slice object
return
for _ in range(invalid_slices):
- self.add_message('invalid-slice-index', node=node)
+ self.add_message("invalid-slice-index", node=node)
- @check_messages('not-context-manager')
+ @check_messages("not-context-manager")
def visit_with(self, node):
for ctx_mgr, _ in node.items:
context = astroid.context.InferenceContext()
@@ -1222,8 +1368,9 @@ accessed. Python regular expressions are accepted.'}
if isinstance(infered, bases.Generator):
# Check if we are dealing with a function decorated
# with contextlib.contextmanager.
- if decorated_with(infered.parent,
- self.config.contextmanager_decorators):
+ if decorated_with(
+ infered.parent, self.config.contextmanager_decorators
+ ):
continue
# If the parent of the generator is not the context manager itself,
# that means that it could have been returned from another
@@ -1238,21 +1385,23 @@ accessed. Python regular expressions are accepted.'}
# Retrieve node from all previusly visited nodes in the the inference history
context_path_names = filter(None, _unflatten(context.path))
- inferred_paths = _flatten_container(path.infer() for path in context_path_names)
+ inferred_paths = _flatten_container(
+ path.infer() for path in context_path_names
+ )
for inf_path in inferred_paths:
scope = inf_path.scope()
if not isinstance(scope, astroid.FunctionDef):
continue
- if decorated_with(scope,
- self.config.contextmanager_decorators):
+ if decorated_with(scope, self.config.contextmanager_decorators):
break
else:
- self.add_message('not-context-manager',
- node=node, args=(infered.name, ))
+ self.add_message(
+ "not-context-manager", node=node, args=(infered.name,)
+ )
else:
try:
- infered.getattr('__enter__')
- infered.getattr('__exit__')
+ infered.getattr("__enter__")
+ infered.getattr("__exit__")
except exceptions.NotFoundError:
if isinstance(infered, astroid.Instance):
# If we do not know the bases of this class,
@@ -1261,27 +1410,27 @@ accessed. Python regular expressions are accepted.'}
continue
# Just ignore mixin classes.
if self.config.ignore_mixin_members:
- if infered.name[-5:].lower() == 'mixin':
+ if infered.name[-5:].lower() == "mixin":
continue
- self.add_message('not-context-manager',
- node=node, args=(infered.name, ))
+ self.add_message(
+ "not-context-manager", node=node, args=(infered.name,)
+ )
- @check_messages('invalid-unary-operand-type')
+ @check_messages("invalid-unary-operand-type")
def visit_unaryop(self, node):
"""Detect TypeErrors for unary operands."""
for error in node.type_errors():
# Let the error customize its output.
- self.add_message('invalid-unary-operand-type',
- args=str(error), node=node)
+ self.add_message("invalid-unary-operand-type", args=str(error), node=node)
- @check_messages('unsupported-binary-operation')
+ @check_messages("unsupported-binary-operation")
def _visit_binop(self, node):
"""Detect TypeErrors for binary arithmetic operands."""
self._check_binop_errors(node)
- @check_messages('unsupported-binary-operation')
+ @check_messages("unsupported-binary-operation")
def _visit_augassign(self, node):
"""Detect TypeErrors for augmented binary arithmetic operands."""
self._check_binop_errors(node)
@@ -1289,11 +1438,12 @@ accessed. Python regular expressions are accepted.'}
def _check_binop_errors(self, node):
for error in node.type_errors():
# Let the error customize its output.
- if any(isinstance(obj, astroid.ClassDef) and not has_known_bases(obj)
- for obj in (error.left_type, error.right_type)):
+ if any(
+ isinstance(obj, astroid.ClassDef) and not has_known_bases(obj)
+ for obj in (error.left_type, error.right_type)
+ ):
continue
- self.add_message('unsupported-binary-operation',
- args=str(error), node=node)
+ self.add_message("unsupported-binary-operation", args=str(error), node=node)
def _check_membership_test(self, node):
if is_inside_abstract_class(node):
@@ -1304,23 +1454,25 @@ accessed. Python regular expressions are accepted.'}
if infered is None or infered is astroid.Uninferable:
return
if not supports_membership_test(infered):
- self.add_message('unsupported-membership-test',
- args=node.as_string(),
- node=node)
+ self.add_message(
+ "unsupported-membership-test", args=node.as_string(), node=node
+ )
- @check_messages('unsupported-membership-test')
+ @check_messages("unsupported-membership-test")
def visit_compare(self, node):
if len(node.ops) != 1:
return
op, right = node.ops[0]
- if op in ['in', 'not in']:
+ if op in ["in", "not in"]:
self._check_membership_test(right)
- @check_messages('unsubscriptable-object',
- 'unsupported-assignment-operation',
- 'unsupported-delete-operation',
- 'unhashable-dict-key')
+ @check_messages(
+ "unsubscriptable-object",
+ "unsupported-assignment-operation",
+ "unsupported-delete-operation",
+ "unhashable-dict-key",
+ )
def visit_subscript(self, node):
supported_protocol = None
if isinstance(node.value, (astroid.ListComp, astroid.DictComp)):
@@ -1331,26 +1483,25 @@ accessed. Python regular expressions are accepted.'}
inferred = safe_infer(node.slice.value)
if inferred not in (None, astroid.Uninferable):
try:
- hash_fn = next(inferred.igetattr('__hash__'))
+ hash_fn = next(inferred.igetattr("__hash__"))
except astroid.InferenceError:
pass
else:
- if getattr(hash_fn, 'value', True) is None:
- self.add_message('unhashable-dict-key', node=node.value)
+ if getattr(hash_fn, "value", True) is None:
+ self.add_message("unhashable-dict-key", node=node.value)
if node.ctx == astroid.Load:
supported_protocol = supports_getitem
- msg = 'unsubscriptable-object'
+ msg = "unsubscriptable-object"
elif node.ctx == astroid.Store:
supported_protocol = supports_setitem
- msg = 'unsupported-assignment-operation'
+ msg = "unsupported-assignment-operation"
elif node.ctx == astroid.Del:
supported_protocol = supports_delitem
- msg = 'unsupported-delete-operation'
+ msg = "unsupported-delete-operation"
if isinstance(node.value, astroid.SetComp):
- self.add_message(msg, args=node.value.as_string(),
- node=node.value)
+ self.add_message(msg, args=node.value.as_string(), node=node.value)
return
if is_inside_abstract_class(node):
@@ -1377,17 +1528,22 @@ class IterableChecker(BaseChecker):
"""
__implements__ = (IAstroidChecker,)
- name = 'iterable_check'
-
- msgs = {'E1133': ('Non-iterable value %s is used in an iterating context',
- 'not-an-iterable',
- 'Used when a non-iterable value is used in place where '
- 'iterable is expected'),
- 'E1134': ('Non-mapping value %s is used in a mapping context',
- 'not-a-mapping',
- 'Used when a non-mapping value is used in place where '
- 'mapping is expected'),
- }
+ name = "iterable_check"
+
+ msgs = {
+ "E1133": (
+ "Non-iterable value %s is used in an iterating context",
+ "not-an-iterable",
+ "Used when a non-iterable value is used in place where "
+ "iterable is expected",
+ ),
+ "E1134": (
+ "Non-mapping value %s is used in a mapping context",
+ "not-a-mapping",
+ "Used when a non-mapping value is used in place where "
+ "mapping is expected",
+ ),
+ }
@staticmethod
def _is_asyncio_coroutine(node):
@@ -1415,9 +1571,7 @@ class IterableChecker(BaseChecker):
if not inferred:
return
if not is_iterable(inferred, check_async=check_async):
- self.add_message('not-an-iterable',
- args=node.as_string(),
- node=node)
+ self.add_message("not-an-iterable", args=node.as_string(), node=node)
def _check_mapping(self, node):
if is_inside_abstract_class(node):
@@ -1428,48 +1582,46 @@ class IterableChecker(BaseChecker):
if infered is None or infered is astroid.Uninferable:
return
if not is_mapping(infered):
- self.add_message('not-a-mapping',
- args=node.as_string(),
- node=node)
+ self.add_message("not-a-mapping", args=node.as_string(), node=node)
- @check_messages('not-an-iterable')
+ @check_messages("not-an-iterable")
def visit_for(self, node):
self._check_iterable(node.iter)
- @check_messages('not-an-iterable')
+ @check_messages("not-an-iterable")
def visit_asyncfor(self, node):
self._check_iterable(node.iter, check_async=True)
- @check_messages('not-an-iterable')
+ @check_messages("not-an-iterable")
def visit_yieldfrom(self, node):
# TODO: hack which can be removed once we support decorators inference
if self._is_asyncio_coroutine(node.value):
return
self._check_iterable(node.value)
- @check_messages('not-an-iterable', 'not-a-mapping')
+ @check_messages("not-an-iterable", "not-a-mapping")
def visit_call(self, node):
for stararg in node.starargs:
self._check_iterable(stararg.value)
for kwarg in node.kwargs:
self._check_mapping(kwarg.value)
- @check_messages('not-an-iterable')
+ @check_messages("not-an-iterable")
def visit_listcomp(self, node):
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
- @check_messages('not-an-iterable')
+ @check_messages("not-an-iterable")
def visit_dictcomp(self, node):
for gen in node.generators:
self._check_iterable(gen.iter)
- @check_messages('not-an-iterable')
+ @check_messages("not-an-iterable")
def visit_setcomp(self, node):
for gen in node.generators:
self._check_iterable(gen.iter)
- @check_messages('not-an-iterable')
+ @check_messages("not-an-iterable")
def visit_generatorexp(self, node):
for gen in node.generators:
self._check_iterable(gen.iter)
diff --git a/pylint/checkers/utils.py b/pylint/checkers/utils.py
index e0b36bb7b..c20f9423a 100644
--- a/pylint/checkers/utils.py
+++ b/pylint/checkers/utils.py
@@ -36,7 +36,18 @@ import itertools
import re
import sys
import string
-from typing import Optional, Iterable, Tuple, Callable, Set, Union, Match, Dict, List, Type
+from typing import (
+ Optional,
+ Iterable,
+ Tuple,
+ Callable,
+ Set,
+ Union,
+ Match,
+ Dict,
+ List,
+ Type,
+)
import astroid
from astroid import bases as _bases
@@ -44,26 +55,34 @@ from astroid import scoped_nodes
BUILTINS_NAME = builtins.__name__
-COMP_NODE_TYPES = (astroid.ListComp, astroid.SetComp,
- astroid.DictComp, astroid.GeneratorExp)
+COMP_NODE_TYPES = (
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.DictComp,
+ astroid.GeneratorExp,
+)
PY3K = sys.version_info[0] == 3
if not PY3K:
EXCEPTIONS_MODULE = "exceptions"
else:
EXCEPTIONS_MODULE = "builtins"
-ABC_METHODS = {'abc.abstractproperty', 'abc.abstractmethod',
- 'abc.abstractclassmethod', 'abc.abstractstaticmethod'}
-ITER_METHOD = '__iter__'
-AITER_METHOD = '__aiter__'
-NEXT_METHOD = '__next__'
-GETITEM_METHOD = '__getitem__'
-SETITEM_METHOD = '__setitem__'
-DELITEM_METHOD = '__delitem__'
-CONTAINS_METHOD = '__contains__'
-KEYS_METHOD = 'keys'
-DATACLASS_DECORATOR = 'dataclass'
-DATACLASS_IMPORT = 'dataclasses'
+ABC_METHODS = {
+ "abc.abstractproperty",
+ "abc.abstractmethod",
+ "abc.abstractclassmethod",
+ "abc.abstractstaticmethod",
+}
+ITER_METHOD = "__iter__"
+AITER_METHOD = "__aiter__"
+NEXT_METHOD = "__next__"
+GETITEM_METHOD = "__getitem__"
+SETITEM_METHOD = "__setitem__"
+DELITEM_METHOD = "__delitem__"
+CONTAINS_METHOD = "__contains__"
+KEYS_METHOD = "keys"
+DATACLASS_DECORATOR = "dataclass"
+DATACLASS_IMPORT = "dataclasses"
# Dictionary which maps the number of expected parameters a
# special method can have to a set of special methods.
@@ -76,42 +95,118 @@ DATACLASS_IMPORT = 'dataclasses'
# although it's best to implement it in order to accept
# all of them.
_SPECIAL_METHODS_PARAMS = {
- None: ('__new__', '__init__', '__call__'),
-
- 0: ('__del__', '__repr__', '__str__', '__bytes__', '__hash__', '__bool__',
- '__dir__', '__len__', '__length_hint__', '__iter__', '__reversed__',
- '__neg__', '__pos__', '__abs__', '__invert__', '__complex__', '__int__',
- '__float__', '__neg__', '__pos__', '__abs__', '__complex__', '__int__',
- '__float__', '__index__', '__enter__', '__aenter__', '__getnewargs_ex__',
- '__getnewargs__', '__getstate__', '__reduce__', '__copy__',
- '__unicode__', '__nonzero__', '__await__', '__aiter__', '__anext__',
- '__fspath__'),
-
- 1: ('__format__', '__lt__', '__le__', '__eq__', '__ne__', '__gt__',
- '__ge__', '__getattr__', '__getattribute__', '__delattr__',
- '__delete__', '__instancecheck__', '__subclasscheck__',
- '__getitem__', '__missing__', '__delitem__', '__contains__',
- '__add__', '__sub__', '__mul__', '__truediv__', '__floordiv__',
- '__mod__', '__divmod__', '__lshift__', '__rshift__', '__and__',
- '__xor__', '__or__', '__radd__', '__rsub__', '__rmul__', '__rtruediv__',
- '__rmod__', '__rdivmod__', '__rpow__', '__rlshift__', '__rrshift__',
- '__rand__', '__rxor__', '__ror__', '__iadd__', '__isub__', '__imul__',
- '__itruediv__', '__ifloordiv__', '__imod__', '__ilshift__',
- '__irshift__', '__iand__', '__ixor__', '__ior__', '__ipow__',
- '__setstate__', '__reduce_ex__', '__deepcopy__', '__cmp__',
- '__matmul__', '__rmatmul__', '__div__'),
-
- 2: ('__setattr__', '__get__', '__set__', '__setitem__', '__set_name__'),
-
- 3: ('__exit__', '__aexit__'),
-
- (0, 1): ('__round__', ),
+ None: ("__new__", "__init__", "__call__"),
+ 0: (
+ "__del__",
+ "__repr__",
+ "__str__",
+ "__bytes__",
+ "__hash__",
+ "__bool__",
+ "__dir__",
+ "__len__",
+ "__length_hint__",
+ "__iter__",
+ "__reversed__",
+ "__neg__",
+ "__pos__",
+ "__abs__",
+ "__invert__",
+ "__complex__",
+ "__int__",
+ "__float__",
+ "__neg__",
+ "__pos__",
+ "__abs__",
+ "__complex__",
+ "__int__",
+ "__float__",
+ "__index__",
+ "__enter__",
+ "__aenter__",
+ "__getnewargs_ex__",
+ "__getnewargs__",
+ "__getstate__",
+ "__reduce__",
+ "__copy__",
+ "__unicode__",
+ "__nonzero__",
+ "__await__",
+ "__aiter__",
+ "__anext__",
+ "__fspath__",
+ ),
+ 1: (
+ "__format__",
+ "__lt__",
+ "__le__",
+ "__eq__",
+ "__ne__",
+ "__gt__",
+ "__ge__",
+ "__getattr__",
+ "__getattribute__",
+ "__delattr__",
+ "__delete__",
+ "__instancecheck__",
+ "__subclasscheck__",
+ "__getitem__",
+ "__missing__",
+ "__delitem__",
+ "__contains__",
+ "__add__",
+ "__sub__",
+ "__mul__",
+ "__truediv__",
+ "__floordiv__",
+ "__mod__",
+ "__divmod__",
+ "__lshift__",
+ "__rshift__",
+ "__and__",
+ "__xor__",
+ "__or__",
+ "__radd__",
+ "__rsub__",
+ "__rmul__",
+ "__rtruediv__",
+ "__rmod__",
+ "__rdivmod__",
+ "__rpow__",
+ "__rlshift__",
+ "__rrshift__",
+ "__rand__",
+ "__rxor__",
+ "__ror__",
+ "__iadd__",
+ "__isub__",
+ "__imul__",
+ "__itruediv__",
+ "__ifloordiv__",
+ "__imod__",
+ "__ilshift__",
+ "__irshift__",
+ "__iand__",
+ "__ixor__",
+ "__ior__",
+ "__ipow__",
+ "__setstate__",
+ "__reduce_ex__",
+ "__deepcopy__",
+ "__cmp__",
+ "__matmul__",
+ "__rmatmul__",
+ "__div__",
+ ),
+ 2: ("__setattr__", "__get__", "__set__", "__setitem__", "__set_name__"),
+ 3: ("__exit__", "__aexit__"),
+ (0, 1): ("__round__",),
}
SPECIAL_METHODS_PARAMS = {
name: params
for params, methods in _SPECIAL_METHODS_PARAMS.items()
- for name in methods # type: ignore
+ for name in methods # type: ignore
}
PYMETHODS = set(SPECIAL_METHODS_PARAMS)
@@ -119,6 +214,7 @@ PYMETHODS = set(SPECIAL_METHODS_PARAMS)
class NoSuchArgumentError(Exception):
pass
+
def is_inside_except(node):
"""Returns true if node is inside the name of an except handler."""
current = node
@@ -138,7 +234,9 @@ def is_inside_lambda(node: astroid.node_classes.NodeNG) -> bool:
return False
-def get_all_elements(node: astroid.node_classes.NodeNG) -> Iterable[astroid.node_classes.NodeNG]:
+def get_all_elements(
+ node: astroid.node_classes.NodeNG
+) -> Iterable[astroid.node_classes.NodeNG]:
"""Recursively returns all atoms in nested lists and tuples."""
if isinstance(node, (astroid.Tuple, astroid.List)):
for child in node.elts:
@@ -148,7 +246,9 @@ def get_all_elements(node: astroid.node_classes.NodeNG) -> Iterable[astroid.node
yield node
-def clobber_in_except(node: astroid.node_classes.NodeNG) -> Tuple[bool, Tuple[str, str]]:
+def clobber_in_except(
+ node: astroid.node_classes.NodeNG
+) -> Tuple[bool, Tuple[str, str]]:
"""Checks if an assignment node in an except handler clobbers an existing
variable.
@@ -156,28 +256,29 @@ def clobber_in_except(node: astroid.node_classes.NodeNG) -> Tuple[bool, Tuple[st
(False, None) otherwise.
"""
if isinstance(node, astroid.AssignAttr):
- return True, (node.attrname, 'object %r' % (node.expr.as_string(),))
+ return True, (node.attrname, "object %r" % (node.expr.as_string(),))
if isinstance(node, astroid.AssignName):
name = node.name
if is_builtin(name):
- return (True, (name, 'builtins'))
+ return (True, (name, "builtins"))
stmts = node.lookup(name)[1]
- if (stmts and not isinstance(stmts[0].assign_type(),
- (astroid.Assign, astroid.AugAssign,
- astroid.ExceptHandler))):
- return True, (name, 'outer scope (line %s)' % stmts[0].fromlineno)
+ if stmts and not isinstance(
+ stmts[0].assign_type(),
+ (astroid.Assign, astroid.AugAssign, astroid.ExceptHandler),
+ ):
+ return True, (name, "outer scope (line %s)" % stmts[0].fromlineno)
return False, None
def is_super(node: astroid.node_classes.NodeNG) -> bool:
"""return True if the node is referencing the "super" builtin function
"""
- if getattr(node, 'name', None) == 'super' and \
- node.root().name == BUILTINS_NAME:
+ if getattr(node, "name", None) == "super" and node.root().name == BUILTINS_NAME:
return True
return False
+
def is_error(node: astroid.node_classes.NodeNG) -> bool:
"""return true if the function does nothing but raising an exception"""
for child_node in node.get_children():
@@ -186,28 +287,35 @@ def is_error(node: astroid.node_classes.NodeNG) -> bool:
return False
-builtins = builtins.__dict__.copy() # type: ignore
-SPECIAL_BUILTINS = ('__builtins__',) # '__path__', '__file__')
+builtins = builtins.__dict__.copy() # type: ignore
+SPECIAL_BUILTINS = ("__builtins__",) # '__path__', '__file__')
+
def is_builtin_object(node: astroid.node_classes.NodeNG) -> bool:
"""Returns True if the given node is an object from the __builtin__ module."""
return node and node.root().name == BUILTINS_NAME
+
def is_builtin(name: str) -> bool:
"""return true if <name> could be considered as a builtin defined by python
"""
- return name in builtins or name in SPECIAL_BUILTINS # type: ignore
+ return name in builtins or name in SPECIAL_BUILTINS # type: ignore
-def is_defined_in_scope(var_node: astroid.node_classes.NodeNG,
- varname: str,
- scope: astroid.node_classes.NodeNG) -> bool:
+def is_defined_in_scope(
+ var_node: astroid.node_classes.NodeNG,
+ varname: str,
+ scope: astroid.node_classes.NodeNG,
+) -> bool:
if isinstance(scope, astroid.If):
for node in scope.body:
- if ((isinstance(node, astroid.Assign) and
- any(isinstance(target, astroid.AssignName) and target.name == varname
- for target in node.targets)) or
- (isinstance(node, astroid.Nonlocal) and varname in node.names)):
+ if (
+ isinstance(node, astroid.Assign)
+ and any(
+ isinstance(target, astroid.AssignName) and target.name == varname
+ for target in node.targets
+ )
+ ) or (isinstance(node, astroid.Nonlocal) and varname in node.names):
return True
elif isinstance(scope, (COMP_NODE_TYPES, astroid.For)):
for ass_node in scope.nodes_of_class(astroid.AssignName):
@@ -217,9 +325,7 @@ def is_defined_in_scope(var_node: astroid.node_classes.NodeNG,
for expr, ids in scope.items:
if expr.parent_of(var_node):
break
- if (ids and
- isinstance(ids, astroid.AssignName) and
- ids.name == varname):
+ if ids and isinstance(ids, astroid.AssignName) and ids.name == varname:
return True
elif isinstance(scope, (astroid.Lambda, astroid.FunctionDef)):
if scope.args.is_argument(varname):
@@ -234,7 +340,7 @@ def is_defined_in_scope(var_node: astroid.node_classes.NodeNG,
except astroid.NoDefault:
pass
return True
- if getattr(scope, 'name', None) == varname:
+ if getattr(scope, "name", None) == varname:
return True
elif isinstance(scope, astroid.ExceptHandler):
if isinstance(scope.name, astroid.AssignName):
@@ -269,6 +375,7 @@ def is_defined_before(var_node: astroid.node_classes.NodeNG) -> bool:
_node = _node.previous_sibling()
return False
+
def is_default_argument(node: astroid.node_classes.NodeNG) -> bool:
"""return true if the given Name node is used in function or lambda
default argument's value
@@ -288,16 +395,18 @@ def is_func_decorator(node: astroid.node_classes.NodeNG) -> bool:
while parent is not None:
if isinstance(parent, astroid.Decorators):
return True
- if (parent.is_statement or
- isinstance(parent, (astroid.Lambda,
- scoped_nodes.ComprehensionScope,
- scoped_nodes.ListComp))):
+ if parent.is_statement or isinstance(
+ parent,
+ (astroid.Lambda, scoped_nodes.ComprehensionScope, scoped_nodes.ListComp),
+ ):
break
parent = parent.parent
return False
-def is_ancestor_name(frame: astroid.node_classes.NodeNG,
- node: astroid.node_classes.NodeNG) -> bool:
+
+def is_ancestor_name(
+ frame: astroid.node_classes.NodeNG, node: astroid.node_classes.NodeNG
+) -> bool:
"""return True if `frame` is an astroid.Class node with `node` in the
subtree of its bases attribute
"""
@@ -310,44 +419,49 @@ def is_ancestor_name(frame: astroid.node_classes.NodeNG,
return True
return False
+
def assign_parent(node: astroid.node_classes.NodeNG) -> astroid.node_classes.NodeNG:
"""return the higher parent which is not an AssignName, Tuple or List node
"""
- while node and isinstance(node, (astroid.AssignName,
- astroid.Tuple,
- astroid.List)):
+ while node and isinstance(node, (astroid.AssignName, astroid.Tuple, astroid.List)):
node = node.parent
return node
-def overrides_a_method(class_node: astroid.node_classes.NodeNG,
- name: str) -> bool:
+def overrides_a_method(class_node: astroid.node_classes.NodeNG, name: str) -> bool:
"""return True if <name> is a method overridden from an ancestor"""
for ancestor in class_node.ancestors():
if name in ancestor and isinstance(ancestor[name], astroid.FunctionDef):
return True
return False
+
def check_messages(*messages: str) -> Callable:
"""decorator to store messages that are handled by a checker method"""
def store_messages(func):
func.checks_msgs = messages
return func
+
return store_messages
+
class IncompleteFormatString(Exception):
"""A format string ended in the middle of a format specifier."""
+
class UnsupportedFormatCharacter(Exception):
"""A format character in a format string is not one of the supported
format characters."""
+
def __init__(self, index):
Exception.__init__(self, index)
self.index = index
-def parse_format_string(format_string: str) -> \
- Tuple[Set[str], int, Dict[str, str], List[str]]:
+
+def parse_format_string(
+ format_string: str
+) -> Tuple[Set[str], int, Dict[str, str], List[str]]:
"""Parses a format string, returning a tuple of (keys, num_args), where keys
is the set of mapping keys in the format string, and num_args is the number
of arguments required by the format string. Raises
@@ -357,64 +471,66 @@ def parse_format_string(format_string: str) -> \
key_types = dict()
pos_types = []
num_args = 0
+
def next_char(i):
i += 1
if i == len(format_string):
raise IncompleteFormatString
return (i, format_string[i])
+
i = 0
while i < len(format_string):
char = format_string[i]
- if char == '%':
+ if char == "%":
i, char = next_char(i)
# Parse the mapping key (optional).
key = None
- if char == '(':
+ if char == "(":
depth = 1
i, char = next_char(i)
key_start = i
while depth != 0:
- if char == '(':
+ if char == "(":
depth += 1
- elif char == ')':
+ elif char == ")":
depth -= 1
i, char = next_char(i)
key_end = i - 1
key = format_string[key_start:key_end]
# Parse the conversion flags (optional).
- while char in '#0- +':
+ while char in "#0- +":
i, char = next_char(i)
# Parse the minimum field width (optional).
- if char == '*':
+ if char == "*":
num_args += 1
i, char = next_char(i)
else:
while char in string.digits:
i, char = next_char(i)
# Parse the precision (optional).
- if char == '.':
+ if char == ".":
i, char = next_char(i)
- if char == '*':
+ if char == "*":
num_args += 1
i, char = next_char(i)
else:
while char in string.digits:
i, char = next_char(i)
# Parse the length modifier (optional).
- if char in 'hlL':
+ if char in "hlL":
i, char = next_char(i)
# Parse the conversion type (mandatory).
if PY3K:
- flags = 'diouxXeEfFgGcrs%a'
+ flags = "diouxXeEfFgGcrs%a"
else:
- flags = 'diouxXeEfFgGcrs%'
+ flags = "diouxXeEfFgGcrs%"
if char not in flags:
raise UnsupportedFormatCharacter(i)
if key:
keys.add(key)
key_types[key] = char
- elif char != '%':
+ elif char != "%":
num_args += 1
pos_types.append(char)
i += 1
@@ -425,10 +541,16 @@ def is_attr_protected(attrname: str) -> bool:
"""return True if attribute name is protected (start with _ and some other
details), False otherwise.
"""
- return attrname[0] == '_' and attrname != '_' and not (
- attrname.startswith('__') and attrname.endswith('__'))
+ return (
+ attrname[0] == "_"
+ and attrname != "_"
+ and not (attrname.startswith("__") and attrname.endswith("__"))
+ )
-def node_frame_class(node: astroid.node_classes.NodeNG) -> Optional[astroid.node_classes.NodeNG]:
+
+def node_frame_class(
+ node: astroid.node_classes.NodeNG
+) -> Optional[astroid.node_classes.NodeNG]:
"""return klass node for a method node (or a staticmethod or a
classmethod), return null otherwise
"""
@@ -447,12 +569,13 @@ def is_attr_private(attrname: str) -> Optional[Match[str]]:
"""Check that attribute name is private (at least two leading underscores,
at most one trailing underscore)
"""
- regex = re.compile('^_{2,}.*[^_]+_?$')
+ regex = re.compile("^_{2,}.*[^_]+_?$")
return regex.match(attrname)
-def get_argument_from_call(call_node: astroid.Call,
- position: int = None,
- keyword: str = None) -> astroid.Name:
+
+def get_argument_from_call(
+ call_node: astroid.Call, position: int = None, keyword: str = None
+) -> astroid.Name:
"""Returns the specified argument from a function call.
:param astroid.Call call_node: Node representing a function call to check.
@@ -466,7 +589,7 @@ def get_argument_from_call(call_node: astroid.Call,
the provided keyword.
"""
if position is None and keyword is None:
- raise ValueError('Must specify at least one of: position or keyword.')
+ raise ValueError("Must specify at least one of: position or keyword.")
if position is not None:
try:
return call_node.args[position]
@@ -479,22 +602,26 @@ def get_argument_from_call(call_node: astroid.Call,
raise NoSuchArgumentError
+
def inherit_from_std_ex(node: astroid.node_classes.NodeNG) -> bool:
"""
Return true if the given class node is subclass of
exceptions.Exception.
"""
- if node.name in ('Exception', 'BaseException') \
- and node.root().name == EXCEPTIONS_MODULE:
+ if (
+ node.name in ("Exception", "BaseException")
+ and node.root().name == EXCEPTIONS_MODULE
+ ):
return True
- if not hasattr(node, 'ancestors'):
+ if not hasattr(node, "ancestors"):
return False
- return any(inherit_from_std_ex(parent)
- for parent in node.ancestors(recurs=True))
+ return any(inherit_from_std_ex(parent) for parent in node.ancestors(recurs=True))
-def error_of_type(handler: astroid.ExceptHandler,
- error_type: Union[str, Type[Exception], Tuple[Type[Exception]]]) -> bool:
+def error_of_type(
+ handler: astroid.ExceptHandler,
+ error_type: Union[str, Type[Exception], Tuple[Type[Exception]]],
+) -> bool:
"""
Check if the given exception handler catches
the given error_type.
@@ -505,13 +632,14 @@ def error_of_type(handler: astroid.ExceptHandler,
The function will return True if the handler catches any of the
given errors.
"""
+
def stringify_error(error):
if not isinstance(error, str):
return error.__name__
return error
if not isinstance(error_type, tuple):
- error_type = (error_type, ) # type: ignore
+ error_type = (error_type,) # type: ignore
expected_errors = {stringify_error(error) for error in error_type} # type: ignore
if not handler.type:
return True
@@ -536,21 +664,25 @@ def decorated_with_property(node: astroid.FunctionDef) -> bool:
def _is_property_decorator(decorator: astroid.Name) -> bool:
for infered in decorator.infer():
if isinstance(infered, astroid.ClassDef):
- if infered.root().name == BUILTINS_NAME and infered.name == 'property':
+ if infered.root().name == BUILTINS_NAME and infered.name == "property":
return True
for ancestor in infered.ancestors():
- if ancestor.name == 'property' and ancestor.root().name == BUILTINS_NAME:
+ if (
+ ancestor.name == "property"
+ and ancestor.root().name == BUILTINS_NAME
+ ):
return True
return False
-def decorated_with(func: astroid.FunctionDef,
- qnames: Iterable[str]) -> bool:
+def decorated_with(func: astroid.FunctionDef, qnames: Iterable[str]) -> bool:
"""Determine if the `func` node has a decorator with the qualified name `qname`."""
decorators = func.decorators.nodes if func.decorators else []
for decorator_node in decorators:
try:
- if any(i is not None and i.qname() in qnames for i in decorator_node.infer()):
+ if any(
+ i is not None and i.qname() in qnames for i in decorator_node.infer()
+ ):
return True
except astroid.InferenceError:
continue
@@ -558,9 +690,9 @@ def decorated_with(func: astroid.FunctionDef,
@lru_cache(maxsize=1024)
-def unimplemented_abstract_methods(node: astroid.node_classes.NodeNG,
- is_abstract_cb: astroid.FunctionDef = None
- ) -> Dict[str, astroid.node_classes.NodeNG]:
+def unimplemented_abstract_methods(
+ node: astroid.node_classes.NodeNG, is_abstract_cb: astroid.FunctionDef = None
+) -> Dict[str, astroid.node_classes.NodeNG]:
"""
Get the unimplemented abstract methods for the given *node*.
@@ -574,7 +706,7 @@ def unimplemented_abstract_methods(node: astroid.node_classes.NodeNG,
"""
if is_abstract_cb is None:
is_abstract_cb = partial(decorated_with, qnames=ABC_METHODS)
- visited = {} # type: Dict[str, astroid.node_classes.NodeNG]
+ visited = {} # type: Dict[str, astroid.node_classes.NodeNG]
try:
mro = reversed(node.mro())
except NotImplementedError:
@@ -617,8 +749,9 @@ def unimplemented_abstract_methods(node: astroid.node_classes.NodeNG,
return visited
-def find_try_except_wrapper_node(node: astroid.node_classes.NodeNG
- ) -> Union[astroid.ExceptHandler, astroid.TryExcept]:
+def find_try_except_wrapper_node(
+ node: astroid.node_classes.NodeNG
+) -> Union[astroid.ExceptHandler, astroid.TryExcept]:
"""Return the ExceptHandler or the TryExcept node in which the node is."""
current = node
ignores = (astroid.ExceptHandler, astroid.TryExcept)
@@ -641,26 +774,30 @@ def is_from_fallback_block(node: astroid.node_classes.NodeNG) -> bool:
handlers = context.parent.handlers
else:
other_body = itertools.chain.from_iterable(
- handler.body for handler in context.handlers)
+ handler.body for handler in context.handlers
+ )
handlers = context.handlers
- has_fallback_imports = any(isinstance(import_node, (astroid.ImportFrom, astroid.Import))
- for import_node in other_body)
+ has_fallback_imports = any(
+ isinstance(import_node, (astroid.ImportFrom, astroid.Import))
+ for import_node in other_body
+ )
ignores_import_error = _except_handlers_ignores_exception(handlers, ImportError)
return ignores_import_error or has_fallback_imports
-def _except_handlers_ignores_exception(handlers: astroid.ExceptHandler,
- exception: Union[str, Type[Exception],
- Tuple[Type[Exception]]]
- ) -> bool:
- func = partial(error_of_type, error_type=(exception, ))
+def _except_handlers_ignores_exception(
+ handlers: astroid.ExceptHandler,
+ exception: Union[str, Type[Exception], Tuple[Type[Exception]]],
+) -> bool:
+ func = partial(error_of_type, error_type=(exception,))
return any(map(func, handlers))
-def get_exception_handlers(node: astroid.node_classes.NodeNG,
- exception: Union[str, Type[Exception]] = Exception
- ) -> List[astroid.ExceptHandler]:
+def get_exception_handlers(
+ node: astroid.node_classes.NodeNG,
+ exception: Union[str, Type[Exception]] = Exception,
+) -> List[astroid.ExceptHandler]:
"""Return the collections of handlers handling the exception in arguments.
Args:
@@ -693,8 +830,10 @@ def is_node_inside_try_except(node: astroid.Raise) -> bool:
return isinstance(context, astroid.TryExcept)
-def node_ignores_exception(node: astroid.node_classes.NodeNG,
- exception: Union[str, Type[Exception]] = Exception) -> bool:
+def node_ignores_exception(
+ node: astroid.node_classes.NodeNG,
+ exception: Union[str, Type[Exception]] = Exception,
+) -> bool:
"""Check if the node is in a TryExcept which handles the given exception.
If the exception is not given, the function is going to look for bare
@@ -717,8 +856,7 @@ def class_is_abstract(node: astroid.ClassDef) -> bool:
return False
-def _supports_protocol_method(value: astroid.node_classes.NodeNG,
- attr: str) -> bool:
+def _supports_protocol_method(value: astroid.node_classes.NodeNG, attr: str) -> bool:
try:
attributes = value.getattr(attr)
except astroid.NotFoundError:
@@ -732,18 +870,19 @@ def _supports_protocol_method(value: astroid.node_classes.NodeNG,
def is_comprehension(node: astroid.node_classes.NodeNG) -> bool:
- comprehensions = (astroid.ListComp,
- astroid.SetComp,
- astroid.DictComp,
- astroid.GeneratorExp)
+ comprehensions = (
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.DictComp,
+ astroid.GeneratorExp,
+ )
return isinstance(node, comprehensions)
def _supports_mapping_protocol(value: astroid.node_classes.NodeNG) -> bool:
- return (
- _supports_protocol_method(value, GETITEM_METHOD)
- and _supports_protocol_method(value, KEYS_METHOD)
- )
+ return _supports_protocol_method(
+ value, GETITEM_METHOD
+ ) and _supports_protocol_method(value, KEYS_METHOD)
def _supports_membership_test_protocol(value: astroid.node_classes.NodeNG) -> bool:
@@ -751,9 +890,8 @@ def _supports_membership_test_protocol(value: astroid.node_classes.NodeNG) -> bo
def _supports_iteration_protocol(value: astroid.node_classes.NodeNG) -> bool:
- return (
- _supports_protocol_method(value, ITER_METHOD)
- or _supports_protocol_method(value, GETITEM_METHOD)
+ return _supports_protocol_method(value, ITER_METHOD) or _supports_protocol_method(
+ value, GETITEM_METHOD
)
@@ -775,9 +913,9 @@ def _supports_delitem_protocol(value: astroid.node_classes.NodeNG) -> bool:
def _is_abstract_class_name(name: str) -> bool:
lname = name.lower()
- is_mixin = lname.endswith('mixin')
- is_abstract = lname.startswith('abstract')
- is_base = lname.startswith('base') or lname.endswith('base')
+ is_mixin = lname.endswith("mixin")
+ is_abstract = lname.startswith("abstract")
+ is_base = lname.startswith("base") or lname.endswith("base")
return is_mixin or is_abstract or is_base
@@ -786,15 +924,16 @@ def is_inside_abstract_class(node: astroid.node_classes.NodeNG) -> bool:
if isinstance(node, astroid.ClassDef):
if class_is_abstract(node):
return True
- name = getattr(node, 'name', None)
+ name = getattr(node, "name", None)
if name is not None and _is_abstract_class_name(name):
return True
node = node.parent
return False
-def _supports_protocol(value: astroid.node_classes.NodeNG,
- protocol_callback: astroid.FunctionDef) -> bool:
+def _supports_protocol(
+ value: astroid.node_classes.NodeNG, protocol_callback: astroid.FunctionDef
+) -> bool:
if isinstance(value, astroid.ClassDef):
if not has_known_bases(value):
return True
@@ -813,17 +952,18 @@ def _supports_protocol(value: astroid.node_classes.NodeNG,
# TODO: this is not needed in astroid 2.0, where we can
# check the type using a virtual base class instead.
- if (isinstance(value, _bases.Proxy)
- and isinstance(value._proxied, astroid.BaseInstance)
- and has_known_bases(value._proxied)):
+ if (
+ isinstance(value, _bases.Proxy)
+ and isinstance(value._proxied, astroid.BaseInstance)
+ and has_known_bases(value._proxied)
+ ):
value = value._proxied
return protocol_callback(value)
return False
-def is_iterable(value: astroid.node_classes.NodeNG,
- check_async: bool = False) -> bool:
+def is_iterable(value: astroid.node_classes.NodeNG, check_async: bool = False) -> bool:
if check_async:
protocol_check = _supports_async_iteration_protocol
else:
@@ -854,8 +994,9 @@ def supports_delitem(value: astroid.node_classes.NodeNG) -> bool:
# TODO(cpopa): deprecate these or leave them as aliases?
@lru_cache(maxsize=1024)
-def safe_infer(node: astroid.node_classes.NodeNG,
- context=None) -> Optional[astroid.node_classes.NodeNG]:
+def safe_infer(
+ node: astroid.node_classes.NodeNG, context=None
+) -> Optional[astroid.node_classes.NodeNG]:
"""Return the inferred value for the given node.
Return None if inference failed or if there is some ambiguity (more than
@@ -868,15 +1009,14 @@ def safe_infer(node: astroid.node_classes.NodeNG,
return None
try:
next(inferit)
- return None # None if there is ambiguity on the inferred node
+ return None # None if there is ambiguity on the inferred node
except astroid.InferenceError:
- return None # there is some kind of ambiguity
+ return None # there is some kind of ambiguity
except StopIteration:
return value
-def has_known_bases(klass: astroid.ClassDef,
- context=None) -> bool:
+def has_known_bases(klass: astroid.ClassDef, context=None) -> bool:
"""Return true if all base classes of a class could be inferred."""
try:
return klass._all_bases_known
@@ -885,9 +1025,11 @@ def has_known_bases(klass: astroid.ClassDef,
for base in klass.bases:
result = safe_infer(base, context=context)
# TODO: check for A->B->A->B pattern in class structure too?
- if (not isinstance(result, astroid.ClassDef) or
- result is klass or
- not has_known_bases(result, context=context)):
+ if (
+ not isinstance(result, astroid.ClassDef)
+ or result is klass
+ or not has_known_bases(result, context=context)
+ ):
klass._all_bases_known = False
return False
klass._all_bases_known = True
@@ -895,10 +1037,11 @@ def has_known_bases(klass: astroid.ClassDef,
def is_none(node: astroid.node_classes.NodeNG) -> bool:
- return (node is None or
- (isinstance(node, astroid.Const) and node.value is None) or
- (isinstance(node, astroid.Name) and node.name == 'None')
- )
+ return (
+ node is None
+ or (isinstance(node, astroid.Const) and node.value is None)
+ or (isinstance(node, astroid.Name) and node.name == "None")
+ )
def node_type(node: astroid.node_classes.NodeNG) -> Optional[type]:
@@ -926,8 +1069,8 @@ def is_registered_in_singledispatch_function(node: astroid.FunctionDef) -> bool:
"""Check if the given function node is a singledispatch function."""
singledispatch_qnames = (
- 'functools.singledispatch',
- 'singledispatch.singledispatch'
+ "functools.singledispatch",
+ "singledispatch.singledispatch",
)
if not isinstance(node, astroid.FunctionDef):
@@ -940,7 +1083,7 @@ def is_registered_in_singledispatch_function(node: astroid.FunctionDef) -> bool:
continue
func = decorator.func
- if not isinstance(func, astroid.Attribute) or func.attrname != 'register':
+ if not isinstance(func, astroid.Attribute) or func.attrname != "register":
continue
try:
@@ -962,17 +1105,17 @@ def get_node_last_lineno(node: astroid.node_classes.NodeNG) -> int:
child statement recursively.
"""
# 'finalbody' is always the last clause in a try statement, if present
- if getattr(node, 'finalbody', False):
+ if getattr(node, "finalbody", False):
return get_node_last_lineno(node.finalbody[-1])
# For if, while, and for statements 'orelse' is always the last clause.
# For try statements 'orelse' is the last in the absence of a 'finalbody'
- if getattr(node, 'orelse', False):
+ if getattr(node, "orelse", False):
return get_node_last_lineno(node.orelse[-1])
# try statements have the 'handlers' last if there is no 'orelse' or 'finalbody'
- if getattr(node, 'handlers', False):
+ if getattr(node, "handlers", False):
return get_node_last_lineno(node.handlers[-1])
# All compound statements have a 'body'
- if getattr(node, 'body', False):
+ if getattr(node, "body", False):
return get_node_last_lineno(node.body[-1])
# Not a compound statement
return node.lineno
@@ -997,7 +1140,7 @@ def is_enum_class(node: astroid.ClassDef) -> bool:
if not isinstance(ancestor, astroid.ClassDef):
continue
- if ancestor.name == 'Enum' and ancestor.root().name == 'enum':
+ if ancestor.name == "Enum" and ancestor.root().name == "enum":
return True
return False
@@ -1032,14 +1175,17 @@ def is_dataclass(node: astroid.ClassDef) -> bool:
def is_postponed_evaluation_enabled(node: astroid.node_classes.NodeNG) -> bool:
"""Check if the postponed evaluation of annotations is enabled"""
- name = 'annotations'
+ name = "annotations"
module = node.root()
stmt = module.locals.get(name)
- return stmt and isinstance(stmt[0], astroid.ImportFrom) and stmt[0].modname == '__future__'
+ return (
+ stmt
+ and isinstance(stmt[0], astroid.ImportFrom)
+ and stmt[0].modname == "__future__"
+ )
-def is_subclass_of(node_a: astroid.ClassDef,
- node_b: astroid.ClassDef) -> bool:
+def is_subclass_of(node_a: astroid.ClassDef, node_b: astroid.ClassDef) -> bool:
"""
Check if first node is a subclass of second node.
:param node_a: Node to check for subclass.
diff --git a/pylint/checkers/variables.py b/pylint/checkers/variables.py
index df7a14bb1..e9c7ccb23 100644
--- a/pylint/checkers/variables.py
+++ b/pylint/checkers/variables.py
@@ -55,17 +55,15 @@ from pylint.checkers import utils
SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
-FUTURE = '__future__'
+FUTURE = "__future__"
# regexp for ignored argument name
-IGNORED_ARGUMENT_NAMES = re.compile('_.*|^ignored_|^unused_')
+IGNORED_ARGUMENT_NAMES = re.compile("_.*|^ignored_|^unused_")
PY3K = sys.version_info >= (3, 0)
# In Python 3.7 abc has a Python implementation which is preferred
# by astroid. Unfortunately this also messes up our explicit checks
# for `abc`
-METACLASS_NAME_TRANSFORMS = {
- '_py_abc': 'abc',
-}
-TYPING_TYPE_CHECKS_GUARDS = frozenset({'typing.TYPE_CHECKING', 'TYPE_CHECKING'})
+METACLASS_NAME_TRANSFORMS = {"_py_abc": "abc"}
+TYPING_TYPE_CHECKS_GUARDS = frozenset({"typing.TYPE_CHECKING", "TYPE_CHECKING"})
def _is_from_future_import(stmt, name):
@@ -76,17 +74,16 @@ def _is_from_future_import(stmt, name):
return None
for local_node in module.locals.get(name, []):
- if (isinstance(local_node, astroid.ImportFrom)
- and local_node.modname == FUTURE):
+ if isinstance(local_node, astroid.ImportFrom) and local_node.modname == FUTURE:
return True
return None
def in_for_else_branch(parent, stmt):
"""Returns True if stmt in inside the else branch for a parent For stmt."""
- return (isinstance(parent, astroid.For) and
- any(else_stmt.parent_of(stmt) or else_stmt == stmt
- for else_stmt in parent.orelse))
+ return isinstance(parent, astroid.For) and any(
+ else_stmt.parent_of(stmt) or else_stmt == stmt for else_stmt in parent.orelse
+ )
@lru_cache(maxsize=1000)
@@ -106,21 +103,23 @@ def overridden_method(klass, name):
return meth_node
return None
+
def _get_unpacking_extra_info(node, infered):
"""return extra information to add to the message for unpacking-non-sequence
and unbalanced-tuple-unpacking errors
"""
- more = ''
+ more = ""
infered_module = infered.root().name
if node.root().name == infered_module:
if node.lineno == infered.lineno:
- more = ' %s' % infered.as_string()
+ more = " %s" % infered.as_string()
elif infered.lineno:
- more = ' defined at line %s' % infered.lineno
+ more = " defined at line %s" % infered.lineno
elif infered.lineno:
- more = ' defined at line %s of %s' % (infered.lineno, infered_module)
+ more = " defined at line %s of %s" % (infered.lineno, infered_module)
return more
+
def _detect_global_scope(node, frame, defframe):
""" Detect that the given frames shares a global
scope.
@@ -150,11 +149,11 @@ def _detect_global_scope(node, frame, defframe):
# the `->` part of annotations. The same goes
# for annotations of function arguments, they'll have
# their parent the Arguments node.
- if not isinstance(node.parent,
- (astroid.FunctionDef, astroid.Arguments)):
+ if not isinstance(node.parent, (astroid.FunctionDef, astroid.Arguments)):
return False
- elif any(not isinstance(f, (astroid.ClassDef, astroid.Module))
- for f in (frame, defframe)):
+ elif any(
+ not isinstance(f, (astroid.ClassDef, astroid.Module)) for f in (frame, defframe)
+ ):
# Not interested in other frames, since they are already
# not in a global scope.
return False
@@ -185,6 +184,7 @@ def _detect_global_scope(node, frame, defframe):
# and the definition of the first depends on the second.
return frame.lineno < defframe.lineno
+
def _fix_dot_imports(not_consumed):
""" Try to fix imports with multiple dots, by returning a dictionary
with the import names expanded. The function unflattens root imports,
@@ -194,9 +194,11 @@ def _fix_dot_imports(not_consumed):
# TODO: this should be improved in issue astroid #46
names = {}
for name, stmts in not_consumed.items():
- if any(isinstance(stmt, astroid.AssignName)
- and isinstance(stmt.assign_type(), astroid.AugAssign)
- for stmt in stmts):
+ if any(
+ isinstance(stmt, astroid.AssignName)
+ and isinstance(stmt.assign_type(), astroid.AugAssign)
+ for stmt in stmts
+ ):
continue
for stmt in stmts:
if not isinstance(stmt, (astroid.ImportFrom, astroid.Import)):
@@ -217,6 +219,7 @@ def _fix_dot_imports(not_consumed):
names[second_name] = stmt
return sorted(names.items(), key=lambda a: a[1].fromlineno)
+
def _find_frame_imports(name, frame):
"""
Detect imports in the frame, with the required
@@ -262,130 +265,185 @@ def _assigned_locally(name_node):
def _has_locals_call_after_node(stmt, scope):
- skip_nodes = (astroid.FunctionDef, astroid.ClassDef, astroid.Import, astroid.ImportFrom)
+ skip_nodes = (
+ astroid.FunctionDef,
+ astroid.ClassDef,
+ astroid.Import,
+ astroid.ImportFrom,
+ )
for call in scope.nodes_of_class(astroid.Call, skip_klass=skip_nodes):
inferred = utils.safe_infer(call.func)
- if utils.is_builtin_object(inferred) and getattr(inferred, 'name', None) == 'locals':
+ if (
+ utils.is_builtin_object(inferred)
+ and getattr(inferred, "name", None) == "locals"
+ ):
if stmt.lineno < call.lineno:
return True
return False
MSGS = {
- 'E0601': ('Using variable %r before assignment',
- 'used-before-assignment',
- 'Used when a local variable is accessed before it\'s '
- 'assignment.'),
- 'E0602': ('Undefined variable %r',
- 'undefined-variable',
- 'Used when an undefined variable is accessed.'),
- 'E0603': ('Undefined variable name %r in __all__',
- 'undefined-all-variable',
- 'Used when an undefined variable name is referenced in __all__.'),
- 'E0604': ('Invalid object %r in __all__, must contain only strings',
- 'invalid-all-object',
- 'Used when an invalid (non-string) object occurs in __all__.'),
- 'E0611': ('No name %r in module %r',
- 'no-name-in-module',
- 'Used when a name cannot be found in a module.'),
-
- 'W0601': ('Global variable %r undefined at the module level',
- 'global-variable-undefined',
- 'Used when a variable is defined through the "global" statement '
- 'but the variable is not defined in the module scope.'),
- 'W0602': ('Using global for %r but no assignment is done',
- 'global-variable-not-assigned',
- 'Used when a variable is defined through the "global" statement '
- 'but no assignment to this variable is done.'),
- 'W0603': ('Using the global statement', # W0121
- 'global-statement',
- 'Used when you use the "global" statement to update a global '
- 'variable. Pylint just try to discourage this '
- 'usage. That doesn\'t mean you cannot use it !'),
- 'W0604': ('Using the global statement at the module level', # W0103
- 'global-at-module-level',
- 'Used when you use the "global" statement at the module level '
- 'since it has no effect'),
- 'W0611': ('Unused %s',
- 'unused-import',
- 'Used when an imported module or variable is not used.'),
- 'W0612': ('Unused variable %r',
- 'unused-variable',
- 'Used when a variable is defined but not used.'),
- 'W0613': ('Unused argument %r',
- 'unused-argument',
- 'Used when a function or method argument is not used.'),
- 'W0614': ('Unused import %s from wildcard import',
- 'unused-wildcard-import',
- 'Used when an imported module or variable is not used from a '
- '`\'from X import *\'` style import.'),
-
- 'W0621': ('Redefining name %r from outer scope (line %s)',
- 'redefined-outer-name',
- 'Used when a variable\'s name hides a name defined in the outer '
- 'scope.'),
- 'W0622': ('Redefining built-in %r',
- 'redefined-builtin',
- 'Used when a variable or function override a built-in.'),
- 'W0623': ('Redefining name %r from %s in exception handler',
- 'redefine-in-handler',
- 'Used when an exception handler assigns the exception '
- 'to an existing name'),
-
- 'W0631': ('Using possibly undefined loop variable %r',
- 'undefined-loop-variable',
- 'Used when a loop variable (i.e. defined by a for loop or '
- 'a list comprehension or a generator expression) is used outside '
- 'the loop.'),
-
- 'E0632': ('Possible unbalanced tuple unpacking with '
- 'sequence%s: '
- 'left side has %d label(s), right side has %d value(s)',
- 'unbalanced-tuple-unpacking',
- 'Used when there is an unbalanced tuple unpacking in assignment',
- {'old_names': [('W0632', 'unbalanced-tuple-unpacking')]}),
-
- 'E0633': ('Attempting to unpack a non-sequence%s',
- 'unpacking-non-sequence',
- 'Used when something which is not '
- 'a sequence is used in an unpack assignment',
- {'old_names': [('W0633', 'unpacking-non-sequence')]}),
-
- 'W0640': ('Cell variable %s defined in loop',
- 'cell-var-from-loop',
- 'A variable used in a closure is defined in a loop. '
- 'This will result in all closures using the same value for '
- 'the closed-over variable.'),
-
- 'W0641': ('Possibly unused variable %r',
- 'possibly-unused-variable',
- 'Used when a variable is defined but might not be used. '
- 'The possibility comes from the fact that locals() might be used, '
- 'which could consume or not the said variable'),
- 'W0642': ('Invalid assignment to %s in method',
- 'self-cls-assignment',
- 'Invalid assignment to self or cls in instance or class method '
- 'respectively.'),
- }
-
-
-ScopeConsumer = collections.namedtuple("ScopeConsumer", "to_consume consumed scope_type")
+ "E0601": (
+ "Using variable %r before assignment",
+ "used-before-assignment",
+ "Used when a local variable is accessed before it's " "assignment.",
+ ),
+ "E0602": (
+ "Undefined variable %r",
+ "undefined-variable",
+ "Used when an undefined variable is accessed.",
+ ),
+ "E0603": (
+ "Undefined variable name %r in __all__",
+ "undefined-all-variable",
+ "Used when an undefined variable name is referenced in __all__.",
+ ),
+ "E0604": (
+ "Invalid object %r in __all__, must contain only strings",
+ "invalid-all-object",
+ "Used when an invalid (non-string) object occurs in __all__.",
+ ),
+ "E0611": (
+ "No name %r in module %r",
+ "no-name-in-module",
+ "Used when a name cannot be found in a module.",
+ ),
+ "W0601": (
+ "Global variable %r undefined at the module level",
+ "global-variable-undefined",
+ 'Used when a variable is defined through the "global" statement '
+ "but the variable is not defined in the module scope.",
+ ),
+ "W0602": (
+ "Using global for %r but no assignment is done",
+ "global-variable-not-assigned",
+ 'Used when a variable is defined through the "global" statement '
+ "but no assignment to this variable is done.",
+ ),
+ "W0603": (
+ "Using the global statement", # W0121
+ "global-statement",
+ 'Used when you use the "global" statement to update a global '
+ "variable. Pylint just try to discourage this "
+ "usage. That doesn't mean you cannot use it !",
+ ),
+ "W0604": (
+ "Using the global statement at the module level", # W0103
+ "global-at-module-level",
+ 'Used when you use the "global" statement at the module level '
+ "since it has no effect",
+ ),
+ "W0611": (
+ "Unused %s",
+ "unused-import",
+ "Used when an imported module or variable is not used.",
+ ),
+ "W0612": (
+ "Unused variable %r",
+ "unused-variable",
+ "Used when a variable is defined but not used.",
+ ),
+ "W0613": (
+ "Unused argument %r",
+ "unused-argument",
+ "Used when a function or method argument is not used.",
+ ),
+ "W0614": (
+ "Unused import %s from wildcard import",
+ "unused-wildcard-import",
+ "Used when an imported module or variable is not used from a "
+ "`'from X import *'` style import.",
+ ),
+ "W0621": (
+ "Redefining name %r from outer scope (line %s)",
+ "redefined-outer-name",
+ "Used when a variable's name hides a name defined in the outer " "scope.",
+ ),
+ "W0622": (
+ "Redefining built-in %r",
+ "redefined-builtin",
+ "Used when a variable or function override a built-in.",
+ ),
+ "W0623": (
+ "Redefining name %r from %s in exception handler",
+ "redefine-in-handler",
+ "Used when an exception handler assigns the exception " "to an existing name",
+ ),
+ "W0631": (
+ "Using possibly undefined loop variable %r",
+ "undefined-loop-variable",
+ "Used when a loop variable (i.e. defined by a for loop or "
+ "a list comprehension or a generator expression) is used outside "
+ "the loop.",
+ ),
+ "E0632": (
+ "Possible unbalanced tuple unpacking with "
+ "sequence%s: "
+ "left side has %d label(s), right side has %d value(s)",
+ "unbalanced-tuple-unpacking",
+ "Used when there is an unbalanced tuple unpacking in assignment",
+ {"old_names": [("W0632", "unbalanced-tuple-unpacking")]},
+ ),
+ "E0633": (
+ "Attempting to unpack a non-sequence%s",
+ "unpacking-non-sequence",
+ "Used when something which is not "
+ "a sequence is used in an unpack assignment",
+ {"old_names": [("W0633", "unpacking-non-sequence")]},
+ ),
+ "W0640": (
+ "Cell variable %s defined in loop",
+ "cell-var-from-loop",
+ "A variable used in a closure is defined in a loop. "
+ "This will result in all closures using the same value for "
+ "the closed-over variable.",
+ ),
+ "W0641": (
+ "Possibly unused variable %r",
+ "possibly-unused-variable",
+ "Used when a variable is defined but might not be used. "
+ "The possibility comes from the fact that locals() might be used, "
+ "which could consume or not the said variable",
+ ),
+ "W0642": (
+ "Invalid assignment to %s in method",
+ "self-cls-assignment",
+ "Invalid assignment to self or cls in instance or class method "
+ "respectively.",
+ ),
+}
+
+
+ScopeConsumer = collections.namedtuple(
+ "ScopeConsumer", "to_consume consumed scope_type"
+)
class NamesConsumer:
"""
A simple class to handle consumed, to consume and scope type info of node locals
"""
+
def __init__(self, node, scope_type):
self._atomic = ScopeConsumer(copy.copy(node.locals), {}, scope_type)
def __repr__(self):
msg = "\nto_consume : {:s}\n".format(
- ", ".join(["{}->{}".format(key, val)
- for key, val in self._atomic.to_consume.items()]))
+ ", ".join(
+ [
+ "{}->{}".format(key, val)
+ for key, val in self._atomic.to_consume.items()
+ ]
+ )
+ )
msg += "consumed : {:s}\n".format(
- ", ".join(["{}->{}".format(key, val)
- for key, val in self._atomic.consumed.items()]))
+ ", ".join(
+ [
+ "{}->{}".format(key, val)
+ for key, val in self._atomic.consumed.items()
+ ]
+ )
+ )
msg += "scope_type : {:s}\n".format(self._atomic.scope_type)
return msg
@@ -417,10 +475,13 @@ class NamesConsumer:
name = node.name
parent_node = node.parent
found_node = self.to_consume.get(name)
- if (found_node and isinstance(parent_node, astroid.Assign)
- and parent_node == found_node[0].parent):
+ if (
+ found_node
+ and isinstance(parent_node, astroid.Assign)
+ and parent_node == found_node[0].parent
+ ):
lhs = found_node[0].parent.targets[0]
- if lhs.name == name: # this name is defined in this very statement
+ if lhs.name == name: # this name is defined in this very statement
found_node = None
return found_node
@@ -437,55 +498,94 @@ class VariablesChecker(BaseChecker):
__implements__ = IAstroidChecker
- name = 'variables'
+ name = "variables"
msgs = MSGS
priority = -1
- options = (("init-import",
- {'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>',
- 'help' : 'Tells whether we should check for unused import in '
- '__init__ files.'}),
- ("dummy-variables-rgx",
- {'default': '_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_',
- 'type' :'regexp', 'metavar' : '<regexp>',
- 'help' : 'A regular expression matching the name of dummy '
- 'variables (i.e. expected to not be used).'}),
- ("additional-builtins",
- {'default': (), 'type' : 'csv',
- 'metavar' : '<comma separated list>',
- 'help' : 'List of additional names supposed to be defined in '
- 'builtins. Remember that you should avoid defining new builtins '
- 'when possible.'
- }),
- ("callbacks",
- {'default' : ('cb_', '_cb'), 'type' : 'csv',
- 'metavar' : '<callbacks>',
- 'help' : 'List of strings which can identify a callback '
- 'function by name. A callback name must start or '
- 'end with one of those strings.'}
- ),
- ("redefining-builtins-modules",
- {'default': ('six.moves', 'past.builtins', 'future.builtins', 'builtins', 'io'),
- 'type': 'csv',
- 'metavar': '<comma separated list>',
- 'help': 'List of qualified module names which can have objects '
- 'that can redefine builtins.'}
- ),
- ('ignored-argument-names',
- {'default' : IGNORED_ARGUMENT_NAMES,
- 'type' :'regexp', 'metavar' : '<regexp>',
- 'help' : 'Argument names that match this expression will be '
- 'ignored. Default to name with leading underscore.'}
- ),
- ('allow-global-unused-variables',
- {'default': True,
- 'type': 'yn', 'metavar': '<y_or_n>',
- 'help': 'Tells whether unused global variables should be treated as a violation.'}
- ),
- )
+ options = (
+ (
+ "init-import",
+ {
+ "default": 0,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether we should check for unused import in "
+ "__init__ files.",
+ },
+ ),
+ (
+ "dummy-variables-rgx",
+ {
+ "default": "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_",
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "A regular expression matching the name of dummy "
+ "variables (i.e. expected to not be used).",
+ },
+ ),
+ (
+ "additional-builtins",
+ {
+ "default": (),
+ "type": "csv",
+ "metavar": "<comma separated list>",
+ "help": "List of additional names supposed to be defined in "
+ "builtins. Remember that you should avoid defining new builtins "
+ "when possible.",
+ },
+ ),
+ (
+ "callbacks",
+ {
+ "default": ("cb_", "_cb"),
+ "type": "csv",
+ "metavar": "<callbacks>",
+ "help": "List of strings which can identify a callback "
+ "function by name. A callback name must start or "
+ "end with one of those strings.",
+ },
+ ),
+ (
+ "redefining-builtins-modules",
+ {
+ "default": (
+ "six.moves",
+ "past.builtins",
+ "future.builtins",
+ "builtins",
+ "io",
+ ),
+ "type": "csv",
+ "metavar": "<comma separated list>",
+ "help": "List of qualified module names which can have objects "
+ "that can redefine builtins.",
+ },
+ ),
+ (
+ "ignored-argument-names",
+ {
+ "default": IGNORED_ARGUMENT_NAMES,
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "Argument names that match this expression will be "
+ "ignored. Default to name with leading underscore.",
+ },
+ ),
+ (
+ "allow-global-unused-variables",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether unused global variables should be treated as a violation.",
+ },
+ ),
+ )
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
- self._to_consume = None # list of tuples: (to_consume:dict, consumed:dict, scope_type:str)
+ self._to_consume = (
+ None
+ ) # list of tuples: (to_consume:dict, consumed:dict, scope_type:str)
self._checking_mod_attr = None
self._loop_variables = []
self._type_annotation_names = []
@@ -494,19 +594,21 @@ class VariablesChecker(BaseChecker):
# Relying on other checker's options, which might not have been initialized yet.
@decorators.cachedproperty
def _analyse_fallback_blocks(self):
- return get_global_option(self, 'analyse-fallback-blocks', default=False)
+ return get_global_option(self, "analyse-fallback-blocks", default=False)
@decorators.cachedproperty
def _ignored_modules(self):
- return get_global_option(self, 'ignored-modules', default=[])
+ return get_global_option(self, "ignored-modules", default=[])
@decorators.cachedproperty
def _allow_global_unused_variables(self):
- return get_global_option(self, 'allow-global-unused-variables', default=True)
+ return get_global_option(self, "allow-global-unused-variables", default=True)
- @utils.check_messages('redefined-outer-name')
+ @utils.check_messages("redefined-outer-name")
def visit_for(self, node):
- assigned_to = [var.name for var in node.target.nodes_of_class(astroid.AssignName)]
+ assigned_to = [
+ var.name for var in node.target.nodes_of_class(astroid.AssignName)
+ ]
# Only check variables that are used
dummy_rgx = self.config.dummy_variables_rgx
@@ -514,18 +616,19 @@ class VariablesChecker(BaseChecker):
for variable in assigned_to:
for outer_for, outer_variables in self._loop_variables:
- if (variable in outer_variables
- and not in_for_else_branch(outer_for, node)):
+ if variable in outer_variables and not in_for_else_branch(
+ outer_for, node
+ ):
self.add_message(
- 'redefined-outer-name',
+ "redefined-outer-name",
args=(variable, outer_for.fromlineno),
- node=node
+ node=node,
)
break
self._loop_variables.append((node, assigned_to))
- @utils.check_messages('redefined-outer-name')
+ @utils.check_messages("redefined-outer-name")
def leave_for(self, node):
self._loop_variables.pop()
self._store_type_annotation_names(node)
@@ -534,25 +637,30 @@ class VariablesChecker(BaseChecker):
"""visit module : update consumption analysis variable
checks globals doesn't overrides builtins
"""
- self._to_consume = [NamesConsumer(node, 'module')]
+ self._to_consume = [NamesConsumer(node, "module")]
self._postponed_evaluation_enabled = is_postponed_evaluation_enabled(node)
for name, stmts in node.locals.items():
if utils.is_builtin(name) and not utils.is_inside_except(stmts[0]):
- if self._should_ignore_redefined_builtin(stmts[0]) or name == '__doc__':
+ if self._should_ignore_redefined_builtin(stmts[0]) or name == "__doc__":
continue
- self.add_message('redefined-builtin', args=name, node=stmts[0])
+ self.add_message("redefined-builtin", args=name, node=stmts[0])
- @utils.check_messages('unused-import', 'unused-wildcard-import',
- 'redefined-builtin', 'undefined-all-variable',
- 'invalid-all-object', 'unused-variable')
+ @utils.check_messages(
+ "unused-import",
+ "unused-wildcard-import",
+ "redefined-builtin",
+ "undefined-all-variable",
+ "invalid-all-object",
+ "unused-variable",
+ )
def leave_module(self, node):
"""leave module: check globals
"""
assert len(self._to_consume) == 1
not_consumed = self._to_consume.pop().to_consume
# attempt to check for __all__ if defined
- if '__all__' in node.locals:
+ if "__all__" in node.locals:
self._check_all(node, not_consumed)
# check for unused globals
@@ -565,11 +673,11 @@ class VariablesChecker(BaseChecker):
self._check_imports(not_consumed)
def _check_all(self, node, not_consumed):
- assigned = next(node.igetattr('__all__'))
+ assigned = next(node.igetattr("__all__"))
if assigned is astroid.Uninferable:
return
- for elt in getattr(assigned, 'elts', ()):
+ for elt in getattr(assigned, "elts", ()):
try:
elt_name = next(elt.infer())
except astroid.InferenceError:
@@ -579,10 +687,10 @@ class VariablesChecker(BaseChecker):
if not elt_name.parent:
continue
- if (not isinstance(elt_name, astroid.Const)
- or not isinstance(elt_name.value, str)):
- self.add_message('invalid-all-object',
- args=elt.as_string(), node=elt)
+ if not isinstance(elt_name, astroid.Const) or not isinstance(
+ elt_name.value, str
+ ):
+ self.add_message("invalid-all-object", args=elt.as_string(), node=elt)
continue
elt_name = elt_name.value
@@ -593,19 +701,19 @@ class VariablesChecker(BaseChecker):
if elt_name not in node.locals:
if not node.package:
- self.add_message('undefined-all-variable',
- args=(elt_name, ),
- node=elt)
+ self.add_message(
+ "undefined-all-variable", args=(elt_name,), node=elt
+ )
else:
basename = os.path.splitext(node.file)[0]
- if os.path.basename(basename) == '__init__':
+ if os.path.basename(basename) == "__init__":
name = node.name + "." + elt_name
try:
modutils.file_from_modpath(name.split("."))
except ImportError:
- self.add_message('undefined-all-variable',
- args=(elt_name, ),
- node=elt)
+ self.add_message(
+ "undefined-all-variable", args=(elt_name,), node=elt
+ )
except SyntaxError:
# don't yield a syntax-error warning,
# because it will be later yielded
@@ -625,7 +733,7 @@ class VariablesChecker(BaseChecker):
return
for name, nodes in not_consumed.items():
for node in nodes:
- self.add_message('unused-variable', args=(name,), node=node)
+ self.add_message("unused-variable", args=(name,), node=node)
def _check_imports(self, not_consumed):
local_names = _fix_dot_imports(not_consumed)
@@ -642,11 +750,12 @@ class VariablesChecker(BaseChecker):
continue
checked.add(real_name)
- if (isinstance(stmt, astroid.Import) or
- (isinstance(stmt, astroid.ImportFrom) and
- not stmt.modname)):
- if (isinstance(stmt, astroid.ImportFrom) and
- SPECIAL_OBJ.search(imported_name)):
+ if isinstance(stmt, astroid.Import) or (
+ isinstance(stmt, astroid.ImportFrom) and not stmt.modname
+ ):
+ if isinstance(stmt, astroid.ImportFrom) and SPECIAL_OBJ.search(
+ imported_name
+ ):
# Filter special objects (__doc__, __all__) etc.,
# because they can be imported for exporting.
continue
@@ -657,9 +766,8 @@ class VariablesChecker(BaseChecker):
else:
msg = "%s imported as %s" % (imported_name, as_name)
if not self._is_type_checking_import(stmt):
- self.add_message('unused-import', args=msg, node=stmt)
- elif (isinstance(stmt, astroid.ImportFrom)
- and stmt.modname != FUTURE):
+ self.add_message("unused-import", args=msg, node=stmt)
+ elif isinstance(stmt, astroid.ImportFrom) and stmt.modname != FUTURE:
if SPECIAL_OBJ.search(imported_name):
# Filter special objects (__doc__, __all__) etc.,
@@ -675,9 +783,8 @@ class VariablesChecker(BaseChecker):
# Most likely a typing import if it wasn't used so far.
continue
- if imported_name == '*':
- self.add_message('unused-wildcard-import',
- args=name, node=stmt)
+ if imported_name == "*":
+ self.add_message("unused-wildcard-import", args=name, node=stmt)
else:
if as_name is None:
msg = "%s imported from %s" % (imported_name, stmt.modname)
@@ -685,13 +792,13 @@ class VariablesChecker(BaseChecker):
fields = (imported_name, stmt.modname, as_name)
msg = "%s imported from %s as %s" % fields
if not self._is_type_checking_import(stmt):
- self.add_message('unused-import', args=msg, node=stmt)
+ self.add_message("unused-import", args=msg, node=stmt)
del self._to_consume
def visit_classdef(self, node):
"""visit class: update consumption analysis variable
"""
- self._to_consume.append(NamesConsumer(node, 'class'))
+ self._to_consume.append(NamesConsumer(node, "class"))
def leave_classdef(self, _):
"""leave class: update consumption analysis variable
@@ -702,7 +809,7 @@ class VariablesChecker(BaseChecker):
def visit_lambda(self, node):
"""visit lambda: update consumption analysis variable
"""
- self._to_consume.append(NamesConsumer(node, 'lambda'))
+ self._to_consume.append(NamesConsumer(node, "lambda"))
def leave_lambda(self, _):
"""leave lambda: update consumption analysis variable
@@ -713,7 +820,7 @@ class VariablesChecker(BaseChecker):
def visit_generatorexp(self, node):
"""visit genexpr: update consumption analysis variable
"""
- self._to_consume.append(NamesConsumer(node, 'comprehension'))
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
def leave_generatorexp(self, _):
"""leave genexpr: update consumption analysis variable
@@ -724,7 +831,7 @@ class VariablesChecker(BaseChecker):
def visit_dictcomp(self, node):
"""visit dictcomp: update consumption analysis variable
"""
- self._to_consume.append(NamesConsumer(node, 'comprehension'))
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
def leave_dictcomp(self, _):
"""leave dictcomp: update consumption analysis variable
@@ -735,7 +842,7 @@ class VariablesChecker(BaseChecker):
def visit_setcomp(self, node):
"""visit setcomp: update consumption analysis variable
"""
- self._to_consume.append(NamesConsumer(node, 'comprehension'))
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
def leave_setcomp(self, _):
"""leave setcomp: update consumption analysis variable
@@ -746,9 +853,11 @@ class VariablesChecker(BaseChecker):
def visit_functiondef(self, node):
"""visit function: update consumption analysis variable and check locals
"""
- self._to_consume.append(NamesConsumer(node, 'function'))
- if not (self.linter.is_message_enabled('redefined-outer-name') or
- self.linter.is_message_enabled('redefined-builtin')):
+ self._to_consume.append(NamesConsumer(node, "function"))
+ if not (
+ self.linter.is_message_enabled("redefined-outer-name")
+ or self.linter.is_message_enabled("redefined-builtin")
+ ):
return
globs = node.root().globals
for name, stmt in node.items():
@@ -756,25 +865,32 @@ class VariablesChecker(BaseChecker):
continue
if name in globs and not isinstance(stmt, astroid.Global):
definition = globs[name][0]
- if (isinstance(definition, astroid.ImportFrom)
- and definition.modname == FUTURE):
+ if (
+ isinstance(definition, astroid.ImportFrom)
+ and definition.modname == FUTURE
+ ):
# It is a __future__ directive, not a symbol.
continue
line = definition.fromlineno
if not self._is_name_ignored(stmt, name):
- self.add_message('redefined-outer-name',
- args=(name, line), node=stmt)
+ self.add_message(
+ "redefined-outer-name", args=(name, line), node=stmt
+ )
- elif utils.is_builtin(name) and not self._should_ignore_redefined_builtin(stmt):
+ elif utils.is_builtin(name) and not self._should_ignore_redefined_builtin(
+ stmt
+ ):
# do not print Redefining builtin for additional builtins
- self.add_message('redefined-builtin', args=name, node=stmt)
+ self.add_message("redefined-builtin", args=name, node=stmt)
def _is_name_ignored(self, stmt, name):
authorized_rgx = self.config.dummy_variables_rgx
- if (isinstance(stmt, astroid.AssignName) and
- isinstance(stmt.parent, astroid.Arguments) or
- isinstance(stmt, astroid.Arguments)):
+ if (
+ isinstance(stmt, astroid.AssignName)
+ and isinstance(stmt.parent, astroid.Arguments)
+ or isinstance(stmt, astroid.Arguments)
+ ):
regex = self.config.ignored_argument_names
else:
regex = authorized_rgx
@@ -785,10 +901,12 @@ class VariablesChecker(BaseChecker):
if self._is_name_ignored(stmt, name):
return
# Ignore names that were added dynamically to the Function scope
- if (isinstance(node, astroid.FunctionDef)
- and name == '__class__'
- and len(node.locals['__class__']) == 1
- and isinstance(node.locals['__class__'][0], astroid.ClassDef)):
+ if (
+ isinstance(node, astroid.FunctionDef)
+ and name == "__class__"
+ and len(node.locals["__class__"]) == 1
+ and isinstance(node.locals["__class__"][0], astroid.ClassDef)
+ ):
return
# Ignore names imported by the global statement.
@@ -798,14 +916,15 @@ class VariablesChecker(BaseChecker):
if global_names and _import_name_is_global(stmt, global_names):
return
- argnames = list(itertools.chain(
- node.argnames(),
- [arg.name for arg in node.args.kwonlyargs]
- ))
+ argnames = list(
+ itertools.chain(node.argnames(), [arg.name for arg in node.args.kwonlyargs])
+ )
is_method = node.is_method()
klass = node.parent.frame()
if is_method and isinstance(klass, astroid.ClassDef):
- confidence = INFERENCE if utils.has_known_bases(klass) else INFERENCE_FAILURE
+ confidence = (
+ INFERENCE if utils.has_known_bases(klass) else INFERENCE_FAILURE
+ )
else:
confidence = HIGH
@@ -813,53 +932,62 @@ class VariablesChecker(BaseChecker):
if name in argnames:
if is_method:
# Don't warn for the first argument of a (non static) method
- if node.type != 'staticmethod' and name == argnames[0]:
+ if node.type != "staticmethod" and name == argnames[0]:
return
# Don't warn for argument of an overridden method
overridden = overridden_method(klass, node.name)
if overridden is not None and name in overridden.argnames():
return
- if node.name in utils.PYMETHODS and node.name not in ('__init__', '__new__'):
+ if node.name in utils.PYMETHODS and node.name not in (
+ "__init__",
+ "__new__",
+ ):
return
# Don't check callback arguments
- if any(node.name.startswith(cb) or node.name.endswith(cb)
- for cb in self.config.callbacks):
+ if any(
+ node.name.startswith(cb) or node.name.endswith(cb)
+ for cb in self.config.callbacks
+ ):
return
# Don't check arguments of singledispatch.register function.
if utils.is_registered_in_singledispatch_function(node):
return
- self.add_message('unused-argument', args=name, node=stmt,
- confidence=confidence)
+ self.add_message(
+ "unused-argument", args=name, node=stmt, confidence=confidence
+ )
else:
if stmt.parent and isinstance(stmt.parent, astroid.Assign):
if name in nonlocal_names:
return
-
if isinstance(stmt, (astroid.Import, astroid.ImportFrom)):
# Need the complete name, which we don't have in .locals.
qname, asname = stmt.names[0]
name = asname or qname
if _has_locals_call_after_node(stmt, node.scope()):
- message_name = 'possibly-unused-variable'
+ message_name = "possibly-unused-variable"
else:
if isinstance(stmt, astroid.Import):
if asname is not None:
msg = "%s imported as %s" % (qname, asname)
else:
msg = "import %s" % name
- self.add_message('unused-import', args=msg, node=stmt)
+ self.add_message("unused-import", args=msg, node=stmt)
return
elif isinstance(stmt, astroid.ImportFrom):
if asname is not None:
- msg = "%s imported from %s as %s" % (qname, stmt.modname, asname)
+ msg = "%s imported from %s as %s" % (
+ qname,
+ stmt.modname,
+ asname,
+ )
else:
msg = "%s imported from %s" % (name, stmt.modname)
- self.add_message('unused-import', args=msg, node=stmt)
+ self.add_message("unused-import", args=msg, node=stmt)
return
else:
- message_name = 'unused-variable'
+ message_name = "unused-variable"
self.add_message(message_name, args=name, node=stmt)
def leave_functiondef(self, node):
@@ -871,9 +999,11 @@ class VariablesChecker(BaseChecker):
self._store_type_annotation_node(argument_annotation)
not_consumed = self._to_consume.pop().to_consume
- if not (self.linter.is_message_enabled('unused-variable') or
- self.linter.is_message_enabled('possibly-unused-variable') or
- self.linter.is_message_enabled('unused-argument')):
+ if not (
+ self.linter.is_message_enabled("unused-variable")
+ or self.linter.is_message_enabled("possibly-unused-variable")
+ or self.linter.is_message_enabled("unused-argument")
+ ):
return
# Don't check arguments of function which are only raising an exception.
@@ -893,14 +1023,18 @@ class VariablesChecker(BaseChecker):
visit_asyncfunctiondef = visit_functiondef
leave_asyncfunctiondef = leave_functiondef
- @utils.check_messages('global-variable-undefined', 'global-variable-not-assigned',
- 'global-statement', 'global-at-module-level',
- 'redefined-builtin')
+ @utils.check_messages(
+ "global-variable-undefined",
+ "global-variable-not-assigned",
+ "global-statement",
+ "global-at-module-level",
+ "redefined-builtin",
+ )
def visit_global(self, node):
"""check names imported exists in the global scope"""
frame = node.frame()
if isinstance(frame, astroid.Module):
- self.add_message('global-at-module-level', node=node)
+ self.add_message("global-at-module-level", node=node)
return
module = frame.root()
@@ -915,17 +1049,19 @@ class VariablesChecker(BaseChecker):
not_defined_locally_by_import = not any(
isinstance(local, astroid.node_classes.Import)
- for local in locals_.get(name, ()))
+ for local in locals_.get(name, ())
+ )
if not assign_nodes and not_defined_locally_by_import:
- self.add_message('global-variable-not-assigned',
- args=name, node=node)
+ self.add_message("global-variable-not-assigned", args=name, node=node)
default_message = False
continue
for anode in assign_nodes:
- if (isinstance(anode, astroid.AssignName)
- and anode.name in module.special_attributes):
- self.add_message('redefined-builtin', args=name, node=node)
+ if (
+ isinstance(anode, astroid.AssignName)
+ and anode.name in module.special_attributes
+ ):
+ self.add_message("redefined-builtin", args=name, node=node)
break
if anode.frame() is module:
# module level assignment
@@ -933,16 +1069,18 @@ class VariablesChecker(BaseChecker):
else:
if not_defined_locally_by_import:
# global undefined at the module scope
- self.add_message('global-variable-undefined', args=name, node=node)
+ self.add_message("global-variable-undefined", args=name, node=node)
default_message = False
if default_message:
- self.add_message('global-statement', node=node)
+ self.add_message("global-statement", node=node)
def _check_late_binding_closure(self, node, assignment_node):
def _is_direct_lambda_call():
- return (isinstance(node_scope.parent, astroid.Call)
- and node_scope.parent.func is node_scope)
+ return (
+ isinstance(node_scope.parent, astroid.Call)
+ and node_scope.parent.func is node_scope
+ )
node_scope = node.scope()
if not isinstance(node_scope, (astroid.Lambda, astroid.FunctionDef)):
@@ -952,7 +1090,7 @@ class VariablesChecker(BaseChecker):
if isinstance(assignment_node, astroid.Comprehension):
if assignment_node.parent.parent_of(node.scope()):
- self.add_message('cell-var-from-loop', node=node, args=node.name)
+ self.add_message("cell-var-from-loop", node=node, args=node.name)
else:
assign_scope = assignment_node.scope()
maybe_for = assignment_node
@@ -961,17 +1099,18 @@ class VariablesChecker(BaseChecker):
break
maybe_for = maybe_for.parent
else:
- if (maybe_for.parent_of(node_scope)
- and not _is_direct_lambda_call()
- and not isinstance(node_scope.statement(), astroid.Return)):
- self.add_message('cell-var-from-loop', node=node, args=node.name)
+ if (
+ maybe_for.parent_of(node_scope)
+ and not _is_direct_lambda_call()
+ and not isinstance(node_scope.statement(), astroid.Return)
+ ):
+ self.add_message("cell-var-from-loop", node=node, args=node.name)
def _loopvar_name(self, node, name):
# filter variables according to node's scope
- if not self.linter.is_message_enabled('undefined-loop-variable'):
+ if not self.linter.is_message_enabled("undefined-loop-variable"):
return
- astmts = [stmt for stmt in node.lookup(name)[1]
- if hasattr(stmt, 'assign_type')]
+ astmts = [stmt for stmt in node.lookup(name)[1] if hasattr(stmt, "assign_type")]
# filter variables according their respective scope test is_statement
# and parent to avoid #74747. This is not a total fix, which would
# introduce a mechanism similar to special attribute lookup in
@@ -979,14 +1118,18 @@ class VariablesChecker(BaseChecker):
# scope lookup rules would need to be changed to return the initial
# assignment (which does not exist in code per se) as well as any later
# modifications.
- if not astmts or (astmts[0].is_statement or astmts[0].parent) \
- and astmts[0].statement().parent_of(node):
+ if (
+ not astmts
+ or (astmts[0].is_statement or astmts[0].parent)
+ and astmts[0].statement().parent_of(node)
+ ):
_astmts = []
else:
_astmts = astmts[:1]
for i, stmt in enumerate(astmts[1:]):
- if (astmts[i].statement().parent_of(stmt)
- and not in_for_else_branch(astmts[i].statement(), stmt)):
+ if astmts[i].statement().parent_of(stmt) and not in_for_else_branch(
+ astmts[i].statement(), stmt
+ ):
continue
_astmts.append(stmt)
astmts = _astmts
@@ -994,19 +1137,23 @@ class VariablesChecker(BaseChecker):
return
assign = astmts[0].assign_type()
- if not (isinstance(assign, (astroid.For, astroid.Comprehension, astroid.GeneratorExp))
- and assign.statement() is not node.statement()):
+ if not (
+ isinstance(
+ assign, (astroid.For, astroid.Comprehension, astroid.GeneratorExp)
+ )
+ and assign.statement() is not node.statement()
+ ):
return
# For functions we can do more by inferring the length of the iterred object
if not isinstance(assign, astroid.For):
- self.add_message('undefined-loop-variable', args=name, node=node)
+ self.add_message("undefined-loop-variable", args=name, node=node)
return
try:
inferred = next(assign.iter.infer())
except astroid.InferenceError:
- self.add_message('undefined-loop-variable', args=name, node=node)
+ self.add_message("undefined-loop-variable", args=name, node=node)
else:
sequences = (
astroid.List,
@@ -1016,24 +1163,24 @@ class VariablesChecker(BaseChecker):
objects.FrozenSet,
)
if not isinstance(inferred, sequences):
- self.add_message('undefined-loop-variable', args=name, node=node)
+ self.add_message("undefined-loop-variable", args=name, node=node)
return
- elements = getattr(inferred, 'elts', getattr(inferred, 'items', []))
+ elements = getattr(inferred, "elts", getattr(inferred, "items", []))
if not elements:
- self.add_message('undefined-loop-variable', args=name, node=node)
+ self.add_message("undefined-loop-variable", args=name, node=node)
def _should_ignore_redefined_builtin(self, stmt):
if not isinstance(stmt, astroid.ImportFrom):
return False
return stmt.modname in self.config.redefining_builtins_modules
- @utils.check_messages('redefine-in-handler')
+ @utils.check_messages("redefine-in-handler")
def visit_excepthandler(self, node):
for name in utils.get_all_elements(node.name):
clobbering, args = utils.clobber_in_except(name)
if clobbering:
- self.add_message('redefine-in-handler', args=args, node=name)
+ self.add_message("redefine-in-handler", args=args, node=name)
def visit_assignname(self, node):
if isinstance(node.assign_type(), astroid.AugAssign):
@@ -1045,24 +1192,30 @@ class VariablesChecker(BaseChecker):
@staticmethod
def _defined_in_function_definition(node, frame):
in_annotation_or_default = False
- if (isinstance(frame, astroid.FunctionDef) and
- node.statement() is frame):
+ if isinstance(frame, astroid.FunctionDef) and node.statement() is frame:
in_annotation_or_default = (
- (
- PY3K and (node in frame.args.annotations
- or node in frame.args.kwonlyargs_annotations
- or node is frame.args.varargannotation
- or node is frame.args.kwargannotation)
+ PY3K
+ and (
+ node in frame.args.annotations
+ or node in frame.args.kwonlyargs_annotations
+ or node is frame.args.varargannotation
+ or node is frame.args.kwargannotation
)
- or
- frame.args.parent_of(node)
- )
+ ) or frame.args.parent_of(node)
return in_annotation_or_default
@staticmethod
- def _is_variable_violation(node, name, defnode, stmt, defstmt,
- frame, defframe, base_scope_type,
- recursive_klass):
+ def _is_variable_violation(
+ node,
+ name,
+ defnode,
+ stmt,
+ defstmt,
+ frame,
+ defframe,
+ base_scope_type,
+ recursive_klass,
+ ):
# node: Node to check for violation
# name: name of node to check violation for
# frame: Scope of statement of node
@@ -1083,25 +1236,28 @@ class VariablesChecker(BaseChecker):
# skip this lookup if name is assigned later in function scope/lambda
# Note: the node.frame() is not the same as the `frame` argument which is
# equivalent to frame.statement().scope()
- forbid_lookup = ((isinstance(frame, astroid.FunctionDef) or
- isinstance(node.frame(), astroid.Lambda)) and
- _assigned_locally(node))
+ forbid_lookup = (
+ isinstance(frame, astroid.FunctionDef)
+ or isinstance(node.frame(), astroid.Lambda)
+ ) and _assigned_locally(node)
if not forbid_lookup and defframe.root().lookup(name)[1]:
maybee0601 = False
- use_outer_definition = (
- stmt == defstmt
- and not isinstance(defnode, astroid.node_classes.Comprehension)
+ use_outer_definition = stmt == defstmt and not isinstance(
+ defnode, astroid.node_classes.Comprehension
)
else:
# check if we have a nonlocal
if name in defframe.locals:
- maybee0601 = not any(isinstance(child, astroid.Nonlocal)
- and name in child.names
- for child in defframe.get_children())
+ maybee0601 = not any(
+ isinstance(child, astroid.Nonlocal) and name in child.names
+ for child in defframe.get_children()
+ )
- if (base_scope_type == 'lambda' and
- isinstance(frame, astroid.ClassDef)
- and name in frame.locals):
+ if (
+ base_scope_type == "lambda"
+ and isinstance(frame, astroid.ClassDef)
+ and name in frame.locals
+ ):
# This rule verifies that if the definition node of the
# checked name is an Arguments node and if the name
@@ -1114,20 +1270,25 @@ class VariablesChecker(BaseChecker):
#
# In this case, maybee0601 should be False, otherwise
# it should be True.
- maybee0601 = not (isinstance(defnode, astroid.Arguments) and
- node in defnode.defaults and
- frame.locals[name][0].fromlineno < defstmt.fromlineno)
- elif (isinstance(defframe, astroid.ClassDef) and
- isinstance(frame, astroid.FunctionDef)):
+ maybee0601 = not (
+ isinstance(defnode, astroid.Arguments)
+ and node in defnode.defaults
+ and frame.locals[name][0].fromlineno < defstmt.fromlineno
+ )
+ elif isinstance(defframe, astroid.ClassDef) and isinstance(
+ frame, astroid.FunctionDef
+ ):
# Special rule for function return annotations,
# which uses the same name as the class where
# the function lives.
- if (PY3K and node is frame.returns and
- defframe.parent_of(frame.returns)):
+ if PY3K and node is frame.returns and defframe.parent_of(frame.returns):
maybee0601 = annotation_return = True
- if (maybee0601 and defframe.name in defframe.locals and
- defframe.locals[name][0].lineno < frame.lineno):
+ if (
+ maybee0601
+ and defframe.name in defframe.locals
+ and defframe.locals[name][0].lineno < frame.lineno
+ ):
# Detect class assignments with the same
# name as the class. In this case, no warning
# should be raised.
@@ -1139,10 +1300,12 @@ class VariablesChecker(BaseChecker):
else:
maybee0601 = maybee0601 and stmt.fromlineno <= defstmt.fromlineno
if maybee0601 and stmt.fromlineno == defstmt.fromlineno:
- if (isinstance(defframe, astroid.FunctionDef)
- and frame is defframe
- and defframe.parent_of(node)
- and stmt is not defstmt):
+ if (
+ isinstance(defframe, astroid.FunctionDef)
+ and frame is defframe
+ and defframe.parent_of(node)
+ and stmt is not defstmt
+ ):
# Single statement function, with the statement on the
# same line as the function definition
maybee0601 = False
@@ -1181,8 +1344,10 @@ class VariablesChecker(BaseChecker):
frame_locals = frame.parent.scope().locals
else:
frame_locals = frame.locals
- return not ((isinstance(frame, astroid.ClassDef) or in_annotation_or_default) and
- name in frame_locals)
+ return not (
+ (isinstance(frame, astroid.ClassDef) or in_annotation_or_default)
+ and name in frame_locals
+ )
@utils.check_messages(*MSGS)
def visit_name(self, node):
@@ -1192,7 +1357,7 @@ class VariablesChecker(BaseChecker):
stmt = node.statement()
if stmt.fromlineno is None:
# name node from an astroid built from live code, skip
- assert not stmt.root().file.endswith('.py')
+ assert not stmt.root().file.endswith(".py")
return
name = node.name
@@ -1200,8 +1365,11 @@ class VariablesChecker(BaseChecker):
# if the name node is used as a function default argument's value or as
# a decorator, then start from the parent frame of the function instead
# of the function frame - and thus open an inner class scope
- if (utils.is_default_argument(node) or utils.is_func_decorator(node)
- or utils.is_ancestor_name(frame, node)):
+ if (
+ utils.is_default_argument(node)
+ or utils.is_func_decorator(node)
+ or utils.is_ancestor_name(frame, node)
+ ):
start_index = len(self._to_consume) - 2
else:
start_index = len(self._to_consume) - 1
@@ -1215,18 +1383,22 @@ class VariablesChecker(BaseChecker):
# the globals one in function members when there are some common
# names. The only exception is when the starting scope is a
# comprehension and its direct outer scope is a class
- if current_consumer.scope_type == 'class' and i != start_index and not (
- base_scope_type == 'comprehension' and i == start_index-1):
+ if (
+ current_consumer.scope_type == "class"
+ and i != start_index
+ and not (base_scope_type == "comprehension" and i == start_index - 1)
+ ):
if self._ignore_class_scope(node):
continue
# the name has already been consumed, only check it's not a loop
# variable used outside the loop
# avoid the case where there are homonyms inside function scope and
- # comprehension current scope (avoid bug #1731)
+ #  comprehension current scope (avoid bug #1731)
if name in current_consumer.consumed and not (
- current_consumer.scope_type == 'comprehension'
- and self._has_homonym_in_upper_function_scope(node, i)):
+ current_consumer.scope_type == "comprehension"
+ and self._has_homonym_in_upper_function_scope(node, i)
+ ):
defnode = utils.assign_parent(current_consumer.consumed[name][0])
self._check_late_binding_closure(node, defnode)
self._loopvar_name(node, name)
@@ -1244,15 +1416,21 @@ class VariablesChecker(BaseChecker):
defstmt = defnode.statement()
defframe = defstmt.frame()
# The class reuses itself in the class scope.
- recursive_klass = (frame is defframe and
- defframe.parent_of(node) and
- isinstance(defframe, astroid.ClassDef) and
- node.name == defframe.name)
-
- if (recursive_klass and
- utils.is_inside_lambda(node) and
- (not utils.is_default_argument(node)
- or node.scope().parent.scope() is not defframe)):
+ recursive_klass = (
+ frame is defframe
+ and defframe.parent_of(node)
+ and isinstance(defframe, astroid.ClassDef)
+ and node.name == defframe.name
+ )
+
+ if (
+ recursive_klass
+ and utils.is_inside_lambda(node)
+ and (
+ not utils.is_default_argument(node)
+ or node.scope().parent.scope() is not defframe
+ )
+ ):
# Self-referential class references are fine in lambda's --
# As long as they are not part of the default argument directly
# under the scope of the parent self-referring class.
@@ -1273,42 +1451,59 @@ class VariablesChecker(BaseChecker):
break
maybee0601, annotation_return, use_outer_definition = self._is_variable_violation(
- node, name, defnode, stmt, defstmt,
- frame, defframe,
- base_scope_type, recursive_klass)
+ node,
+ name,
+ defnode,
+ stmt,
+ defstmt,
+ frame,
+ defframe,
+ base_scope_type,
+ recursive_klass,
+ )
if use_outer_definition:
continue
- if (maybee0601
- and not utils.is_defined_before(node)
- and not astroid.are_exclusive(stmt, defstmt, ('NameError',))):
+ if (
+ maybee0601
+ and not utils.is_defined_before(node)
+ and not astroid.are_exclusive(stmt, defstmt, ("NameError",))
+ ):
# Used and defined in the same place, e.g `x += 1` and `del x`
- defined_by_stmt = (
- defstmt is stmt
- and isinstance(node, (astroid.DelName, astroid.AssignName))
+ defined_by_stmt = defstmt is stmt and isinstance(
+ node, (astroid.DelName, astroid.AssignName)
)
- if (recursive_klass
- or defined_by_stmt
- or annotation_return
- or isinstance(defstmt, astroid.Delete)):
+ if (
+ recursive_klass
+ or defined_by_stmt
+ or annotation_return
+ or isinstance(defstmt, astroid.Delete)
+ ):
if not utils.node_ignores_exception(node, NameError):
# Handle postponed evaluation of annotations
- if not (self._postponed_evaluation_enabled
- and annotation_return
- and name in node.root().locals):
- self.add_message('undefined-variable', args=name,
- node=node)
- elif base_scope_type != 'lambda':
+ if not (
+ self._postponed_evaluation_enabled
+ and annotation_return
+ and name in node.root().locals
+ ):
+ self.add_message(
+ "undefined-variable", args=name, node=node
+ )
+ elif base_scope_type != "lambda":
# E0601 may *not* occurs in lambda scope.
# Handle postponed evaluation of annotations
- if not (self._postponed_evaluation_enabled
- and isinstance(stmt, astroid.FunctionDef)):
- self.add_message('used-before-assignment', args=name, node=node)
- elif base_scope_type == 'lambda':
+ if not (
+ self._postponed_evaluation_enabled
+ and isinstance(stmt, astroid.FunctionDef)
+ ):
+ self.add_message(
+ "used-before-assignment", args=name, node=node
+ )
+ elif base_scope_type == "lambda":
# E0601 can occur in class-level scope in lambdas, as in
# the following example:
# class A:
@@ -1321,14 +1516,15 @@ class VariablesChecker(BaseChecker):
# class A:
# x = 42
# y = lambda attr=x: attr
- self.add_message('used-before-assignment',
- args=name, node=node)
+ self.add_message(
+ "used-before-assignment", args=name, node=node
+ )
else:
- self.add_message('undefined-variable',
- args=name, node=node)
- elif current_consumer.scope_type == 'lambda':
- self.add_message('undefined-variable',
- node=node, args=name)
+ self.add_message(
+ "undefined-variable", args=name, node=node
+ )
+ elif current_consumer.scope_type == "lambda":
+ self.add_message("undefined-variable", node=node, args=name)
current_consumer.mark_as_consumed(name, found_node)
# check it's not a loop variable used outside the loop
@@ -1337,10 +1533,13 @@ class VariablesChecker(BaseChecker):
else:
# we have not found the name, if it isn't a builtin, that's an
# undefined name !
- if not (name in astroid.Module.scope_attrs or utils.is_builtin(name)
- or name in self.config.additional_builtins):
+ if not (
+ name in astroid.Module.scope_attrs
+ or utils.is_builtin(name)
+ or name in self.config.additional_builtins
+ ):
if not utils.node_ignores_exception(node, NameError):
- self.add_message('undefined-variable', args=name, node=node)
+ self.add_message("undefined-variable", args=name, node=node)
def _has_homonym_in_upper_function_scope(self, node, index):
"""
@@ -1355,12 +1554,12 @@ class VariablesChecker(BaseChecker):
and if that scope is a function
:rtype: bool
"""
- for _consumer in self._to_consume[index-1::-1]:
- if _consumer.scope_type == 'function' and node.name in _consumer.to_consume:
+ for _consumer in self._to_consume[index - 1 :: -1]:
+ if _consumer.scope_type == "function" and node.name in _consumer.to_consume:
return True
return False
- @utils.check_messages('no-name-in-module')
+ @utils.check_messages("no-name-in-module")
def visit_import(self, node):
"""check modules attribute accesses"""
if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node):
@@ -1369,14 +1568,14 @@ class VariablesChecker(BaseChecker):
return
for name, _ in node.names:
- parts = name.split('.')
+ parts = name.split(".")
try:
module = next(node.infer_name_module(parts[0]))
except astroid.ResolveError:
continue
self._check_module_attrs(node, module, parts[1:])
- @utils.check_messages('no-name-in-module')
+ @utils.check_messages("no-name-in-module")
def visit_importfrom(self, node):
"""check modules attribute accesses"""
if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node):
@@ -1384,7 +1583,7 @@ class VariablesChecker(BaseChecker):
# handled by the client code.
return
- name_parts = node.modname.split('.')
+ name_parts = node.modname.split(".")
try:
module = node.do_import_module(name_parts[0])
except astroid.AstroidBuildingException:
@@ -1393,13 +1592,13 @@ class VariablesChecker(BaseChecker):
if not module:
return
for name, _ in node.names:
- if name == '*':
+ if name == "*":
continue
- self._check_module_attrs(node, module, name.split('.'))
+ self._check_module_attrs(node, module, name.split("."))
@utils.check_messages(
- 'unbalanced-tuple-unpacking', 'unpacking-non-sequence',
- 'self-cls-assignment')
+ "unbalanced-tuple-unpacking", "unpacking-non-sequence", "self-cls-assignment"
+ )
def visit_assign(self, node):
"""Check unbalanced tuple unpacking for assignments
and unpacking non-sequences as well as in case self/cls
@@ -1422,9 +1621,14 @@ class VariablesChecker(BaseChecker):
if isinstance(type_annotation, astroid.Name):
self._type_annotation_names.append(type_annotation.name)
else:
- self._type_annotation_names.extend(list(
- (annotation.name for annotation in type_annotation.nodes_of_class(astroid.Name))
- ))
+ self._type_annotation_names.extend(
+ list(
+ (
+ annotation.name
+ for annotation in type_annotation.nodes_of_class(astroid.Name)
+ )
+ )
+ )
def _store_type_annotation_names(self, node):
type_annotation = node.type_annotation
@@ -1437,30 +1641,37 @@ class VariablesChecker(BaseChecker):
def _check_self_cls_assign(self, node):
"""Check that self/cls don't get assigned"""
- assign_names = {target.name for target in node.targets
- if isinstance(target, astroid.AssignName)}
+ assign_names = {
+ target.name
+ for target in node.targets
+ if isinstance(target, astroid.AssignName)
+ }
scope = node.scope()
nonlocals_with_same_name = any(
- child for child in scope.body
+ child
+ for child in scope.body
if isinstance(child, astroid.Nonlocal) and assign_names & set(child.names)
)
if nonlocals_with_same_name:
scope = node.scope().parent.scope()
- if not (isinstance(scope, astroid.scoped_nodes.FunctionDef) and
- scope.is_method() and
- "builtins.staticmethod" not in scope.decoratornames()):
+ if not (
+ isinstance(scope, astroid.scoped_nodes.FunctionDef)
+ and scope.is_method()
+ and "builtins.staticmethod" not in scope.decoratornames()
+ ):
return
argument_names = scope.argnames()
if not argument_names:
return
self_cls_name = argument_names[0]
target_assign_names = (
- target.name for target in node.targets
- if isinstance(target, astroid.node_classes.AssignName))
+ target.name
+ for target in node.targets
+ if isinstance(target, astroid.node_classes.AssignName)
+ )
if self_cls_name in target_assign_names:
- self.add_message(
- 'self-cls-assignment', node=node, args=(self_cls_name))
+ self.add_message("self-cls-assignment", node=node, args=(self_cls_name))
def _check_unpacking(self, infered, node, targets):
""" Check for unbalanced tuple unpacking
@@ -1472,9 +1683,11 @@ class VariablesChecker(BaseChecker):
return
if infered is astroid.Uninferable:
return
- if (isinstance(infered.parent, astroid.Arguments) and
- isinstance(node.value, astroid.Name) and
- node.value.name == infered.parent.vararg):
+ if (
+ isinstance(infered.parent, astroid.Arguments)
+ and isinstance(node.value, astroid.Name)
+ and node.value.name == infered.parent.vararg
+ ):
# Variable-length argument, we can't determine the length.
return
if isinstance(infered, (astroid.Tuple, astroid.List)):
@@ -1482,19 +1695,25 @@ class VariablesChecker(BaseChecker):
values = infered.itered()
if len(targets) != len(values):
# Check if we have starred nodes.
- if any(isinstance(target, astroid.Starred)
- for target in targets):
+ if any(isinstance(target, astroid.Starred) for target in targets):
return
- self.add_message('unbalanced-tuple-unpacking', node=node,
- args=(_get_unpacking_extra_info(node, infered),
- len(targets),
- len(values)))
+ self.add_message(
+ "unbalanced-tuple-unpacking",
+ node=node,
+ args=(
+ _get_unpacking_extra_info(node, infered),
+ len(targets),
+ len(values),
+ ),
+ )
# attempt to check unpacking may be possible (ie RHS is iterable)
else:
if not utils.is_iterable(infered):
- self.add_message('unpacking-non-sequence', node=node,
- args=(_get_unpacking_extra_info(node, infered),))
-
+ self.add_message(
+ "unpacking-non-sequence",
+ node=node,
+ args=(_get_unpacking_extra_info(node, infered),),
+ )
def _check_module_attrs(self, node, module, module_names):
"""check that module_names (list of string) are accessible through the
@@ -1504,7 +1723,7 @@ class VariablesChecker(BaseChecker):
assert isinstance(module, astroid.Module), module
while module_names:
name = module_names.pop(0)
- if name == '__dict__':
+ if name == "__dict__":
module = None
break
try:
@@ -1514,17 +1733,19 @@ class VariablesChecker(BaseChecker):
except astroid.NotFoundError:
if module.name in self._ignored_modules:
return None
- self.add_message('no-name-in-module',
- args=(name, module.name), node=node)
+ self.add_message(
+ "no-name-in-module", args=(name, module.name), node=node
+ )
return None
except astroid.InferenceError:
return None
if module_names:
# FIXME: other message if name is not the latest part of
# module_names ?
- modname = module.name if module else '__dict__'
- self.add_message('no-name-in-module', node=node,
- args=('.'.join(module_names), modname))
+ modname = module.name if module else "__dict__"
+ self.add_message(
+ "no-name-in-module", node=node, args=(".".join(module_names), modname)
+ )
return None
if isinstance(module, astroid.Module):
return module
@@ -1532,13 +1753,14 @@ class VariablesChecker(BaseChecker):
class VariablesChecker3k(VariablesChecker):
- '''Modified variables checker for 3k'''
+ """Modified variables checker for 3k"""
+
# listcomp have now also their scope
def visit_listcomp(self, node):
"""visit dictcomp: update consumption analysis variable
"""
- self._to_consume.append(NamesConsumer(node, 'comprehension'))
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
def leave_listcomp(self, _):
"""leave dictcomp: update consumption analysis variable
@@ -1599,13 +1821,13 @@ class VariablesChecker3k(VariablesChecker):
name = klass._metaclass.as_string()
if name is not None:
- if not (name in astroid.Module.scope_attrs or
- utils.is_builtin(name) or
- name in self.config.additional_builtins or
- name in parent_node.locals):
- self.add_message('undefined-variable',
- node=klass,
- args=(name,))
+ if not (
+ name in astroid.Module.scope_attrs
+ or utils.is_builtin(name)
+ or name in self.config.additional_builtins
+ or name in parent_node.locals
+ ):
+ self.add_message("undefined-variable", node=klass, args=(name,))
return consumed
diff --git a/pylint/config.py b/pylint/config.py
index 7195ac4e8..290627b2d 100644
--- a/pylint/config.py
+++ b/pylint/config.py
@@ -58,20 +58,20 @@ import configparser
from pylint import utils
-USER_HOME = os.path.expanduser('~')
-if 'PYLINTHOME' in os.environ:
- PYLINT_HOME = os.environ['PYLINTHOME']
- if USER_HOME == '~':
+USER_HOME = os.path.expanduser("~")
+if "PYLINTHOME" in os.environ:
+ PYLINT_HOME = os.environ["PYLINTHOME"]
+ if USER_HOME == "~":
USER_HOME = os.path.dirname(PYLINT_HOME)
-elif USER_HOME == '~':
+elif USER_HOME == "~":
PYLINT_HOME = ".pylint.d"
else:
- PYLINT_HOME = os.path.join(USER_HOME, '.pylint.d')
+ PYLINT_HOME = os.path.join(USER_HOME, ".pylint.d")
def _get_pdata_path(base_name, recurs):
- base_name = base_name.replace(os.sep, '_')
- return os.path.join(PYLINT_HOME, "%s%s%s"%(base_name, recurs, '.stats'))
+ base_name = base_name.replace(os.sep, "_")
+ return os.path.join(PYLINT_HOME, "%s%s%s" % (base_name, recurs, ".stats"))
def load_results(base):
@@ -79,64 +79,68 @@ def load_results(base):
try:
with open(data_file, _PICK_LOAD) as stream:
return pickle.load(stream)
- except Exception: # pylint: disable=broad-except
+ except Exception: # pylint: disable=broad-except
return {}
+
if sys.version_info < (3, 0):
- _PICK_DUMP, _PICK_LOAD = 'w', 'r'
+ _PICK_DUMP, _PICK_LOAD = "w", "r"
else:
- _PICK_DUMP, _PICK_LOAD = 'wb', 'rb'
+ _PICK_DUMP, _PICK_LOAD = "wb", "rb"
+
def save_results(results, base):
if not os.path.exists(PYLINT_HOME):
try:
os.mkdir(PYLINT_HOME)
except OSError:
- print('Unable to create directory %s' % PYLINT_HOME, file=sys.stderr)
+ print("Unable to create directory %s" % PYLINT_HOME, file=sys.stderr)
data_file = _get_pdata_path(base, 1)
try:
with open(data_file, _PICK_DUMP) as stream:
pickle.dump(results, stream)
except (IOError, OSError) as ex:
- print('Unable to create file %s: %s' % (data_file, ex), file=sys.stderr)
+ print("Unable to create file %s: %s" % (data_file, ex), file=sys.stderr)
def find_pylintrc():
"""search the pylint rc file and return its path if it find it, else None
"""
# is there a pylint rc file in the current directory ?
- if os.path.exists('pylintrc'):
- return os.path.abspath('pylintrc')
- if os.path.exists('.pylintrc'):
- return os.path.abspath('.pylintrc')
- if os.path.isfile('__init__.py'):
+ if os.path.exists("pylintrc"):
+ return os.path.abspath("pylintrc")
+ if os.path.exists(".pylintrc"):
+ return os.path.abspath(".pylintrc")
+ if os.path.isfile("__init__.py"):
curdir = os.path.abspath(os.getcwd())
- while os.path.isfile(os.path.join(curdir, '__init__.py')):
- curdir = os.path.abspath(os.path.join(curdir, '..'))
- if os.path.isfile(os.path.join(curdir, 'pylintrc')):
- return os.path.join(curdir, 'pylintrc')
- if os.path.isfile(os.path.join(curdir, '.pylintrc')):
- return os.path.join(curdir, '.pylintrc')
- if 'PYLINTRC' in os.environ and os.path.exists(os.environ['PYLINTRC']):
- pylintrc = os.environ['PYLINTRC']
+ while os.path.isfile(os.path.join(curdir, "__init__.py")):
+ curdir = os.path.abspath(os.path.join(curdir, ".."))
+ if os.path.isfile(os.path.join(curdir, "pylintrc")):
+ return os.path.join(curdir, "pylintrc")
+ if os.path.isfile(os.path.join(curdir, ".pylintrc")):
+ return os.path.join(curdir, ".pylintrc")
+ if "PYLINTRC" in os.environ and os.path.exists(os.environ["PYLINTRC"]):
+ pylintrc = os.environ["PYLINTRC"]
else:
- user_home = os.path.expanduser('~')
- if user_home in ('~', '/root'):
+ user_home = os.path.expanduser("~")
+ if user_home in ("~", "/root"):
pylintrc = ".pylintrc"
else:
- pylintrc = os.path.join(user_home, '.pylintrc')
+ pylintrc = os.path.join(user_home, ".pylintrc")
if not os.path.isfile(pylintrc):
- pylintrc = os.path.join(user_home, '.config', 'pylintrc')
+ pylintrc = os.path.join(user_home, ".config", "pylintrc")
if not os.path.isfile(pylintrc):
- if os.path.isfile('/etc/pylintrc'):
- pylintrc = '/etc/pylintrc'
+ if os.path.isfile("/etc/pylintrc"):
+ pylintrc = "/etc/pylintrc"
else:
pylintrc = None
return pylintrc
+
PYLINTRC = find_pylintrc()
-ENV_HELP = '''
+ENV_HELP = (
+ """
The following environment variables are used:
* PYLINTHOME
Path to the directory where persistent data for the run will be stored. If
@@ -145,7 +149,9 @@ directory).
* PYLINTRC
Path to the configuration file. See the documentation for the method used
to search for configuration file.
-''' % globals() # type: ignore
+"""
+ % globals()
+) # type: ignore
class UnsupportedAction(Exception):
@@ -167,6 +173,7 @@ def _choice_validator(choices, name, value):
raise optparse.OptionValueError(msg % (name, value, choices))
return value
+
# pylint: disable=unused-argument
def _csv_validator(_, name, value):
return utils._check_csv(value)
@@ -174,20 +181,22 @@ def _csv_validator(_, name, value):
# pylint: disable=unused-argument
def _regexp_validator(_, name, value):
- if hasattr(value, 'pattern'):
+ if hasattr(value, "pattern"):
return value
return re.compile(value)
+
# pylint: disable=unused-argument
def _regexp_csv_validator(_, name, value):
return [_regexp_validator(_, name, val) for val in _csv_validator(_, name, value)]
+
def _yn_validator(opt, _, value):
if isinstance(value, int):
return bool(value)
- if value in ('y', 'yes'):
+ if value in ("y", "yes"):
return True
- if value in ('n', 'no'):
+ if value in ("n", "no"):
return False
msg = "option %s: invalid yn value %r, should be in (y, yes, n, no)"
raise optparse.OptionValueError(msg % (opt, value))
@@ -201,18 +210,20 @@ def _non_empty_string_validator(opt, _, value):
VALIDATORS = {
- 'string': utils._unquote,
- 'int': int,
- 'regexp': re.compile,
- 'regexp_csv': _regexp_csv_validator,
- 'csv': _csv_validator,
- 'yn': _yn_validator,
- 'choice': lambda opt, name, value: _choice_validator(opt['choices'], name, value),
- 'multiple_choice': lambda opt, name, value: _multiple_choice_validator(opt['choices'],
- name, value),
- 'non_empty_string': _non_empty_string_validator,
+ "string": utils._unquote,
+ "int": int,
+ "regexp": re.compile,
+ "regexp_csv": _regexp_csv_validator,
+ "csv": _csv_validator,
+ "yn": _yn_validator,
+ "choice": lambda opt, name, value: _choice_validator(opt["choices"], name, value),
+ "multiple_choice": lambda opt, name, value: _multiple_choice_validator(
+ opt["choices"], name, value
+ ),
+ "non_empty_string": _non_empty_string_validator,
}
+
def _call_validator(opttype, optdict, option, value):
if opttype not in VALIDATORS:
raise Exception('Unsupported type "%s"' % opttype)
@@ -222,17 +233,18 @@ def _call_validator(opttype, optdict, option, value):
try:
return VALIDATORS[opttype](value)
except Exception:
- raise optparse.OptionValueError('%s value (%r) should be of type %s' %
- (option, value, opttype))
+ raise optparse.OptionValueError(
+ "%s value (%r) should be of type %s" % (option, value, opttype)
+ )
-def _validate(value, optdict, name=''):
+def _validate(value, optdict, name=""):
"""return a validated value for an option according to its type
optional argument name is only used for error message formatting
"""
try:
- _type = optdict['type']
+ _type = optdict["type"]
except KeyError:
# FIXME
return value
@@ -240,9 +252,12 @@ def _validate(value, optdict, name=''):
def _level_options(group, outputlevel):
- return [option for option in group.option_list
- if (getattr(option, 'level', 0) or 0) <= outputlevel
- and option.help is not optparse.SUPPRESS_HELP]
+ return [
+ option
+ for option in group.option_list
+ if (getattr(option, "level", 0) or 0) <= outputlevel
+ and option.help is not optparse.SUPPRESS_HELP
+ ]
def _expand_default(self, option):
@@ -284,17 +299,22 @@ def _multiple_choices_validating_option(opt, name, value):
# pylint: disable=no-member
class Option(optparse.Option):
- TYPES = optparse.Option.TYPES + ('regexp', 'regexp_csv', 'csv', 'yn',
- 'multiple_choice',
- 'non_empty_string')
- ATTRS = optparse.Option.ATTRS + ['hide', 'level']
+ TYPES = optparse.Option.TYPES + (
+ "regexp",
+ "regexp_csv",
+ "csv",
+ "yn",
+ "multiple_choice",
+ "non_empty_string",
+ )
+ ATTRS = optparse.Option.ATTRS + ["hide", "level"]
TYPE_CHECKER = copy.copy(optparse.Option.TYPE_CHECKER)
- TYPE_CHECKER['regexp'] = _regexp_validator
- TYPE_CHECKER['regexp_csv'] = _regexp_csv_validator
- TYPE_CHECKER['csv'] = _csv_validator
- TYPE_CHECKER['yn'] = _yn_validator
- TYPE_CHECKER['multiple_choice'] = _multiple_choices_validating_option
- TYPE_CHECKER['non_empty_string'] = _non_empty_string_validator
+ TYPE_CHECKER["regexp"] = _regexp_validator
+ TYPE_CHECKER["regexp_csv"] = _regexp_csv_validator
+ TYPE_CHECKER["csv"] = _csv_validator
+ TYPE_CHECKER["yn"] = _yn_validator
+ TYPE_CHECKER["multiple_choice"] = _multiple_choices_validating_option
+ TYPE_CHECKER["non_empty_string"] = _non_empty_string_validator
def __init__(self, *opts, **attrs):
optparse.Option.__init__(self, *opts, **attrs)
@@ -305,23 +325,27 @@ class Option(optparse.Option):
if self.type in ("choice", "multiple_choice"):
if self.choices is None:
raise optparse.OptionError(
- "must supply a list of choices for type 'choice'", self)
+ "must supply a list of choices for type 'choice'", self
+ )
elif not isinstance(self.choices, (tuple, list)):
raise optparse.OptionError(
"choices must be a list of strings ('%s' supplied)"
- % str(type(self.choices)).split("'")[1], self)
+ % str(type(self.choices)).split("'")[1],
+ self,
+ )
elif self.choices is not None:
raise optparse.OptionError(
- "must not supply choices for type %r" % self.type, self)
+ "must not supply choices for type %r" % self.type, self
+ )
# pylint: disable=unsupported-assignment-operation
- optparse.Option.CHECK_METHODS[2] = _check_choice # type: ignore
+ optparse.Option.CHECK_METHODS[2] = _check_choice # type: ignore
def process(self, opt, value, values, parser):
# First, convert the value(s) to the right type. Howl if any
# value(s) are bogus.
value = self.convert_value(opt, value)
- if self.type == 'named':
+ if self.type == "named":
existent = getattr(values, self.dest)
if existent:
existent.update(value)
@@ -329,19 +353,17 @@ class Option(optparse.Option):
# And then take whatever action is expected of us.
# This is a separate method to make life easier for
# subclasses to add new actions.
- return self.take_action(
- self.action, self.dest, opt, value, values, parser)
+ return self.take_action(self.action, self.dest, opt, value, values, parser)
class OptionParser(optparse.OptionParser):
-
def __init__(self, option_class, *args, **kwargs):
optparse.OptionParser.__init__(self, option_class=Option, *args, **kwargs)
def format_option_help(self, formatter=None):
if formatter is None:
formatter = self.formatter
- outputlevel = getattr(formatter, 'output_level', 0)
+ outputlevel = getattr(formatter, "output_level", 0)
formatter.store_option_strings(self)
result = []
result.append(formatter.format_heading("Options"))
@@ -351,7 +373,8 @@ class OptionParser(optparse.OptionParser):
result.append("\n")
for group in self.option_groups:
if group.level <= outputlevel and (
- group.description or _level_options(group, outputlevel)):
+ group.description or _level_options(group, outputlevel)
+ ):
result.append(group.format_help(formatter))
result.append("\n")
formatter.dedent()
@@ -367,14 +390,15 @@ class OptionParser(optparse.OptionParser):
# pylint: disable=abstract-method; by design?
class _ManHelpFormatter(optparse.HelpFormatter):
-
- def __init__(self, indent_increment=0, max_help_position=24,
- width=79, short_first=0):
+ def __init__(
+ self, indent_increment=0, max_help_position=24, width=79, short_first=0
+ ):
optparse.HelpFormatter.__init__(
- self, indent_increment, max_help_position, width, short_first)
+ self, indent_increment, max_help_position, width, short_first
+ )
def format_heading(self, heading):
- return '.SH %s\n' % heading.upper()
+ return ".SH %s\n" % heading.upper()
def format_description(self, description):
return description
@@ -386,14 +410,17 @@ class _ManHelpFormatter(optparse.HelpFormatter):
optstring = self.format_option_strings(option)
if option.help:
help_text = self.expand_default(option)
- help_string = ' '.join([l.strip() for l in help_text.splitlines()])
- help_string = help_string.replace('\\', '\\\\')
- help_string = help_string.replace('[current:', '[default:')
+ help_string = " ".join([l.strip() for l in help_text.splitlines()])
+ help_string = help_string.replace("\\", "\\\\")
+ help_string = help_string.replace("[current:", "[default:")
else:
- help_string = ''
- return '''.IP "%s"
+ help_string = ""
+ return """.IP "%s"
%s
-''' % (optstring, help_string)
+""" % (
+ optstring,
+ help_string,
+ )
def format_head(self, optparser, pkginfo, section=1):
long_desc = ""
@@ -405,48 +432,59 @@ class _ManHelpFormatter(optparse.HelpFormatter):
short_desc = self.format_short_description(pgm, pkginfo.description)
if hasattr(pkginfo, "long_desc"):
long_desc = self.format_long_description(pgm, pkginfo.long_desc)
- return '%s\n%s\n%s\n%s' % (self.format_title(pgm, section),
- short_desc, self.format_synopsis(pgm),
- long_desc)
+ return "%s\n%s\n%s\n%s" % (
+ self.format_title(pgm, section),
+ short_desc,
+ self.format_synopsis(pgm),
+ long_desc,
+ )
@staticmethod
def format_title(pgm, section):
- date = '%d-%02d-%02d' % time.localtime()[:3]
+ date = "%d-%02d-%02d" % time.localtime()[:3]
return '.TH %s %s "%s" %s' % (pgm, section, date, pgm)
@staticmethod
def format_short_description(pgm, short_desc):
- return '''.SH NAME
+ return """.SH NAME
.B %s
\\- %s
-''' % (pgm, short_desc.strip())
+""" % (
+ pgm,
+ short_desc.strip(),
+ )
@staticmethod
def format_synopsis(pgm):
- return '''.SH SYNOPSIS
+ return (
+ """.SH SYNOPSIS
.B %s
[
.I OPTIONS
] [
.I <arguments>
]
-''' % pgm
+"""
+ % pgm
+ )
@staticmethod
def format_long_description(pgm, long_desc):
- long_desc = '\n'.join(line.lstrip()
- for line in long_desc.splitlines())
- long_desc = long_desc.replace('\n.\n', '\n\n')
+ long_desc = "\n".join(line.lstrip() for line in long_desc.splitlines())
+ long_desc = long_desc.replace("\n.\n", "\n\n")
if long_desc.lower().startswith(pgm):
- long_desc = long_desc[len(pgm):]
- return '''.SH DESCRIPTION
+ long_desc = long_desc[len(pgm) :]
+ return """.SH DESCRIPTION
.B %s
%s
-''' % (pgm, long_desc.strip())
+""" % (
+ pgm,
+ long_desc.strip(),
+ )
@staticmethod
def format_tail(pkginfo):
- tail = '''.SH SEE ALSO
+ tail = """.SH SEE ALSO
/usr/share/doc/pythonX.Y-%s/
.SH BUGS
@@ -455,14 +493,21 @@ Please report bugs on the project\'s mailing list:
.SH AUTHOR
%s <%s>
-''' % (getattr(pkginfo, 'debian_name', pkginfo.modname),
- pkginfo.mailinglist, pkginfo.author, pkginfo.author_email)
+""" % (
+ getattr(pkginfo, "debian_name", pkginfo.modname),
+ pkginfo.mailinglist,
+ pkginfo.author,
+ pkginfo.author_email,
+ )
if hasattr(pkginfo, "copyright"):
- tail += '''
+ tail += (
+ """
.SH COPYRIGHT
%s
-''' % pkginfo.copyright
+"""
+ % pkginfo.copyright
+ )
return tail
@@ -483,9 +528,11 @@ class OptionsManagerMixIn:
# verbosity
self._maxlevel = 0
- def reset_parsers(self, usage='', version=None):
+ def reset_parsers(self, usage="", version=None):
# configuration file parser
- self.cfgfile_parser = configparser.ConfigParser(inline_comment_prefixes=('#', ';'))
+ self.cfgfile_parser = configparser.ConfigParser(
+ inline_comment_prefixes=("#", ";")
+ )
# command line parser
self.cmdline_parser = OptionParser(Option, usage=usage, version=version)
self.cmdline_parser.options_manager = self
@@ -500,19 +547,27 @@ class OptionsManagerMixIn:
break
else:
self.options_providers.append(provider)
- non_group_spec_options = [option for option in provider.options
- if 'group' not in option[1]]
- groups = getattr(provider, 'option_groups', ())
+ non_group_spec_options = [
+ option for option in provider.options if "group" not in option[1]
+ ]
+ groups = getattr(provider, "option_groups", ())
if own_group and non_group_spec_options:
- self.add_option_group(provider.name.upper(), provider.__doc__,
- non_group_spec_options, provider)
+ self.add_option_group(
+ provider.name.upper(),
+ provider.__doc__,
+ non_group_spec_options,
+ provider,
+ )
else:
for opt, optdict in non_group_spec_options:
self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
for gname, gdoc in groups:
gname = gname.upper()
- goptions = [option for option in provider.options
- if option[1].get('group', '').upper() == gname]
+ goptions = [
+ option
+ for option in provider.options
+ if option[1].get("group", "").upper() == gname
+ ]
self.add_option_group(gname, gdoc, goptions, provider)
def add_option_group(self, group_name, _, options, provider):
@@ -520,14 +575,17 @@ class OptionsManagerMixIn:
if group_name in self._mygroups:
group = self._mygroups[group_name]
else:
- group = optparse.OptionGroup(self.cmdline_parser,
- title=group_name.capitalize())
+ group = optparse.OptionGroup(
+ self.cmdline_parser, title=group_name.capitalize()
+ )
self.cmdline_parser.add_option_group(group)
group.level = provider.level
self._mygroups[group_name] = group
# add section to the config file
- if group_name != "DEFAULT" and \
- group_name not in self.cfgfile_parser._sections:
+ if (
+ group_name != "DEFAULT"
+ and group_name not in self.cfgfile_parser._sections
+ ):
self.cfgfile_parser.add_section(group_name)
# add provider's specific options
for opt, optdict in options:
@@ -544,24 +602,26 @@ class OptionsManagerMixIn:
use with optik/optparse
"""
optdict = copy.copy(optdict)
- if 'action' in optdict:
+ if "action" in optdict:
self._nocallback_options[provider] = opt
else:
- optdict['action'] = 'callback'
- optdict['callback'] = self.cb_set_provider_option
+ optdict["action"] = "callback"
+ optdict["callback"] = self.cb_set_provider_option
# default is handled here and *must not* be given to optik if you
# want the whole machinery to work
- if 'default' in optdict:
- if ('help' in optdict
- and optdict.get('default') is not None
- and optdict['action'] not in ('store_true', 'store_false')):
- optdict['help'] += ' [current: %default]'
- del optdict['default']
- args = ['--' + str(opt)]
- if 'short' in optdict:
- self._short_options[optdict['short']] = opt
- args.append('-' + optdict['short'])
- del optdict['short']
+ if "default" in optdict:
+ if (
+ "help" in optdict
+ and optdict.get("default") is not None
+ and optdict["action"] not in ("store_true", "store_false")
+ ):
+ optdict["help"] += " [current: %default]"
+ del optdict["default"]
+ args = ["--" + str(opt)]
+ if "short" in optdict:
+ self._short_options[optdict["short"]] = opt
+ args.append("-" + optdict["short"])
+ del optdict["short"]
# cleanup option definition dict before giving it to optik
for key in list(optdict.keys()):
if key not in self._optik_option_attrs:
@@ -570,7 +630,7 @@ class OptionsManagerMixIn:
def cb_set_provider_option(self, option, opt, value, parser):
"""optik callback for option setting"""
- if opt.startswith('--'):
+ if opt.startswith("--"):
# remove -- on long option
opt = opt[2:]
else:
@@ -597,9 +657,11 @@ class OptionsManagerMixIn:
section = provider.name
if section in skipsections:
continue
- options = [(n, d, v) for (n, d, v) in options
- if d.get('type') is not None
- and not d.get('deprecated')]
+ options = [
+ (n, d, v)
+ for (n, d, v) in options
+ if d.get("type") is not None and not d.get("deprecated")
+ ]
if not options:
continue
if section not in sections:
@@ -610,16 +672,21 @@ class OptionsManagerMixIn:
printed = False
for section in sections:
if printed:
- print('\n', file=stream)
- utils.format_section(stream, section.upper(),
- sorted(options_by_section[section]))
+ print("\n", file=stream)
+ utils.format_section(
+ stream, section.upper(), sorted(options_by_section[section])
+ )
printed = True
def generate_manpage(self, pkginfo, section=1, stream=None):
with _patch_optparse():
- _generate_manpage(self.cmdline_parser, pkginfo,
- section, stream=stream or sys.stdout,
- level=self._maxlevel)
+ _generate_manpage(
+ self.cmdline_parser,
+ pkginfo,
+ section,
+ stream=stream or sys.stdout,
+ level=self._maxlevel,
+ )
def load_provider_defaults(self):
"""initialize configuration using default values"""
@@ -632,16 +699,16 @@ class OptionsManagerMixIn:
"""
helplevel = 1
while helplevel <= self._maxlevel:
- opt = '-'.join(['long'] * helplevel) + '-help'
+ opt = "-".join(["long"] * helplevel) + "-help"
if opt in self._all_options:
- break # already processed
+ break # already processed
# pylint: disable=unused-argument
def helpfunc(option, opt, val, p, level=helplevel):
print(self.help(level))
sys.exit(0)
- helpmsg = '%s verbose help.' % ' '.join(['more'] * helplevel)
- optdict = {'action': 'callback', 'callback': helpfunc,
- 'help': helpmsg}
+
+ helpmsg = "%s verbose help." % " ".join(["more"] * helplevel)
+ optdict = {"action": "callback", "callback": helpfunc, "help": helpmsg}
provider = self.options_providers[0]
self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
provider.options += ((opt, optdict),)
@@ -658,7 +725,7 @@ class OptionsManagerMixIn:
parser = self.cfgfile_parser
# Use this encoding in order to strip the BOM marker, if any.
- with io.open(config_file, 'r', encoding='utf_8_sig') as fp:
+ with io.open(config_file, "r", encoding="utf_8_sig") as fp:
parser.read_file(fp)
# normalize sections'title
@@ -670,9 +737,9 @@ class OptionsManagerMixIn:
return
if use_config_file:
- msg = 'Using config file {}'.format(os.path.abspath(config_file))
+ msg = "Using config file {}".format(os.path.abspath(config_file))
else:
- msg = 'No config file found, using default configuration'
+ msg = "No config file found, using default configuration"
print(msg, file=sys.stderr)
def load_config_file(self):
@@ -694,7 +761,7 @@ class OptionsManagerMixIn:
def load_configuration_from_config(self, config):
for opt, opt_value in config.items():
- opt = opt.replace('_', '-')
+ opt = opt.replace("_", "-")
provider = self._all_options[opt]
provider.set_option(opt, opt_value)
@@ -720,9 +787,9 @@ class OptionsManagerMixIn:
def add_help_section(self, title, description, level=0):
"""add a dummy option section for help purpose """
- group = optparse.OptionGroup(self.cmdline_parser,
- title=title.capitalize(),
- description=description)
+ group = optparse.OptionGroup(
+ self.cmdline_parser, title=title.capitalize(), description=description
+ )
group.level = level
self._maxlevel = max(self._maxlevel, level)
self.cmdline_parser.add_option_group(group)
@@ -739,7 +806,7 @@ class OptionsProviderMixIn:
# those attributes should be overridden
priority = -1
- name = 'default'
+ name = "default"
options = ()
level = 0
@@ -750,19 +817,19 @@ class OptionsProviderMixIn:
def load_defaults(self):
"""initialize the provider using default values"""
for opt, optdict in self.options:
- action = optdict.get('action')
- if action != 'callback':
+ action = optdict.get("action")
+ if action != "callback":
# callback action have no default
if optdict is None:
optdict = self.get_option_def(opt)
- default = optdict.get('default')
+ default = optdict.get("default")
self.set_option(opt, default, action, optdict)
def option_attrname(self, opt, optdict=None):
"""get the config attribute corresponding to opt"""
if optdict is None:
optdict = self.get_option_def(opt)
- return optdict.get('dest', opt.replace('-', '_'))
+ return optdict.get("dest", opt.replace("-", "_"))
def option_value(self, opt):
"""get the current value for the given option"""
@@ -775,14 +842,14 @@ class OptionsProviderMixIn:
if value is not None:
value = _validate(value, optdict, optname)
if action is None:
- action = optdict.get('action', 'store')
- if action == 'store':
+ action = optdict.get("action", "store")
+ if action == "store":
setattr(self.config, self.option_attrname(optname, optdict), value)
- elif action in ('store_true', 'count'):
+ elif action in ("store_true", "count"):
setattr(self.config, self.option_attrname(optname, optdict), 0)
- elif action == 'store_false':
+ elif action == "store_false":
setattr(self.config, self.option_attrname(optname, optdict), 1)
- elif action == 'append':
+ elif action == "append":
optname = self.option_attrname(optname, optdict)
_list = getattr(self.config, optname, None)
if _list is None:
@@ -796,8 +863,8 @@ class OptionsProviderMixIn:
setattr(self.config, optname, _list + (value,))
else:
_list.append(value)
- elif action == 'callback':
- optdict['callback'](None, optname, value, None)
+ elif action == "callback":
+ optdict["callback"](None, optname, value, None)
else:
raise UnsupportedAction(action)
@@ -807,8 +874,9 @@ class OptionsProviderMixIn:
for option in self.options:
if option[0] == opt:
return option[1]
- raise optparse.OptionError('no such option %s in section %r'
- % (opt, self.name), opt)
+ raise optparse.OptionError(
+ "no such option %s in section %r" % (opt, self.name), opt
+ )
def options_by_section(self):
"""return an iterator on options grouped by section
@@ -817,8 +885,9 @@ class OptionsProviderMixIn:
"""
sections = {}
for optname, optdict in self.options:
- sections.setdefault(optdict.get('group'), []).append(
- (optname, optdict, self.option_value(optname)))
+ sections.setdefault(optdict.get("group"), []).append(
+ (optname, optdict, self.option_value(optname))
+ )
if None in sections:
yield None, sections.pop(None)
for section, options in sorted(sections.items()):
@@ -835,16 +904,17 @@ class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn):
"""basic mixin for simple configurations which don't need the
manager / providers model
"""
+
def __init__(self, *args, **kwargs):
if not args:
- kwargs.setdefault('usage', '')
+ kwargs.setdefault("usage", "")
OptionsManagerMixIn.__init__(self, *args, **kwargs)
OptionsProviderMixIn.__init__(self)
- if not getattr(self, 'option_groups', None):
+ if not getattr(self, "option_groups", None):
self.option_groups = []
for _, optdict in self.options:
try:
- gdef = (optdict['group'].upper(), '')
+ gdef = (optdict["group"].upper(), "")
except KeyError:
continue
if gdef not in self.option_groups:
@@ -852,8 +922,7 @@ class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn):
self.register_options_provider(self, own_group=False)
-def _generate_manpage(optparser, pkginfo, section=1,
- stream=sys.stdout, level=0):
+def _generate_manpage(optparser, pkginfo, section=1, stream=sys.stdout, level=0):
formatter = _ManHelpFormatter()
formatter.output_level = level
formatter.parser = optparser
diff --git a/pylint/epylint.py b/pylint/epylint.py
index 0956df3d3..50c3a99f9 100755
--- a/pylint/epylint.py
+++ b/pylint/epylint.py
@@ -63,12 +63,13 @@ from io import StringIO
def _get_env():
- '''Extracts the environment PYTHONPATH and appends the current sys.path to
- those.'''
+ """Extracts the environment PYTHONPATH and appends the current sys.path to
+ those."""
env = dict(os.environ)
- env['PYTHONPATH'] = os.pathsep.join(sys.path)
+ env["PYTHONPATH"] = os.pathsep.join(sys.path)
return env
+
def lint(filename, options=()):
"""Pylint the given file.
@@ -90,18 +91,27 @@ def lint(filename, options=()):
parent_path = osp.dirname(full_path)
child_path = osp.basename(full_path)
- while parent_path != "/" and osp.exists(osp.join(parent_path, '__init__.py')):
+ while parent_path != "/" and osp.exists(osp.join(parent_path, "__init__.py")):
child_path = osp.join(osp.basename(parent_path), child_path)
parent_path = osp.dirname(parent_path)
# Start pylint
# Ensure we use the python and pylint associated with the running epylint
run_cmd = "import sys; from pylint.lint import Run; Run(sys.argv[1:])"
- cmd = [sys.executable, "-c", run_cmd] + [
- '--msg-template', '{path}:{line}: {category} ({msg_id}, {symbol}, {obj}) {msg}',
- '-r', 'n', child_path] + list(options)
- process = Popen(cmd, stdout=PIPE, cwd=parent_path, env=_get_env(),
- universal_newlines=True)
+ cmd = (
+ [sys.executable, "-c", run_cmd]
+ + [
+ "--msg-template",
+ "{path}:{line}: {category} ({msg_id}, {symbol}, {obj}) {msg}",
+ "-r",
+ "n",
+ child_path,
+ ]
+ + list(options)
+ )
+ process = Popen(
+ cmd, stdout=PIPE, cwd=parent_path, env=_get_env(), universal_newlines=True
+ )
for line in process.stdout:
# remove pylintrc warning
@@ -112,13 +122,13 @@ def lint(filename, options=()):
parts = line.split(":")
if parts and parts[0] == child_path:
line = ":".join([filename] + parts[1:])
- print(line, end=' ')
+ print(line, end=" ")
process.wait()
return process.returncode
-def py_run(command_options='', return_std=False, stdout=None, stderr=None):
+def py_run(command_options="", return_std=False, stdout=None, stderr=None):
"""Run pylint from python
``command_options`` is a string containing ``pylint`` command line options;
@@ -141,7 +151,7 @@ def py_run(command_options='', return_std=False, stdout=None, stderr=None):
"""
# Create command line to call pylint
epylint_part = [sys.executable, "-c", "from pylint import epylint;epylint.Run()"]
- options = shlex.split(command_options, posix=not sys.platform.startswith('win'))
+ options = shlex.split(command_options, posix=not sys.platform.startswith("win"))
cli = epylint_part + options
# Providing standard output and/or error if not set
@@ -156,8 +166,14 @@ def py_run(command_options='', return_std=False, stdout=None, stderr=None):
else:
stderr = sys.stderr
# Call pylint in a subprocess
- process = Popen(cli, shell=False, stdout=stdout, stderr=stderr,
- env=_get_env(), universal_newlines=True)
+ process = Popen(
+ cli,
+ shell=False,
+ stdout=stdout,
+ stderr=stderr,
+ env=_get_env(),
+ universal_newlines=True,
+ )
proc_stdout, proc_stderr = process.communicate()
# Return standard output and error
if return_std:
@@ -176,5 +192,5 @@ def Run():
sys.exit(lint(sys.argv[1], sys.argv[2:]))
-if __name__ == '__main__':
+if __name__ == "__main__":
Run()
diff --git a/pylint/exceptions.py b/pylint/exceptions.py
index aaa37ecd7..57353a466 100644
--- a/pylint/exceptions.py
+++ b/pylint/exceptions.py
@@ -12,11 +12,14 @@
class InvalidMessageError(Exception):
"""raised when a message creation, registration or addition is rejected"""
+
class UnknownMessageError(Exception):
"""raised when an unregistered message id is encountered"""
+
class EmptyReportError(Exception):
"""raised when a report is empty and so should not be displayed"""
+
class InvalidReporterError(Exception):
"""raised when selected reporter is invalid (e.g. not found)"""
diff --git a/pylint/extensions/_check_docs_utils.py b/pylint/extensions/_check_docs_utils.py
index 161d3165f..2aaa70962 100644
--- a/pylint/extensions/_check_docs_utils.py
+++ b/pylint/extensions/_check_docs_utils.py
@@ -33,7 +33,7 @@ def space_indentation(s):
:rtype: int
:return: number of leading spaces
"""
- return len(s) - len(s.lstrip(' '))
+ return len(s) - len(s.lstrip(" "))
def get_setters_property_name(node):
@@ -48,9 +48,11 @@ def get_setters_property_name(node):
"""
decorators = node.decorators.nodes if node.decorators else []
for decorator in decorators:
- if (isinstance(decorator, astroid.Attribute) and
- decorator.attrname == "setter" and
- isinstance(decorator.expr, astroid.Name)):
+ if (
+ isinstance(decorator, astroid.Attribute)
+ and decorator.attrname == "setter"
+ and isinstance(decorator.expr, astroid.Name)
+ ):
return decorator.expr.name
return None
@@ -117,8 +119,7 @@ def possible_exc_types(node):
inferred = utils.safe_infer(node.exc)
if inferred:
excs = [inferred.name]
- elif (isinstance(node.exc, astroid.Call) and
- isinstance(node.exc.func, astroid.Name)):
+ elif isinstance(node.exc, astroid.Call) and isinstance(node.exc.func, astroid.Name):
target = utils.safe_infer(node.exc.func)
if isinstance(target, astroid.ClassDef):
excs = [target.name]
@@ -129,8 +130,11 @@ def possible_exc_types(node):
continue
val = utils.safe_infer(ret.value)
- if (val and isinstance(val, (astroid.Instance, astroid.ClassDef))
- and utils.inherit_from_std_ex(val)):
+ if (
+ val
+ and isinstance(val, (astroid.Instance, astroid.ClassDef))
+ and utils.inherit_from_std_ex(val)
+ ):
excs.append(val.name)
elif node.exc is None:
handler = node.parent
@@ -139,9 +143,7 @@ def possible_exc_types(node):
if handler and handler.type:
inferred_excs = astroid.unpack_infer(handler.type)
- excs = (exc.name for exc in inferred_excs
- if exc is not astroid.Uninferable)
-
+ excs = (exc.name for exc in inferred_excs if exc is not astroid.Uninferable)
try:
return {exc for exc in excs if not utils.node_ignores_exception(node, exc)}
@@ -149,9 +151,13 @@ def possible_exc_types(node):
return set()
-def docstringify(docstring, default_type='default'):
- for docstring_type in [SphinxDocstring, EpytextDocstring,
- GoogleDocstring, NumpyDocstring]:
+def docstringify(docstring, default_type="default"):
+ for docstring_type in [
+ SphinxDocstring,
+ EpytextDocstring,
+ GoogleDocstring,
+ NumpyDocstring,
+ ]:
instance = docstring_type(docstring)
if instance.is_valid():
return instance
@@ -161,9 +167,12 @@ def docstringify(docstring, default_type='default'):
class Docstring:
- re_for_parameters_see = re.compile(r"""
+ re_for_parameters_see = re.compile(
+ r"""
For\s+the\s+(other)?\s*parameters\s*,\s+see
- """, re.X | re.S)
+ """,
+ re.X | re.S,
+ )
supports_yields = None
"""True if the docstring supports a "yield" section.
@@ -217,12 +226,16 @@ class SphinxDocstring(Docstring):
re_simple_container_type = r"""
{type} # a container type
[\(\[] [^\n\s]+ [\)\]] # with the contents of the container
- """.format(type=re_type)
+ """.format(
+ type=re_type
+ )
re_xref = r"""
(?::\w+:)? # optional tag
`{}` # what to reference
- """.format(re_type)
+ """.format(
+ re_type
+ )
re_param_raw = r"""
: # initial colon
@@ -241,7 +254,9 @@ class SphinxDocstring(Docstring):
(\w+) # Parameter name
\s* # whitespace
: # final colon
- """.format(type=re_type, container_type=re_simple_container_type)
+ """.format(
+ type=re_type, container_type=re_simple_container_type
+ )
re_param_in_docstring = re.compile(re_param_raw, re.X | re.S)
re_type_raw = r"""
@@ -250,17 +265,19 @@ class SphinxDocstring(Docstring):
({type}) # Parameter name
\s* # whitespace
: # final colon
- """.format(type=re_type)
+ """.format(
+ type=re_type
+ )
re_type_in_docstring = re.compile(re_type_raw, re.X | re.S)
re_property_type_raw = r"""
:type: # Sphinx keyword
\s+ # whitespace
{type} # type declaration
- """.format(type=re_type)
- re_property_type_in_docstring = re.compile(
- re_property_type_raw, re.X | re.S
+ """.format(
+ type=re_type
)
+ re_property_type_in_docstring = re.compile(re_property_type_raw, re.X | re.S)
re_raise_raw = r"""
: # initial colon
@@ -278,7 +295,9 @@ class SphinxDocstring(Docstring):
(\w+) # Parameter name
\s* # whitespace
: # final colon
- """.format(type=re_type)
+ """.format(
+ type=re_type
+ )
re_raise_in_docstring = re.compile(re_raise_raw, re.X | re.S)
re_rtype_in_docstring = re.compile(r":rtype:")
@@ -288,11 +307,13 @@ class SphinxDocstring(Docstring):
supports_yields = False
def is_valid(self):
- return bool(self.re_param_in_docstring.search(self.doc) or
- self.re_raise_in_docstring.search(self.doc) or
- self.re_rtype_in_docstring.search(self.doc) or
- self.re_returns_in_docstring.search(self.doc) or
- self.re_property_type_in_docstring.search(self.doc))
+ return bool(
+ self.re_param_in_docstring.search(self.doc)
+ or self.re_raise_in_docstring.search(self.doc)
+ or self.re_rtype_in_docstring.search(self.doc)
+ or self.re_returns_in_docstring.search(self.doc)
+ or self.re_property_type_in_docstring.search(self.doc)
+ )
def exceptions(self):
types = set()
@@ -327,7 +348,7 @@ class SphinxDocstring(Docstring):
# The summary line is the return doc,
# so the first line must not be a known directive.
- return not self.doc.lstrip().startswith(':')
+ return not self.doc.lstrip().startswith(":")
def has_property_type(self):
if not self.doc:
@@ -360,29 +381,33 @@ class EpytextDocstring(SphinxDocstring):
https://www.jetbrains.com/help/pycharm/2016.1/creating-documentation-comments.html#d848203e314
https://www.jetbrains.com/help/pycharm/2016.1/using-docstrings-to-specify-types.html
"""
+
re_param_in_docstring = re.compile(
- SphinxDocstring.re_param_raw.replace(':', '@', 1),
- re.X | re.S)
+ SphinxDocstring.re_param_raw.replace(":", "@", 1), re.X | re.S
+ )
re_type_in_docstring = re.compile(
- SphinxDocstring.re_type_raw.replace(':', '@', 1),
- re.X | re.S)
+ SphinxDocstring.re_type_raw.replace(":", "@", 1), re.X | re.S
+ )
re_property_type_in_docstring = re.compile(
- SphinxDocstring.re_property_type_raw.replace(':', '@', 1),
- re.X | re.S)
+ SphinxDocstring.re_property_type_raw.replace(":", "@", 1), re.X | re.S
+ )
re_raise_in_docstring = re.compile(
- SphinxDocstring.re_raise_raw.replace(':', '@', 1),
- re.X | re.S)
+ SphinxDocstring.re_raise_raw.replace(":", "@", 1), re.X | re.S
+ )
- re_rtype_in_docstring = re.compile(r"""
+ re_rtype_in_docstring = re.compile(
+ r"""
@ # initial "at" symbol
(?: # Epytext keyword
rtype|returntype
)
: # final colon
- """, re.X | re.S)
+ """,
+ re.X | re.S,
+ )
re_returns_in_docstring = re.compile(r"@returns?:")
@@ -394,7 +419,7 @@ class EpytextDocstring(SphinxDocstring):
if self.has_property_type():
# The summary line is the return doc,
# so the first line must not be a known directive.
- return not self.doc.lstrip().startswith('@')
+ return not self.doc.lstrip().startswith("@")
return False
@@ -407,12 +432,16 @@ class GoogleDocstring(Docstring):
re_container_type = r"""
(?:{type}|{xref}) # a container type
[\(\[] [^\n]+ [\)\]] # with the contents of the container
- """.format(type=re_type, xref=re_xref)
+ """.format(
+ type=re_type, xref=re_xref
+ )
re_multiple_type = r"""
(?:{container_type}|{type}|{xref})
(?:\s+or\s+(?:{container_type}|{type}|{xref}))*
- """.format(type=re_type, xref=re_xref, container_type=re_container_type)
+ """.format(
+ type=re_type, xref=re_xref, container_type=re_container_type
+ )
_re_section_template = r"""
^([ ]*) {0} \s*: \s*$ # Google parameter header
@@ -421,15 +450,16 @@ class GoogleDocstring(Docstring):
re_param_section = re.compile(
_re_section_template.format(r"(?:Args|Arguments|Parameters)"),
- re.X | re.S | re.M
+ re.X | re.S | re.M,
)
re_keyword_param_section = re.compile(
_re_section_template.format(r"Keyword\s(?:Args|Arguments|Parameters)"),
- re.X | re.S | re.M
+ re.X | re.S | re.M,
)
- re_param_line = re.compile(r"""
+ re_param_line = re.compile(
+ r"""
\s* \*{{0,2}}(\w+) # identifier potentially with asterisks
\s* ( [(]
{type}
@@ -437,41 +467,51 @@ class GoogleDocstring(Docstring):
[)] )? \s* : # optional type declaration
\s* (.*) # beginning of optional description
""".format(
- type=re_multiple_type,
- ), re.X | re.S | re.M)
+ type=re_multiple_type
+ ),
+ re.X | re.S | re.M,
+ )
re_raise_section = re.compile(
- _re_section_template.format(r"Raises"),
- re.X | re.S | re.M
+ _re_section_template.format(r"Raises"), re.X | re.S | re.M
)
- re_raise_line = re.compile(r"""
+ re_raise_line = re.compile(
+ r"""
\s* ({type}) \s* : # identifier
\s* (.*) # beginning of optional description
- """.format(type=re_type), re.X | re.S | re.M)
+ """.format(
+ type=re_type
+ ),
+ re.X | re.S | re.M,
+ )
re_returns_section = re.compile(
- _re_section_template.format(r"Returns?"),
- re.X | re.S | re.M
+ _re_section_template.format(r"Returns?"), re.X | re.S | re.M
)
- re_returns_line = re.compile(r"""
+ re_returns_line = re.compile(
+ r"""
\s* ({type}:)? # identifier
\s* (.*) # beginning of description
""".format(
- type=re_multiple_type,
- ), re.X | re.S | re.M)
+ type=re_multiple_type
+ ),
+ re.X | re.S | re.M,
+ )
- re_property_returns_line = re.compile(r"""
+ re_property_returns_line = re.compile(
+ r"""
^{type}: # indentifier
\s* (.*) # Summary line / description
""".format(
- type=re_multiple_type,
- ), re.X | re.S | re.M)
+ type=re_multiple_type
+ ),
+ re.X | re.S | re.M,
+ )
re_yields_section = re.compile(
- _re_section_template.format(r"Yields?"),
- re.X | re.S | re.M
+ _re_section_template.format(r"Yields?"), re.X | re.S | re.M
)
re_yields_line = re_returns_line
@@ -479,11 +519,13 @@ class GoogleDocstring(Docstring):
supports_yields = True
def is_valid(self):
- return bool(self.re_param_section.search(self.doc) or
- self.re_raise_section.search(self.doc) or
- self.re_returns_section.search(self.doc) or
- self.re_yields_section.search(self.doc) or
- self.re_property_returns_line.search(self._first_line()))
+ return bool(
+ self.re_param_section.search(self.doc)
+ or self.re_raise_section.search(self.doc)
+ or self.re_returns_section.search(self.doc)
+ or self.re_yields_section.search(self.doc)
+ or self.re_property_returns_line.search(self._first_line())
+ )
def has_params(self):
if not self.doc:
@@ -527,10 +569,12 @@ class GoogleDocstring(Docstring):
# The summary line is the return doc,
# so the first line must not be a known directive.
first_line = self._first_line()
- return not bool(self.re_param_section.search(first_line) or
- self.re_raise_section.search(first_line) or
- self.re_returns_section.search(first_line) or
- self.re_yields_section.search(first_line))
+ return not bool(
+ self.re_param_section.search(first_line)
+ or self.re_raise_section.search(first_line)
+ or self.re_returns_section.search(first_line)
+ or self.re_yields_section.search(first_line)
+ )
def has_property_type(self):
if not self.doc:
@@ -609,7 +653,7 @@ class GoogleDocstring(Docstring):
return params_with_doc, params_with_type
def _first_line(self):
- return self.doc.lstrip().split('\n', 1)[0]
+ return self.doc.lstrip().split("\n", 1)[0]
@staticmethod
def min_section_indent(section_match):
@@ -670,45 +714,53 @@ class NumpyDocstring(GoogleDocstring):
re_param_section = re.compile(
_re_section_template.format(r"(?:Args|Arguments|Parameters)"),
- re.X | re.S | re.M
+ re.X | re.S | re.M,
)
- re_param_line = re.compile(r"""
+ re_param_line = re.compile(
+ r"""
\s* (\w+) # identifier
\s* :
\s* (?:({type})(?:,\s+optional)?)? # optional type declaration
\n # description starts on a new line
\s* (.*) # description
""".format(
- type=GoogleDocstring.re_multiple_type,
- ), re.X | re.S)
+ type=GoogleDocstring.re_multiple_type
+ ),
+ re.X | re.S,
+ )
re_raise_section = re.compile(
- _re_section_template.format(r"Raises"),
- re.X | re.S | re.M
+ _re_section_template.format(r"Raises"), re.X | re.S | re.M
)
- re_raise_line = re.compile(r"""
+ re_raise_line = re.compile(
+ r"""
\s* ({type})$ # type declaration
\s* (.*) # optional description
- """.format(type=GoogleDocstring.re_type), re.X | re.S | re.M)
+ """.format(
+ type=GoogleDocstring.re_type
+ ),
+ re.X | re.S | re.M,
+ )
re_returns_section = re.compile(
- _re_section_template.format(r"Returns?"),
- re.X | re.S | re.M
+ _re_section_template.format(r"Returns?"), re.X | re.S | re.M
)
- re_returns_line = re.compile(r"""
+ re_returns_line = re.compile(
+ r"""
\s* (?:\w+\s+:\s+)? # optional name
({type})$ # type declaration
\s* (.*) # optional description
""".format(
- type=GoogleDocstring.re_multiple_type,
- ), re.X | re.S | re.M)
+ type=GoogleDocstring.re_multiple_type
+ ),
+ re.X | re.S | re.M,
+ )
re_yields_section = re.compile(
- _re_section_template.format(r"Yields?"),
- re.X | re.S | re.M
+ _re_section_template.format(r"Yields?"), re.X | re.S | re.M
)
re_yields_line = re_returns_line
@@ -721,15 +773,15 @@ class NumpyDocstring(GoogleDocstring):
@staticmethod
def _is_section_header(line):
- return bool(re.match(r'\s*-+$', line))
+ return bool(re.match(r"\s*-+$", line))
DOCSTRING_TYPES = {
- 'sphinx': SphinxDocstring,
- 'epytext': EpytextDocstring,
- 'google': GoogleDocstring,
- 'numpy': NumpyDocstring,
- 'default': Docstring,
+ "sphinx": SphinxDocstring,
+ "epytext": EpytextDocstring,
+ "google": GoogleDocstring,
+ "numpy": NumpyDocstring,
+ "default": Docstring,
}
"""A map of the name of the docstring type to its class.
diff --git a/pylint/extensions/bad_builtin.py b/pylint/extensions/bad_builtin.py
index 9876922eb..2609d3168 100644
--- a/pylint/extensions/bad_builtin.py
+++ b/pylint/extensions/bad_builtin.py
@@ -12,37 +12,43 @@ from pylint.checkers.utils import check_messages
from pylint.interfaces import IAstroidChecker
-BAD_FUNCTIONS = ['map', 'filter']
+BAD_FUNCTIONS = ["map", "filter"]
if sys.version_info < (3, 0):
- BAD_FUNCTIONS.append('input')
+ BAD_FUNCTIONS.append("input")
# Some hints regarding the use of bad builtins.
-BUILTIN_HINTS = {
- 'map': 'Using a list comprehension can be clearer.',
-}
-BUILTIN_HINTS['filter'] = BUILTIN_HINTS['map']
+BUILTIN_HINTS = {"map": "Using a list comprehension can be clearer."}
+BUILTIN_HINTS["filter"] = BUILTIN_HINTS["map"]
class BadBuiltinChecker(BaseChecker):
- __implements__ = (IAstroidChecker, )
- name = 'deprecated_builtins'
- msgs = {'W0141': ('Used builtin function %s',
- 'bad-builtin',
- 'Used when a black listed builtin function is used (see the '
- 'bad-function option). Usual black listed functions are the ones '
- 'like map, or filter , where Python offers now some cleaner '
- 'alternative like list comprehension.'),
- }
+ __implements__ = (IAstroidChecker,)
+ name = "deprecated_builtins"
+ msgs = {
+ "W0141": (
+ "Used builtin function %s",
+ "bad-builtin",
+ "Used when a black listed builtin function is used (see the "
+ "bad-function option). Usual black listed functions are the ones "
+ "like map, or filter , where Python offers now some cleaner "
+ "alternative like list comprehension.",
+ )
+ }
- options = (('bad-functions',
- {'default' : BAD_FUNCTIONS,
- 'type' :'csv', 'metavar' : '<builtin function names>',
- 'help' : 'List of builtins function names that should not be '
- 'used, separated by a comma'}
- ),
- )
+ options = (
+ (
+ "bad-functions",
+ {
+ "default": BAD_FUNCTIONS,
+ "type": "csv",
+ "metavar": "<builtin function names>",
+ "help": "List of builtins function names that should not be "
+ "used, separated by a comma",
+ },
+ ),
+ )
- @check_messages('bad-builtin')
+ @check_messages("bad-builtin")
def visit_call(self, node):
if isinstance(node.func, astroid.Name):
name = node.func.name
@@ -55,7 +61,7 @@ class BadBuiltinChecker(BaseChecker):
args = "%r. %s" % (name, hint)
else:
args = repr(name)
- self.add_message('bad-builtin', node=node, args=args)
+ self.add_message("bad-builtin", node=node, args=args)
def register(linter):
diff --git a/pylint/extensions/check_docs.py b/pylint/extensions/check_docs.py
index a01d6fa47..7f7f643d7 100644
--- a/pylint/extensions/check_docs.py
+++ b/pylint/extensions/check_docs.py
@@ -16,6 +16,8 @@ def register(linter):
:param linter: Main interface object for Pylint plugins
:type linter: Pylint object
"""
- warnings.warn("This plugin is deprecated, use pylint.extensions.docparams instead.",
- DeprecationWarning)
+ warnings.warn(
+ "This plugin is deprecated, use pylint.extensions.docparams instead.",
+ DeprecationWarning,
+ )
linter.register_checker(docparams.DocstringParameterChecker(linter))
diff --git a/pylint/extensions/check_elif.py b/pylint/extensions/check_elif.py
index 75c6f0628..a259361ec 100644
--- a/pylint/extensions/check_elif.py
+++ b/pylint/extensions/check_elif.py
@@ -18,13 +18,16 @@ class ElseifUsedChecker(BaseTokenChecker):
"""
__implements__ = (ITokenChecker, IAstroidChecker)
- name = 'else_if_used'
- msgs = {'R5501': ('Consider using "elif" instead of "else if"',
- 'else-if-used',
- 'Used when an else statement is immediately followed by '
- 'an if statement and does not contain statements that '
- 'would be unrelated to it.'),
- }
+ name = "else_if_used"
+ msgs = {
+ "R5501": (
+ 'Consider using "elif" instead of "else if"',
+ "else-if-used",
+ "Used when an else statement is immediately followed by "
+ "an if statement and does not contain statements that "
+ "would be unrelated to it.",
+ )
+ }
def __init__(self, linter=None):
BaseTokenChecker.__init__(self, linter)
@@ -37,9 +40,9 @@ class ElseifUsedChecker(BaseTokenChecker):
def process_tokens(self, tokens):
# Process tokens and look for 'if' or 'elif'
for _, token, _, _, _ in tokens:
- if token == 'elif':
+ if token == "elif":
self._elifs.append(True)
- elif token == 'if':
+ elif token == "if":
self._elifs.append(False)
def leave_module(self, _):
@@ -51,14 +54,14 @@ class ElseifUsedChecker(BaseTokenChecker):
def visit_comprehension(self, node):
self._if_counter += len(node.ifs)
- @check_messages('else-if-used')
+ @check_messages("else-if-used")
def visit_if(self, node):
if isinstance(node.parent, astroid.If):
orelse = node.parent.orelse
# current if node must directly follow an "else"
if orelse and orelse == [node]:
if not self._elifs[self._if_counter]:
- self.add_message('else-if-used', node=node)
+ self.add_message("else-if-used", node=node)
self._if_counter += 1
diff --git a/pylint/extensions/comparetozero.py b/pylint/extensions/comparetozero.py
index 1ee4a2142..16c94280f 100644
--- a/pylint/extensions/comparetozero.py
+++ b/pylint/extensions/comparetozero.py
@@ -30,23 +30,26 @@ class CompareToZeroChecker(checkers.BaseChecker):
__implements__ = (interfaces.IAstroidChecker,)
# configuration section name
- name = 'compare-to-zero'
- msgs = {'C2001': ('Avoid comparisons to zero',
- 'compare-to-zero',
- 'Used when Pylint detects comparison to a 0 constant.'),
- }
+ name = "compare-to-zero"
+ msgs = {
+ "C2001": (
+ "Avoid comparisons to zero",
+ "compare-to-zero",
+ "Used when Pylint detects comparison to a 0 constant.",
+ )
+ }
priority = -2
options = ()
- @utils.check_messages('compare-to-zero')
+ @utils.check_messages("compare-to-zero")
def visit_compare(self, node):
- _operators = ['!=', '==', 'is not', 'is']
+ _operators = ["!=", "==", "is not", "is"]
# note: astroid.Compare has the left most operand in node.left
# while the rest are a list of tuples in node.ops
# the format of the tuple is ('compare operator sign', node)
# here we squash everything into `ops` to make it easier for processing later
- ops = [('', node.left)]
+ ops = [("", node.left)]
ops.extend(node.ops)
ops = list(itertools.chain(*ops))
@@ -57,14 +60,14 @@ class CompareToZeroChecker(checkers.BaseChecker):
error_detected = False
# 0 ?? X
- if _is_constant_zero(op_1) and op_2 in _operators + ['<']:
+ if _is_constant_zero(op_1) and op_2 in _operators + ["<"]:
error_detected = True
# X ?? 0
- elif op_2 in _operators + ['>'] and _is_constant_zero(op_3):
+ elif op_2 in _operators + [">"] and _is_constant_zero(op_3):
error_detected = True
if error_detected:
- self.add_message('compare-to-zero', node=node)
+ self.add_message("compare-to-zero", node=node)
def register(linter):
diff --git a/pylint/extensions/docparams.py b/pylint/extensions/docparams.py
index 67fcc82af..d55515a56 100644
--- a/pylint/extensions/docparams.py
+++ b/pylint/extensions/docparams.py
@@ -49,95 +49,141 @@ class DocstringParameterChecker(BaseChecker):
:param linter: linter object
:type linter: :class:`pylint.lint.PyLinter`
"""
+
__implements__ = IAstroidChecker
- name = 'parameter_documentation'
+ name = "parameter_documentation"
msgs = {
- 'W9005': ('"%s" has constructor parameters documented in class and __init__',
- 'multiple-constructor-doc',
- 'Please remove parameter declarations in the class or constructor.'),
- 'W9006': ('"%s" not documented as being raised',
- 'missing-raises-doc',
- 'Please document exceptions for all raised exception types.'),
- 'W9008': ('Redundant returns documentation',
- 'redundant-returns-doc',
- 'Please remove the return/rtype documentation from this method.'),
- 'W9010': ('Redundant yields documentation',
- 'redundant-yields-doc',
- 'Please remove the yields documentation from this method.'),
- 'W9011': ('Missing return documentation',
- 'missing-return-doc',
- 'Please add documentation about what this method returns.',
- {'old_names': [('W9007', 'missing-returns-doc')]}),
- 'W9012': ('Missing return type documentation',
- 'missing-return-type-doc',
- 'Please document the type returned by this method.',
- # we can't use the same old_name for two different warnings
- # {'old_names': [('W9007', 'missing-returns-doc')]},
- ),
- 'W9013': ('Missing yield documentation',
- 'missing-yield-doc',
- 'Please add documentation about what this generator yields.',
- {'old_names': [('W9009', 'missing-yields-doc')]}),
- 'W9014': ('Missing yield type documentation',
- 'missing-yield-type-doc',
- 'Please document the type yielded by this method.',
- # we can't use the same old_name for two different warnings
- # {'old_names': [('W9009', 'missing-yields-doc')]},
- ),
- 'W9015': ('"%s" missing in parameter documentation',
- 'missing-param-doc',
- 'Please add parameter declarations for all parameters.',
- {'old_names': [('W9003', 'missing-param-doc')]}),
- 'W9016': ('"%s" missing in parameter type documentation',
- 'missing-type-doc',
- 'Please add parameter type declarations for all parameters.',
- {'old_names': [('W9004', 'missing-type-doc')]}),
- 'W9017': ('"%s" differing in parameter documentation',
- 'differing-param-doc',
- 'Please check parameter names in declarations.',
- ),
- 'W9018': ('"%s" differing in parameter type documentation',
- 'differing-type-doc',
- 'Please check parameter names in type declarations.',
- ),
+ "W9005": (
+ '"%s" has constructor parameters documented in class and __init__',
+ "multiple-constructor-doc",
+ "Please remove parameter declarations in the class or constructor.",
+ ),
+ "W9006": (
+ '"%s" not documented as being raised',
+ "missing-raises-doc",
+ "Please document exceptions for all raised exception types.",
+ ),
+ "W9008": (
+ "Redundant returns documentation",
+ "redundant-returns-doc",
+ "Please remove the return/rtype documentation from this method.",
+ ),
+ "W9010": (
+ "Redundant yields documentation",
+ "redundant-yields-doc",
+ "Please remove the yields documentation from this method.",
+ ),
+ "W9011": (
+ "Missing return documentation",
+ "missing-return-doc",
+ "Please add documentation about what this method returns.",
+ {"old_names": [("W9007", "missing-returns-doc")]},
+ ),
+ "W9012": (
+ "Missing return type documentation",
+ "missing-return-type-doc",
+ "Please document the type returned by this method.",
+ # we can't use the same old_name for two different warnings
+ # {'old_names': [('W9007', 'missing-returns-doc')]},
+ ),
+ "W9013": (
+ "Missing yield documentation",
+ "missing-yield-doc",
+ "Please add documentation about what this generator yields.",
+ {"old_names": [("W9009", "missing-yields-doc")]},
+ ),
+ "W9014": (
+ "Missing yield type documentation",
+ "missing-yield-type-doc",
+ "Please document the type yielded by this method.",
+ # we can't use the same old_name for two different warnings
+ # {'old_names': [('W9009', 'missing-yields-doc')]},
+ ),
+ "W9015": (
+ '"%s" missing in parameter documentation',
+ "missing-param-doc",
+ "Please add parameter declarations for all parameters.",
+ {"old_names": [("W9003", "missing-param-doc")]},
+ ),
+ "W9016": (
+ '"%s" missing in parameter type documentation',
+ "missing-type-doc",
+ "Please add parameter type declarations for all parameters.",
+ {"old_names": [("W9004", "missing-type-doc")]},
+ ),
+ "W9017": (
+ '"%s" differing in parameter documentation',
+ "differing-param-doc",
+ "Please check parameter names in declarations.",
+ ),
+ "W9018": (
+ '"%s" differing in parameter type documentation',
+ "differing-type-doc",
+ "Please check parameter names in type declarations.",
+ ),
}
- options = (('accept-no-param-doc',
- {'default': True, 'type' : 'yn', 'metavar' : '<y or n>',
- 'help': 'Whether to accept totally missing parameter '
- 'documentation in the docstring of a function that has '
- 'parameters.'
- }),
- ('accept-no-raise-doc',
- {'default': True, 'type' : 'yn', 'metavar' : '<y or n>',
- 'help': 'Whether to accept totally missing raises '
- 'documentation in the docstring of a function that '
- 'raises an exception.'
- }),
- ('accept-no-return-doc',
- {'default': True, 'type' : 'yn', 'metavar' : '<y or n>',
- 'help': 'Whether to accept totally missing return '
- 'documentation in the docstring of a function that '
- 'returns a statement.'
- }),
- ('accept-no-yields-doc',
- {'default': True, 'type' : 'yn', 'metavar': '<y or n>',
- 'help': 'Whether to accept totally missing yields '
- 'documentation in the docstring of a generator.'
- }),
- ('default-docstring-type',
- {'type': 'choice', 'default': 'default',
- 'choices': list(utils.DOCSTRING_TYPES),
- 'help': 'If the docstring type cannot be guessed '
- 'the specified docstring type will be used.'
- }),
- )
+ options = (
+ (
+ "accept-no-param-doc",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Whether to accept totally missing parameter "
+ "documentation in the docstring of a function that has "
+ "parameters.",
+ },
+ ),
+ (
+ "accept-no-raise-doc",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Whether to accept totally missing raises "
+ "documentation in the docstring of a function that "
+ "raises an exception.",
+ },
+ ),
+ (
+ "accept-no-return-doc",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Whether to accept totally missing return "
+ "documentation in the docstring of a function that "
+ "returns a statement.",
+ },
+ ),
+ (
+ "accept-no-yields-doc",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Whether to accept totally missing yields "
+ "documentation in the docstring of a generator.",
+ },
+ ),
+ (
+ "default-docstring-type",
+ {
+ "type": "choice",
+ "default": "default",
+ "choices": list(utils.DOCSTRING_TYPES),
+ "help": "If the docstring type cannot be guessed "
+ "the specified docstring type will be used.",
+ },
+ ),
+ )
priority = -2
- constructor_names = {'__init__', '__new__'}
- not_needed_param_in_docstring = {'self', 'cls'}
+ constructor_names = {"__init__", "__new__"}
+ not_needed_param_in_docstring = {"self", "cls"}
def visit_functiondef(self, node):
"""Called for function and method definitions (def).
@@ -145,9 +191,7 @@ class DocstringParameterChecker(BaseChecker):
:param node: Node for a function or method definition in the AST
:type node: :class:`astroid.scoped_nodes.Function`
"""
- node_doc = utils.docstringify(
- node.doc, self.config.default_docstring_type,
- )
+ node_doc = utils.docstringify(node.doc, self.config.default_docstring_type)
self.check_functiondef_params(node, node_doc)
self.check_functiondef_returns(node, node_doc)
self.check_functiondef_yields(node, node_doc)
@@ -158,50 +202,49 @@ class DocstringParameterChecker(BaseChecker):
class_node = checker_utils.node_frame_class(node)
if class_node is not None:
class_doc = utils.docstringify(
- class_node.doc, self.config.default_docstring_type,
+ class_node.doc, self.config.default_docstring_type
)
self.check_single_constructor_params(class_doc, node_doc, class_node)
# __init__ or class docstrings can have no parameters documented
# as long as the other documents them.
node_allow_no_param = (
- class_doc.has_params() or
- class_doc.params_documented_elsewhere() or
- None
+ class_doc.has_params()
+ or class_doc.params_documented_elsewhere()
+ or None
)
class_allow_no_param = (
- node_doc.has_params() or
- node_doc.params_documented_elsewhere() or
- None
+ node_doc.has_params()
+ or node_doc.params_documented_elsewhere()
+ or None
)
self.check_arguments_in_docstring(
- class_doc, node.args, class_node, class_allow_no_param)
+ class_doc, node.args, class_node, class_allow_no_param
+ )
self.check_arguments_in_docstring(
- node_doc, node.args, node, node_allow_no_param)
+ node_doc, node.args, node, node_allow_no_param
+ )
def check_functiondef_returns(self, node, node_doc):
- if ((not node_doc.supports_yields and node.is_generator())
- or node.is_abstract()):
+ if (not node_doc.supports_yields and node.is_generator()) or node.is_abstract():
return
return_nodes = node.nodes_of_class(astroid.Return)
- if ((node_doc.has_returns() or node_doc.has_rtype()) and
- not any(utils.returns_something(ret_node) for ret_node in return_nodes)):
- self.add_message(
- 'redundant-returns-doc',
- node=node)
+ if (node_doc.has_returns() or node_doc.has_rtype()) and not any(
+ utils.returns_something(ret_node) for ret_node in return_nodes
+ ):
+ self.add_message("redundant-returns-doc", node=node)
def check_functiondef_yields(self, node, node_doc):
if not node_doc.supports_yields or node.is_abstract():
return
- if ((node_doc.has_yields() or node_doc.has_yields_type()) and
- not node.is_generator()):
- self.add_message(
- 'redundant-yields-doc',
- node=node)
+ if (
+ node_doc.has_yields() or node_doc.has_yields_type()
+ ) and not node.is_generator():
+ self.add_message("redundant-yields-doc", node=node)
def visit_raise(self, node):
func_node = node.frame()
@@ -219,9 +262,7 @@ class DocstringParameterChecker(BaseChecker):
if property_:
func_node = property_
- doc = utils.docstringify(
- func_node.doc, self.config.default_docstring_type,
- )
+ doc = utils.docstringify(func_node.doc, self.config.default_docstring_type)
if not doc.is_valid():
if doc.doc:
self._handle_no_raise_doc(expected_excs, func_node)
@@ -239,39 +280,27 @@ class DocstringParameterChecker(BaseChecker):
if not isinstance(func_node, astroid.FunctionDef):
return
- doc = utils.docstringify(
- func_node.doc, self.config.default_docstring_type,
- )
+ doc = utils.docstringify(func_node.doc, self.config.default_docstring_type)
if not doc.is_valid() and self.config.accept_no_return_doc:
return
is_property = checker_utils.decorated_with_property(func_node)
- if not (doc.has_returns() or
- (doc.has_property_returns() and is_property)):
- self.add_message(
- 'missing-return-doc',
- node=func_node
- )
+ if not (doc.has_returns() or (doc.has_property_returns() and is_property)):
+ self.add_message("missing-return-doc", node=func_node)
if func_node.returns:
return
- if not (doc.has_rtype() or
- (doc.has_property_type() and is_property)):
- self.add_message(
- 'missing-return-type-doc',
- node=func_node
- )
+ if not (doc.has_rtype() or (doc.has_property_type() and is_property)):
+ self.add_message("missing-return-type-doc", node=func_node)
def visit_yield(self, node):
func_node = node.frame()
if not isinstance(func_node, astroid.FunctionDef):
return
- doc = utils.docstringify(
- func_node.doc, self.config.default_docstring_type,
- )
+ doc = utils.docstringify(func_node.doc, self.config.default_docstring_type)
if not doc.is_valid() and self.config.accept_no_yields_doc:
return
@@ -283,22 +312,17 @@ class DocstringParameterChecker(BaseChecker):
doc_has_yields_type = doc.has_rtype()
if not doc_has_yields:
- self.add_message(
- 'missing-yield-doc',
- node=func_node
- )
+ self.add_message("missing-yield-doc", node=func_node)
if not doc_has_yields_type:
- self.add_message(
- 'missing-yield-type-doc',
- node=func_node
- )
+ self.add_message("missing-yield-type-doc", node=func_node)
def visit_yieldfrom(self, node):
self.visit_yield(node)
- def check_arguments_in_docstring(self, doc, arguments_node, warning_node,
- accept_no_param_doc=None):
+ def check_arguments_in_docstring(
+ self, doc, arguments_node, warning_node, accept_no_param_doc=None
+ ):
"""Check that all parameters in a function, method or class constructor
on the one hand and the parameters mentioned in the parameter
documentation (e.g. the Sphinx tags 'param' and 'type') on the other
@@ -344,8 +368,7 @@ class DocstringParameterChecker(BaseChecker):
# Collect the function arguments.
expected_argument_names = {arg.name for arg in arguments_node.args}
expected_argument_names.update(arg.name for arg in arguments_node.kwonlyargs)
- not_needed_type_in_docstring = (
- self.not_needed_param_in_docstring.copy())
+ not_needed_type_in_docstring = self.not_needed_param_in_docstring.copy()
if arguments_node.vararg is not None:
expected_argument_names.add(arguments_node.vararg)
@@ -356,12 +379,10 @@ class DocstringParameterChecker(BaseChecker):
params_with_doc, params_with_type = doc.match_param_docs()
# Tolerate no parameter documentation at all.
- if (not params_with_doc and not params_with_type
- and accept_no_param_doc):
+ if not params_with_doc and not params_with_type and accept_no_param_doc:
tolerate_missing_params = True
- def _compare_missing_args(found_argument_names, message_id,
- not_needed_names):
+ def _compare_missing_args(found_argument_names, message_id, not_needed_names):
"""Compare the found argument names with the expected ones and
generate a message if there are arguments missing.
@@ -375,17 +396,16 @@ class DocstringParameterChecker(BaseChecker):
"""
if not tolerate_missing_params:
missing_argument_names = (
- (expected_argument_names - found_argument_names)
- - not_needed_names)
+ expected_argument_names - found_argument_names
+ ) - not_needed_names
if missing_argument_names:
self.add_message(
message_id,
- args=(', '.join(
- sorted(missing_argument_names)),),
- node=warning_node)
+ args=(", ".join(sorted(missing_argument_names)),),
+ node=warning_node,
+ )
- def _compare_different_args(found_argument_names, message_id,
- not_needed_names):
+ def _compare_different_args(found_argument_names, message_id, not_needed_names):
"""Compare the found argument names with the expected ones and
generate a message if there are extra arguments found.
@@ -399,36 +419,41 @@ class DocstringParameterChecker(BaseChecker):
"""
differing_argument_names = (
(expected_argument_names ^ found_argument_names)
- - not_needed_names - expected_argument_names)
+ - not_needed_names
+ - expected_argument_names
+ )
if differing_argument_names:
self.add_message(
message_id,
- args=(', '.join(
- sorted(differing_argument_names)),),
- node=warning_node)
+ args=(", ".join(sorted(differing_argument_names)),),
+ node=warning_node,
+ )
- _compare_missing_args(params_with_doc, 'missing-param-doc',
- self.not_needed_param_in_docstring)
+ _compare_missing_args(
+ params_with_doc, "missing-param-doc", self.not_needed_param_in_docstring
+ )
for index, arg_name in enumerate(arguments_node.args):
if arguments_node.annotations[index]:
params_with_type.add(arg_name.name)
- _compare_missing_args(params_with_type, 'missing-type-doc',
- not_needed_type_in_docstring)
+ _compare_missing_args(
+ params_with_type, "missing-type-doc", not_needed_type_in_docstring
+ )
- _compare_different_args(params_with_doc, 'differing-param-doc',
- self.not_needed_param_in_docstring)
- _compare_different_args(params_with_type, 'differing-type-doc',
- not_needed_type_in_docstring)
+ _compare_different_args(
+ params_with_doc, "differing-param-doc", self.not_needed_param_in_docstring
+ )
+ _compare_different_args(
+ params_with_type, "differing-type-doc", not_needed_type_in_docstring
+ )
def check_single_constructor_params(self, class_doc, init_doc, class_node):
if class_doc.has_params() and init_doc.has_params():
self.add_message(
- 'multiple-constructor-doc',
- args=(class_node.name,),
- node=class_node)
+ "multiple-constructor-doc", args=(class_node.name,), node=class_node
+ )
def _handle_no_raise_doc(self, excs, node):
if self.config.accept_no_raise_doc:
@@ -448,7 +473,7 @@ class DocstringParameterChecker(BaseChecker):
"""
if node.is_abstract():
try:
- missing_excs.remove('NotImplementedError')
+ missing_excs.remove("NotImplementedError")
except KeyError:
pass
@@ -456,9 +481,9 @@ class DocstringParameterChecker(BaseChecker):
return
self.add_message(
- 'missing-raises-doc',
- args=(', '.join(sorted(missing_excs)),),
- node=node)
+ "missing-raises-doc", args=(", ".join(sorted(missing_excs)),), node=node
+ )
+
def register(linter):
"""Required method to auto register this checker.
diff --git a/pylint/extensions/docstyle.py b/pylint/extensions/docstyle.py
index a1cf8c08e..1f50a414e 100644
--- a/pylint/extensions/docstyle.py
+++ b/pylint/extensions/docstyle.py
@@ -17,35 +17,43 @@ class DocStringStyleChecker(checkers.BaseChecker):
"""Checks format of docstrings based on PEP 0257"""
__implements__ = IAstroidChecker
- name = 'docstyle'
+ name = "docstyle"
msgs = {
- 'C0198': ('Bad docstring quotes in %s, expected """, given %s',
- 'bad-docstring-quotes',
- 'Used when a docstring does not have triple double quotes.'),
- 'C0199': ('First line empty in %s docstring',
- 'docstring-first-line-empty',
- 'Used when a blank line is found at the beginning of a docstring.'),
- }
-
- @check_messages('docstring-first-line-empty', 'bad-docstring-quotes')
+ "C0198": (
+ 'Bad docstring quotes in %s, expected """, given %s',
+ "bad-docstring-quotes",
+ "Used when a docstring does not have triple double quotes.",
+ ),
+ "C0199": (
+ "First line empty in %s docstring",
+ "docstring-first-line-empty",
+ "Used when a blank line is found at the beginning of a docstring.",
+ ),
+ }
+
+ @check_messages("docstring-first-line-empty", "bad-docstring-quotes")
def visit_module(self, node):
- self._check_docstring('module', node)
+ self._check_docstring("module", node)
def visit_classdef(self, node):
- self._check_docstring('class', node)
+ self._check_docstring("class", node)
def visit_functiondef(self, node):
- ftype = 'method' if node.is_method() else 'function'
+ ftype = "method" if node.is_method() else "function"
self._check_docstring(ftype, node)
visit_asyncfunctiondef = visit_functiondef
def _check_docstring(self, node_type, node):
docstring = node.doc
- if docstring and docstring[0] == '\n':
- self.add_message('docstring-first-line-empty', node=node,
- args=(node_type,), confidence=HIGH)
+ if docstring and docstring[0] == "\n":
+ self.add_message(
+ "docstring-first-line-empty",
+ node=node,
+ args=(node_type,),
+ confidence=HIGH,
+ )
# Use "linecache", instead of node.as_string(), because the latter
# looses the original form of the docstrings.
@@ -55,17 +63,21 @@ class DocStringStyleChecker(checkers.BaseChecker):
line = linecache.getline(node.root().file, lineno).lstrip()
if line and line.find('"""') == 0:
return
- if line and '\'\'\'' in line:
- quotes = '\'\'\''
+ if line and "'''" in line:
+ quotes = "'''"
elif line and line[0] == '"':
quotes = '"'
- elif line and line[0] == '\'':
- quotes = '\''
+ elif line and line[0] == "'":
+ quotes = "'"
else:
quotes = False
if quotes:
- self.add_message('bad-docstring-quotes', node=node,
- args=(node_type, quotes), confidence=HIGH)
+ self.add_message(
+ "bad-docstring-quotes",
+ node=node,
+ args=(node_type, quotes),
+ confidence=HIGH,
+ )
def register(linter):
diff --git a/pylint/extensions/emptystring.py b/pylint/extensions/emptystring.py
index a3ff209c6..ddca8f7d5 100644
--- a/pylint/extensions/emptystring.py
+++ b/pylint/extensions/emptystring.py
@@ -17,7 +17,7 @@ from pylint.checkers import utils
def _is_constant_empty_str(node):
- return isinstance(node, astroid.Const) and node.value == ''
+ return isinstance(node, astroid.Const) and node.value == ""
class CompareToEmptyStringChecker(checkers.BaseChecker):
@@ -30,23 +30,26 @@ class CompareToEmptyStringChecker(checkers.BaseChecker):
__implements__ = (interfaces.IAstroidChecker,)
# configuration section name
- name = 'compare-to-empty-string'
- msgs = {'C1901': ('Avoid comparisons to empty string',
- 'compare-to-empty-string',
- 'Used when Pylint detects comparison to an empty string constant.'),
- }
+ name = "compare-to-empty-string"
+ msgs = {
+ "C1901": (
+ "Avoid comparisons to empty string",
+ "compare-to-empty-string",
+ "Used when Pylint detects comparison to an empty string constant.",
+ )
+ }
priority = -2
options = ()
- @utils.check_messages('compare-to-empty-string')
+ @utils.check_messages("compare-to-empty-string")
def visit_compare(self, node):
- _operators = ['!=', '==', 'is not', 'is']
+ _operators = ["!=", "==", "is not", "is"]
# note: astroid.Compare has the left most operand in node.left
# while the rest are a list of tuples in node.ops
# the format of the tuple is ('compare operator sign', node)
# here we squash everything into `ops` to make it easier for processing later
- ops = [('', node.left)]
+ ops = [("", node.left)]
ops.extend(node.ops)
ops = list(itertools.chain(*ops))
@@ -64,7 +67,7 @@ class CompareToEmptyStringChecker(checkers.BaseChecker):
error_detected = True
if error_detected:
- self.add_message('compare-to-empty-string', node=node)
+ self.add_message("compare-to-empty-string", node=node)
def register(linter):
diff --git a/pylint/extensions/mccabe.py b/pylint/extensions/mccabe.py
index b8d601da7..41eca280f 100644
--- a/pylint/extensions/mccabe.py
+++ b/pylint/extensions/mccabe.py
@@ -9,8 +9,10 @@
from __future__ import absolute_import
-from mccabe import PathGraph as Mccabe_PathGraph, \
- PathGraphingAstVisitor as Mccabe_PathGraphingAstVisitor
+from mccabe import (
+ PathGraph as Mccabe_PathGraph,
+ PathGraphingAstVisitor as Mccabe_PathGraphingAstVisitor,
+)
from pylint import checkers
from pylint.checkers.utils import check_messages
from pylint.interfaces import HIGH, IAstroidChecker
@@ -18,7 +20,7 @@ from pylint.interfaces import HIGH, IAstroidChecker
class PathGraph(Mccabe_PathGraph):
def __init__(self, node):
- super(PathGraph, self).__init__(name='', entity='', lineno=1)
+ super(PathGraph, self).__init__(name="", entity="", lineno=1)
self.root = node
@@ -37,7 +39,7 @@ class PathGraphingAstVisitor(Mccabe_PathGraphingAstVisitor):
meth = self._cache.get(klass)
if meth is None:
className = klass.__name__
- meth = getattr(self.visitor, 'visit' + className, self.default)
+ meth = getattr(self.visitor, "visit" + className, self.default)
self._cache[klass] = meth
return meth(node, *args)
@@ -64,10 +66,31 @@ class PathGraphingAstVisitor(Mccabe_PathGraphingAstVisitor):
def visitSimpleStatement(self, node):
self._append_node(node)
- visitAssert = visitAssign = visitAugAssign = visitDelete = visitPrint = \
- visitRaise = visitYield = visitImport = visitCall = visitSubscript = \
- visitPass = visitContinue = visitBreak = visitGlobal = visitReturn = \
- visitExpr = visitAwait = visitSimpleStatement
+ visitAssert = (
+ visitAssign
+ ) = (
+ visitAugAssign
+ ) = (
+ visitDelete
+ ) = (
+ visitPrint
+ ) = (
+ visitRaise
+ ) = (
+ visitYield
+ ) = (
+ visitImport
+ ) = (
+ visitCall
+ ) = (
+ visitSubscript
+ ) = (
+ visitPass
+ ) = (
+ visitContinue
+ ) = (
+ visitBreak
+ ) = visitGlobal = visitReturn = visitExpr = visitAwait = visitSimpleStatement
def visitWith(self, node):
self._append_node(node)
@@ -94,7 +117,9 @@ class PathGraphingAstVisitor(Mccabe_PathGraphingAstVisitor):
self._append_node(node)
self._subgraph_parse(node, node, extra_blocks)
- def _subgraph_parse(self, node, pathnode, extra_blocks): # pylint: disable=unused-argument
+ def _subgraph_parse(
+ self, node, pathnode, extra_blocks
+ ): # pylint: disable=unused-argument
"""parse the body and any `else` block of `if` and `for` statements"""
loose_ends = []
self.tail = node
@@ -124,25 +149,29 @@ class McCabeMethodChecker(checkers.BaseChecker):
"""
__implements__ = IAstroidChecker
- name = 'design'
+ name = "design"
msgs = {
- 'R1260': (
+ "R1260": (
"%s is too complex. The McCabe rating is %d",
- 'too-complex',
- 'Used when a method or function is too complex based on '
- 'McCabe Complexity Cyclomatic'),
+ "too-complex",
+ "Used when a method or function is too complex based on "
+ "McCabe Complexity Cyclomatic",
+ )
}
options = (
- ('max-complexity', {
- 'default': 10,
- 'type': 'int',
- 'metavar': '<int>',
- 'help': 'McCabe complexity cyclomatic threshold',
- }),
+ (
+ "max-complexity",
+ {
+ "default": 10,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "McCabe complexity cyclomatic threshold",
+ },
+ ),
)
- @check_messages('too-complex')
+ @check_messages("too-complex")
def visit_module(self, node):
"""visit an astroid.Module node to check too complex rating and
add message if is greather than max_complexity stored from options"""
@@ -152,15 +181,15 @@ class McCabeMethodChecker(checkers.BaseChecker):
for graph in visitor.graphs.values():
complexity = graph.complexity()
node = graph.root
- if hasattr(node, 'name'):
+ if hasattr(node, "name"):
node_name = "'%s'" % node.name
else:
node_name = "This '%s'" % node.__class__.__name__.lower()
if complexity <= self.config.max_complexity:
continue
self.add_message(
- 'too-complex', node=node, confidence=HIGH,
- args=(node_name, complexity))
+ "too-complex", node=node, confidence=HIGH, args=(node_name, complexity)
+ )
def register(linter):
diff --git a/pylint/extensions/overlapping_exceptions.py b/pylint/extensions/overlapping_exceptions.py
index 7e7e8f462..3b1454cae 100644
--- a/pylint/extensions/overlapping_exceptions.py
+++ b/pylint/extensions/overlapping_exceptions.py
@@ -21,14 +21,18 @@ class OverlappingExceptionsChecker(checkers.BaseChecker):
__implements__ = interfaces.IAstroidChecker
- name = 'overlap-except'
- msgs = {'W0714': ('Overlapping exceptions (%s)',
- 'overlapping-except',
- 'Used when exceptions in handler overlap or are identical')}
+ name = "overlap-except"
+ msgs = {
+ "W0714": (
+ "Overlapping exceptions (%s)",
+ "overlapping-except",
+ "Used when exceptions in handler overlap or are identical",
+ )
+ }
priority = -2
options = ()
- @utils.check_messages('overlapping-except')
+ @utils.check_messages("overlapping-except")
def visit_tryexcept(self, node):
"""check for empty except"""
for handler in node.handlers:
@@ -45,34 +49,39 @@ class OverlappingExceptionsChecker(checkers.BaseChecker):
for part, exc in excs:
if exc is astroid.Uninferable:
continue
- if (isinstance(exc, astroid.Instance) and
- utils.inherit_from_std_ex(exc)):
+ if isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(exc):
# pylint: disable=protected-access
exc = exc._proxied
if not isinstance(exc, astroid.ClassDef):
continue
- exc_ancestors = [anc for anc in exc.ancestors()
- if isinstance(anc, astroid.ClassDef)]
+ exc_ancestors = [
+ anc for anc in exc.ancestors() if isinstance(anc, astroid.ClassDef)
+ ]
for prev_part, prev_exc in handled_in_clause:
- prev_exc_ancestors = [anc for anc in prev_exc.ancestors()
- if isinstance(anc, astroid.ClassDef)]
+ prev_exc_ancestors = [
+ anc
+ for anc in prev_exc.ancestors()
+ if isinstance(anc, astroid.ClassDef)
+ ]
if exc == prev_exc:
- self.add_message('overlapping-except',
- node=handler.type,
- args='%s and %s are the same' %
- (prev_part.as_string(),
- part.as_string()))
- elif (prev_exc in exc_ancestors or
- exc in prev_exc_ancestors):
+ self.add_message(
+ "overlapping-except",
+ node=handler.type,
+ args="%s and %s are the same"
+ % (prev_part.as_string(), part.as_string()),
+ )
+ elif prev_exc in exc_ancestors or exc in prev_exc_ancestors:
ancestor = part if exc in prev_exc_ancestors else prev_part
descendant = part if prev_exc in exc_ancestors else prev_part
- self.add_message('overlapping-except',
- node=handler.type,
- args='%s is an ancestor class of %s' %
- (ancestor.as_string(), descendant.as_string()))
+ self.add_message(
+ "overlapping-except",
+ node=handler.type,
+ args="%s is an ancestor class of %s"
+ % (ancestor.as_string(), descendant.as_string()),
+ )
handled_in_clause += [(part, exc)]
diff --git a/pylint/extensions/redefined_variable_type.py b/pylint/extensions/redefined_variable_type.py
index ab557eeee..a58f04e2d 100644
--- a/pylint/extensions/redefined_variable_type.py
+++ b/pylint/extensions/redefined_variable_type.py
@@ -13,7 +13,7 @@ from pylint.checkers.utils import check_messages, is_none, node_type
from pylint.interfaces import IAstroidChecker
-BUILTINS = 'builtins'
+BUILTINS = "builtins"
class MultipleTypesChecker(BaseChecker):
@@ -29,20 +29,23 @@ class MultipleTypesChecker(BaseChecker):
ifexpr, etc. Also it would be great to have support for inference on
str.split()
"""
+
__implements__ = IAstroidChecker
- name = 'multiple_types'
- msgs = {'R0204': ('Redefinition of %s type from %s to %s',
- 'redefined-variable-type',
- 'Used when the type of a variable changes inside a '
- 'method or a function.'
- ),
- }
+ name = "multiple_types"
+ msgs = {
+ "R0204": (
+ "Redefinition of %s type from %s to %s",
+ "redefined-variable-type",
+ "Used when the type of a variable changes inside a "
+ "method or a function.",
+ )
+ }
def visit_classdef(self, _):
self._assigns.append({})
- @check_messages('redefined-variable-type')
+ @check_messages("redefined-variable-type")
def leave_classdef(self, _):
self._check_and_add_messages()
@@ -69,18 +72,24 @@ class MultipleTypesChecker(BaseChecker):
redef_parent = redef_node.parent
if isinstance(orig_parent, astroid.If):
if orig_parent == redef_parent:
- if (redef_node in orig_parent.orelse and
- orig_node not in orig_parent.orelse):
+ if (
+ redef_node in orig_parent.orelse
+ and orig_node not in orig_parent.orelse
+ ):
orig_node, orig_type = redef_node, redef_type
continue
- elif (isinstance(redef_parent, astroid.If) and
- redef_parent in orig_parent.nodes_of_class(astroid.If)):
+ elif isinstance(
+ redef_parent, astroid.If
+ ) and redef_parent in orig_parent.nodes_of_class(astroid.If):
orig_node, orig_type = redef_node, redef_type
continue
- orig_type = orig_type.replace(BUILTINS + ".", '')
- redef_type = redef_type.replace(BUILTINS + ".", '')
- self.add_message('redefined-variable-type', node=redef_node,
- args=(name, orig_type, redef_type))
+ orig_type = orig_type.replace(BUILTINS + ".", "")
+ redef_type = redef_type.replace(BUILTINS + ".", "")
+ self.add_message(
+ "redefined-variable-type",
+ node=redef_node,
+ args=(name, orig_type, redef_type),
+ )
break
def visit_assign(self, node):
@@ -94,7 +103,8 @@ class MultipleTypesChecker(BaseChecker):
_type = node_type(node.value)
if _type:
self._assigns[-1].setdefault(target.as_string(), []).append(
- (node, _type.pytype()))
+ (node, _type.pytype())
+ )
def register(linter):
diff --git a/pylint/graph.py b/pylint/graph.py
index 2f8f9f4ed..f7790dae8 100644
--- a/pylint/graph.py
+++ b/pylint/graph.py
@@ -17,18 +17,28 @@ import sys
import tempfile
import codecs
+
def target_info_from_filename(filename):
"""Transforms /some/path/foo.png into ('/some/path', 'foo.png', 'png')."""
basename = osp.basename(filename)
storedir = osp.dirname(osp.abspath(filename))
- target = filename.split('.')[-1]
+ target = filename.split(".")[-1]
return storedir, basename, target
class DotBackend:
"""Dot File backend."""
- def __init__(self, graphname, rankdir=None, size=None, ratio=None,
- charset='utf-8', renderer='dot', additional_param=None):
+
+ def __init__(
+ self,
+ graphname,
+ rankdir=None,
+ size=None,
+ ratio=None,
+ charset="utf-8",
+ renderer="dot",
+ additional_param=None,
+ ):
if additional_param is None:
additional_param = {}
self.graphname = graphname
@@ -37,23 +47,24 @@ class DotBackend:
self._source = None
self.emit("digraph %s {" % normalize_node_id(graphname))
if rankdir:
- self.emit('rankdir=%s' % rankdir)
+ self.emit("rankdir=%s" % rankdir)
if ratio:
- self.emit('ratio=%s' % ratio)
+ self.emit("ratio=%s" % ratio)
if size:
self.emit('size="%s"' % size)
if charset:
- assert charset.lower() in ('utf-8', 'iso-8859-1', 'latin1'), \
- 'unsupported charset %s' % charset
+ assert charset.lower() in ("utf-8", "iso-8859-1", "latin1"), (
+ "unsupported charset %s" % charset
+ )
self.emit('charset="%s"' % charset)
for param in additional_param.items():
- self.emit('='.join(param))
+ self.emit("=".join(param))
def get_source(self):
"""returns self._source"""
if self._source is None:
self.emit("}\n")
- self._source = '\n'.join(self.lines)
+ self._source = "\n".join(self.lines)
del self.lines
return self._source
@@ -69,14 +80,15 @@ class DotBackend:
:rtype: str
:return: a path to the generated file
"""
- import subprocess # introduced in py 2.4
+ import subprocess # introduced in py 2.4
+
name = self.graphname
if not dotfile:
# if 'outputfile' is a dot file use it as 'dotfile'
if outputfile and outputfile.endswith(".dot"):
dotfile = outputfile
else:
- dotfile = '%s.dot' % name
+ dotfile = "%s.dot" % name
if outputfile is not None:
storedir, _, target = target_info_from_filename(outputfile)
if target != "dot":
@@ -85,25 +97,36 @@ class DotBackend:
else:
dot_sourcepath = osp.join(storedir, dotfile)
else:
- target = 'png'
+ target = "png"
pdot, dot_sourcepath = tempfile.mkstemp(".dot", name)
ppng, outputfile = tempfile.mkstemp(".png", name)
os.close(pdot)
os.close(ppng)
- pdot = codecs.open(dot_sourcepath, 'w', encoding='utf8')
+ pdot = codecs.open(dot_sourcepath, "w", encoding="utf8")
pdot.write(self.source)
pdot.close()
- if target != 'dot':
- use_shell = sys.platform == 'win32'
+ if target != "dot":
+ use_shell = sys.platform == "win32"
if mapfile:
- subprocess.call([self.renderer, '-Tcmapx', '-o',
- mapfile, '-T', target, dot_sourcepath,
- '-o', outputfile],
- shell=use_shell)
+ subprocess.call(
+ [
+ self.renderer,
+ "-Tcmapx",
+ "-o",
+ mapfile,
+ "-T",
+ target,
+ dot_sourcepath,
+ "-o",
+ outputfile,
+ ],
+ shell=use_shell,
+ )
else:
- subprocess.call([self.renderer, '-T', target,
- dot_sourcepath, '-o', outputfile],
- shell=use_shell)
+ subprocess.call(
+ [self.renderer, "-T", target, dot_sourcepath, "-o", outputfile],
+ shell=use_shell,
+ )
os.unlink(dot_sourcepath)
return outputfile
@@ -117,24 +140,26 @@ class DotBackend:
"""
attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
n_from, n_to = normalize_node_id(name1), normalize_node_id(name2)
- self.emit('%s -> %s [%s];' % (n_from, n_to, ', '.join(sorted(attrs))))
+ self.emit("%s -> %s [%s];" % (n_from, n_to, ", ".join(sorted(attrs))))
def emit_node(self, name, **props):
"""emit a node with given properties.
node properties: see http://www.graphviz.org/doc/info/attrs.html
"""
attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
- self.emit('%s [%s];' % (normalize_node_id(name), ', '.join(sorted(attrs))))
+ self.emit("%s [%s];" % (normalize_node_id(name), ", ".join(sorted(attrs))))
+
def normalize_node_id(nid):
"""Returns a suitable DOT node id for `nid`."""
return '"%s"' % nid
+
def get_cycles(graph_dict, vertices=None):
- '''given a dictionary representing an ordered graph (i.e. key are vertices
+ """given a dictionary representing an ordered graph (i.e. key are vertices
and values is a list of destination vertices representing edges), return a
list of detected cycles
- '''
+ """
if not graph_dict:
return ()
result = []
@@ -144,6 +169,7 @@ def get_cycles(graph_dict, vertices=None):
_get_cycles(graph_dict, [], set(), result, vertice)
return result
+
def _get_cycles(graph_dict, path, visited, result, vertice):
"""recursive function doing the real work for get_cycles"""
if vertice in path:
diff --git a/pylint/interfaces.py b/pylint/interfaces.py
index 8e6cda53e..378585c74 100644
--- a/pylint/interfaces.py
+++ b/pylint/interfaces.py
@@ -15,20 +15,21 @@
"""Interfaces for Pylint objects"""
from collections import namedtuple
-Confidence = namedtuple('Confidence', ['name', 'description'])
+Confidence = namedtuple("Confidence", ["name", "description"])
# Warning Certainties
-HIGH = Confidence('HIGH', 'No false positive possible.')
-INFERENCE = Confidence('INFERENCE', 'Warning based on inference result.')
-INFERENCE_FAILURE = Confidence('INFERENCE_FAILURE',
- 'Warning based on inference with failures.')
-UNDEFINED = Confidence('UNDEFINED',
- 'Warning without any associated confidence level.')
+HIGH = Confidence("HIGH", "No false positive possible.")
+INFERENCE = Confidence("INFERENCE", "Warning based on inference result.")
+INFERENCE_FAILURE = Confidence(
+ "INFERENCE_FAILURE", "Warning based on inference with failures."
+)
+UNDEFINED = Confidence("UNDEFINED", "Warning without any associated confidence level.")
CONFIDENCE_LEVELS = [HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED]
class Interface:
"""Base class for interfaces."""
+
@classmethod
def is_implemented_by(cls, instance):
return implements(instance, cls)
@@ -38,7 +39,7 @@ def implements(obj, interface):
"""Return true if the give object (maybe an instance or class) implements
the interface.
"""
- kimplements = getattr(obj, '__implements__', ())
+ kimplements = getattr(obj, "__implements__", ())
if not isinstance(kimplements, (list, tuple)):
kimplements = (kimplements,)
for implementedinterface in kimplements:
@@ -72,6 +73,7 @@ class IRawChecker(IChecker):
class ITokenChecker(IChecker):
"""Interface for checkers that need access to the token list."""
+
def process_tokens(self, tokens):
"""Process a module.
@@ -97,4 +99,4 @@ class IReporter(Interface):
"""
-__all__ = ('IRawChecker', 'IAstroidChecker', 'ITokenChecker', 'IReporter')
+__all__ = ("IRawChecker", "IAstroidChecker", "ITokenChecker", "IReporter")
diff --git a/pylint/lint.py b/pylint/lint.py
index 258f0ed91..6149ea001 100644
--- a/pylint/lint.py
+++ b/pylint/lint.py
@@ -64,6 +64,7 @@ import collections
import contextlib
import operator
import os
+
try:
import multiprocessing
except ImportError:
@@ -97,13 +98,8 @@ def _get_new_args(message):
message.line,
message.column,
)
- return (
- message.msg_id,
- message.symbol,
- location,
- message.msg,
- message.confidence,
- )
+ return (message.msg_id, message.symbol, location, message.msg, message.confidence)
+
def _get_python_path(filepath):
dirname = os.path.realpath(os.path.expanduser(filepath))
@@ -123,7 +119,7 @@ def _merge_stats(stats):
merged = {}
by_msg = collections.Counter()
for stat in stats:
- message_stats = stat.pop('by_msg', {})
+ message_stats = stat.pop("by_msg", {})
by_msg.update(message_stats)
for key, item in stat.items():
@@ -135,7 +131,7 @@ def _merge_stats(stats):
else:
merged[key] = merged[key] + item
- merged['by_msg'] = by_msg
+ merged["by_msg"] = by_msg
return merged
@@ -146,100 +142,119 @@ def _patch_sysmodules():
# For more details why this is needed,
# see Python issue http://bugs.python.org/issue10845.
- mock_main = __name__ != '__main__' # -m switch
+ mock_main = __name__ != "__main__" # -m switch
if mock_main:
- sys.modules['__main__'] = sys.modules[__name__]
+ sys.modules["__main__"] = sys.modules[__name__]
try:
yield
finally:
if mock_main:
- sys.modules.pop('__main__')
+ sys.modules.pop("__main__")
# Python Linter class #########################################################
MSGS = {
- 'F0001': ('%s',
- 'fatal',
- 'Used when an error occurred preventing the analysis of a \
- module (unable to find it for instance).'),
- 'F0002': ('%s: %s',
- 'astroid-error',
- 'Used when an unexpected error occurred while building the '
- 'Astroid representation. This is usually accompanied by a '
- 'traceback. Please report such errors !'),
- 'F0010': ('error while code parsing: %s',
- 'parse-error',
- 'Used when an exception occurred while building the Astroid '
- 'representation which could be handled by astroid.'),
-
- 'I0001': ('Unable to run raw checkers on built-in module %s',
- 'raw-checker-failed',
- 'Used to inform that a built-in module has not been checked '
- 'using the raw checkers.'),
-
- 'I0010': ('Unable to consider inline option %r',
- 'bad-inline-option',
- 'Used when an inline option is either badly formatted or can\'t '
- 'be used inside modules.'),
-
- 'I0011': ('Locally disabling %s (%s)',
- 'locally-disabled',
- 'Used when an inline option disables a message or a messages '
- 'category.'),
- 'I0013': ('Ignoring entire file',
- 'file-ignored',
- 'Used to inform that the file will not be checked'),
- 'I0020': ('Suppressed %s (from line %d)',
- 'suppressed-message',
- 'A message was triggered on a line, but suppressed explicitly '
- 'by a disable= comment in the file. This message is not '
- 'generated for messages that are ignored due to configuration '
- 'settings.'),
- 'I0021': ('Useless suppression of %s',
- 'useless-suppression',
- 'Reported when a message is explicitly disabled for a line or '
- 'a block of code, but never triggered.'),
- 'I0022': ('Pragma "%s" is deprecated, use "%s" instead',
- 'deprecated-pragma',
- 'Some inline pylint options have been renamed or reworked, '
- 'only the most recent form should be used. '
- 'NOTE:skip-all is only available with pylint >= 0.26',
- {'old_names': [('I0014', 'deprecated-disable-all')]}),
-
- 'E0001': ('%s',
- 'syntax-error',
- 'Used when a syntax error is raised for a module.'),
-
- 'E0011': ('Unrecognized file option %r',
- 'unrecognized-inline-option',
- 'Used when an unknown inline option is encountered.'),
- 'E0012': ('Bad option value %r',
- 'bad-option-value',
- 'Used when a bad value for an inline option is encountered.'),
- }
+ "F0001": (
+ "%s",
+ "fatal",
+ "Used when an error occurred preventing the analysis of a \
+ module (unable to find it for instance).",
+ ),
+ "F0002": (
+ "%s: %s",
+ "astroid-error",
+ "Used when an unexpected error occurred while building the "
+ "Astroid representation. This is usually accompanied by a "
+ "traceback. Please report such errors !",
+ ),
+ "F0010": (
+ "error while code parsing: %s",
+ "parse-error",
+ "Used when an exception occurred while building the Astroid "
+ "representation which could be handled by astroid.",
+ ),
+ "I0001": (
+ "Unable to run raw checkers on built-in module %s",
+ "raw-checker-failed",
+ "Used to inform that a built-in module has not been checked "
+ "using the raw checkers.",
+ ),
+ "I0010": (
+ "Unable to consider inline option %r",
+ "bad-inline-option",
+ "Used when an inline option is either badly formatted or can't "
+ "be used inside modules.",
+ ),
+ "I0011": (
+ "Locally disabling %s (%s)",
+ "locally-disabled",
+ "Used when an inline option disables a message or a messages " "category.",
+ ),
+ "I0013": (
+ "Ignoring entire file",
+ "file-ignored",
+ "Used to inform that the file will not be checked",
+ ),
+ "I0020": (
+ "Suppressed %s (from line %d)",
+ "suppressed-message",
+ "A message was triggered on a line, but suppressed explicitly "
+ "by a disable= comment in the file. This message is not "
+ "generated for messages that are ignored due to configuration "
+ "settings.",
+ ),
+ "I0021": (
+ "Useless suppression of %s",
+ "useless-suppression",
+ "Reported when a message is explicitly disabled for a line or "
+ "a block of code, but never triggered.",
+ ),
+ "I0022": (
+ 'Pragma "%s" is deprecated, use "%s" instead',
+ "deprecated-pragma",
+ "Some inline pylint options have been renamed or reworked, "
+ "only the most recent form should be used. "
+ "NOTE:skip-all is only available with pylint >= 0.26",
+ {"old_names": [("I0014", "deprecated-disable-all")]},
+ ),
+ "E0001": ("%s", "syntax-error", "Used when a syntax error is raised for a module."),
+ "E0011": (
+ "Unrecognized file option %r",
+ "unrecognized-inline-option",
+ "Used when an unknown inline option is encountered.",
+ ),
+ "E0012": (
+ "Bad option value %r",
+ "bad-option-value",
+ "Used when a bad value for an inline option is encountered.",
+ ),
+}
if multiprocessing is not None:
+
class ChildLinter(multiprocessing.Process):
def run(self):
# pylint: disable=no-member, unbalanced-tuple-unpacking
tasks_queue, results_queue, self._config = self._args
self._config["jobs"] = 1 # Child does not parallelize any further.
- self._python3_porting_mode = self._config.pop(
- 'python3_porting_mode', None)
- self._plugins = self._config.pop('plugins', None)
+ self._python3_porting_mode = self._config.pop("python3_porting_mode", None)
+ self._plugins = self._config.pop("plugins", None)
# Run linter for received files/modules.
- for file_or_module in iter(tasks_queue.get, 'STOP'):
+ for file_or_module in iter(tasks_queue.get, "STOP"):
try:
result = self._run_linter(file_or_module[0])
results_queue.put(result)
except Exception as ex:
- print("internal error with sending report for module %s" %
- file_or_module, file=sys.stderr)
+ print(
+ "internal error with sending report for module %s"
+ % file_or_module,
+ file=sys.stderr,
+ )
print(ex, file=sys.stderr)
results_queue.put({})
@@ -266,15 +281,23 @@ if multiprocessing is not None:
linter.check(file_or_module)
msgs = [_get_new_args(m) for m in linter.reporter.messages]
- return (file_or_module, linter.file_state.base_name, linter.current_name,
- msgs, linter.stats, linter.msg_status)
+ return (
+ file_or_module,
+ linter.file_state.base_name,
+ linter.current_name,
+ msgs,
+ linter.stats,
+ linter.msg_status,
+ )
# pylint: disable=too-many-instance-attributes
-class PyLinter(config.OptionsManagerMixIn,
- utils.MessagesHandlerMixIn,
- utils.ReportsHandlerMixIn,
- checkers.BaseTokenChecker):
+class PyLinter(
+ config.OptionsManagerMixIn,
+ utils.MessagesHandlerMixIn,
+ utils.ReportsHandlerMixIn,
+ checkers.BaseTokenChecker,
+):
"""lint Python modules using external checkers.
This is the main checker controlling the other ones and the reports
@@ -288,165 +311,262 @@ class PyLinter(config.OptionsManagerMixIn,
to ensure the latest code version is actually checked.
"""
- __implements__ = (interfaces.ITokenChecker, )
+ __implements__ = (interfaces.ITokenChecker,)
- name = 'master'
+ name = "master"
priority = 0
level = 0
msgs = MSGS
@staticmethod
def make_options():
- return (('ignore',
- {'type' : 'csv', 'metavar' : '<file>[,<file>...]',
- 'dest' : 'black_list', 'default' : ('CVS',),
- 'help' : 'Add files or directories to the blacklist. '
- 'They should be base names, not paths.'}),
-
- ('ignore-patterns',
- {'type' : 'regexp_csv', 'metavar' : '<pattern>[,<pattern>...]',
- 'dest' : 'black_list_re', 'default' : (),
- 'help' : 'Add files or directories matching the regex patterns to the'
- ' blacklist. The regex matches against base names, not paths.'}),
-
- ('persistent',
- {'default': True, 'type' : 'yn', 'metavar' : '<y_or_n>',
- 'level': 1,
- 'help' : 'Pickle collected data for later comparisons.'}),
-
- ('load-plugins',
- {'type' : 'csv', 'metavar' : '<modules>', 'default' : (),
- 'level': 1,
- 'help' : 'List of plugins (as comma separated values of '
- 'python modules names) to load, usually to register '
- 'additional checkers.'}),
-
- ('output-format',
- {'default': 'text', 'type': 'string', 'metavar' : '<format>',
- 'short': 'f',
- 'group': 'Reports',
- 'help' : 'Set the output format. Available formats are text,'
- ' parseable, colorized, json and msvs (visual studio).'
- ' You can also give a reporter class, e.g. mypackage.mymodule.'
- 'MyReporterClass.'}),
-
- ('reports',
- {'default': False, 'type' : 'yn', 'metavar' : '<y_or_n>',
- 'short': 'r',
- 'group': 'Reports',
- 'help' : 'Tells whether to display a full report or only the '
- 'messages.'}),
-
- ('evaluation',
- {'type' : 'string', 'metavar' : '<python_expression>',
- 'group': 'Reports', 'level': 1,
- 'default': '10.0 - ((float(5 * error + warning + refactor + '
- 'convention) / statement) * 10)',
- 'help' : 'Python expression which should return a note less '
- 'than 10 (10 is the highest note). You have access '
- 'to the variables errors warning, statement which '
- 'respectively contain the number of errors / '
- 'warnings messages and the total number of '
- 'statements analyzed. This is used by the global '
- 'evaluation report (RP0004).'}),
- ('score',
- {'default': True, 'type': 'yn', 'metavar': '<y_or_n>',
- 'short': 's',
- 'group': 'Reports',
- 'help': 'Activate the evaluation score.'}),
-
- ('confidence',
- {'type' : 'multiple_choice', 'metavar': '<levels>',
- 'default': '',
- 'choices': [c.name for c in interfaces.CONFIDENCE_LEVELS],
- 'group': 'Messages control',
- 'help' : 'Only show warnings with the listed confidence levels.'
- ' Leave empty to show all. Valid levels: %s.' % (
- ', '.join(c.name for c in interfaces.CONFIDENCE_LEVELS),)}),
-
- ('enable',
- {'type' : 'csv', 'metavar': '<msg ids>',
- 'short': 'e',
- 'group': 'Messages control',
- 'help' : 'Enable the message, report, category or checker with the '
- 'given id(s). You can either give multiple identifier '
- 'separated by comma (,) or put this option multiple time '
- '(only on the command line, not in the configuration file '
- 'where it should appear only once). '
- 'See also the "--disable" option for examples.'}),
-
- ('disable',
- {'type' : 'csv', 'metavar': '<msg ids>',
- 'short': 'd',
- 'group': 'Messages control',
- 'help' : 'Disable the message, report, category or checker '
- 'with the given id(s). You can either give multiple identifiers '
- 'separated by comma (,) or put this option multiple times '
- '(only on the command line, not in the configuration file '
- 'where it should appear only once). '
- 'You can also use "--disable=all" to disable everything first '
- 'and then reenable specific checks. For example, if you want '
- 'to run only the similarities checker, you can use '
- '"--disable=all --enable=similarities". '
- 'If you want to run only the classes checker, but have no '
- 'Warning level messages displayed, use '
- '"--disable=all --enable=classes --disable=W".'}),
-
- ('msg-template',
- {'type' : 'string', 'metavar': '<template>',
- 'group': 'Reports',
- 'help' : ('Template used to display messages. '
- 'This is a python new-style format string '
- 'used to format the message information. '
- 'See doc for all details.')
- }),
-
- ('jobs',
- {'type' : 'int', 'metavar': '<n-processes>',
- 'short': 'j',
- 'default': 1,
- 'help' : 'Use multiple processes to speed up Pylint. Specifying 0 will '
- 'auto-detect the number of processors available to use.',
- }),
-
- ('unsafe-load-any-extension',
- {'type': 'yn', 'metavar': '<yn>', 'default': False, 'hide': True,
- 'help': ('Allow loading of arbitrary C extensions. Extensions'
- ' are imported into the active Python interpreter and'
- ' may run arbitrary code.')}),
-
- ('limit-inference-results',
- {'type': 'int', 'metavar': '<number-of-results>', 'default': 100,
- 'help': ('Control the amount of potential inferred values when inferring '
- 'a single object. This can help the performance when dealing with '
- 'large functions or complex, nested conditions. ')}),
-
- ('extension-pkg-whitelist',
- {'type': 'csv', 'metavar': '<pkg[,pkg]>', 'default': [],
- 'help': ('A comma-separated list of package or module names'
- ' from where C extensions may be loaded. Extensions are'
- ' loading into the active Python interpreter and may run'
- ' arbitrary code.')}),
- ('suggestion-mode',
- {'type': 'yn', 'metavar': '<yn>', 'default': True,
- 'help': ('When enabled, pylint would attempt to guess common '
- 'misconfiguration and emit user-friendly hints instead '
- 'of false-positive error messages.')}),
-
- ('exit-zero',
- {'action': 'store_true',
- 'help': ('Always return a 0 (non-error) status code, even if '
- 'lint errors are found. This is primarily useful in '
- 'continuous integration scripts.')}),
- )
+ return (
+ (
+ "ignore",
+ {
+ "type": "csv",
+ "metavar": "<file>[,<file>...]",
+ "dest": "black_list",
+ "default": ("CVS",),
+ "help": "Add files or directories to the blacklist. "
+ "They should be base names, not paths.",
+ },
+ ),
+ (
+ "ignore-patterns",
+ {
+ "type": "regexp_csv",
+ "metavar": "<pattern>[,<pattern>...]",
+ "dest": "black_list_re",
+ "default": (),
+ "help": "Add files or directories matching the regex patterns to the"
+ " blacklist. The regex matches against base names, not paths.",
+ },
+ ),
+ (
+ "persistent",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "level": 1,
+ "help": "Pickle collected data for later comparisons.",
+ },
+ ),
+ (
+ "load-plugins",
+ {
+ "type": "csv",
+ "metavar": "<modules>",
+ "default": (),
+ "level": 1,
+ "help": "List of plugins (as comma separated values of "
+ "python modules names) to load, usually to register "
+ "additional checkers.",
+ },
+ ),
+ (
+ "output-format",
+ {
+ "default": "text",
+ "type": "string",
+ "metavar": "<format>",
+ "short": "f",
+ "group": "Reports",
+ "help": "Set the output format. Available formats are text,"
+ " parseable, colorized, json and msvs (visual studio)."
+ " You can also give a reporter class, e.g. mypackage.mymodule."
+ "MyReporterClass.",
+ },
+ ),
+ (
+ "reports",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "short": "r",
+ "group": "Reports",
+ "help": "Tells whether to display a full report or only the "
+ "messages.",
+ },
+ ),
+ (
+ "evaluation",
+ {
+ "type": "string",
+ "metavar": "<python_expression>",
+ "group": "Reports",
+ "level": 1,
+ "default": "10.0 - ((float(5 * error + warning + refactor + "
+ "convention) / statement) * 10)",
+ "help": "Python expression which should return a note less "
+ "than 10 (10 is the highest note). You have access "
+ "to the variables errors warning, statement which "
+ "respectively contain the number of errors / "
+ "warnings messages and the total number of "
+ "statements analyzed. This is used by the global "
+ "evaluation report (RP0004).",
+ },
+ ),
+ (
+ "score",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "short": "s",
+ "group": "Reports",
+ "help": "Activate the evaluation score.",
+ },
+ ),
+ (
+ "confidence",
+ {
+ "type": "multiple_choice",
+ "metavar": "<levels>",
+ "default": "",
+ "choices": [c.name for c in interfaces.CONFIDENCE_LEVELS],
+ "group": "Messages control",
+ "help": "Only show warnings with the listed confidence levels."
+ " Leave empty to show all. Valid levels: %s."
+ % (", ".join(c.name for c in interfaces.CONFIDENCE_LEVELS),),
+ },
+ ),
+ (
+ "enable",
+ {
+ "type": "csv",
+ "metavar": "<msg ids>",
+ "short": "e",
+ "group": "Messages control",
+ "help": "Enable the message, report, category or checker with the "
+ "given id(s). You can either give multiple identifier "
+ "separated by comma (,) or put this option multiple time "
+ "(only on the command line, not in the configuration file "
+ "where it should appear only once). "
+ 'See also the "--disable" option for examples.',
+ },
+ ),
+ (
+ "disable",
+ {
+ "type": "csv",
+ "metavar": "<msg ids>",
+ "short": "d",
+ "group": "Messages control",
+ "help": "Disable the message, report, category or checker "
+ "with the given id(s). You can either give multiple identifiers "
+ "separated by comma (,) or put this option multiple times "
+ "(only on the command line, not in the configuration file "
+ "where it should appear only once). "
+ 'You can also use "--disable=all" to disable everything first '
+ "and then reenable specific checks. For example, if you want "
+ "to run only the similarities checker, you can use "
+ '"--disable=all --enable=similarities". '
+ "If you want to run only the classes checker, but have no "
+ "Warning level messages displayed, use "
+ '"--disable=all --enable=classes --disable=W".',
+ },
+ ),
+ (
+ "msg-template",
+ {
+ "type": "string",
+ "metavar": "<template>",
+ "group": "Reports",
+ "help": (
+ "Template used to display messages. "
+ "This is a python new-style format string "
+ "used to format the message information. "
+ "See doc for all details."
+ ),
+ },
+ ),
+ (
+ "jobs",
+ {
+ "type": "int",
+ "metavar": "<n-processes>",
+ "short": "j",
+ "default": 1,
+ "help": "Use multiple processes to speed up Pylint. Specifying 0 will "
+ "auto-detect the number of processors available to use.",
+ },
+ ),
+ (
+ "unsafe-load-any-extension",
+ {
+ "type": "yn",
+ "metavar": "<yn>",
+ "default": False,
+ "hide": True,
+ "help": (
+ "Allow loading of arbitrary C extensions. Extensions"
+ " are imported into the active Python interpreter and"
+ " may run arbitrary code."
+ ),
+ },
+ ),
+ (
+ "limit-inference-results",
+ {
+ "type": "int",
+ "metavar": "<number-of-results>",
+ "default": 100,
+ "help": (
+ "Control the amount of potential inferred values when inferring "
+ "a single object. This can help the performance when dealing with "
+ "large functions or complex, nested conditions. "
+ ),
+ },
+ ),
+ (
+ "extension-pkg-whitelist",
+ {
+ "type": "csv",
+ "metavar": "<pkg[,pkg]>",
+ "default": [],
+ "help": (
+ "A comma-separated list of package or module names"
+ " from where C extensions may be loaded. Extensions are"
+ " loading into the active Python interpreter and may run"
+ " arbitrary code."
+ ),
+ },
+ ),
+ (
+ "suggestion-mode",
+ {
+ "type": "yn",
+ "metavar": "<yn>",
+ "default": True,
+ "help": (
+ "When enabled, pylint would attempt to guess common "
+ "misconfiguration and emit user-friendly hints instead "
+ "of false-positive error messages."
+ ),
+ },
+ ),
+ (
+ "exit-zero",
+ {
+ "action": "store_true",
+ "help": (
+ "Always return a 0 (non-error) status code, even if "
+ "lint errors are found. This is primarily useful in "
+ "continuous integration scripts."
+ ),
+ },
+ ),
+ )
option_groups = (
- ('Messages control', 'Options controlling analysis messages'),
- ('Reports', 'Options related to output formatting and reporting'),
- )
+ ("Messages control", "Options controlling analysis messages"),
+ ("Reports", "Options related to output formatting and reporting"),
+ )
- def __init__(self, options=(), reporter=None, option_groups=(),
- pylintrc=None):
+ def __init__(self, options=(), reporter=None, option_groups=(), pylintrc=None):
# some stuff has to be done before ancestors initialization...
#
# messages store / checkers / reporter / astroid manager
@@ -466,28 +586,32 @@ class PyLinter(config.OptionsManagerMixIn,
self._external_opts = options
self.options = options + PyLinter.make_options()
self.option_groups = option_groups + PyLinter.option_groups
- self._options_methods = {
- 'enable': self.enable,
- 'disable': self.disable}
- self._bw_options_methods = {'disable-msg': self.disable,
- 'enable-msg': self.enable}
- full_version = '%%prog %s\nastroid %s\nPython %s' % (
- version, astroid_version, sys.version)
+ self._options_methods = {"enable": self.enable, "disable": self.disable}
+ self._bw_options_methods = {
+ "disable-msg": self.disable,
+ "enable-msg": self.enable,
+ }
+ full_version = "%%prog %s\nastroid %s\nPython %s" % (
+ version,
+ astroid_version,
+ sys.version,
+ )
utils.MessagesHandlerMixIn.__init__(self)
utils.ReportsHandlerMixIn.__init__(self)
super(PyLinter, self).__init__(
- usage=__doc__,
- version=full_version,
- config_file=pylintrc or config.PYLINTRC)
+ usage=__doc__, version=full_version, config_file=pylintrc or config.PYLINTRC
+ )
checkers.BaseTokenChecker.__init__(self)
# provided reports
- self.reports = (('RP0001', 'Messages by category',
- report_total_messages_stats),
- ('RP0002', '% errors / warnings by module',
- report_messages_by_module_stats),
- ('RP0003', 'Messages',
- report_messages_stats),
- )
+ self.reports = (
+ ("RP0001", "Messages by category", report_total_messages_stats),
+ (
+ "RP0002",
+ "% errors / warnings by module",
+ report_messages_by_module_stats,
+ ),
+ ("RP0003", "Messages", report_messages_stats),
+ )
self.register_checker(self)
self._dynamic_plugins = set()
self._python3_porting_mode = False
@@ -529,9 +653,8 @@ class PyLinter(config.OptionsManagerMixIn,
def _load_reporter_class(self):
qname = self._reporter_name
- module = modutils.load_module_from_name(
- modutils.get_module_part(qname))
- class_name = qname.split('.')[-1]
+ module = modutils.load_module_from_name(modutils.get_module_part(qname))
+ class_name = qname.split(".")[-1]
reporter_class = getattr(module, class_name)
return reporter_class
@@ -544,24 +667,25 @@ class PyLinter(config.OptionsManagerMixIn,
"""overridden from config.OptionsProviderMixin to handle some
special options
"""
- if optname in self._options_methods or \
- optname in self._bw_options_methods:
+ if optname in self._options_methods or optname in self._bw_options_methods:
if value:
try:
meth = self._options_methods[optname]
except KeyError:
meth = self._bw_options_methods[optname]
- warnings.warn('%s is deprecated, replace it by %s' % (optname,
- optname.split('-')[0]),
- DeprecationWarning)
+ warnings.warn(
+ "%s is deprecated, replace it by %s"
+ % (optname, optname.split("-")[0]),
+ DeprecationWarning,
+ )
value = utils._check_csv(value)
if isinstance(value, (list, tuple)):
for _id in value:
meth(_id, ignore_unknown=True)
else:
meth(value)
- return # no need to call set_option, disable/enable methods do it
- elif optname == 'output-format':
+ return # no need to call set_option, disable/enable methods do it
+ elif optname == "output-format":
self._reporter_name = value
# If the reporters are already available, load
# the reporter class.
@@ -569,17 +693,15 @@ class PyLinter(config.OptionsManagerMixIn,
self._load_reporter()
try:
- checkers.BaseTokenChecker.set_option(self, optname,
- value, action, optdict)
+ checkers.BaseTokenChecker.set_option(self, optname, value, action, optdict)
except config.UnsupportedAction:
- print('option %s can\'t be read from config file' % \
- optname, file=sys.stderr)
+ print("option %s can't be read from config file" % optname, file=sys.stderr)
def register_reporter(self, reporter_class):
self._reporters[reporter_class.name] = reporter_class
def report_order(self):
- reports = sorted(self._reports, key=lambda x: getattr(x, 'name', ''))
+ reports = sorted(self._reports, key=lambda x: getattr(x, "name", ""))
try:
# Remove the current reporter and add it
# at the end of the list.
@@ -597,24 +719,24 @@ class PyLinter(config.OptionsManagerMixIn,
checker is an object implementing IRawChecker or / and IAstroidChecker
"""
- assert checker.priority <= 0, 'checker priority can\'t be >= 0'
+ assert checker.priority <= 0, "checker priority can't be >= 0"
self._checkers[checker.name].append(checker)
for r_id, r_title, r_cb in checker.reports:
self.register_report(r_id, r_title, r_cb, checker)
self.register_options_provider(checker)
- if hasattr(checker, 'msgs'):
+ if hasattr(checker, "msgs"):
self.msgs_store.register_messages(checker)
checker.load_defaults()
# Register the checker, but disable all of its messages.
# TODO(cpopa): we should have a better API for this.
- if not getattr(checker, 'enabled', True):
+ if not getattr(checker, "enabled", True):
self.disable(checker.name)
def disable_noerror_messages(self):
for msgcat, msgids in self.msgs_store._msgs_by_category.items():
# enable only messages with 'error' severity and above ('fatal')
- if msgcat in ['E', 'F']:
+ if msgcat in ["E", "F"]:
for msgid in msgids:
self.enable(msgid)
else:
@@ -631,39 +753,39 @@ class PyLinter(config.OptionsManagerMixIn,
"""error mode: enable only errors; no reports, no persistent"""
self._error_mode = True
self.disable_noerror_messages()
- self.disable('miscellaneous')
+ self.disable("miscellaneous")
if self._python3_porting_mode:
- self.disable('all')
- for msg_id in self._checker_messages('python3'):
- if msg_id.startswith('E'):
+ self.disable("all")
+ for msg_id in self._checker_messages("python3"):
+ if msg_id.startswith("E"):
self.enable(msg_id)
config_parser = self.cfgfile_parser
- if config_parser.has_option('MESSAGES CONTROL', 'disable'):
- value = config_parser.get('MESSAGES CONTROL', 'disable')
- self.global_set_option('disable', value)
+ if config_parser.has_option("MESSAGES CONTROL", "disable"):
+ value = config_parser.get("MESSAGES CONTROL", "disable")
+ self.global_set_option("disable", value)
else:
- self.disable('python3')
- self.set_option('reports', False)
- self.set_option('persistent', False)
- self.set_option('score', False)
+ self.disable("python3")
+ self.set_option("reports", False)
+ self.set_option("persistent", False)
+ self.set_option("score", False)
def python3_porting_mode(self):
"""Disable all other checkers and enable Python 3 warnings."""
- self.disable('all')
- self.enable('python3')
+ self.disable("all")
+ self.enable("python3")
if self._error_mode:
# The error mode was activated, using the -E flag.
# So we'll need to enable only the errors from the
# Python 3 porting checker.
- for msg_id in self._checker_messages('python3'):
- if msg_id.startswith('E'):
+ for msg_id in self._checker_messages("python3"):
+ if msg_id.startswith("E"):
self.enable(msg_id)
else:
self.disable(msg_id)
config_parser = self.cfgfile_parser
- if config_parser.has_option('MESSAGES CONTROL', 'disable'):
- value = config_parser.get('MESSAGES CONTROL', 'disable')
- self.global_set_option('disable', value)
+ if config_parser.has_option("MESSAGES CONTROL", "disable"):
+ value = config_parser.get("MESSAGES CONTROL", "disable")
+ self.global_set_option("disable", value)
self._python3_porting_mode = True
# block level option handling #############################################
@@ -674,26 +796,32 @@ class PyLinter(config.OptionsManagerMixIn,
"""process tokens from the current module to search for module/block
level options
"""
- control_pragmas = {'disable', 'enable'}
+ control_pragmas = {"disable", "enable"}
for (tok_type, content, start, _, _) in tokens:
if tok_type != tokenize.COMMENT:
continue
match = utils.OPTION_RGX.search(content)
if match is None:
continue
- if match.group(1).strip() == "disable-all" or \
- match.group(1).strip() == 'skip-file':
+ if (
+ match.group(1).strip() == "disable-all"
+ or match.group(1).strip() == "skip-file"
+ ):
if match.group(1).strip() == "disable-all":
- self.add_message('deprecated-pragma', line=start[0],
- args=('disable-all', 'skip-file'))
- self.add_message('file-ignored', line=start[0])
+ self.add_message(
+ "deprecated-pragma",
+ line=start[0],
+ args=("disable-all", "skip-file"),
+ )
+ self.add_message("file-ignored", line=start[0])
self._ignore_file = True
return
try:
- opt, value = match.group(1).split('=', 1)
+ opt, value = match.group(1).split("=", 1)
except ValueError:
- self.add_message('bad-inline-option', args=match.group(1).strip(),
- line=start[0])
+ self.add_message(
+ "bad-inline-option", args=match.group(1).strip(), line=start[0]
+ )
continue
opt = opt.strip()
if opt in self._options_methods or opt in self._bw_options_methods:
@@ -702,41 +830,52 @@ class PyLinter(config.OptionsManagerMixIn,
except KeyError:
meth = self._bw_options_methods[opt]
# found a "(dis|en)able-msg" pragma deprecated suppression
- self.add_message('deprecated-pragma', line=start[0],
- args=(opt, opt.replace('-msg', '')))
+ self.add_message(
+ "deprecated-pragma",
+ line=start[0],
+ args=(opt, opt.replace("-msg", "")),
+ )
for msgid in utils._splitstrip(value):
# Add the line where a control pragma was encountered.
if opt in control_pragmas:
self._pragma_lineno[msgid] = start[0]
try:
- if (opt, msgid) == ('disable', 'all'):
- self.add_message('deprecated-pragma', line=start[0],
- args=('disable=all', 'skip-file'))
- self.add_message('file-ignored', line=start[0])
+ if (opt, msgid) == ("disable", "all"):
+ self.add_message(
+ "deprecated-pragma",
+ line=start[0],
+ args=("disable=all", "skip-file"),
+ )
+ self.add_message("file-ignored", line=start[0])
self._ignore_file = True
return
- comments_sharp_sep = content.split('#')[1:]
+ comments_sharp_sep = content.split("#")[1:]
first_comment = "#" + comments_sharp_sep[0]
- first_comment_match_disable = utils.OPTION_RGX.search(first_comment)
- # Deactivate msg emission for whole module only if
+ first_comment_match_disable = utils.OPTION_RGX.search(
+ first_comment
+ )
+ #  Deactivate msg emission for whole module only if
# we are sure the disable directive is the first comment.
# If not then it refers to the comment before
# and not to the module itself.
if first_comment_match_disable:
- meth(msgid, 'module', start[0])
+ meth(msgid, "module", start[0])
except exceptions.UnknownMessageError:
- self.add_message('bad-option-value', args=msgid, line=start[0])
+ self.add_message("bad-option-value", args=msgid, line=start[0])
else:
- self.add_message('unrecognized-inline-option', args=opt, line=start[0])
-
+ self.add_message("unrecognized-inline-option", args=opt, line=start[0])
# code checking methods ###################################################
def get_checkers(self):
"""return all available checkers as a list"""
- return [self] + [c for _checkers in self._checkers.values()
- for c in _checkers if c is not self]
+ return [self] + [
+ c
+ for _checkers in self._checkers.values()
+ for c in _checkers
+ if c is not self
+ ]
def prepare_checkers(self):
"""return checkers needed for activated messages and reports"""
@@ -745,15 +884,13 @@ class PyLinter(config.OptionsManagerMixIn,
# get needed checkers
neededcheckers = [self]
for checker in self.get_checkers()[1:]:
- messages = {msg for msg in checker.msgs
- if self.is_message_enabled(msg)}
- if (messages or
- any(self.report_is_enabled(r[0]) for r in checker.reports)):
+ messages = {msg for msg in checker.msgs if self.is_message_enabled(msg)}
+ if messages or any(self.report_is_enabled(r[0]) for r in checker.reports):
neededcheckers.append(checker)
# Sort checkers by priority
- neededcheckers = sorted(neededcheckers,
- key=operator.attrgetter('priority'),
- reverse=True)
+ neededcheckers = sorted(
+ neededcheckers, key=operator.attrgetter("priority"), reverse=True
+ )
return neededcheckers
# pylint: disable=unused-argument
@@ -777,7 +914,8 @@ class PyLinter(config.OptionsManagerMixIn,
"""
if is_argument:
return True
- return path.endswith('.py')
+ return path.endswith(".py")
+
# pylint: enable=unused-argument
def check(self, files_or_modules):
@@ -801,18 +939,17 @@ class PyLinter(config.OptionsManagerMixIn,
def _get_jobs_config(self):
child_config = collections.OrderedDict()
- filter_options = {'long-help'}
+ filter_options = {"long-help"}
filter_options.update((opt_name for opt_name, _ in self._external_opts))
for opt_providers in self._all_options.values():
for optname, optdict, val in opt_providers.options_and_values():
- if optdict.get('deprecated'):
+ if optdict.get("deprecated"):
continue
if optname not in filter_options:
- child_config[optname] = utils._format_option_value(
- optdict, val)
- child_config['python3_porting_mode'] = self._python3_porting_mode
- child_config['plugins'] = self._dynamic_plugins
+ child_config[optname] = utils._format_option_value(optdict, val)
+ child_config["python3_porting_mode"] = self._python3_porting_mode
+ child_config["plugins"] = self._dynamic_plugins
return child_config
def _parallel_task(self, files_or_modules):
@@ -827,19 +964,18 @@ class PyLinter(config.OptionsManagerMixIn,
# Send files to child linters.
expanded_files = []
for descr in self.expand_files(files_or_modules):
- modname, filepath, is_arg = descr['name'], descr['path'], descr['isarg']
+ modname, filepath, is_arg = descr["name"], descr["path"], descr["isarg"]
if self.should_analyze_file(modname, filepath, is_argument=is_arg):
expanded_files.append(descr)
# do not start more jobs than needed
for _ in range(min(self.config.jobs, len(expanded_files))):
- child_linter = ChildLinter(args=(tasks_queue, results_queue,
- child_config))
+ child_linter = ChildLinter(args=(tasks_queue, results_queue, child_config))
child_linter.start()
children.append(child_linter)
for files_or_module in expanded_files:
- path = files_or_module['path']
+ path = files_or_module["path"]
tasks_queue.put([path])
# collect results from child linters
@@ -848,8 +984,10 @@ class PyLinter(config.OptionsManagerMixIn,
try:
result = results_queue.get()
except Exception as ex:
- print("internal error while receiving results from child linter",
- file=sys.stderr)
+ print(
+ "internal error while receiving results from child linter",
+ file=sys.stderr,
+ )
print(ex, file=sys.stderr)
failed = True
break
@@ -857,7 +995,7 @@ class PyLinter(config.OptionsManagerMixIn,
# Stop child linters and wait for their completion.
for _ in range(self.config.jobs):
- tasks_queue.put('STOP')
+ tasks_queue.put("STOP")
for child in children:
child.join()
@@ -874,14 +1012,7 @@ class PyLinter(config.OptionsManagerMixIn,
for result in self._parallel_task(files_or_modules):
if not result:
continue
- (
- _,
- self.file_state.base_name,
- module,
- messages,
- stats,
- msg_status
- ) = result
+ (_, self.file_state.base_name, module, messages, stats, msg_status) = result
for msg in messages:
msg = utils.Message(*msg)
@@ -902,11 +1033,14 @@ class PyLinter(config.OptionsManagerMixIn,
def _do_check(self, files_or_modules):
walker = utils.PyLintASTWalker(self)
_checkers = self.prepare_checkers()
- tokencheckers = [c for c in _checkers
- if interfaces.implements(c, interfaces.ITokenChecker)
- and c is not self]
- rawcheckers = [c for c in _checkers
- if interfaces.implements(c, interfaces.IRawChecker)]
+ tokencheckers = [
+ c
+ for c in _checkers
+ if interfaces.implements(c, interfaces.ITokenChecker) and c is not self
+ ]
+ rawcheckers = [
+ c for c in _checkers if interfaces.implements(c, interfaces.IRawChecker)
+ ]
# notify global begin
for checker in _checkers:
checker.open()
@@ -914,7 +1048,7 @@ class PyLinter(config.OptionsManagerMixIn,
walker.add_checker(checker)
# build ast and check modules or packages
for descr in self.expand_files(files_or_modules):
- modname, filepath, is_arg = descr['name'], descr['path'], descr['isarg']
+ modname, filepath, is_arg = descr["name"], descr["path"], descr["isarg"]
if not self.should_analyze_file(modname, filepath, is_argument=is_arg):
continue
@@ -926,32 +1060,35 @@ class PyLinter(config.OptionsManagerMixIn,
# XXX to be correct we need to keep module_msgs_state for every
# analyzed module (the problem stands with localized messages which
# are only detected in the .close step)
- self.file_state = utils.FileState(descr['basename'])
+ self.file_state = utils.FileState(descr["basename"])
self._ignore_file = False
# fix the current file (if the source file was not available or
# if it's actually a c extension)
- self.current_file = ast_node.file # pylint: disable=maybe-no-member
+ self.current_file = ast_node.file # pylint: disable=maybe-no-member
self.check_astroid_module(ast_node, walker, rawcheckers, tokencheckers)
# warn about spurious inline messages handling
- spurious_messages = self.file_state.iter_spurious_suppression_messages(self.msgs_store)
+ spurious_messages = self.file_state.iter_spurious_suppression_messages(
+ self.msgs_store
+ )
for msgid, line, args in spurious_messages:
self.add_message(msgid, line, None, args)
# notify global end
- self.stats['statement'] = walker.nbstatements
+ self.stats["statement"] = walker.nbstatements
for checker in reversed(_checkers):
checker.close()
def expand_files(self, modules):
"""get modules and errors from a list of modules and handle errors
"""
- result, errors = utils.expand_modules(modules, self.config.black_list,
- self.config.black_list_re)
+ result, errors = utils.expand_modules(
+ modules, self.config.black_list, self.config.black_list_re
+ )
for error in errors:
message = modname = error["mod"]
key = error["key"]
self.set_current_module(modname)
if key == "fatal":
- message = str(error["ex"]).replace(os.getcwd() + os.sep, '')
+ message = str(error["ex"]).replace(os.getcwd() + os.sep, "")
self.add_message(key, args=message)
return result
@@ -964,10 +1101,10 @@ class PyLinter(config.OptionsManagerMixIn,
self.reporter.on_set_current_module(modname, filepath)
self.current_name = modname
self.current_file = filepath or modname
- self.stats['by_module'][modname] = {}
- self.stats['by_module'][modname]['statement'] = 0
+ self.stats["by_module"][modname] = {}
+ self.stats["by_module"][modname]["statement"] = 0
for msg_cat in utils.MSG_TYPES.values():
- self.stats['by_module'][modname][msg_cat] = 0
+ self.stats["by_module"][modname][msg_cat] = 0
def get_ast(self, filepath, modname):
"""return an ast(roid) representation for a module"""
@@ -975,29 +1112,29 @@ class PyLinter(config.OptionsManagerMixIn,
return MANAGER.ast_from_file(filepath, modname, source=True)
except astroid.AstroidSyntaxError as ex:
# pylint: disable=no-member
- self.add_message('syntax-error',
- line=getattr(ex.error, 'lineno', 0),
- args=str(ex.error))
+ self.add_message(
+ "syntax-error", line=getattr(ex.error, "lineno", 0), args=str(ex.error)
+ )
except astroid.AstroidBuildingException as ex:
- self.add_message('parse-error', args=ex)
+ self.add_message("parse-error", args=ex)
except Exception as ex:
import traceback
+
traceback.print_exc()
- self.add_message('astroid-error', args=(ex.__class__, ex))
+ self.add_message("astroid-error", args=(ex.__class__, ex))
- def check_astroid_module(self, ast_node, walker,
- rawcheckers, tokencheckers):
+ def check_astroid_module(self, ast_node, walker, rawcheckers, tokencheckers):
"""Check a module from its astroid representation."""
try:
tokens = utils.tokenize_module(ast_node)
except tokenize.TokenError as ex:
- self.add_message('syntax-error', line=ex.args[1][0], args=ex.args[0])
+ self.add_message("syntax-error", line=ex.args[1][0], args=ex.args[0])
return None
if not ast_node.pure_python:
- self.add_message('raw-checker-failed', args=ast_node.name)
+ self.add_message("raw-checker-failed", args=ast_node.name)
else:
- #assert astroid.file.endswith('.py')
+ # assert astroid.file.endswith('.py')
# invoke ITokenChecker interface on self to fetch module/block
# level options
self.process_tokens(tokens)
@@ -1018,13 +1155,10 @@ class PyLinter(config.OptionsManagerMixIn,
def open(self):
"""initialize counters"""
- self.stats = {'by_module' : {},
- 'by_msg' : {},
- }
+ self.stats = {"by_module": {}, "by_msg": {}}
MANAGER.always_load_extensions = self.config.unsafe_load_any_extension
MANAGER.max_inferable_values = self.config.limit_inference_results
- MANAGER.extension_package_whitelist.update(
- self.config.extension_pkg_whitelist)
+ MANAGER.extension_package_whitelist.update(self.config.extension_pkg_whitelist)
for msg_cat in utils.MSG_TYPES.values():
self.stats[msg_cat] = 0
@@ -1060,82 +1194,94 @@ class PyLinter(config.OptionsManagerMixIn,
# check with at least check 1 statements (usually 0 when there is a
# syntax error preventing pylint from further processing)
previous_stats = config.load_results(self.file_state.base_name)
- if self.stats['statement'] == 0:
+ if self.stats["statement"] == 0:
return
# get a global note for the code
evaluation = self.config.evaluation
try:
- note = eval(evaluation, {}, self.stats) # pylint: disable=eval-used
+ note = eval(evaluation, {}, self.stats) # pylint: disable=eval-used
except Exception as ex:
- msg = 'An exception occurred while rating: %s' % ex
+ msg = "An exception occurred while rating: %s" % ex
else:
- self.stats['global_note'] = note
- msg = 'Your code has been rated at %.2f/10' % note
- pnote = previous_stats.get('global_note')
+ self.stats["global_note"] = note
+ msg = "Your code has been rated at %.2f/10" % note
+ pnote = previous_stats.get("global_note")
if pnote is not None:
- msg += ' (previous run: %.2f/10, %+.2f)' % (pnote, note - pnote)
+ msg += " (previous run: %.2f/10, %+.2f)" % (pnote, note - pnote)
if self.config.score:
sect = report_nodes.EvaluationSection(msg)
self.reporter.display_reports(sect)
+
# some reporting functions ####################################################
+
def report_total_messages_stats(sect, stats, previous_stats):
"""make total errors / warnings report"""
- lines = ['type', 'number', 'previous', 'difference']
- lines += checkers.table_lines_from_stats(stats, previous_stats,
- ('convention', 'refactor',
- 'warning', 'error'))
+ lines = ["type", "number", "previous", "difference"]
+ lines += checkers.table_lines_from_stats(
+ stats, previous_stats, ("convention", "refactor", "warning", "error")
+ )
sect.append(report_nodes.Table(children=lines, cols=4, rheaders=1))
+
def report_messages_stats(sect, stats, _):
"""make messages type report"""
- if not stats['by_msg']:
+ if not stats["by_msg"]:
# don't print this report when we didn't detected any errors
raise exceptions.EmptyReportError()
- in_order = sorted([(value, msg_id)
- for msg_id, value in stats['by_msg'].items()
- if not msg_id.startswith('I')])
+ in_order = sorted(
+ [
+ (value, msg_id)
+ for msg_id, value in stats["by_msg"].items()
+ if not msg_id.startswith("I")
+ ]
+ )
in_order.reverse()
- lines = ('message id', 'occurrences')
+ lines = ("message id", "occurrences")
for value, msg_id in in_order:
lines += (msg_id, str(value))
sect.append(report_nodes.Table(children=lines, cols=2, rheaders=1))
+
def report_messages_by_module_stats(sect, stats, _):
"""make errors / warnings by modules report"""
- if len(stats['by_module']) == 1:
+ if len(stats["by_module"]) == 1:
# don't print this report when we are analysing a single module
raise exceptions.EmptyReportError()
by_mod = collections.defaultdict(dict)
- for m_type in ('fatal', 'error', 'warning', 'refactor', 'convention'):
+ for m_type in ("fatal", "error", "warning", "refactor", "convention"):
total = stats[m_type]
- for module in stats['by_module'].keys():
- mod_total = stats['by_module'][module][m_type]
+ for module in stats["by_module"].keys():
+ mod_total = stats["by_module"][module][m_type]
if total == 0:
percent = 0
else:
- percent = float((mod_total)*100) / total
+ percent = float((mod_total) * 100) / total
by_mod[module][m_type] = percent
sorted_result = []
for module, mod_info in by_mod.items():
- sorted_result.append((mod_info['error'],
- mod_info['warning'],
- mod_info['refactor'],
- mod_info['convention'],
- module))
+ sorted_result.append(
+ (
+ mod_info["error"],
+ mod_info["warning"],
+ mod_info["refactor"],
+ mod_info["convention"],
+ module,
+ )
+ )
sorted_result.sort()
sorted_result.reverse()
- lines = ['module', 'error', 'warning', 'refactor', 'convention']
+ lines = ["module", "error", "warning", "refactor", "convention"]
for line in sorted_result:
# Don't report clean modules.
if all(entry == 0 for entry in line[:-1]):
continue
lines.append(line[-1])
for val in line[:-1]:
- lines.append('%.2f' % val)
+ lines.append("%.2f" % val)
if len(lines) == 5:
raise exceptions.EmptyReportError()
sect.append(report_nodes.Table(children=lines, cols=5, rheaders=1))
@@ -1158,9 +1304,9 @@ def preprocess_options(args, search_for):
i = 0
while i < len(args):
arg = args[i]
- if arg.startswith('--'):
+ if arg.startswith("--"):
try:
- option, val = arg[2:].split('=', 1)
+ option, val = arg[2:].split("=", 1)
except ValueError:
option, val = arg[2:], None
try:
@@ -1170,8 +1316,8 @@ def preprocess_options(args, search_for):
else:
del args[i]
if takearg and val is None:
- if i >= len(args) or args[i].startswith('-'):
- msg = 'Option %s expects a value' % option
+ if i >= len(args) or args[i].startswith("-"):
+ msg = "Option %s expects a value" % option
raise ArgumentPreprocessingError(msg)
val = args[i]
del args[i]
@@ -1212,146 +1358,210 @@ class Run:
run(*sys.argv[1:])
"""
+
LinterClass = PyLinter
option_groups = (
- ('Commands', 'Options which are actually commands. Options in this \
-group are mutually exclusive.'),
- )
+ (
+ "Commands",
+ "Options which are actually commands. Options in this \
+group are mutually exclusive.",
+ ),
+ )
def __init__(self, args, reporter=None, do_exit=True):
self._rcfile = None
self._plugins = []
self.verbose = None
try:
- preprocess_options(args, {
- # option: (callback, takearg)
- 'init-hook': (cb_init_hook, True),
- 'rcfile': (self.cb_set_rcfile, True),
- 'load-plugins': (self.cb_add_plugins, True),
- 'verbose': (self.cb_verbose_mode, False),
- })
+ preprocess_options(
+ args,
+ {
+ # option: (callback, takearg)
+ "init-hook": (cb_init_hook, True),
+ "rcfile": (self.cb_set_rcfile, True),
+ "load-plugins": (self.cb_add_plugins, True),
+ "verbose": (self.cb_verbose_mode, False),
+ },
+ )
except ArgumentPreprocessingError as ex:
print(ex, file=sys.stderr)
sys.exit(32)
- self.linter = linter = self.LinterClass((
- ('rcfile',
- {'action' : 'callback', 'callback' : lambda *args: 1,
- 'type': 'string', 'metavar': '<file>',
- 'help' : 'Specify a configuration file.'}),
-
- ('init-hook',
- {'action' : 'callback', 'callback' : lambda *args: 1,
- 'type' : 'string', 'metavar': '<code>',
- 'level': 1,
- 'help' : 'Python code to execute, usually for sys.path '
- 'manipulation such as pygtk.require().'}),
-
- ('help-msg',
- {'action' : 'callback', 'type' : 'string', 'metavar': '<msg-id>',
- 'callback' : self.cb_help_message,
- 'group': 'Commands',
- 'help' : 'Display a help message for the given message id and '
- 'exit. The value may be a comma separated list of message ids.'}),
-
- ('list-msgs',
- {'action' : 'callback', 'metavar': '<msg-id>',
- 'callback' : self.cb_list_messages,
- 'group': 'Commands', 'level': 1,
- 'help' : "Generate pylint's messages."}),
-
- ('list-conf-levels',
- {'action' : 'callback',
- 'callback' : cb_list_confidence_levels,
- 'group': 'Commands', 'level': 1,
- 'help' : "Generate pylint's confidence levels."}),
-
- ('full-documentation',
- {'action' : 'callback', 'metavar': '<msg-id>',
- 'callback' : self.cb_full_documentation,
- 'group': 'Commands', 'level': 1,
- 'help' : "Generate pylint's full documentation."}),
-
- ('generate-rcfile',
- {'action' : 'callback', 'callback' : self.cb_generate_config,
- 'group': 'Commands',
- 'help' : 'Generate a sample configuration file according to '
- 'the current configuration. You can put other options '
- 'before this one to get them in the generated '
- 'configuration.'}),
-
- ('generate-man',
- {'action' : 'callback', 'callback' : self.cb_generate_manpage,
- 'group': 'Commands',
- 'help' : "Generate pylint's man page.", 'hide': True}),
-
- ('errors-only',
- {'action' : 'callback', 'callback' : self.cb_error_mode,
- 'short': 'E',
- 'help' : 'In error mode, checkers without error messages are '
- 'disabled and for others, only the ERROR messages are '
- 'displayed, and no reports are done by default.'}),
-
- ('py3k',
- {'action' : 'callback', 'callback' : self.cb_python3_porting_mode,
- 'help' : 'In Python 3 porting mode, all checkers will be '
- 'disabled and only messages emitted by the porting '
- 'checker will be displayed.'}),
-
- ('verbose',
- {'action' : 'callback', 'callback' : self.cb_verbose_mode,
- 'short': 'v',
- 'help' : 'In verbose mode, extra non-checker-related info '
- 'will be displayed.'})
-
- ), option_groups=self.option_groups, pylintrc=self._rcfile)
+ self.linter = linter = self.LinterClass(
+ (
+ (
+ "rcfile",
+ {
+ "action": "callback",
+ "callback": lambda *args: 1,
+ "type": "string",
+ "metavar": "<file>",
+ "help": "Specify a configuration file.",
+ },
+ ),
+ (
+ "init-hook",
+ {
+ "action": "callback",
+ "callback": lambda *args: 1,
+ "type": "string",
+ "metavar": "<code>",
+ "level": 1,
+ "help": "Python code to execute, usually for sys.path "
+ "manipulation such as pygtk.require().",
+ },
+ ),
+ (
+ "help-msg",
+ {
+ "action": "callback",
+ "type": "string",
+ "metavar": "<msg-id>",
+ "callback": self.cb_help_message,
+ "group": "Commands",
+ "help": "Display a help message for the given message id and "
+ "exit. The value may be a comma separated list of message ids.",
+ },
+ ),
+ (
+ "list-msgs",
+ {
+ "action": "callback",
+ "metavar": "<msg-id>",
+ "callback": self.cb_list_messages,
+ "group": "Commands",
+ "level": 1,
+ "help": "Generate pylint's messages.",
+ },
+ ),
+ (
+ "list-conf-levels",
+ {
+ "action": "callback",
+ "callback": cb_list_confidence_levels,
+ "group": "Commands",
+ "level": 1,
+ "help": "Generate pylint's confidence levels.",
+ },
+ ),
+ (
+ "full-documentation",
+ {
+ "action": "callback",
+ "metavar": "<msg-id>",
+ "callback": self.cb_full_documentation,
+ "group": "Commands",
+ "level": 1,
+ "help": "Generate pylint's full documentation.",
+ },
+ ),
+ (
+ "generate-rcfile",
+ {
+ "action": "callback",
+ "callback": self.cb_generate_config,
+ "group": "Commands",
+ "help": "Generate a sample configuration file according to "
+ "the current configuration. You can put other options "
+ "before this one to get them in the generated "
+ "configuration.",
+ },
+ ),
+ (
+ "generate-man",
+ {
+ "action": "callback",
+ "callback": self.cb_generate_manpage,
+ "group": "Commands",
+ "help": "Generate pylint's man page.",
+ "hide": True,
+ },
+ ),
+ (
+ "errors-only",
+ {
+ "action": "callback",
+ "callback": self.cb_error_mode,
+ "short": "E",
+ "help": "In error mode, checkers without error messages are "
+ "disabled and for others, only the ERROR messages are "
+ "displayed, and no reports are done by default.",
+ },
+ ),
+ (
+ "py3k",
+ {
+ "action": "callback",
+ "callback": self.cb_python3_porting_mode,
+ "help": "In Python 3 porting mode, all checkers will be "
+ "disabled and only messages emitted by the porting "
+ "checker will be displayed.",
+ },
+ ),
+ (
+ "verbose",
+ {
+ "action": "callback",
+ "callback": self.cb_verbose_mode,
+ "short": "v",
+ "help": "In verbose mode, extra non-checker-related info "
+ "will be displayed.",
+ },
+ ),
+ ),
+ option_groups=self.option_groups,
+ pylintrc=self._rcfile,
+ )
# register standard checkers
linter.load_default_plugins()
# load command line plugins
linter.load_plugin_modules(self._plugins)
# add some help section
- linter.add_help_section('Environment variables', config.ENV_HELP, level=1)
+ linter.add_help_section("Environment variables", config.ENV_HELP, level=1)
# pylint: disable=bad-continuation
- linter.add_help_section('Output',
-'Using the default text output, the message format is : \n'
-' \n'
-' MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE \n'
-' \n'
-'There are 5 kind of message types : \n'
-' * (C) convention, for programming standard violation \n'
-' * (R) refactor, for bad code smell \n'
-' * (W) warning, for python specific problems \n'
-' * (E) error, for probable bugs in the code \n'
-' * (F) fatal, if an error occurred which prevented pylint from doing further\n'
-'processing.\n'
- , level=1)
- linter.add_help_section('Output status code',
-'Pylint should leave with following status code: \n'
-' * 0 if everything went fine \n'
-' * 1 if a fatal message was issued \n'
-' * 2 if an error message was issued \n'
-' * 4 if a warning message was issued \n'
-' * 8 if a refactor message was issued \n'
-' * 16 if a convention message was issued \n'
-' * 32 on usage error \n'
-' \n'
-'status 1 to 16 will be bit-ORed so you can know which different categories has\n'
-'been issued by analysing pylint output status code\n',
- level=1)
+ linter.add_help_section(
+ "Output",
+ "Using the default text output, the message format is : \n"
+ " \n"
+ " MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE \n"
+ " \n"
+ "There are 5 kind of message types : \n"
+ " * (C) convention, for programming standard violation \n"
+ " * (R) refactor, for bad code smell \n"
+ " * (W) warning, for python specific problems \n"
+ " * (E) error, for probable bugs in the code \n"
+ " * (F) fatal, if an error occurred which prevented pylint from doing further\n"
+ "processing.\n",
+ level=1,
+ )
+ linter.add_help_section(
+ "Output status code",
+ "Pylint should leave with following status code: \n"
+ " * 0 if everything went fine \n"
+ " * 1 if a fatal message was issued \n"
+ " * 2 if an error message was issued \n"
+ " * 4 if a warning message was issued \n"
+ " * 8 if a refactor message was issued \n"
+ " * 16 if a convention message was issued \n"
+ " * 32 on usage error \n"
+ " \n"
+ "status 1 to 16 will be bit-ORed so you can know which different categories has\n"
+ "been issued by analysing pylint output status code\n",
+ level=1,
+ )
# read configuration
- linter.disable('I')
- linter.enable('c-extension-no-member')
+ linter.disable("I")
+ linter.enable("c-extension-no-member")
linter.read_config_file(verbose=self.verbose)
config_parser = linter.cfgfile_parser
# run init hook, if present, before loading plugins
- if config_parser.has_option('MASTER', 'init-hook'):
- cb_init_hook('init-hook',
- utils._unquote(config_parser.get('MASTER',
- 'init-hook')))
+ if config_parser.has_option("MASTER", "init-hook"):
+ cb_init_hook(
+ "init-hook", utils._unquote(config_parser.get("MASTER", "init-hook"))
+ )
# is there some additional plugins in the file configuration, in
- if config_parser.has_option('MASTER', 'load-plugins'):
- plugins = utils._splitstrip(
- config_parser.get('MASTER', 'load-plugins'))
+ if config_parser.has_option("MASTER", "load-plugins"):
+ plugins = utils._splitstrip(config_parser.get("MASTER", "load-plugins"))
linter.load_plugin_modules(plugins)
# now we can load file config and command line, plugins (which can
# provide options) have been registered
@@ -1364,7 +1574,7 @@ group are mutually exclusive.'),
try:
args = linter.load_command_line_configuration(args)
except SystemExit as exc:
- if exc.code == 2: # bad options
+ if exc.code == 2: # bad options
exc.code = 32
raise
if not args:
@@ -1372,13 +1582,18 @@ group are mutually exclusive.'),
sys.exit(32)
if linter.config.jobs < 0:
- print("Jobs number (%d) should be greater than or equal to 0"
- % linter.config.jobs, file=sys.stderr)
+ print(
+ "Jobs number (%d) should be greater than or equal to 0"
+ % linter.config.jobs,
+ file=sys.stderr,
+ )
sys.exit(32)
if linter.config.jobs > 1 or linter.config.jobs == 0:
if multiprocessing is None:
- print("Multiprocessing library is missing, "
- "fallback to single process", file=sys.stderr)
+ print(
+ "Multiprocessing library is missing, " "fallback to single process",
+ file=sys.stderr,
+ )
linter.set_option("jobs", 1)
else:
if linter.config.jobs == 0:
@@ -1415,12 +1630,13 @@ group are mutually exclusive.'),
def cb_generate_config(self, *args, **kwargs):
"""optik callback for sample config file generation"""
- self.linter.generate_config(skipsections=('COMMANDS',))
+ self.linter.generate_config(skipsections=("COMMANDS",))
sys.exit(0)
def cb_generate_manpage(self, *args, **kwargs):
"""optik callback for sample config file generation"""
from pylint import __pkginfo__
+
self.linter.generate_manpage(__pkginfo__)
sys.exit(0)
@@ -1434,7 +1650,7 @@ group are mutually exclusive.'),
self.linter.print_full_documentation()
sys.exit(0)
- def cb_list_messages(self, option, optname, value, parser): # FIXME
+ def cb_list_messages(self, option, optname, value, parser): # FIXME
"""optik callback for printing available messages"""
self.linter.msgs_store.list_messages()
sys.exit(0)
@@ -1446,15 +1662,17 @@ group are mutually exclusive.'),
def cb_verbose_mode(self, *args, **kwargs):
self.verbose = True
+
def cb_list_confidence_levels(option, optname, value, parser):
for level in interfaces.CONFIDENCE_LEVELS:
- print('%-18s: %s' % level)
+ print("%-18s: %s" % level)
sys.exit(0)
+
def cb_init_hook(optname, value):
"""exec arbitrary code to set sys.path for instance"""
- exec(value) # pylint: disable=exec-used
+ exec(value) # pylint: disable=exec-used
-if __name__ == '__main__':
+if __name__ == "__main__":
Run(sys.argv[1:])
diff --git a/pylint/pyreverse/diadefslib.py b/pylint/pyreverse/diadefslib.py
index afd5312ea..e64614fc7 100644
--- a/pylint/pyreverse/diadefslib.py
+++ b/pylint/pyreverse/diadefslib.py
@@ -22,10 +22,11 @@ import astroid
from pylint.pyreverse.diagrams import PackageDiagram, ClassDiagram
from pylint.pyreverse.utils import LocalsVisitor
-BUILTINS_NAME = 'builtins'
+BUILTINS_NAME = "builtins"
# diagram generators ##########################################################
+
class DiaDefGenerator:
"""handle diagram generation options"""
@@ -34,13 +35,13 @@ class DiaDefGenerator:
self.config = handler.config
self._set_default_options()
self.linker = linker
- self.classdiagram = None # defined by subclasses
+ self.classdiagram = None # defined by subclasses
def get_title(self, node):
"""get title for objects"""
title = node.name
if self.module_names:
- title = '%s.%s' % (node.root().name, title)
+ title = "%s.%s" % (node.root().name, title)
return title
def _set_option(self, option):
@@ -57,7 +58,7 @@ class DiaDefGenerator:
all_ancestors = self._set_option(self.config.all_ancestors)
all_associated = self._set_option(self.config.all_associated)
anc_level, association_level = (0, 0)
- if all_ancestors:
+ if all_ancestors:
anc_level = -1
if all_associated:
association_level = -1
@@ -95,13 +96,13 @@ class DiaDefGenerator:
"""return associated nodes of a class node"""
if level == 0:
return
- for association_nodes in list(klass_node.instance_attrs_type.values()) + \
- list(klass_node.locals_type.values()):
+ for association_nodes in list(klass_node.instance_attrs_type.values()) + list(
+ klass_node.locals_type.values()
+ ):
for node in association_nodes:
if isinstance(node, astroid.Instance):
node = node._proxied
- if not (isinstance(node, astroid.ClassDef)
- and self.show_node(node)):
+ if not (isinstance(node, astroid.ClassDef) and self.show_node(node)):
continue
yield node
@@ -136,12 +137,12 @@ class DefaultDiadefGenerator(LocalsVisitor, DiaDefGenerator):
"""
mode = self.config.mode
if len(node.modules) > 1:
- self.pkgdiagram = PackageDiagram('packages %s' % node.name, mode)
+ self.pkgdiagram = PackageDiagram("packages %s" % node.name, mode)
else:
self.pkgdiagram = None
- self.classdiagram = ClassDiagram('classes %s' % node.name, mode)
+ self.classdiagram = ClassDiagram("classes %s" % node.name, mode)
- def leave_project(self, node): # pylint: disable=unused-argument
+ def leave_project(self, node): # pylint: disable=unused-argument
"""leave the pyreverse.utils.Project node
return the generated diagram definition
@@ -189,19 +190,21 @@ class ClassDiadefGenerator(DiaDefGenerator):
self.classdiagram = ClassDiagram(klass, self.config.mode)
if len(project.modules) > 1:
- module, klass = klass.rsplit('.', 1)
+ module, klass = klass.rsplit(".", 1)
module = project.get_module(module)
else:
module = project.modules[0]
- klass = klass.split('.')[-1]
+ klass = klass.split(".")[-1]
klass = next(module.ilookup(klass))
anc_level, association_level = self._get_levels()
self.extract_classes(klass, anc_level, association_level)
return self.classdiagram
+
# diagram handler #############################################################
+
class DiadefsHandler:
"""handle diagram definitions :
@@ -232,4 +235,4 @@ class DiadefsHandler:
diagrams = DefaultDiadefGenerator(linker, self).visit(project)
for diagram in diagrams:
diagram.extract_relationships()
- return diagrams
+ return diagrams
diff --git a/pylint/pyreverse/diagrams.py b/pylint/pyreverse/diagrams.py
index 27e1092e1..afd7ffee4 100644
--- a/pylint/pyreverse/diagrams.py
+++ b/pylint/pyreverse/diagrams.py
@@ -23,6 +23,7 @@ class Figure:
class Relationship(Figure):
"""a relation ship from an object in the diagram to another
"""
+
def __init__(self, from_object, to_object, relation_type, name=None):
Figure.__init__(self)
self.from_object = from_object
@@ -34,7 +35,8 @@ class Relationship(Figure):
class DiagramEntity(Figure):
"""a diagram object, i.e. a label associated to an astroid node
"""
- def __init__(self, title='No name', node=None):
+
+ def __init__(self, title="No name", node=None):
Figure.__init__(self)
self.title = title
self.node = node
@@ -43,7 +45,9 @@ class DiagramEntity(Figure):
class ClassDiagram(Figure, FilterMixIn):
"""main class diagram handling
"""
- TYPE = 'class'
+
+ TYPE = "class"
+
def __init__(self, title, mode):
FilterMixIn.__init__(self, mode)
Figure.__init__(self)
@@ -55,11 +59,12 @@ class ClassDiagram(Figure, FilterMixIn):
def get_relationships(self, role):
# sorted to get predictable (hence testable) results
- return sorted(self.relationships.get(role, ()),
- key=lambda x: (x.from_object.fig_id, x.to_object.fig_id))
+ return sorted(
+ self.relationships.get(role, ()),
+ key=lambda x: (x.from_object.fig_id, x.to_object.fig_id),
+ )
- def add_relationship(self, from_object, to_object,
- relation_type, name=None):
+ def add_relationship(self, from_object, to_object, relation_type, name=None):
"""create a relation ship
"""
rel = Relationship(from_object, to_object, relation_type, name)
@@ -77,12 +82,15 @@ class ClassDiagram(Figure, FilterMixIn):
"""return visible attributes, possibly with class name"""
attrs = []
properties = [
- (n, m) for n, m in node.items()
- if isinstance(m, astroid.FunctionDef)
- and decorated_with_property(m)
+ (n, m)
+ for n, m in node.items()
+ if isinstance(m, astroid.FunctionDef) and decorated_with_property(m)
]
- for node_name, associated_nodes in list(node.instance_attrs_type.items()) + \
- list(node.locals_type.items()) + properties:
+ for node_name, associated_nodes in (
+ list(node.instance_attrs_type.items())
+ + list(node.locals_type.items())
+ + properties
+ ):
if not self.show_attr(node_name):
continue
names = self.class_names(associated_nodes)
@@ -94,7 +102,8 @@ class ClassDiagram(Figure, FilterMixIn):
def get_methods(self, node):
"""return visible methods"""
methods = [
- m for m in node.values()
+ m
+ for m in node.values()
if isinstance(m, astroid.FunctionDef)
and not decorated_with_property(m)
and self.show_attr(m.name)
@@ -115,8 +124,11 @@ class ClassDiagram(Figure, FilterMixIn):
for node in nodes:
if isinstance(node, astroid.Instance):
node = node._proxied
- if isinstance(node, astroid.ClassDef) \
- and hasattr(node, "name") and not self.has_node(node):
+ if (
+ isinstance(node, astroid.ClassDef)
+ and hasattr(node, "name")
+ and not self.has_node(node)
+ ):
if node.name not in names:
node_name = node.name
names.append(node_name)
@@ -158,26 +170,27 @@ class ClassDiagram(Figure, FilterMixIn):
obj.methods = self.get_methods(node)
# shape
if is_interface(node):
- obj.shape = 'interface'
+ obj.shape = "interface"
else:
- obj.shape = 'class'
+ obj.shape = "class"
# inheritance link
for par_node in node.ancestors(recurs=False):
try:
par_obj = self.object_from_node(par_node)
- self.add_relationship(obj, par_obj, 'specialization')
+ self.add_relationship(obj, par_obj, "specialization")
except KeyError:
continue
# implements link
for impl_node in node.implements:
try:
impl_obj = self.object_from_node(impl_node)
- self.add_relationship(obj, impl_obj, 'implements')
+ self.add_relationship(obj, impl_obj, "implements")
except KeyError:
continue
# associations link
- for name, values in list(node.instance_attrs_type.items()) + \
- list(node.locals_type.items()):
+ for name, values in list(node.instance_attrs_type.items()) + list(
+ node.locals_type.items()
+ ):
for value in values:
if value is astroid.Uninferable:
continue
@@ -185,7 +198,7 @@ class ClassDiagram(Figure, FilterMixIn):
value = value._proxied
try:
associated_obj = self.object_from_node(value)
- self.add_relationship(associated_obj, obj, 'association', name)
+ self.add_relationship(associated_obj, obj, "association", name)
except KeyError:
continue
@@ -193,7 +206,8 @@ class ClassDiagram(Figure, FilterMixIn):
class PackageDiagram(ClassDiagram):
"""package diagram handling
"""
- TYPE = 'package'
+
+ TYPE = "package"
def modules(self):
"""return all module nodes in the diagram"""
@@ -215,11 +229,11 @@ class PackageDiagram(ClassDiagram):
mod_name = mod.node.name
if mod_name == name:
return mod
- #search for fullname of relative import modules
+ # search for fullname of relative import modules
package = node.root().name
if mod_name == "%s.%s" % (package, name):
return mod
- if mod_name == "%s.%s" % (package.rsplit('.', 1)[0], name):
+ if mod_name == "%s.%s" % (package.rsplit(".", 1)[0], name):
return mod
raise KeyError(name)
@@ -239,15 +253,15 @@ class PackageDiagram(ClassDiagram):
# ownership
try:
mod = self.object_from_node(obj.node.root())
- self.add_relationship(obj, mod, 'ownership')
+ self.add_relationship(obj, mod, "ownership")
except KeyError:
continue
for obj in self.modules():
- obj.shape = 'package'
+ obj.shape = "package"
# dependencies
for dep_name in obj.node.depends:
try:
dep = self.get_module(dep_name, obj.node)
except KeyError:
continue
- self.add_relationship(obj, dep, 'depends')
+ self.add_relationship(obj, dep, "depends")
diff --git a/pylint/pyreverse/inspector.py b/pylint/pyreverse/inspector.py
index e43cfda80..eca4b67aa 100644
--- a/pylint/pyreverse/inspector.py
+++ b/pylint/pyreverse/inspector.py
@@ -34,12 +34,12 @@ def _iface_hdlr(_):
def _astroid_wrapper(func, modname):
- print('parsing %s...' % modname)
+ print("parsing %s..." % modname)
try:
return func(modname)
except exceptions.AstroidBuildingException as exc:
print(exc)
- except Exception as exc: # pylint: disable=broad-except
+ except Exception as exc: # pylint: disable=broad-except
traceback.print_exc()
@@ -47,7 +47,7 @@ def interfaces(node, herited=True, handler_func=_iface_hdlr):
"""Return an iterator on interfaces implemented by the given class node."""
# FIXME: what if __implements__ = (MyIFace, MyParent.__implements__)...
try:
- implements = bases.Instance(node).getattr('__implements__')[0]
+ implements = bases.Instance(node).getattr("__implements__")[0]
except exceptions.NotFoundError:
return
if not herited and implements.frame() is not node:
@@ -143,7 +143,7 @@ class Linker(IdGeneratorMixIn, utils.LocalsVisitor):
* set the depends mapping
* optionally tag the node with a unique id
"""
- if hasattr(node, 'locals_type'):
+ if hasattr(node, "locals_type"):
return
node.locals_type = collections.defaultdict(list)
node.depends = []
@@ -157,14 +157,14 @@ class Linker(IdGeneratorMixIn, utils.LocalsVisitor):
* set the implements list and build it
* optionally tag the node with a unique id
"""
- if hasattr(node, 'locals_type'):
+ if hasattr(node, "locals_type"):
return
node.locals_type = collections.defaultdict(list)
if self.tag:
node.uid = self.generate_id()
# resolve ancestors
for baseobj in node.ancestors(recurs=False):
- specializations = getattr(baseobj, 'specializations', [])
+ specializations = getattr(baseobj, "specializations", [])
specializations.append(node)
baseobj.specializations = specializations
# resolve instance attributes
@@ -184,7 +184,7 @@ class Linker(IdGeneratorMixIn, utils.LocalsVisitor):
* set the locals_type mapping
* optionally tag the node with a unique id
"""
- if hasattr(node, 'locals_type'):
+ if hasattr(node, "locals_type"):
return
node.locals_type = collections.defaultdict(list)
if self.tag:
@@ -202,7 +202,7 @@ class Linker(IdGeneratorMixIn, utils.LocalsVisitor):
"""
# avoid double parsing done by different Linkers.visit
# running over the same project:
- if hasattr(node, '_handled'):
+ if hasattr(node, "_handled"):
return
node._handled = True
if node.name in node.frame():
@@ -212,7 +212,7 @@ class Linker(IdGeneratorMixIn, utils.LocalsVisitor):
# there.
frame = node.root()
try:
- if not hasattr(frame, 'locals_type'):
+ if not hasattr(frame, "locals_type"):
# If the frame doesn't have a locals_type yet,
# it means it wasn't yet visited. Visit it now
# to add what's missing from it.
@@ -264,16 +264,15 @@ class Linker(IdGeneratorMixIn, utils.LocalsVisitor):
else:
relative = False
for name in node.names:
- if name[0] == '*':
+ if name[0] == "*":
continue
# analyze dependencies
- fullname = '%s.%s' % (basename, name[0])
- if fullname.find('.') > -1:
+ fullname = "%s.%s" % (basename, name[0])
+ if fullname.find(".") > -1:
try:
# TODO: don't use get_module_part,
# missing package precedence
- fullname = modutils.get_module_part(fullname,
- context_file)
+ fullname = modutils.get_module_part(fullname, context_file)
except ImportError:
continue
if fullname != basename:
@@ -293,11 +292,10 @@ class Linker(IdGeneratorMixIn, utils.LocalsVisitor):
module = node.root()
context_name = module.name
if relative:
- mod_path = '%s.%s' % ('.'.join(context_name.split('.')[:-1]),
- mod_path)
+ mod_path = "%s.%s" % (".".join(context_name.split(".")[:-1]), mod_path)
if self.compute_module(context_name, mod_path):
# handle dependencies
- if not hasattr(module, 'depends'):
+ if not hasattr(module, "depends"):
module.depends = []
mod_paths = module.depends
if mod_path not in mod_paths:
@@ -306,7 +304,8 @@ class Linker(IdGeneratorMixIn, utils.LocalsVisitor):
class Project:
"""a project handle a set of modules / packages"""
- def __init__(self, name=''):
+
+ def __init__(self, name=""):
self.name = name
self.path = None
self.modules = []
@@ -328,22 +327,25 @@ class Project:
return self.modules
def __repr__(self):
- return '<Project %r at %s (%s modules)>' % (self.name, id(self),
- len(self.modules))
+ return "<Project %r at %s (%s modules)>" % (
+ self.name,
+ id(self),
+ len(self.modules),
+ )
-def project_from_files(files, func_wrapper=_astroid_wrapper,
- project_name="no name",
- black_list=('CVS',)):
+def project_from_files(
+ files, func_wrapper=_astroid_wrapper, project_name="no name", black_list=("CVS",)
+):
"""return a Project from a list of files or modules"""
# build the project representation
astroid_manager = manager.AstroidManager()
project = Project(project_name)
for something in files:
if not os.path.exists(something):
- fpath = modutils.file_from_modpath(something.split('.'))
+ fpath = modutils.file_from_modpath(something.split("."))
elif os.path.isdir(something):
- fpath = os.path.join(something, '__init__.py')
+ fpath = os.path.join(something, "__init__.py")
else:
fpath = something
ast = func_wrapper(astroid_manager.ast_from_file, fpath)
@@ -354,10 +356,11 @@ def project_from_files(files, func_wrapper=_astroid_wrapper,
project.add_module(ast)
base_name = ast.name
# recurse in package except if __init__ was explicitly given
- if ast.package and something.find('__init__') == -1:
+ if ast.package and something.find("__init__") == -1:
# recurse on others packages / modules if this is a package
- for fpath in modutils.get_module_files(os.path.dirname(ast.file),
- black_list):
+ for fpath in modutils.get_module_files(
+ os.path.dirname(ast.file), black_list
+ ):
ast = func_wrapper(astroid_manager.ast_from_file, fpath)
if ast is None or ast.name == base_name:
continue
diff --git a/pylint/pyreverse/main.py b/pylint/pyreverse/main.py
index bf1ea78ca..2be7c486d 100644
--- a/pylint/pyreverse/main.py
+++ b/pylint/pyreverse/main.py
@@ -26,10 +26,16 @@ from pylint.pyreverse import writer
from pylint.pyreverse.utils import insert_default_options
OPTIONS = (
- ("filter-mode",
- dict(short='f', default='PUB_ONLY', dest='mode', type='string',
- action='store', metavar='<mode>',
- help="""filter attributes and functions according to
+ (
+ "filter-mode",
+ dict(
+ short="f",
+ default="PUB_ONLY",
+ dest="mode",
+ type="string",
+ action="store",
+ metavar="<mode>",
+ help="""filter attributes and functions according to
<mode>. Correct modes are :
'PUB_ONLY' filter all non public attributes
[DEFAULT], equivalent to PRIVATE+SPECIAL_A
@@ -37,76 +43,149 @@ OPTIONS = (
'SPECIAL' filter Python special functions
except constructor
'OTHER' filter protected and private
- attributes""")),
-
- ("class",
- dict(short='c', action="append", metavar="<class>", dest="classes", default=[],
- help="create a class diagram with all classes related to <class>;\
- this uses by default the options -ASmy")),
-
- ("show-ancestors",
- dict(short="a", action="store", metavar='<ancestor>', type='int',
- help='show <ancestor> generations of ancestor classes not in <projects>')),
- ("all-ancestors",
- dict(short="A", default=None,
- help="show all ancestors off all classes in <projects>")),
- ("show-associated",
- dict(short='s', action="store", metavar='<association_level>', type='int',
- help='show <association_level> levels of associated classes not in <projects>')),
- ("all-associated",
- dict(short='S', default=None,
- help='show recursively all associated off all associated classes')),
- ("show-builtin",
- dict(short="b", action="store_true", default=False,
- help='include builtin objects in representation of classes')),
-
- ("module-names",
- dict(short="m", default=None, type='yn', metavar='[yn]',
- help='include module name in representation of classes')),
+ attributes""",
+ ),
+ ),
+ (
+ "class",
+ dict(
+ short="c",
+ action="append",
+ metavar="<class>",
+ dest="classes",
+ default=[],
+ help="create a class diagram with all classes related to <class>;\
+ this uses by default the options -ASmy",
+ ),
+ ),
+ (
+ "show-ancestors",
+ dict(
+ short="a",
+ action="store",
+ metavar="<ancestor>",
+ type="int",
+ help="show <ancestor> generations of ancestor classes not in <projects>",
+ ),
+ ),
+ (
+ "all-ancestors",
+ dict(
+ short="A",
+ default=None,
+ help="show all ancestors off all classes in <projects>",
+ ),
+ ),
+ (
+ "show-associated",
+ dict(
+ short="s",
+ action="store",
+ metavar="<association_level>",
+ type="int",
+ help="show <association_level> levels of associated classes not in <projects>",
+ ),
+ ),
+ (
+ "all-associated",
+ dict(
+ short="S",
+ default=None,
+ help="show recursively all associated off all associated classes",
+ ),
+ ),
+ (
+ "show-builtin",
+ dict(
+ short="b",
+ action="store_true",
+ default=False,
+ help="include builtin objects in representation of classes",
+ ),
+ ),
+ (
+ "module-names",
+ dict(
+ short="m",
+ default=None,
+ type="yn",
+ metavar="[yn]",
+ help="include module name in representation of classes",
+ ),
+ ),
# TODO : generate dependencies like in pylint
# ("package-dependencies",
# dict(short="M", action="store", metavar='<package_depth>', type='int',
# help='show <package_depth> module dependencies beyond modules in \
# <projects> (for the package diagram)')),
- ("only-classnames",
- dict(short='k', action="store_true", default=False,
- help="don't show attributes and methods in the class boxes; \
-this disables -f values")),
- ("output", dict(short="o", dest="output_format", action="store",
- default="dot", metavar="<format>",
- help="create a *.<format> output file if format available.")),
- ("ignore", {'type' : "csv", 'metavar' : "<file[,file...]>",
- 'dest' : "black_list", "default" : ('CVS',),
- 'help' : "Add files or directories to the blacklist. They "
- "should be base names, not paths."}),
- ("project", {'default': "", 'type' : 'string', 'short': 'p',
- 'metavar': '<project name>', 'help': 'set the project name.'}),
+ (
+ "only-classnames",
+ dict(
+ short="k",
+ action="store_true",
+ default=False,
+ help="don't show attributes and methods in the class boxes; \
+this disables -f values",
+ ),
+ ),
+ (
+ "output",
+ dict(
+ short="o",
+ dest="output_format",
+ action="store",
+ default="dot",
+ metavar="<format>",
+ help="create a *.<format> output file if format available.",
+ ),
+ ),
+ (
+ "ignore",
+ {
+ "type": "csv",
+ "metavar": "<file[,file...]>",
+ "dest": "black_list",
+ "default": ("CVS",),
+ "help": "Add files or directories to the blacklist. They "
+ "should be base names, not paths.",
+ },
+ ),
+ (
+ "project",
+ {
+ "default": "",
+ "type": "string",
+ "short": "p",
+ "metavar": "<project name>",
+ "help": "set the project name.",
+ },
+ ),
)
def _check_graphviz_available(output_format):
"""check if we need graphviz for different output format"""
try:
- subprocess.call(['dot', '-V'], stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
+ subprocess.call(["dot", "-V"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError:
- print("The output format '%s' is currently not available.\n"
- "Please install 'Graphviz' to have other output formats "
- "than 'dot' or 'vcg'." % output_format)
+ print(
+ "The output format '%s' is currently not available.\n"
+ "Please install 'Graphviz' to have other output formats "
+ "than 'dot' or 'vcg'." % output_format
+ )
sys.exit(32)
-
class Run(ConfigurationMixIn):
"""base class providing common behaviour for pyreverse commands"""
- options = OPTIONS # type: ignore
+ options = OPTIONS # type: ignore
def __init__(self, args):
ConfigurationMixIn.__init__(self, usage=__doc__)
insert_default_options()
args = self.load_command_line_configuration()
- if self.config.output_format not in ('dot', 'vcg'):
+ if self.config.output_format not in ("dot", "vcg"):
_check_graphviz_available(self.config.output_format)
sys.exit(self.run(args))
@@ -120,8 +199,11 @@ class Run(ConfigurationMixIn):
# dependencies to local modules even if cwd is not in the PYTHONPATH
sys.path.insert(0, os.getcwd())
try:
- project = project_from_files(args, project_name=self.config.project,
- black_list=self.config.black_list)
+ project = project_from_files(
+ args,
+ project_name=self.config.project,
+ black_list=self.config.black_list,
+ )
linker = Linker(project, tag=True)
handler = DiadefsHandler(self.config)
diadefs = handler.get_diadefs(project, linker)
@@ -135,5 +217,5 @@ class Run(ConfigurationMixIn):
return 0
-if __name__ == '__main__':
+if __name__ == "__main__":
Run(sys.argv[1:])
diff --git a/pylint/pyreverse/utils.py b/pylint/pyreverse/utils.py
index cc08bc062..b60f336ab 100644
--- a/pylint/pyreverse/utils.py
+++ b/pylint/pyreverse/utils.py
@@ -21,22 +21,24 @@ import sys
########### pyreverse option utils ##############################
-RCFILE = '.pyreverserc'
+RCFILE = ".pyreverserc"
+
def get_default_options():
"""
Read config file and return list of options
"""
options = []
- home = os.environ.get('HOME', '')
+ home = os.environ.get("HOME", "")
if home:
rcfile = os.path.join(home, RCFILE)
try:
options = open(rcfile).read().split()
except IOError:
- pass # ignore if no config file found
+ pass # ignore if no config file found
return options
+
def insert_default_options():
"""insert default options to sys.argv
"""
@@ -46,29 +48,31 @@ def insert_default_options():
sys.argv.insert(1, arg)
-
# astroid utilities ###########################################################
-SPECIAL = re.compile('^__[A-Za-z0-9]+[A-Za-z0-9_]*__$')
-PRIVATE = re.compile('^__[_A-Za-z0-9]*[A-Za-z0-9]+_?$')
-PROTECTED = re.compile('^_[_A-Za-z0-9]*$')
+SPECIAL = re.compile("^__[A-Za-z0-9]+[A-Za-z0-9_]*__$")
+PRIVATE = re.compile("^__[_A-Za-z0-9]*[A-Za-z0-9]+_?$")
+PROTECTED = re.compile("^_[_A-Za-z0-9]*$")
+
def get_visibility(name):
"""return the visibility from a name: public, protected, private or special
"""
if SPECIAL.match(name):
- visibility = 'special'
+ visibility = "special"
elif PRIVATE.match(name):
- visibility = 'private'
+ visibility = "private"
elif PROTECTED.match(name):
- visibility = 'protected'
+ visibility = "protected"
else:
- visibility = 'public'
+ visibility = "public"
return visibility
-ABSTRACT = re.compile('^.*Abstract.*')
-FINAL = re.compile('^[A-Z_]*$')
+
+ABSTRACT = re.compile("^.*Abstract.*")
+FINAL = re.compile("^[A-Z_]*$")
+
def is_abstract(node):
"""return true if the given class node correspond to an abstract class
@@ -76,19 +80,22 @@ def is_abstract(node):
"""
return ABSTRACT.match(node.name)
+
def is_final(node):
"""return true if the given class/function node correspond to final
definition
"""
return FINAL.match(node.name)
+
def is_interface(node):
# bw compat
- return node.type == 'interface'
+ return node.type == "interface"
+
def is_exception(node):
# bw compat
- return node.type == 'exception'
+ return node.type == "exception"
# Helpers #####################################################################
@@ -98,33 +105,37 @@ _SPECIAL = 2
_PROTECTED = 4
_PRIVATE = 8
MODES = {
- 'ALL' : 0,
- 'PUB_ONLY' : _SPECIAL + _PROTECTED + _PRIVATE,
- 'SPECIAL' : _SPECIAL,
- 'OTHER' : _PROTECTED + _PRIVATE,
+ "ALL": 0,
+ "PUB_ONLY": _SPECIAL + _PROTECTED + _PRIVATE,
+ "SPECIAL": _SPECIAL,
+ "OTHER": _PROTECTED + _PRIVATE,
+}
+VIS_MOD = {
+ "special": _SPECIAL,
+ "protected": _PROTECTED,
+ "private": _PRIVATE,
+ "public": 0,
}
-VIS_MOD = {'special': _SPECIAL, 'protected': _PROTECTED,
- 'private': _PRIVATE, 'public': 0}
class FilterMixIn:
"""filter nodes according to a mode and nodes' visibility
"""
+
def __init__(self, mode):
"init filter modes"
__mode = 0
- for nummod in mode.split('+'):
+ for nummod in mode.split("+"):
try:
__mode += MODES[nummod]
except KeyError as ex:
- print('Unknown filter mode %s' % ex, file=sys.stderr)
+ print("Unknown filter mode %s" % ex, file=sys.stderr)
self.__mode = __mode
-
def show_attr(self, node):
"""return true if the node should be treated
"""
- visibility = get_visibility(getattr(node, 'name', node))
+ visibility = get_visibility(getattr(node, "name", node))
return not self.__mode & VIS_MOD[visibility]
@@ -163,10 +174,12 @@ class ASTWalker:
if methods is None:
handler = self.handler
kid = klass.__name__.lower()
- e_method = getattr(handler, 'visit_%s' % kid,
- getattr(handler, 'visit_default', None))
- l_method = getattr(handler, 'leave_%s' % kid,
- getattr(handler, 'leave_default', None))
+ e_method = getattr(
+ handler, "visit_%s" % kid, getattr(handler, "visit_default", None)
+ )
+ l_method = getattr(
+ handler, "leave_%s" % kid, getattr(handler, "leave_default", None)
+ )
self._cache[klass] = (e_method, l_method)
else:
e_method, l_method = methods
@@ -187,6 +200,7 @@ class ASTWalker:
class LocalsVisitor(ASTWalker):
"""visit a project by traversing the locals dictionary"""
+
def __init__(self):
ASTWalker.__init__(self, self)
self._visited = {}
@@ -195,11 +209,11 @@ class LocalsVisitor(ASTWalker):
"""launch the visit starting from the given node"""
if node in self._visited:
return None
- self._visited[node] = 1 # FIXME: use set ?
+ self._visited[node] = 1 # FIXME: use set ?
methods = self.get_callbacks(node)
if methods[0] is not None:
methods[0](node)
- if hasattr(node, 'locals'): # skip Instance and other proxy
+ if hasattr(node, "locals"): # skip Instance and other proxy
for local_node in node.values():
self.visit(local_node)
if methods[1] is not None:
diff --git a/pylint/pyreverse/vcgutils.py b/pylint/pyreverse/vcgutils.py
index a400312ae..89c691139 100644
--- a/pylint/pyreverse/vcgutils.py
+++ b/pylint/pyreverse/vcgutils.py
@@ -17,113 +17,153 @@ maybe used for the functions parameters.
"""
ATTRS_VAL = {
- 'algos': ('dfs', 'tree', 'minbackward',
- 'left_to_right', 'right_to_left',
- 'top_to_bottom', 'bottom_to_top',
- 'maxdepth', 'maxdepthslow', 'mindepth', 'mindepthslow',
- 'mindegree', 'minindegree', 'minoutdegree',
- 'maxdegree', 'maxindegree', 'maxoutdegree'),
- 'booleans': ('yes', 'no'),
- 'colors': ('black', 'white', 'blue', 'red', 'green', 'yellow',
- 'magenta', 'lightgrey',
- 'cyan', 'darkgrey', 'darkblue', 'darkred', 'darkgreen',
- 'darkyellow', 'darkmagenta', 'darkcyan', 'gold',
- 'lightblue', 'lightred', 'lightgreen', 'lightyellow',
- 'lightmagenta', 'lightcyan', 'lilac', 'turquoise',
- 'aquamarine', 'khaki', 'purple', 'yellowgreen', 'pink',
- 'orange', 'orchid'),
- 'shapes': ('box', 'ellipse', 'rhomb', 'triangle'),
- 'textmodes': ('center', 'left_justify', 'right_justify'),
- 'arrowstyles': ('solid', 'line', 'none'),
- 'linestyles': ('continuous', 'dashed', 'dotted', 'invisible'),
- }
+ "algos": (
+ "dfs",
+ "tree",
+ "minbackward",
+ "left_to_right",
+ "right_to_left",
+ "top_to_bottom",
+ "bottom_to_top",
+ "maxdepth",
+ "maxdepthslow",
+ "mindepth",
+ "mindepthslow",
+ "mindegree",
+ "minindegree",
+ "minoutdegree",
+ "maxdegree",
+ "maxindegree",
+ "maxoutdegree",
+ ),
+ "booleans": ("yes", "no"),
+ "colors": (
+ "black",
+ "white",
+ "blue",
+ "red",
+ "green",
+ "yellow",
+ "magenta",
+ "lightgrey",
+ "cyan",
+ "darkgrey",
+ "darkblue",
+ "darkred",
+ "darkgreen",
+ "darkyellow",
+ "darkmagenta",
+ "darkcyan",
+ "gold",
+ "lightblue",
+ "lightred",
+ "lightgreen",
+ "lightyellow",
+ "lightmagenta",
+ "lightcyan",
+ "lilac",
+ "turquoise",
+ "aquamarine",
+ "khaki",
+ "purple",
+ "yellowgreen",
+ "pink",
+ "orange",
+ "orchid",
+ ),
+ "shapes": ("box", "ellipse", "rhomb", "triangle"),
+ "textmodes": ("center", "left_justify", "right_justify"),
+ "arrowstyles": ("solid", "line", "none"),
+ "linestyles": ("continuous", "dashed", "dotted", "invisible"),
+}
# meaning of possible values:
# O -> string
# 1 -> int
# list -> value in list
GRAPH_ATTRS = {
- 'title': 0,
- 'label': 0,
- 'color': ATTRS_VAL['colors'],
- 'textcolor': ATTRS_VAL['colors'],
- 'bordercolor': ATTRS_VAL['colors'],
- 'width': 1,
- 'height': 1,
- 'borderwidth': 1,
- 'textmode': ATTRS_VAL['textmodes'],
- 'shape': ATTRS_VAL['shapes'],
- 'shrink': 1,
- 'stretch': 1,
- 'orientation': ATTRS_VAL['algos'],
- 'vertical_order': 1,
- 'horizontal_order': 1,
- 'xspace': 1,
- 'yspace': 1,
- 'layoutalgorithm': ATTRS_VAL['algos'],
- 'late_edge_labels': ATTRS_VAL['booleans'],
- 'display_edge_labels': ATTRS_VAL['booleans'],
- 'dirty_edge_labels': ATTRS_VAL['booleans'],
- 'finetuning': ATTRS_VAL['booleans'],
- 'manhattan_edges': ATTRS_VAL['booleans'],
- 'smanhattan_edges': ATTRS_VAL['booleans'],
- 'port_sharing': ATTRS_VAL['booleans'],
- 'edges': ATTRS_VAL['booleans'],
- 'nodes': ATTRS_VAL['booleans'],
- 'splines': ATTRS_VAL['booleans'],
- }
+ "title": 0,
+ "label": 0,
+ "color": ATTRS_VAL["colors"],
+ "textcolor": ATTRS_VAL["colors"],
+ "bordercolor": ATTRS_VAL["colors"],
+ "width": 1,
+ "height": 1,
+ "borderwidth": 1,
+ "textmode": ATTRS_VAL["textmodes"],
+ "shape": ATTRS_VAL["shapes"],
+ "shrink": 1,
+ "stretch": 1,
+ "orientation": ATTRS_VAL["algos"],
+ "vertical_order": 1,
+ "horizontal_order": 1,
+ "xspace": 1,
+ "yspace": 1,
+ "layoutalgorithm": ATTRS_VAL["algos"],
+ "late_edge_labels": ATTRS_VAL["booleans"],
+ "display_edge_labels": ATTRS_VAL["booleans"],
+ "dirty_edge_labels": ATTRS_VAL["booleans"],
+ "finetuning": ATTRS_VAL["booleans"],
+ "manhattan_edges": ATTRS_VAL["booleans"],
+ "smanhattan_edges": ATTRS_VAL["booleans"],
+ "port_sharing": ATTRS_VAL["booleans"],
+ "edges": ATTRS_VAL["booleans"],
+ "nodes": ATTRS_VAL["booleans"],
+ "splines": ATTRS_VAL["booleans"],
+}
NODE_ATTRS = {
- 'title': 0,
- 'label': 0,
- 'color': ATTRS_VAL['colors'],
- 'textcolor': ATTRS_VAL['colors'],
- 'bordercolor': ATTRS_VAL['colors'],
- 'width': 1,
- 'height': 1,
- 'borderwidth': 1,
- 'textmode': ATTRS_VAL['textmodes'],
- 'shape': ATTRS_VAL['shapes'],
- 'shrink': 1,
- 'stretch': 1,
- 'vertical_order': 1,
- 'horizontal_order': 1,
- }
+ "title": 0,
+ "label": 0,
+ "color": ATTRS_VAL["colors"],
+ "textcolor": ATTRS_VAL["colors"],
+ "bordercolor": ATTRS_VAL["colors"],
+ "width": 1,
+ "height": 1,
+ "borderwidth": 1,
+ "textmode": ATTRS_VAL["textmodes"],
+ "shape": ATTRS_VAL["shapes"],
+ "shrink": 1,
+ "stretch": 1,
+ "vertical_order": 1,
+ "horizontal_order": 1,
+}
EDGE_ATTRS = {
- 'sourcename': 0,
- 'targetname': 0,
- 'label': 0,
- 'linestyle': ATTRS_VAL['linestyles'],
- 'class': 1,
- 'thickness': 0,
- 'color': ATTRS_VAL['colors'],
- 'textcolor': ATTRS_VAL['colors'],
- 'arrowcolor': ATTRS_VAL['colors'],
- 'backarrowcolor': ATTRS_VAL['colors'],
- 'arrowsize': 1,
- 'backarrowsize': 1,
- 'arrowstyle': ATTRS_VAL['arrowstyles'],
- 'backarrowstyle': ATTRS_VAL['arrowstyles'],
- 'textmode': ATTRS_VAL['textmodes'],
- 'priority': 1,
- 'anchor': 1,
- 'horizontal_order': 1,
- }
+ "sourcename": 0,
+ "targetname": 0,
+ "label": 0,
+ "linestyle": ATTRS_VAL["linestyles"],
+ "class": 1,
+ "thickness": 0,
+ "color": ATTRS_VAL["colors"],
+ "textcolor": ATTRS_VAL["colors"],
+ "arrowcolor": ATTRS_VAL["colors"],
+ "backarrowcolor": ATTRS_VAL["colors"],
+ "arrowsize": 1,
+ "backarrowsize": 1,
+ "arrowstyle": ATTRS_VAL["arrowstyles"],
+ "backarrowstyle": ATTRS_VAL["arrowstyles"],
+ "textmode": ATTRS_VAL["textmodes"],
+ "priority": 1,
+ "anchor": 1,
+ "horizontal_order": 1,
+}
# Misc utilities ###############################################################
+
class VCGPrinter:
"""A vcg graph writer.
"""
def __init__(self, output_stream):
self._stream = output_stream
- self._indent = ''
+ self._indent = ""
def open_graph(self, **args):
"""open a vcg graph
"""
- self._stream.write('%sgraph:{\n'%self._indent)
+ self._stream.write("%sgraph:{\n" % self._indent)
self._inc_indent()
self._write_attributes(GRAPH_ATTRS, **args)
@@ -131,26 +171,24 @@ class VCGPrinter:
"""close a vcg graph
"""
self._dec_indent()
- self._stream.write('%s}\n'%self._indent)
-
+ self._stream.write("%s}\n" % self._indent)
def node(self, title, **args):
"""draw a node
"""
self._stream.write('%snode: {title:"%s"' % (self._indent, title))
self._write_attributes(NODE_ATTRS, **args)
- self._stream.write('}\n')
+ self._stream.write("}\n")
-
- def edge(self, from_node, to_node, edge_type='', **args):
+ def edge(self, from_node, to_node, edge_type="", **args):
"""draw an edge from a node to another.
"""
self._stream.write(
- '%s%sedge: {sourcename:"%s" targetname:"%s"' % (
- self._indent, edge_type, from_node, to_node))
+ '%s%sedge: {sourcename:"%s" targetname:"%s"'
+ % (self._indent, edge_type, from_node, to_node)
+ )
self._write_attributes(EDGE_ATTRS, **args)
- self._stream.write('}\n')
-
+ self._stream.write("}\n")
# private ##################################################################
@@ -161,24 +199,29 @@ class VCGPrinter:
try:
_type = attributes_dict[key]
except KeyError:
- raise Exception('''no such attribute %s
-possible attributes are %s''' % (key, attributes_dict.keys()))
+ raise Exception(
+ """no such attribute %s
+possible attributes are %s"""
+ % (key, attributes_dict.keys())
+ )
if not _type:
self._stream.write('%s%s:"%s"\n' % (self._indent, key, value))
elif _type == 1:
- self._stream.write('%s%s:%s\n' % (self._indent, key,
- int(value)))
+ self._stream.write("%s%s:%s\n" % (self._indent, key, int(value)))
elif value in _type:
- self._stream.write('%s%s:%s\n' % (self._indent, key, value))
+ self._stream.write("%s%s:%s\n" % (self._indent, key, value))
else:
- raise Exception('''value %s isn\'t correct for attribute %s
-correct values are %s''' % (value, key, _type))
+ raise Exception(
+ """value %s isn\'t correct for attribute %s
+correct values are %s"""
+ % (value, key, _type)
+ )
def _inc_indent(self):
"""increment indentation
"""
- self._indent = ' %s' % self._indent
+ self._indent = " %s" % self._indent
def _dec_indent(self):
"""decrement indentation
diff --git a/pylint/pyreverse/writer.py b/pylint/pyreverse/writer.py
index f11cb7966..332c28838 100644
--- a/pylint/pyreverse/writer.py
+++ b/pylint/pyreverse/writer.py
@@ -17,22 +17,24 @@ from pylint.pyreverse.utils import is_exception
from pylint.pyreverse.vcgutils import VCGPrinter
from pylint.graph import DotBackend
+
class DiagramWriter:
"""base class for writing project diagrams
"""
+
def __init__(self, config, styles):
self.config = config
self.pkg_edges, self.inh_edges, self.imp_edges, self.association_edges = styles
- self.printer = None # defined in set_printer
+ self.printer = None # defined in set_printer
def write(self, diadefs):
"""write files for <project> according to <diadefs>
"""
for diagram in diadefs:
- basename = diagram.title.strip().replace(' ', '_')
- file_name = '%s.%s' % (basename, self.config.output_format)
+ basename = diagram.title.strip().replace(" ", "_")
+ file_name = "%s.%s" % (basename, self.config.output_format)
self.set_printer(file_name, basename)
- if diagram.TYPE == 'class':
+ if diagram.TYPE == "class":
self.write_classes(diagram)
else:
self.write_packages(diagram)
@@ -42,12 +44,13 @@ class DiagramWriter:
"""write a package diagram"""
# sorted to get predictable (hence testable) results
for i, obj in enumerate(sorted(diagram.modules(), key=lambda x: x.title)):
- self.printer.emit_node(i, label=self.get_title(obj), shape='box')
+ self.printer.emit_node(i, label=self.get_title(obj), shape="box")
obj.fig_id = i
# package dependencies
- for rel in diagram.get_relationships('depends'):
- self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
- **self.pkg_edges)
+ for rel in diagram.get_relationships("depends"):
+ self.printer.emit_edge(
+ rel.from_object.fig_id, rel.to_object.fig_id, **self.pkg_edges
+ )
def write_classes(self, diagram):
"""write a class diagram"""
@@ -56,17 +59,23 @@ class DiagramWriter:
self.printer.emit_node(i, **self.get_values(obj))
obj.fig_id = i
# inheritance links
- for rel in diagram.get_relationships('specialization'):
- self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
- **self.inh_edges)
+ for rel in diagram.get_relationships("specialization"):
+ self.printer.emit_edge(
+ rel.from_object.fig_id, rel.to_object.fig_id, **self.inh_edges
+ )
# implementation links
- for rel in diagram.get_relationships('implements'):
- self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
- **self.imp_edges)
+ for rel in diagram.get_relationships("implements"):
+ self.printer.emit_edge(
+ rel.from_object.fig_id, rel.to_object.fig_id, **self.imp_edges
+ )
# generate associations
- for rel in diagram.get_relationships('association'):
- self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
- label=rel.name, **self.association_edges)
+ for rel in diagram.get_relationships("association"):
+ self.printer.emit_edge(
+ rel.from_object.fig_id,
+ rel.to_object.fig_id,
+ label=rel.name,
+ **self.association_edges
+ )
def set_printer(self, file_name, basename):
"""set printer"""
@@ -90,12 +99,14 @@ class DotWriter(DiagramWriter):
"""
def __init__(self, config):
- styles = [dict(arrowtail='none', arrowhead="open"),
- dict(arrowtail='none', arrowhead='empty'),
- dict(arrowtail='node', arrowhead='empty', style='dashed'),
- dict(fontcolor='green', arrowtail='none',
- arrowhead='diamond', style='solid'),
- ]
+ styles = [
+ dict(arrowtail="none", arrowhead="open"),
+ dict(arrowtail="none", arrowhead="empty"),
+ dict(arrowtail="node", arrowhead="empty", style="dashed"),
+ dict(
+ fontcolor="green", arrowtail="none", arrowhead="diamond", style="solid"
+ ),
+ ]
DiagramWriter.__init__(self, config, styles)
def set_printer(self, file_name, basename):
@@ -115,16 +126,16 @@ class DotWriter(DiagramWriter):
The label contains all attributes and methods
"""
label = obj.title
- if obj.shape == 'interface':
- label = '«interface»\\n%s' % label
+ if obj.shape == "interface":
+ label = "«interface»\\n%s" % label
if not self.config.only_classnames:
- label = r'%s|%s\l|' % (label, r'\l'.join(obj.attrs))
+ label = r"%s|%s\l|" % (label, r"\l".join(obj.attrs))
for func in obj.methods:
- label = r'%s%s()\l' % (label, func.name)
- label = '{%s}' % label
+ label = r"%s%s()\l" % (label, func.name)
+ label = "{%s}" % label
if is_exception(obj.node):
- return dict(fontcolor='red', label=label, shape='record')
- return dict(label=label, shape='record')
+ return dict(fontcolor="red", label=label, shape="record")
+ return dict(label=label, shape="record")
def close_graph(self):
"""print the dot graph into <file_name>"""
@@ -134,31 +145,38 @@ class DotWriter(DiagramWriter):
class VCGWriter(DiagramWriter):
"""write vcg graphs from a diagram definition and a project
"""
+
def __init__(self, config):
- styles = [dict(arrowstyle='solid', backarrowstyle='none',
- backarrowsize=0),
- dict(arrowstyle='solid', backarrowstyle='none',
- backarrowsize=10),
- dict(arrowstyle='solid', backarrowstyle='none',
- linestyle='dotted', backarrowsize=10),
- dict(arrowstyle='solid', backarrowstyle='none',
- textcolor='green'),
- ]
+ styles = [
+ dict(arrowstyle="solid", backarrowstyle="none", backarrowsize=0),
+ dict(arrowstyle="solid", backarrowstyle="none", backarrowsize=10),
+ dict(
+ arrowstyle="solid",
+ backarrowstyle="none",
+ linestyle="dotted",
+ backarrowsize=10,
+ ),
+ dict(arrowstyle="solid", backarrowstyle="none", textcolor="green"),
+ ]
DiagramWriter.__init__(self, config, styles)
def set_printer(self, file_name, basename):
"""initialize VCGWriter for a UML graph"""
- self.graph_file = open(file_name, 'w+')
+ self.graph_file = open(file_name, "w+")
self.printer = VCGPrinter(self.graph_file)
- self.printer.open_graph(title=basename, layoutalgorithm='dfs',
- late_edge_labels='yes', port_sharing='no',
- manhattan_edges='yes')
+ self.printer.open_graph(
+ title=basename,
+ layoutalgorithm="dfs",
+ late_edge_labels="yes",
+ port_sharing="no",
+ manhattan_edges="yes",
+ )
self.printer.emit_node = self.printer.node
self.printer.emit_edge = self.printer.edge
def get_title(self, obj):
"""get project title in vcg format"""
- return r'\fb%s\fn' % obj.title
+ return r"\fb%s\fn" % obj.title
def get_values(self, obj):
"""get label and shape for classes.
@@ -166,26 +184,26 @@ class VCGWriter(DiagramWriter):
The label contains all attributes and methods
"""
if is_exception(obj.node):
- label = r'\fb\f09%s\fn' % obj.title
+ label = r"\fb\f09%s\fn" % obj.title
else:
- label = r'\fb%s\fn' % obj.title
- if obj.shape == 'interface':
- shape = 'ellipse'
+ label = r"\fb%s\fn" % obj.title
+ if obj.shape == "interface":
+ shape = "ellipse"
else:
- shape = 'box'
+ shape = "box"
if not self.config.only_classnames:
attrs = obj.attrs
methods = [func.name for func in obj.methods]
# box width for UML like diagram
maxlen = max(len(name) for name in [obj.title] + methods + attrs)
- line = '_' * (maxlen + 2)
- label = r'%s\n\f%s' % (label, line)
+ line = "_" * (maxlen + 2)
+ label = r"%s\n\f%s" % (label, line)
for attr in attrs:
- label = r'%s\n\f08%s' % (label, attr)
+ label = r"%s\n\f08%s" % (label, attr)
if attrs:
- label = r'%s\n\f%s' % (label, line)
+ label = r"%s\n\f%s" % (label, line)
for func in methods:
- label = r'%s\n\f10%s()' % (label, func)
+ label = r"%s\n\f10%s()" % (label, func)
return dict(label=label, shape=shape)
def close_graph(self):
diff --git a/pylint/reporters/__init__.py b/pylint/reporters/__init__.py
index 340b072b8..c09ebdd0c 100644
--- a/pylint/reporters/__init__.py
+++ b/pylint/reporters/__init__.py
@@ -25,19 +25,21 @@ import os
import warnings
-CMPS = ['=', '-', '+']
+CMPS = ["=", "-", "+"]
# py3k has no more cmp builtin
if sys.version_info >= (3, 0):
- def cmp(a, b): # pylint: disable=redefined-builtin
+
+ def cmp(a, b): # pylint: disable=redefined-builtin
return (a > b) - (a < b)
+
def diff_string(old, new):
"""given an old and new int value, return a string representing the
difference
"""
diff = abs(old - new)
- diff_str = "%s%s" % (CMPS[cmp(old, new)], diff and ('%.2f' % diff) or '')
+ diff_str = "%s%s" % (CMPS[cmp(old, new)], diff and ("%.2f" % diff) or "")
return diff_str
@@ -47,7 +49,7 @@ class BaseReporter:
symbols: show short symbolic names for messages.
"""
- extension = ''
+ extension = ""
def __init__(self, output=None):
self.linter = None
@@ -65,15 +67,15 @@ class BaseReporter:
"""set output stream"""
self.out = output or sys.stdout
- def writeln(self, string=''):
+ def writeln(self, string=""):
"""write a line in the output buffer"""
print(string, file=self.out)
def display_reports(self, layout):
"""display results encapsulated in the layout tree"""
self.section = 0
- if hasattr(layout, 'report_id'):
- layout.children[0].children[0].data += ' (%s)' % layout.report_id
+ if hasattr(layout, "report_id"):
+ layout.children[0].children[0].data += " (%s)" % layout.report_id
self._display(layout)
def _display(self, layout):
@@ -103,7 +105,7 @@ class BaseReporter:
class CollectingReporter(BaseReporter):
"""collects messages"""
- name = 'collector'
+ name = "collector"
def __init__(self):
BaseReporter.__init__(self)
@@ -118,4 +120,5 @@ class CollectingReporter(BaseReporter):
def initialize(linter):
"""initialize linter with reporters in this package """
from pylint import utils
+
utils.register_plugins(linter, __path__[0])
diff --git a/pylint/reporters/json.py b/pylint/reporters/json.py
index 7f465ca61..bb3ca0153 100644
--- a/pylint/reporters/json.py
+++ b/pylint/reporters/json.py
@@ -21,8 +21,8 @@ class JSONReporter(BaseReporter):
"""Report messages and layouts in JSON."""
__implements__ = IReporter
- name = 'json'
- extension = 'json'
+ name = "json"
+ extension = "json"
def __init__(self, output=sys.stdout):
BaseReporter.__init__(self, output)
@@ -30,24 +30,26 @@ class JSONReporter(BaseReporter):
def handle_message(self, msg):
"""Manage message of different type and in the context of path."""
- self.messages.append({
- 'type': msg.category,
- 'module': msg.module,
- 'obj': msg.obj,
- 'line': msg.line,
- 'column': msg.column,
- 'path': msg.path,
- 'symbol': msg.symbol,
- # pylint: disable=deprecated-method; deprecated since 3.2.
- 'message': cgi.escape(msg.msg or ''),
- 'message-id': msg.msg_id,
- })
+ self.messages.append(
+ {
+ "type": msg.category,
+ "module": msg.module,
+ "obj": msg.obj,
+ "line": msg.line,
+ "column": msg.column,
+ "path": msg.path,
+ "symbol": msg.symbol,
+ # pylint: disable=deprecated-method; deprecated since 3.2.
+ "message": cgi.escape(msg.msg or ""),
+ "message-id": msg.msg_id,
+ }
+ )
def display_messages(self, layout):
"""Launch layouts display"""
print(json.dumps(self.messages, indent=4), file=self.out)
- def display_reports(self, layout): # pylint: disable=arguments-differ
+ def display_reports(self, layout): # pylint: disable=arguments-differ
"""Don't do nothing in this reporter."""
def _display(self, layout):
diff --git a/pylint/reporters/text.py b/pylint/reporters/text.py
index 517c4484a..4c682ae05 100644
--- a/pylint/reporters/text.py
+++ b/pylint/reporters/text.py
@@ -30,32 +30,33 @@ from pylint import utils
from pylint.reporters.ureports.text_writer import TextWriter
-TITLE_UNDERLINES = ['', '=', '-', '.']
+TITLE_UNDERLINES = ["", "=", "-", "."]
-ANSI_PREFIX = '\033['
-ANSI_END = 'm'
-ANSI_RESET = '\033[0m'
+ANSI_PREFIX = "\033["
+ANSI_END = "m"
+ANSI_RESET = "\033[0m"
ANSI_STYLES = {
- 'reset': "0",
- 'bold': "1",
- 'italic': "3",
- 'underline': "4",
- 'blink': "5",
- 'inverse': "7",
- 'strike': "9",
+ "reset": "0",
+ "bold": "1",
+ "italic": "3",
+ "underline": "4",
+ "blink": "5",
+ "inverse": "7",
+ "strike": "9",
}
ANSI_COLORS = {
- 'reset': "0",
- 'black': "30",
- 'red': "31",
- 'green': "32",
- 'yellow': "33",
- 'blue': "34",
- 'magenta': "35",
- 'cyan': "36",
- 'white': "37",
+ "reset": "0",
+ "black": "30",
+ "red": "31",
+ "green": "32",
+ "yellow": "33",
+ "blue": "34",
+ "magenta": "35",
+ "cyan": "36",
+ "white": "37",
}
+
def _get_ansi_code(color=None, style=None):
"""return ansi escape code corresponding to color and style
@@ -81,13 +82,14 @@ def _get_ansi_code(color=None, style=None):
ansi_code.append(ANSI_STYLES[effect])
if color:
if color.isdigit():
- ansi_code.extend(['38', '5'])
+ ansi_code.extend(["38", "5"])
ansi_code.append(color)
else:
ansi_code.append(ANSI_COLORS[color])
if ansi_code:
- return ANSI_PREFIX + ';'.join(ansi_code) + ANSI_END
- return ''
+ return ANSI_PREFIX + ";".join(ansi_code) + ANSI_END
+ return ""
+
def colorize_ansi(msg, color=None, style=None):
"""colorize message by wrapping it with ansi escape codes
@@ -115,7 +117,7 @@ def colorize_ansi(msg, color=None, style=None):
escape_code = _get_ansi_code(color, style)
# If invalid (or unknown) color, don't wrap msg with ansi codes
if escape_code:
- return '%s%s%s' % (escape_code, msg, ANSI_RESET)
+ return "%s%s%s" % (escape_code, msg, ANSI_RESET)
return msg
@@ -123,9 +125,9 @@ class TextReporter(BaseReporter):
"""reports messages and layouts in plain text"""
__implements__ = IReporter
- name = 'text'
- extension = 'txt'
- line_format = '{path}:{line}:{column}: {msg_id}: {msg} ({symbol})'
+ name = "text"
+ extension = "txt"
+ line_format = "{path}:{line}:{column}: {msg_id}: {msg} ({symbol})"
def __init__(self, output=None):
BaseReporter.__init__(self, output)
@@ -143,10 +145,10 @@ class TextReporter(BaseReporter):
"""manage message of different type and in the context of path"""
if msg.module not in self._modules:
if msg.module:
- self.writeln('************* Module %s' % msg.module)
+ self.writeln("************* Module %s" % msg.module)
self._modules.add(msg.module)
else:
- self.writeln('************* ')
+ self.writeln("************* ")
self.write_message(msg)
def _display(self, layout):
@@ -161,45 +163,49 @@ class ParseableTextReporter(TextReporter):
<filename>:<linenum>:<msg>
"""
- name = 'parseable'
- line_format = '{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}'
+
+ name = "parseable"
+ line_format = "{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}"
def __init__(self, output=None):
- warnings.warn('%s output format is deprecated. This is equivalent '
- 'to --msg-template=%s' % (self.name, self.line_format),
- DeprecationWarning)
+ warnings.warn(
+ "%s output format is deprecated. This is equivalent "
+ "to --msg-template=%s" % (self.name, self.line_format),
+ DeprecationWarning,
+ )
TextReporter.__init__(self, output)
class VSTextReporter(ParseableTextReporter):
"""Visual studio text reporter"""
- name = 'msvs'
- line_format = '{path}({line}): [{msg_id}({symbol}){obj}] {msg}'
+
+ name = "msvs"
+ line_format = "{path}({line}): [{msg_id}({symbol}){obj}] {msg}"
class ColorizedTextReporter(TextReporter):
"""Simple TextReporter that colorizes text output"""
- name = 'colorized'
+ name = "colorized"
COLOR_MAPPING = {
- "I" : ("green", None),
- 'C' : (None, "bold"),
- 'R' : ("magenta", "bold, italic"),
- 'W' : ("magenta", None),
- 'E' : ("red", "bold"),
- 'F' : ("red", "bold, underline"),
- 'S' : ("yellow", "inverse"), # S stands for module Separator
+ "I": ("green", None),
+ "C": (None, "bold"),
+ "R": ("magenta", "bold, italic"),
+ "W": ("magenta", None),
+ "E": ("red", "bold"),
+ "F": ("red", "bold, underline"),
+ "S": ("yellow", "inverse"), # S stands for module Separator
}
def __init__(self, output=None, color_mapping=None):
TextReporter.__init__(self, output)
- self.color_mapping = color_mapping or \
- dict(ColorizedTextReporter.COLOR_MAPPING)
- ansi_terms = ['xterm-16color', 'xterm-256color']
- if os.environ.get('TERM') not in ansi_terms:
- if sys.platform == 'win32':
+ self.color_mapping = color_mapping or dict(ColorizedTextReporter.COLOR_MAPPING)
+ ansi_terms = ["xterm-16color", "xterm-256color"]
+ if os.environ.get("TERM") not in ansi_terms:
+ if sys.platform == "win32":
# pylint: disable=import-error
import colorama
+
self.out = colorama.AnsiToWin32(self.out)
def _get_decoration(self, msg_id):
@@ -216,20 +222,23 @@ class ColorizedTextReporter(TextReporter):
using ansi escape codes
"""
if msg.module not in self._modules:
- color, style = self._get_decoration('S')
+ color, style = self._get_decoration("S")
if msg.module:
- modsep = colorize_ansi('************* Module %s' % msg.module,
- color, style)
+ modsep = colorize_ansi(
+ "************* Module %s" % msg.module, color, style
+ )
else:
- modsep = colorize_ansi('************* %s' % msg.module,
- color, style)
+ modsep = colorize_ansi("************* %s" % msg.module, color, style)
self.writeln(modsep)
self._modules.add(msg.module)
color, style = self._get_decoration(msg.C)
msg = msg._replace(
- **{attr: colorize_ansi(getattr(msg, attr), color, style)
- for attr in ('msg', 'symbol', 'category', 'C')})
+ **{
+ attr: colorize_ansi(getattr(msg, attr), color, style)
+ for attr in ("msg", "symbol", "category", "C")
+ }
+ )
self.write_message(msg)
diff --git a/pylint/reporters/ureports/__init__.py b/pylint/reporters/ureports/__init__.py
index 2eec5e9ae..56a829fac 100644
--- a/pylint/reporters/ureports/__init__.py
+++ b/pylint/reporters/ureports/__init__.py
@@ -30,8 +30,8 @@ class BaseWriter:
if stream is None:
stream = sys.stdout
if not encoding:
- encoding = getattr(stream, 'encoding', 'UTF-8')
- self.encoding = encoding or 'UTF-8'
+ encoding = getattr(stream, "encoding", "UTF-8")
+ self.encoding = encoding or "UTF-8"
self.out = stream
self.begin_format()
layout.accept(self)
@@ -41,10 +41,10 @@ class BaseWriter:
"""recurse on the layout children and call their accept method
(see the Visitor pattern)
"""
- for child in getattr(layout, 'children', ()):
+ for child in getattr(layout, "children", ()):
child.accept(self)
- def writeln(self, string=''):
+ def writeln(self, string=""):
"""write a line in the output buffer"""
self.write(string + os.linesep)
@@ -74,7 +74,7 @@ class BaseWriter:
result[-1].append(cell)
# fill missing cells
while len(result[-1]) < cols:
- result[-1].append('')
+ result[-1].append("")
return result
def compute_content(self, layout):
diff --git a/pylint/reporters/ureports/nodes.py b/pylint/reporters/ureports/nodes.py
index 8b502fb62..8fafb206a 100644
--- a/pylint/reporters/ureports/nodes.py
+++ b/pylint/reporters/ureports/nodes.py
@@ -13,7 +13,6 @@ A micro report is a tree of layout and content objects.
class VNode:
-
def __init__(self, nid=None):
self.id = nid
# navigation
@@ -41,17 +40,17 @@ class VNode:
"""
try:
# pylint: disable=no-member
- return self.TYPE.replace('-', '_')
+ return self.TYPE.replace("-", "_")
# pylint: disable=broad-except
except Exception:
return self.__class__.__name__.lower()
def accept(self, visitor, *args, **kwargs):
- func = getattr(visitor, 'visit_%s' % self._get_visit_name())
+ func = getattr(visitor, "visit_%s" % self._get_visit_name())
return func(self, *args, **kwargs)
def leave(self, visitor, *args, **kwargs):
- func = getattr(visitor, 'leave_%s' % self._get_visit_name())
+ func = getattr(visitor, "leave_%s" % self._get_visit_name())
return func(self, *args, **kwargs)
@@ -61,6 +60,7 @@ class BaseLayout(VNode):
attributes
* children : components in this table (i.e. the table's cells)
"""
+
def __init__(self, children=(), **kwargs):
super(BaseLayout, self).__init__(**kwargs)
for child in children:
@@ -88,15 +88,17 @@ class BaseLayout(VNode):
# non container nodes #########################################################
+
class Text(VNode):
"""a text portion
attributes :
* data : the text value as an encoded or unicode string
"""
+
def __init__(self, data, escaped=True, **kwargs):
super(Text, self).__init__(**kwargs)
- #if isinstance(data, unicode):
+ # if isinstance(data, unicode):
# data = data.encode('ascii')
assert isinstance(data, str), data.__class__
self.escaped = escaped
@@ -110,8 +112,10 @@ class VerbatimText(Text):
* data : the text value as an encoded or unicode string
"""
+
# container nodes #############################################################
+
class Section(BaseLayout):
"""a section
@@ -123,6 +127,7 @@ class Section(BaseLayout):
a description may also be given to the constructor, it'll be added
as a first paragraph
"""
+
def __init__(self, title=None, description=None, **kwargs):
super(Section, self).__init__(**kwargs)
if description:
@@ -132,7 +137,6 @@ class Section(BaseLayout):
class EvaluationSection(Section):
-
def __init__(self, message, **kwargs):
super(EvaluationSection, self).__init__(**kwargs)
title = Paragraph()
@@ -174,9 +178,8 @@ class Table(BaseLayout):
* cheaders : the first col's elements are table's header
* title : the table's optional title
"""
- def __init__(self, cols, title=None,
- rheaders=0, cheaders=0,
- **kwargs):
+
+ def __init__(self, cols, title=None, rheaders=0, cheaders=0, **kwargs):
super(Table, self).__init__(**kwargs)
assert isinstance(cols, int)
self.cols = cols
diff --git a/pylint/reporters/ureports/text_writer.py b/pylint/reporters/ureports/text_writer.py
index 7dfe743bc..9d9e1ad34 100644
--- a/pylint/reporters/ureports/text_writer.py
+++ b/pylint/reporters/ureports/text_writer.py
@@ -11,13 +11,15 @@ from __future__ import print_function
from pylint.reporters.ureports import BaseWriter
-TITLE_UNDERLINES = ['', '=', '-', '`', '.', '~', '^']
-BULLETS = ['*', '-']
+TITLE_UNDERLINES = ["", "=", "-", "`", ".", "~", "^"]
+BULLETS = ["*", "-"]
+
class TextWriter(BaseWriter):
"""format layouts as text
(ReStructured inspiration but not totally handled yet)
"""
+
def begin_format(self):
super(TextWriter, self).begin_format()
self.list_level = 0
@@ -39,7 +41,7 @@ class TextWriter(BaseWriter):
self.writeln()
def visit_title(self, layout):
- title = ''.join(list(self.compute_content(layout)))
+ title = "".join(list(self.compute_content(layout)))
self.writeln(title)
try:
self.writeln(TITLE_UNDERLINES[self.section] * len(title))
@@ -55,7 +57,7 @@ class TextWriter(BaseWriter):
"""display a table as text"""
table_content = self.get_table_content(layout)
# get columns width
- cols_width = [0]*len(table_content[0])
+ cols_width = [0] * len(table_content[0])
for row in table_content:
for index, col in enumerate(row):
cols_width[index] = max(cols_width[index], len(col))
@@ -64,19 +66,19 @@ class TextWriter(BaseWriter):
def default_table(self, layout, table_content, cols_width):
"""format a table"""
- cols_width = [size+1 for size in cols_width]
- format_strings = ' '.join(['%%-%ss'] * len(cols_width))
+ cols_width = [size + 1 for size in cols_width]
+ format_strings = " ".join(["%%-%ss"] * len(cols_width))
format_strings = format_strings % tuple(cols_width)
- format_strings = format_strings.split(' ')
- table_linesep = '\n+' + '+'.join(['-'*w for w in cols_width]) + '+\n'
- headsep = '\n+' + '+'.join(['='*w for w in cols_width]) + '+\n'
+ format_strings = format_strings.split(" ")
+ table_linesep = "\n+" + "+".join(["-" * w for w in cols_width]) + "+\n"
+ headsep = "\n+" + "+".join(["=" * w for w in cols_width]) + "+\n"
# FIXME: layout.cheaders
self.write(table_linesep)
for index, line in enumerate(table_content):
- self.write('|')
+ self.write("|")
for line_index, at_index in enumerate(line):
self.write(format_strings[line_index] % at_index)
- self.write('|')
+ self.write("|")
if index == 0 and layout.rheaders:
self.write(headsep)
else:
@@ -85,11 +87,11 @@ class TextWriter(BaseWriter):
def visit_verbatimtext(self, layout):
"""display a verbatim layout as text (so difficult ;)
"""
- self.writeln('::\n')
+ self.writeln("::\n")
for line in layout.data.splitlines():
- self.writeln(' ' + line)
+ self.writeln(" " + line)
self.writeln()
def visit_text(self, layout):
"""add some text"""
- self.write('%s' % layout.data)
+ self.write("%s" % layout.data)
diff --git a/pylint/test/a.py b/pylint/test/a.py
index d280e7a84..6ca3434f4 100644
--- a/pylint/test/a.py
+++ b/pylint/test/a.py
@@ -1 +1 @@
-"{a[0]}".format(a=object) # [invalid-format-index]
+"{a[0]}".format(a=object) # [invalid-format-index]
diff --git a/pylint/test/acceptance/test_stdlib.py b/pylint/test/acceptance/test_stdlib.py
index 96d247a4c..1b97fd2ea 100644
--- a/pylint/test/acceptance/test_stdlib.py
+++ b/pylint/test/acceptance/test_stdlib.py
@@ -16,7 +16,7 @@ def is_module(filename):
def is_package(filename, location):
- return os.path.exists(os.path.join(location, filename, '__init__.py'))
+ return os.path.exists(os.path.join(location, filename, "__init__.py"))
@contextlib.contextmanager
@@ -28,22 +28,25 @@ def _patch_stdout(out):
sys.stdout = sys.__stdout__
-LIB_DIRS = [
- os.path.dirname(os.__file__),
+LIB_DIRS = [os.path.dirname(os.__file__)]
+MODULES_TO_CHECK = [
+ (location, module)
+ for location in LIB_DIRS
+ for module in os.listdir(location)
+ if is_module(module) or is_package(module, location)
]
-MODULES_TO_CHECK = [(location, module) for location in LIB_DIRS for module in os.listdir(location)
- if is_module(module) or is_package(module, location)]
MODULES_NAMES = [m[1] for m in MODULES_TO_CHECK]
@pytest.mark.acceptance
-@pytest.mark.parametrize(("test_module_location", "test_module_name"),
- MODULES_TO_CHECK, ids=MODULES_NAMES)
+@pytest.mark.parametrize(
+ ("test_module_location", "test_module_name"), MODULES_TO_CHECK, ids=MODULES_NAMES
+)
def test_libmodule(test_module_location, test_module_name):
os.chdir(test_module_location)
with _patch_stdout(io.StringIO()):
try:
- pylint.lint.Run([test_module_name, '--enable=all', '--ignore=test'])
+ pylint.lint.Run([test_module_name, "--enable=all", "--ignore=test"])
except SystemExit as ex:
assert ex.code != 32
return
diff --git a/pylint/test/conftest.py b/pylint/test/conftest.py
index 2a8df8c93..d75522bed 100644
--- a/pylint/test/conftest.py
+++ b/pylint/test/conftest.py
@@ -4,6 +4,7 @@ import pytest
from pylint import checkers
from pylint.lint import PyLinter
+
# pylint: disable=no-name-in-module
from pylint.testutils import MinimalTestReporter
@@ -23,30 +24,30 @@ def linter(checker, register, enable, disable, reporter):
if enable:
for msg in enable:
_linter.enable(msg)
- os.environ.pop('PYLINTRC', None)
+ os.environ.pop("PYLINTRC", None)
return _linter
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def checker():
return None
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def register():
return None
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def enable():
return None
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def disable():
return None
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def reporter():
return MinimalTestReporter
diff --git a/pylint/test/test_func.py b/pylint/test/test_func.py
index d82b8613b..091a6d71d 100644
--- a/pylint/test/test_func.py
+++ b/pylint/test/test_func.py
@@ -22,56 +22,58 @@ from os.path import abspath, dirname, join
from pylint.testutils import _get_tests_info, linter
PY3K = sys.version_info >= (3, 0)
-SYS_VERS_STR = '%d%d%d' % sys.version_info[:3]
+SYS_VERS_STR = "%d%d%d" % sys.version_info[:3]
# Configure paths
-INPUT_DIR = join(dirname(abspath(__file__)), 'input')
-MSG_DIR = join(dirname(abspath(__file__)), 'messages')
+INPUT_DIR = join(dirname(abspath(__file__)), "input")
+MSG_DIR = join(dirname(abspath(__file__)), "messages")
FILTER_RGX = None
UPDATE = False
-INFO_TEST_RGX = re.compile(r'^func_i\d\d\d\d$')
+INFO_TEST_RGX = re.compile(r"^func_i\d\d\d\d$")
# Classes
-quote = "'" if sys.version_info >= (3, 3) else ''
+quote = "'" if sys.version_info >= (3, 3) else ""
-def exception_str(self, ex): # pylint: disable=unused-argument
+def exception_str(self, ex): # pylint: disable=unused-argument
"""function used to replace default __str__ method of exception instances"""
- return 'in %s\n:: %s' % (ex.file, ', '.join(ex.args))
+ return "in %s\n:: %s" % (ex.file, ", ".join(ex.args))
class LintTestUsingModule(object):
INPUT_DIR = None
- DEFAULT_PACKAGE = 'input'
+ DEFAULT_PACKAGE = "input"
package = DEFAULT_PACKAGE
linter = linter
module = None
depends = None
output = None
- _TEST_TYPE = 'module'
+ _TEST_TYPE = "module"
# def runTest(self):
# # This is a hack to make ./test/test_func.py work under pytest.
# pass
def _test_functionality(self):
- tocheck = [self.package+'.'+self.module]
+ tocheck = [self.package + "." + self.module]
# pylint: disable=not-an-iterable; can't handle boolean checks for now
if self.depends:
- tocheck += [self.package+'.%s' % name.replace('.py', '')
- for name, _ in self.depends]
+ tocheck += [
+ self.package + ".%s" % name.replace(".py", "")
+ for name, _ in self.depends
+ ]
self._test(tocheck)
def _check_result(self, got):
- assert self._get_expected().strip()+'\n' == got.strip()+'\n'
+ assert self._get_expected().strip() + "\n" == got.strip() + "\n"
def _test(self, tocheck):
if INFO_TEST_RGX.match(self.module):
- self.linter.enable('I')
+ self.linter.enable("I")
else:
- self.linter.disable('I')
+ self.linter.disable("I")
try:
self.linter.check(tocheck)
except Exception as ex:
@@ -84,28 +86,28 @@ class LintTestUsingModule(object):
self._check_result(self.linter.reporter.finalize())
def _has_output(self):
- return not self.module.startswith('func_noerror_')
+ return not self.module.startswith("func_noerror_")
def _get_expected(self):
if self._has_output() and self.output:
- with open(self.output, 'U') as fobj:
- return fobj.read().strip() + '\n'
+ with open(self.output, "U") as fobj:
+ return fobj.read().strip() + "\n"
else:
- return ''
+ return ""
class LintTestUpdate(LintTestUsingModule):
- _TEST_TYPE = 'update'
+ _TEST_TYPE = "update"
def _check_result(self, got):
if self._has_output():
try:
expected = self._get_expected()
except IOError:
- expected = ''
+ expected = ""
if got != expected:
- with open(self.output, 'w') as fobj:
+ with open(self.output, "w") as fobj:
fobj.write(got)
@@ -115,13 +117,11 @@ def gen_tests(filter_rgx):
else:
is_to_run = lambda x: 1
tests = []
- for module_file, messages_file in (
- _get_tests_info(INPUT_DIR, MSG_DIR, 'func_', '')
- ):
- if not is_to_run(module_file) or module_file.endswith(('.pyc', "$py.class")):
+ for module_file, messages_file in _get_tests_info(INPUT_DIR, MSG_DIR, "func_", ""):
+ if not is_to_run(module_file) or module_file.endswith((".pyc", "$py.class")):
continue
- base = module_file.replace('.py', '').split('_')[1]
- dependencies = _get_tests_info(INPUT_DIR, MSG_DIR, base, '.py')
+ base = module_file.replace(".py", "").split("_")[1]
+ dependencies = _get_tests_info(INPUT_DIR, MSG_DIR, base, ".py")
tests.append((module_file, messages_file, dependencies))
if UPDATE:
@@ -131,22 +131,26 @@ def gen_tests(filter_rgx):
return tests
-@pytest.mark.parametrize("module_file,messages_file,dependencies", gen_tests(FILTER_RGX),
- ids=[o[0] for o in gen_tests(FILTER_RGX)])
-def test_functionality(module_file, messages_file, dependencies,):
+@pytest.mark.parametrize(
+ "module_file,messages_file,dependencies",
+ gen_tests(FILTER_RGX),
+ ids=[o[0] for o in gen_tests(FILTER_RGX)],
+)
+def test_functionality(module_file, messages_file, dependencies):
LT = LintTestUpdate() if UPDATE else LintTestUsingModule()
- LT.module = module_file.replace('.py', '')
+ LT.module = module_file.replace(".py", "")
LT.output = messages_file
LT.depends = dependencies or None
LT.INPUT_DIR = INPUT_DIR
LT._test_functionality()
-if __name__ == '__main__':
- if '-u' in sys.argv:
+
+if __name__ == "__main__":
+ if "-u" in sys.argv:
UPDATE = True
- sys.argv.remove('-u')
+ sys.argv.remove("-u")
if len(sys.argv) > 1:
FILTER_RGX = sys.argv[1]
diff --git a/pylint/test/test_import_graph.py b/pylint/test/test_import_graph.py
index 36f57339e..218164565 100644
--- a/pylint/test/test_import_graph.py
+++ b/pylint/test/test_import_graph.py
@@ -22,16 +22,17 @@ import pylint.testutils as testutils
@pytest.fixture
def dest():
- dest = 'dependencies_graph.dot'
+ dest = "dependencies_graph.dot"
yield dest
os.remove(dest)
def test_dependencies_graph(dest):
- imports._dependencies_graph(dest, {'labas': ['hoho', 'yep'],
- 'hoho': ['yep']})
+ imports._dependencies_graph(dest, {"labas": ["hoho", "yep"], "hoho": ["yep"]})
with open(dest) as stream:
- assert stream.read().strip() == '''
+ assert (
+ stream.read().strip()
+ == """
digraph "dependencies_graph" {
rankdir=LR
charset="utf-8"
@@ -43,7 +44,8 @@ URL="." node[shape="box"]
"hoho" -> "labas" [];
"yep" -> "labas" [];
}
-'''.strip()
+""".strip()
+ )
@pytest.fixture
@@ -56,7 +58,7 @@ def linter():
@pytest.fixture
def remove_files():
yield
- for fname in ('import.dot', 'ext_import.dot', 'int_import.dot'):
+ for fname in ("import.dot", "ext_import.dot", "int_import.dot"):
try:
os.remove(fname)
except:
@@ -66,17 +68,17 @@ def remove_files():
@pytest.mark.usefixtures("remove_files")
def test_checker_dep_graphs(linter):
l = linter
- l.global_set_option('persistent', False)
- l.global_set_option('reports', True)
- l.global_set_option('enable', 'imports')
- l.global_set_option('import-graph', 'import.dot')
- l.global_set_option('ext-import-graph', 'ext_import.dot')
- l.global_set_option('int-import-graph', 'int_import.dot')
- l.global_set_option('int-import-graph', 'int_import.dot')
+ l.global_set_option("persistent", False)
+ l.global_set_option("reports", True)
+ l.global_set_option("enable", "imports")
+ l.global_set_option("import-graph", "import.dot")
+ l.global_set_option("ext-import-graph", "ext_import.dot")
+ l.global_set_option("int-import-graph", "int_import.dot")
+ l.global_set_option("int-import-graph", "int_import.dot")
# ignore this file causing spurious MemoryError w/ some python version (>=2.3?)
- l.global_set_option('ignore', ('func_unknown_encoding.py',))
- l.check('input')
+ l.global_set_option("ignore", ("func_unknown_encoding.py",))
+ l.check("input")
l.generate_reports()
- assert exists('import.dot')
- assert exists('ext_import.dot')
- assert exists('int_import.dot')
+ assert exists("import.dot")
+ assert exists("ext_import.dot")
+ assert exists("int_import.dot")
diff --git a/pylint/test/test_regr.py b/pylint/test/test_regr.py
index c96cca0e4..c3e026015 100644
--- a/pylint/test/test_regr.py
+++ b/pylint/test/test_regr.py
@@ -25,7 +25,7 @@ import pylint.testutils as testutils
from pylint import epylint
-REGR_DATA = join(dirname(abspath(__file__)), 'regrtest_data')
+REGR_DATA = join(dirname(abspath(__file__)), "regrtest_data")
sys.path.insert(1, REGR_DATA)
try:
@@ -41,7 +41,7 @@ def reporter(reporter):
@pytest.fixture(scope="module")
def disable(disable):
- return ['I']
+ return ["I"]
@pytest.fixture
@@ -57,36 +57,41 @@ def Equals(expected):
return lambda got: got == expected
-@pytest.mark.parametrize("file_name, check", [
- ("package.__init__", Equals("")),
- ("precedence_test", Equals("")),
- ("import_package_subpackage_module", Equals("")),
- ("pylint.checkers.__init__", lambda x: '__path__' not in x),
- (join(REGR_DATA, "classdoc_usage.py"), Equals("")),
- (join(REGR_DATA, "module_global.py"), Equals("")),
- (join(REGR_DATA, "decimal_inference.py"), Equals("")),
- (join(REGR_DATA, 'absimp', 'string.py'), Equals("")),
- (join(REGR_DATA, 'bad_package'),
- lambda x: "Unused import missing" in x),
-
-])
+@pytest.mark.parametrize(
+ "file_name, check",
+ [
+ ("package.__init__", Equals("")),
+ ("precedence_test", Equals("")),
+ ("import_package_subpackage_module", Equals("")),
+ ("pylint.checkers.__init__", lambda x: "__path__" not in x),
+ (join(REGR_DATA, "classdoc_usage.py"), Equals("")),
+ (join(REGR_DATA, "module_global.py"), Equals("")),
+ (join(REGR_DATA, "decimal_inference.py"), Equals("")),
+ (join(REGR_DATA, "absimp", "string.py"), Equals("")),
+ (join(REGR_DATA, "bad_package"), lambda x: "Unused import missing" in x),
+ ],
+)
def test_package(finalize_linter, file_name, check):
finalize_linter.check(file_name)
got = finalize_linter.reporter.finalize().strip()
assert check(got)
-@pytest.mark.parametrize("file_name", [
- join(REGR_DATA, 'import_assign.py'),
- join(REGR_DATA, 'special_attr_scope_lookup_crash.py'),
- join(REGR_DATA, 'try_finally_disable_msg_crash'),
-])
+@pytest.mark.parametrize(
+ "file_name",
+ [
+ join(REGR_DATA, "import_assign.py"),
+ join(REGR_DATA, "special_attr_scope_lookup_crash.py"),
+ join(REGR_DATA, "try_finally_disable_msg_crash"),
+ ],
+)
def test_crash(finalize_linter, file_name):
finalize_linter.check(file_name)
-@pytest.mark.parametrize("fname", [x for x in os.listdir(REGR_DATA)
- if x.endswith('_crash.py')])
+@pytest.mark.parametrize(
+ "fname", [x for x in os.listdir(REGR_DATA) if x.endswith("_crash.py")]
+)
def test_descriptor_crash(fname, finalize_linter):
finalize_linter.check(join(REGR_DATA, fname))
finalize_linter.reporter.finalize().strip()
@@ -95,7 +100,7 @@ def test_descriptor_crash(fname, finalize_linter):
@pytest.fixture
def modify_path():
cwd = os.getcwd()
- sys.path.insert(0, '')
+ sys.path.insert(0, "")
yield
sys.path.pop(0)
os.chdir(cwd)
@@ -103,26 +108,32 @@ def modify_path():
@pytest.mark.usefixtures("modify_path")
def test_check_package___init__(finalize_linter):
- filename = 'package.__init__'
+ filename = "package.__init__"
finalize_linter.check(filename)
- checked = list(finalize_linter.stats['by_module'].keys())
+ checked = list(finalize_linter.stats["by_module"].keys())
assert checked == [filename]
- os.chdir(join(REGR_DATA, 'package'))
- finalize_linter.check('__init__')
- checked = list(finalize_linter.stats['by_module'].keys())
- assert checked == ['__init__']
+ os.chdir(join(REGR_DATA, "package"))
+ finalize_linter.check("__init__")
+ checked = list(finalize_linter.stats["by_module"].keys())
+ assert checked == ["__init__"]
def test_pylint_config_attr():
- mod = astroid.MANAGER.ast_from_module_name('pylint.lint')
- pylinter = mod['PyLinter']
- expect = ['OptionsManagerMixIn', 'object', 'MessagesHandlerMixIn',
- 'ReportsHandlerMixIn', 'BaseTokenChecker', 'BaseChecker',
- 'OptionsProviderMixIn']
+ mod = astroid.MANAGER.ast_from_module_name("pylint.lint")
+ pylinter = mod["PyLinter"]
+ expect = [
+ "OptionsManagerMixIn",
+ "object",
+ "MessagesHandlerMixIn",
+ "ReportsHandlerMixIn",
+ "BaseTokenChecker",
+ "BaseChecker",
+ "OptionsProviderMixIn",
+ ]
assert [c.name for c in pylinter.ancestors()] == expect
- assert list(astroid.Instance(pylinter).getattr('config'))
- inferred = list(astroid.Instance(pylinter).igetattr('config'))
+ assert list(astroid.Instance(pylinter).getattr("config"))
+ inferred = list(astroid.Instance(pylinter).igetattr("config"))
assert len(inferred) == 1
- assert inferred[0].root().name == 'optparse'
- assert inferred[0].name == 'Values'
+ assert inferred[0].root().name == "optparse"
+ assert inferred[0].name == "Values"
diff --git a/pylint/test/test_self.py b/pylint/test/test_self.py
index bcee0eb01..8d019878f 100644
--- a/pylint/test/test_self.py
+++ b/pylint/test/test_self.py
@@ -41,7 +41,6 @@ from pylint import utils
HERE = abspath(dirname(__file__))
-
@contextlib.contextmanager
def _patch_streams(out):
sys.stderr = sys.stdout = out
@@ -54,7 +53,7 @@ def _patch_streams(out):
@contextlib.contextmanager
def _configure_lc_ctype(lc_ctype):
- lc_ctype_env = 'LC_CTYPE'
+ lc_ctype_env = "LC_CTYPE"
original_lctype = os.environ.get(lc_ctype_env)
os.environ[lc_ctype_env] = lc_ctype
try:
@@ -97,24 +96,23 @@ class MultiReporter(BaseReporter):
class TestRunTC(object):
-
def _runtest(self, args, reporter=None, out=None, code=None):
if out is None:
out = StringIO()
pylint_code = self._run_pylint(args, reporter=reporter, out=out)
if reporter:
output = reporter.out.getvalue()
- elif hasattr(out, 'getvalue'):
+ elif hasattr(out, "getvalue"):
output = out.getvalue()
else:
output = None
- msg = 'expected output status %s, got %s' % (code, pylint_code)
+ msg = "expected output status %s, got %s" % (code, pylint_code)
if output is not None:
- msg = '%s. Below pylint output: \n%s' % (msg, output)
+ msg = "%s. Below pylint output: \n%s" % (msg, output)
assert pylint_code == code, msg
def _run_pylint(self, args, out, reporter=None):
- args = args + ['--persistent=no']
+ args = args + ["--persistent=no"]
with _patch_streams(out):
with pytest.raises(SystemExit) as cm:
with warnings.catch_warnings():
@@ -124,7 +122,9 @@ class TestRunTC(object):
def _clean_paths(self, output):
"""Remove version-specific tox parent directories from paths."""
- return re.sub('^py.+/site-packages/', '', output.replace('\\', '/'), flags=re.MULTILINE)
+ return re.sub(
+ "^py.+/site-packages/", "", output.replace("\\", "/"), flags=re.MULTILINE
+ )
def _test_output(self, args, expected_output):
out = StringIO()
@@ -134,39 +134,40 @@ class TestRunTC(object):
def test_pkginfo(self):
"""Make pylint check itself."""
- self._runtest(['pylint.__pkginfo__'], reporter=TextReporter(StringIO()),
- code=0)
+ self._runtest(["pylint.__pkginfo__"], reporter=TextReporter(StringIO()), code=0)
def test_all(self):
"""Make pylint check itself."""
reporters = [
TextReporter(StringIO()),
ColorizedTextReporter(StringIO()),
- JSONReporter(StringIO())
+ JSONReporter(StringIO()),
]
- self._runtest([join(HERE, 'functional/arguments.py')],
- reporter=MultiReporter(reporters), code=2)
+ self._runtest(
+ [join(HERE, "functional/arguments.py")],
+ reporter=MultiReporter(reporters),
+ code=2,
+ )
def test_no_ext_file(self):
- self._runtest([join(HERE, 'input', 'noext')], code=0)
+ self._runtest([join(HERE, "input", "noext")], code=0)
def test_w0704_ignored(self):
- self._runtest([join(HERE, 'input', 'ignore_except_pass_by_default.py')], code=0)
+ self._runtest([join(HERE, "input", "ignore_except_pass_by_default.py")], code=0)
def test_exit_zero(self):
- self._runtest([
- '--exit-zero',
- join(HERE, 'regrtest_data', 'syntax_error.py')
- ], code=0)
+ self._runtest(
+ ["--exit-zero", join(HERE, "regrtest_data", "syntax_error.py")], code=0
+ )
def test_generate_config_option(self):
- self._runtest(['--generate-rcfile'], code=0)
+ self._runtest(["--generate-rcfile"], code=0)
def test_generate_config_option_order(self):
out1 = StringIO()
out2 = StringIO()
- self._runtest(['--generate-rcfile'], code=0, out=out1)
- self._runtest(['--generate-rcfile'], code=0, out=out2)
+ self._runtest(["--generate-rcfile"], code=0, out=out1)
+ self._runtest(["--generate-rcfile"], code=0, out=out2)
output1 = out1.getvalue()
output2 = out2.getvalue()
assert output1 == output2
@@ -182,11 +183,11 @@ class TestRunTC(object):
# Get rid of the pesky messages that pylint emits if the
# configuration file is not found.
master = re.search(r"\[MASTER", output)
- out = StringIO(output[master.start():])
+ out = StringIO(output[master.start() :])
parser = configparser.RawConfigParser()
parser.readfp(out)
- messages = utils._splitstrip(parser.get('MESSAGES CONTROL', 'disable'))
- assert 'suppressed-message' in messages
+ messages = utils._splitstrip(parser.get("MESSAGES CONTROL", "disable"))
+ assert "suppressed-message" in messages
def test_generate_rcfile_no_obsolete_methods(self):
out = StringIO()
@@ -201,10 +202,10 @@ class TestRunTC(object):
assert "The config file /tmp/norcfile.txt doesn't exist!" == str(excinfo.value)
def test_help_message_option(self):
- self._runtest(['--help-msg', 'W0101'], code=0)
+ self._runtest(["--help-msg", "W0101"], code=0)
def test_error_help_message_option(self):
- self._runtest(['--help-msg', 'WX101'], code=0)
+ self._runtest(["--help-msg", "WX101"], code=0)
def test_error_missing_arguments(self):
self._runtest([], code=32)
@@ -212,7 +213,7 @@ class TestRunTC(object):
def test_no_out_encoding(self):
"""test redirection of stdout with non ascii caracters
"""
- #This test reproduces bug #48066 ; it happens when stdout is redirected
+ # This test reproduces bug #48066 ; it happens when stdout is redirected
# through '>' : the sys.stdout.encoding becomes then None, and if the
# output contains non ascii, pylint will crash
if sys.version_info < (3, 0):
@@ -220,143 +221,177 @@ class TestRunTC(object):
else:
strio = StringIO()
assert strio.encoding is None
- self._runtest([join(HERE, 'regrtest_data/no_stdout_encoding.py'),
- '--enable=all'],
- out=strio, code=28)
+ self._runtest(
+ [join(HERE, "regrtest_data/no_stdout_encoding.py"), "--enable=all"],
+ out=strio,
+ code=28,
+ )
def test_parallel_execution(self):
- self._runtest(['-j 2',
- join(HERE, 'functional/arguments.py'),
- join(HERE, 'functional/bad_continuation.py')], code=18)
+ self._runtest(
+ [
+ "-j 2",
+ join(HERE, "functional/arguments.py"),
+ join(HERE, "functional/bad_continuation.py"),
+ ],
+ code=18,
+ )
def test_parallel_execution_missing_arguments(self):
- self._runtest(['-j 2', 'not_here', 'not_here_too'], code=1)
+ self._runtest(["-j 2", "not_here", "not_here_too"], code=1)
def test_py3k_option(self):
# Test that --py3k flag works.
rc_code = 0
- self._runtest([join(HERE, 'functional', 'unpacked_exceptions.py'),
- '--py3k'],
- code=rc_code)
+ self._runtest(
+ [join(HERE, "functional", "unpacked_exceptions.py"), "--py3k"], code=rc_code
+ )
def test_py3k_jobs_option(self):
rc_code = 0
- self._runtest([join(HERE, 'functional', 'unpacked_exceptions.py'),
- '--py3k', '-j 2'],
- code=rc_code)
+ self._runtest(
+ [join(HERE, "functional", "unpacked_exceptions.py"), "--py3k", "-j 2"],
+ code=rc_code,
+ )
@pytest.mark.skipif(sys.version_info[0] > 2, reason="Requires the --py3k flag.")
def test_py3k_commutative_with_errors_only(self):
# Test what gets emitted with -E only
- module = join(HERE, 'regrtest_data', 'py3k_error_flag.py')
- expected = textwrap.dedent("""
+ module = join(HERE, "regrtest_data", "py3k_error_flag.py")
+ expected = textwrap.dedent(
+ """
************* Module py3k_error_flag
Explicit return in __init__
- """)
- self._test_output([module, "-E", "--msg-template='{msg}'"],
- expected_output=expected)
+ """
+ )
+ self._test_output(
+ [module, "-E", "--msg-template='{msg}'"], expected_output=expected
+ )
# Test what gets emitted with -E --py3k
- expected = textwrap.dedent("""
+ expected = textwrap.dedent(
+ """
************* Module py3k_error_flag
Use raise ErrorClass(args) instead of raise ErrorClass, args.
- """)
- self._test_output([module, "-E", "--py3k", "--msg-template='{msg}'"],
- expected_output=expected)
+ """
+ )
+ self._test_output(
+ [module, "-E", "--py3k", "--msg-template='{msg}'"], expected_output=expected
+ )
# Test what gets emitted with --py3k -E
- self._test_output([module, "--py3k", "-E", "--msg-template='{msg}'"],
- expected_output=expected)
+ self._test_output(
+ [module, "--py3k", "-E", "--msg-template='{msg}'"], expected_output=expected
+ )
@pytest.mark.skipif(sys.version_info[0] > 2, reason="Requires the --py3k flag.")
def test_py3k_commutative_with_config_disable(self):
- module = join(HERE, 'regrtest_data', 'py3k_errors_and_warnings.py')
- rcfile = join(HERE, 'regrtest_data', 'py3k-disabled.rc')
+ module = join(HERE, "regrtest_data", "py3k_errors_and_warnings.py")
+ rcfile = join(HERE, "regrtest_data", "py3k-disabled.rc")
cmd = [module, "--msg-template='{msg}'", "--reports=n"]
- expected = textwrap.dedent("""
+ expected = textwrap.dedent(
+ """
************* Module py3k_errors_and_warnings
import missing `from __future__ import absolute_import`
Use raise ErrorClass(args) instead of raise ErrorClass, args.
Calling a dict.iter*() method
print statement used
- """)
+ """
+ )
self._test_output(cmd + ["--py3k"], expected_output=expected)
- expected = textwrap.dedent("""
+ expected = textwrap.dedent(
+ """
************* Module py3k_errors_and_warnings
Use raise ErrorClass(args) instead of raise ErrorClass, args.
Calling a dict.iter*() method
print statement used
- """)
- self._test_output(cmd + ["--py3k", "--rcfile", rcfile],
- expected_output=expected)
+ """
+ )
+ self._test_output(
+ cmd + ["--py3k", "--rcfile", rcfile], expected_output=expected
+ )
- expected = textwrap.dedent("""
+ expected = textwrap.dedent(
+ """
************* Module py3k_errors_and_warnings
Use raise ErrorClass(args) instead of raise ErrorClass, args.
print statement used
- """)
- self._test_output(cmd + ["--py3k", "-E", "--rcfile", rcfile],
- expected_output=expected)
+ """
+ )
+ self._test_output(
+ cmd + ["--py3k", "-E", "--rcfile", rcfile], expected_output=expected
+ )
- self._test_output(cmd + ["-E", "--py3k", "--rcfile", rcfile],
- expected_output=expected)
+ self._test_output(
+ cmd + ["-E", "--py3k", "--rcfile", rcfile], expected_output=expected
+ )
def test_abbreviations_are_not_supported(self):
expected = "no such option: --load-plugin"
self._test_output([".", "--load-plugin"], expected_output=expected)
def test_enable_all_works(self):
- module = join(HERE, 'data', 'clientmodule_test.py')
- expected = textwrap.dedent("""
+ module = join(HERE, "data", "clientmodule_test.py")
+ expected = textwrap.dedent(
+ """
************* Module data.clientmodule_test
pylint/test/data/clientmodule_test.py:10:8: W0612: Unused variable 'local_variable' (unused-variable)
pylint/test/data/clientmodule_test.py:18:4: C0111: Missing method docstring (missing-docstring)
pylint/test/data/clientmodule_test.py:22:0: C0111: Missing class docstring (missing-docstring)
- """)
- self._test_output([module, "--disable=all", "--enable=all", "-rn"],
- expected_output=expected)
+ """
+ )
+ self._test_output(
+ [module, "--disable=all", "--enable=all", "-rn"], expected_output=expected
+ )
def test_wrong_import_position_when_others_disabled(self):
- expected_output = textwrap.dedent('''
+ expected_output = textwrap.dedent(
+ """
************* Module wrong_import_position
pylint/test/regrtest_data/wrong_import_position.py:11:0: C0413: Import "import os" should be placed at the top of the module (wrong-import-position)
- ''')
- module1 = join(HERE, 'regrtest_data', 'import_something.py')
- module2 = join(HERE, 'regrtest_data', 'wrong_import_position.py')
- args = [module2, module1,
- "--disable=all", "--enable=wrong-import-position",
- "-rn", "-sn"]
+ """
+ )
+ module1 = join(HERE, "regrtest_data", "import_something.py")
+ module2 = join(HERE, "regrtest_data", "wrong_import_position.py")
+ args = [
+ module2,
+ module1,
+ "--disable=all",
+ "--enable=wrong-import-position",
+ "-rn",
+ "-sn",
+ ]
out = StringIO()
self._run_pylint(args, out=out)
actual_output = self._clean_paths(out.getvalue().strip())
to_remove = "No config file found, using default configuration"
if to_remove in actual_output:
- actual_output = actual_output[len(to_remove):]
+ actual_output = actual_output[len(to_remove) :]
if actual_output.startswith("Using config file "):
# If ~/.pylintrc is present remove the
# Using config file... line
- actual_output = actual_output[actual_output.find("\n"):]
+ actual_output = actual_output[actual_output.find("\n") :]
assert expected_output.strip() == actual_output.strip()
def test_import_itself_not_accounted_for_relative_imports(self):
- expected = 'Your code has been rated at 10.00/10'
- package = join(HERE, 'regrtest_data', 'dummy')
- self._test_output([package, '--disable=locally-disabled', '-rn'],
- expected_output=expected)
+ expected = "Your code has been rated at 10.00/10"
+ package = join(HERE, "regrtest_data", "dummy")
+ self._test_output(
+ [package, "--disable=locally-disabled", "-rn"], expected_output=expected
+ )
def test_reject_empty_indent_strings(self):
expected = "indent string can't be empty"
- module = join(HERE, 'data', 'clientmodule_test.py')
- self._test_output([module, '--indent-string='],
- expected_output=expected)
+ module = join(HERE, "data", "clientmodule_test.py")
+ self._test_output([module, "--indent-string="], expected_output=expected)
def test_json_report_when_file_has_syntax_error(self):
out = StringIO()
- module = join(HERE, 'regrtest_data', 'syntax_error.py')
+ module = join(HERE, "regrtest_data", "syntax_error.py")
self._runtest([module], code=2, reporter=JSONReporter(out))
output = json.loads(out.getvalue())
assert isinstance(output, list)
@@ -368,17 +403,17 @@ class TestRunTC(object):
"line": 1,
"type": "error",
"symbol": "syntax-error",
- "module": "syntax_error"
+ "module": "syntax_error",
}
message = output[0]
for key, value in expected.items():
assert key in message
assert message[key] == value
- assert 'invalid syntax' in message['message'].lower()
+ assert "invalid syntax" in message["message"].lower()
def test_json_report_when_file_is_missing(self):
out = StringIO()
- module = join(HERE, 'regrtest_data', 'totally_missing.py')
+ module = join(HERE, "regrtest_data", "totally_missing.py")
self._runtest([module], code=1, reporter=JSONReporter(out))
output = json.loads(out.getvalue())
assert isinstance(output, list)
@@ -390,93 +425,110 @@ class TestRunTC(object):
"line": 1,
"type": "fatal",
"symbol": "fatal",
- "module": module
+ "module": module,
}
message = output[0]
for key, value in expected.items():
assert key in message
assert message[key] == value
- assert message['message'].startswith("No module named")
+ assert message["message"].startswith("No module named")
def test_information_category_disabled_by_default(self):
- expected = 'Your code has been rated at 10.00/10'
- path = join(HERE, 'regrtest_data', 'meta.py')
+ expected = "Your code has been rated at 10.00/10"
+ path = join(HERE, "regrtest_data", "meta.py")
self._test_output([path], expected_output=expected)
def test_error_mode_shows_no_score(self):
- expected_output = textwrap.dedent('''
+ expected_output = textwrap.dedent(
+ """
************* Module application_crash
pylint/test/regrtest_data/application_crash.py:1:6: E0602: Undefined variable 'something_undefined' (undefined-variable)
- ''')
- module = join(HERE, 'regrtest_data', 'application_crash.py')
+ """
+ )
+ module = join(HERE, "regrtest_data", "application_crash.py")
self._test_output([module, "-E"], expected_output=expected_output)
def test_evaluation_score_shown_by_default(self):
- expected_output = 'Your code has been rated at '
- module = join(HERE, 'regrtest_data', 'application_crash.py')
+ expected_output = "Your code has been rated at "
+ module = join(HERE, "regrtest_data", "application_crash.py")
self._test_output([module], expected_output=expected_output)
def test_confidence_levels(self):
- expected = 'Your code has been rated at'
- path = join(HERE, 'regrtest_data', 'meta.py')
- self._test_output([path, "--confidence=HIGH,INFERENCE"],
- expected_output=expected)
+ expected = "Your code has been rated at"
+ path = join(HERE, "regrtest_data", "meta.py")
+ self._test_output(
+ [path, "--confidence=HIGH,INFERENCE"], expected_output=expected
+ )
def test_bom_marker(self):
- path = join(HERE, 'regrtest_data', 'meta.py')
- config_path = join(HERE, 'regrtest_data', '.pylintrc')
- expected = 'Your code has been rated at 10.00/10'
- self._test_output([path, "--rcfile=%s" % config_path, "-rn"],
- expected_output=expected)
+ path = join(HERE, "regrtest_data", "meta.py")
+ config_path = join(HERE, "regrtest_data", ".pylintrc")
+ expected = "Your code has been rated at 10.00/10"
+ self._test_output(
+ [path, "--rcfile=%s" % config_path, "-rn"], expected_output=expected
+ )
def test_pylintrc_plugin_duplicate_options(self):
- dummy_plugin_path = join(HERE, 'regrtest_data', 'dummy_plugin')
+ dummy_plugin_path = join(HERE, "regrtest_data", "dummy_plugin")
# Enable --load-plugins=dummy_plugin
sys.path.append(dummy_plugin_path)
- config_path = join(HERE, 'regrtest_data', 'dummy_plugin.rc')
+ config_path = join(HERE, "regrtest_data", "dummy_plugin.rc")
expected = (
":dummy-message-01 (I9061): *Dummy short desc 01*\n"
" Dummy long desc This message belongs to the dummy_plugin checker.\n\n"
":dummy-message-02 (I9060): *Dummy short desc 02*\n"
- " Dummy long desc This message belongs to the dummy_plugin checker.")
- self._test_output(["--rcfile=%s" % config_path,
- "--help-msg=dummy-message-01,dummy-message-02"],
- expected_output=expected)
+ " Dummy long desc This message belongs to the dummy_plugin checker."
+ )
+ self._test_output(
+ [
+ "--rcfile=%s" % config_path,
+ "--help-msg=dummy-message-01,dummy-message-02",
+ ],
+ expected_output=expected,
+ )
expected = (
"[DUMMY_PLUGIN]\n\n# Dummy option 1\ndummy_option_1=dummy value 1\n\n"
- "# Dummy option 2\ndummy_option_2=dummy value 2")
- self._test_output(["--rcfile=%s" % config_path, "--generate-rcfile"],
- expected_output=expected)
+ "# Dummy option 2\ndummy_option_2=dummy value 2"
+ )
+ self._test_output(
+ ["--rcfile=%s" % config_path, "--generate-rcfile"], expected_output=expected
+ )
sys.path.remove(dummy_plugin_path)
def test_pylintrc_comments_in_values(self):
- path = join(HERE, 'regrtest_data', 'test_pylintrc_comments.py')
- config_path = join(HERE, 'regrtest_data', 'comments_pylintrc')
- expected = textwrap.dedent('''
+ path = join(HERE, "regrtest_data", "test_pylintrc_comments.py")
+ config_path = join(HERE, "regrtest_data", "comments_pylintrc")
+ expected = textwrap.dedent(
+ """
************* Module test_pylintrc_comments
pylint/test/regrtest_data/test_pylintrc_comments.py:2:0: W0311: Bad indentation. Found 1 spaces, expected 4 (bad-indentation)
pylint/test/regrtest_data/test_pylintrc_comments.py:1:0: C0111: Missing module docstring (missing-docstring)
pylint/test/regrtest_data/test_pylintrc_comments.py:1:0: C0111: Missing function docstring (missing-docstring)
- ''')
- self._test_output([path, "--rcfile=%s" % config_path, "-rn"],
- expected_output=expected)
+ """
+ )
+ self._test_output(
+ [path, "--rcfile=%s" % config_path, "-rn"], expected_output=expected
+ )
def test_no_crash_with_formatting_regex_defaults(self):
- self._runtest(["--ignore-patterns=a"], reporter=TextReporter(StringIO()),
- code=32)
+ self._runtest(
+ ["--ignore-patterns=a"], reporter=TextReporter(StringIO()), code=32
+ )
def test_getdefaultencoding_crashes_with_lc_ctype_utf8(self):
- expected_output = textwrap.dedent('''
+ expected_output = textwrap.dedent(
+ """
************* Module application_crash
pylint/test/regrtest_data/application_crash.py:1:6: E0602: Undefined variable 'something_undefined' (undefined-variable)
- ''')
- module = join(HERE, 'regrtest_data', 'application_crash.py')
- with _configure_lc_ctype('UTF-8'):
- self._test_output([module, '-E'], expected_output=expected_output)
+ """
+ )
+ module = join(HERE, "regrtest_data", "application_crash.py")
+ with _configure_lc_ctype("UTF-8"):
+ self._test_output([module, "-E"], expected_output=expected_output)
- @pytest.mark.skipif(sys.platform == 'win32', reason='only occurs on *nix')
+ @pytest.mark.skipif(sys.platform == "win32", reason="only occurs on *nix")
def test_parseable_file_path(self):
- file_name = 'test_target.py'
+ file_name = "test_target.py"
fake_path = HERE + os.getcwd()
module = join(fake_path, file_name)
@@ -484,12 +536,13 @@ class TestRunTC(object):
# create module under directories which have the same name as reporter.path_strip_prefix
# e.g. /src/some/path/src/test_target.py when reporter.path_strip_prefix = /src/
os.makedirs(fake_path)
- with open(module, 'w') as test_target:
- test_target.write('a = object()')
+ with open(module, "w") as test_target:
+ test_target.write("a = object()")
self._test_output(
- [module, '--output-format=parseable'],
- expected_output=join(os.getcwd(), file_name))
+ [module, "--output-format=parseable"],
+ expected_output=join(os.getcwd(), file_name),
+ )
finally:
os.remove(module)
os.removedirs(fake_path)
diff --git a/pylint/test/unittest_checker_base.py b/pylint/test/unittest_checker_base.py
index 85d27ae95..8337c9c60 100644
--- a/pylint/test/unittest_checker_base.py
+++ b/pylint/test/unittest_checker_base.py
@@ -31,7 +31,7 @@ class TestDocstring(CheckerTestCase):
def test_missing_docstring_module(self):
module = astroid.parse("something")
- message = Message('missing-docstring', node=module, args=('module',))
+ message = Message("missing-docstring", node=module, args=("module",))
with self.assertAddsMessages(message):
self.checker.visit_module(module)
@@ -42,91 +42,106 @@ class TestDocstring(CheckerTestCase):
def test_empty_docstring_module(self):
module = astroid.parse("''''''")
- message = Message('empty-docstring', node=module, args=('module',))
+ message = Message("empty-docstring", node=module, args=("module",))
with self.assertAddsMessages(message):
self.checker.visit_module(module)
def test_empty_docstring_function(self):
- func = astroid.extract_node("""
+ func = astroid.extract_node(
+ """
def func(tion):
- pass""")
- message = Message('missing-docstring', node=func, args=('function',))
+ pass"""
+ )
+ message = Message("missing-docstring", node=func, args=("function",))
with self.assertAddsMessages(message):
self.checker.visit_functiondef(func)
@set_config(docstring_min_length=2)
def test_short_function_no_docstring(self):
- func = astroid.extract_node("""
+ func = astroid.extract_node(
+ """
def func(tion):
- pass""")
+ pass"""
+ )
with self.assertNoMessages():
self.checker.visit_functiondef(func)
@set_config(docstring_min_length=2)
def test_long_function_no_docstring(self):
- func = astroid.extract_node("""
+ func = astroid.extract_node(
+ """
def func(tion):
pass
pass
- """)
- message = Message('missing-docstring', node=func, args=('function',))
+ """
+ )
+ message = Message("missing-docstring", node=func, args=("function",))
with self.assertAddsMessages(message):
self.checker.visit_functiondef(func)
@set_config(docstring_min_length=2)
def test_long_function_nested_statements_no_docstring(self):
- func = astroid.extract_node("""
+ func = astroid.extract_node(
+ """
def func(tion):
try:
pass
except:
pass
- """)
- message = Message('missing-docstring', node=func, args=('function',))
+ """
+ )
+ message = Message("missing-docstring", node=func, args=("function",))
with self.assertAddsMessages(message):
self.checker.visit_functiondef(func)
@set_config(docstring_min_length=2)
def test_function_no_docstring_by_name(self):
- func = astroid.extract_node("""
+ func = astroid.extract_node(
+ """
def __fun__(tion):
- pass""")
+ pass"""
+ )
with self.assertNoMessages():
self.checker.visit_functiondef(func)
def test_class_no_docstring(self):
- klass = astroid.extract_node("""
+ klass = astroid.extract_node(
+ """
class Klass(object):
- pass""")
- message = Message('missing-docstring', node=klass, args=('class',))
+ pass"""
+ )
+ message = Message("missing-docstring", node=klass, args=("class",))
with self.assertAddsMessages(message):
self.checker.visit_classdef(klass)
def test_inner_function_no_docstring(self):
- func = astroid.extract_node("""
+ func = astroid.extract_node(
+ """
def func(tion):
\"""Documented\"""
def inner(fun):
# Not documented
pass
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_functiondef(func)
class TestNameChecker(CheckerTestCase):
CHECKER_CLASS = base.NameChecker
- CONFIG = {
- 'bad_names': set(),
- }
+ CONFIG = {"bad_names": set()}
- @set_config(attr_rgx=re.compile('[A-Z]+'),
- property_classes=('abc.abstractproperty', '.custom_prop'))
+ @set_config(
+ attr_rgx=re.compile("[A-Z]+"),
+ property_classes=("abc.abstractproperty", ".custom_prop"),
+ )
def test_property_names(self):
# If a method is annotated with @property, its name should
# match the attr regex. Since by default the attribute regex is the same
# as the method regex, we override it here.
- methods = astroid.extract_node("""
+ methods = astroid.extract_node(
+ """
import abc
def custom_prop(f):
@@ -148,19 +163,25 @@ class TestNameChecker(CheckerTestCase):
@custom_prop
def QUX(self): #@
pass
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_functiondef(methods[0])
self.checker.visit_functiondef(methods[2])
self.checker.visit_functiondef(methods[3])
- with self.assertAddsMessages(Message('invalid-name', node=methods[1],
- args=('Attribute', 'bar',
- "'[A-Z]+' pattern"))):
+ with self.assertAddsMessages(
+ Message(
+ "invalid-name",
+ node=methods[1],
+ args=("Attribute", "bar", "'[A-Z]+' pattern"),
+ )
+ ):
self.checker.visit_functiondef(methods[1])
- @set_config(attr_rgx=re.compile('[A-Z]+'))
+ @set_config(attr_rgx=re.compile("[A-Z]+"))
def test_property_setters(self):
- method = astroid.extract_node("""
+ method = astroid.extract_node(
+ """
class FooClass(object):
@property
def foo(self): pass
@@ -168,37 +189,46 @@ class TestNameChecker(CheckerTestCase):
@foo.setter
def FOOSETTER(self): #@
pass
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_functiondef(method)
def test_module_level_names(self):
- assign = astroid.extract_node("""
+ assign = astroid.extract_node(
+ """
import collections
Class = collections.namedtuple("a", ("b", "c")) #@
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_assignname(assign.targets[0])
- assign = astroid.extract_node("""
+ assign = astroid.extract_node(
+ """
class ClassA(object):
pass
ClassB = ClassA
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_assignname(assign.targets[0])
- module = astroid.parse("""
+ module = astroid.parse(
+ """
def A():
return 1, 2, 3
CONSTA, CONSTB, CONSTC = A()
- CONSTD = A()""")
+ CONSTD = A()"""
+ )
with self.assertNoMessages():
self.checker.visit_assignname(module.body[1].targets[0].elts[0])
self.checker.visit_assignname(module.body[2].targets[0])
- assign = astroid.extract_node("""
- CONST = "12 34 ".rstrip().split()""")
+ assign = astroid.extract_node(
+ """
+ CONST = "12 34 ".rstrip().split()"""
+ )
with self.assertNoMessages():
self.checker.visit_assignname(assign.targets[0])
@@ -207,28 +237,38 @@ class TestNameChecker(CheckerTestCase):
@set_config(function_rgx=re.compile(".+"))
@set_config(class_rgx=re.compile(".+"))
def test_assign_to_new_keyword_py3(self):
- ast = astroid.extract_node("""
+ ast = astroid.extract_node(
+ """
async = "foo" #@
await = "bar" #@
def async(): #@
pass
class async: #@
pass
- """)
+ """
+ )
with self.assertAddsMessages(
- Message(msg_id='assign-to-new-keyword', node=ast[0].targets[0], args=('async', '3.7'))
+ Message(
+ msg_id="assign-to-new-keyword",
+ node=ast[0].targets[0],
+ args=("async", "3.7"),
+ )
):
self.checker.visit_assignname(ast[0].targets[0])
with self.assertAddsMessages(
- Message(msg_id='assign-to-new-keyword', node=ast[1].targets[0], args=('await', '3.7'))
+ Message(
+ msg_id="assign-to-new-keyword",
+ node=ast[1].targets[0],
+ args=("await", "3.7"),
+ )
):
self.checker.visit_assignname(ast[1].targets[0])
with self.assertAddsMessages(
- Message(msg_id='assign-to-new-keyword', node=ast[2], args=('async', '3.7'))
+ Message(msg_id="assign-to-new-keyword", node=ast[2], args=("async", "3.7"))
):
self.checker.visit_functiondef(ast[2])
with self.assertAddsMessages(
- Message(msg_id='assign-to-new-keyword', node=ast[3], args=('async', '3.7'))
+ Message(msg_id="assign-to-new-keyword", node=ast[3], args=("async", "3.7"))
):
self.checker.visit_classdef(ast[3])
@@ -236,22 +276,25 @@ class TestNameChecker(CheckerTestCase):
class TestMultiNamingStyle(CheckerTestCase):
CHECKER_CLASS = base.NameChecker
- MULTI_STYLE_RE = re.compile('(?:(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$')
+ MULTI_STYLE_RE = re.compile("(?:(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$")
@set_config(class_rgx=MULTI_STYLE_RE)
def test_multi_name_detection_majority(self):
- classes = astroid.extract_node("""
+ classes = astroid.extract_node(
+ """
class classb(object): #@
pass
class CLASSA(object): #@
pass
class CLASSC(object): #@
pass
- """)
- message = Message('invalid-name',
- node=classes[0],
- args=('Class', 'classb',
- "'(?:(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$' pattern"))
+ """
+ )
+ message = Message(
+ "invalid-name",
+ node=classes[0],
+ args=("Class", "classb", "'(?:(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$' pattern"),
+ )
with self.assertAddsMessages(message):
for cls in classes:
self.checker.visit_classdef(cls)
@@ -259,48 +302,74 @@ class TestMultiNamingStyle(CheckerTestCase):
@set_config(class_rgx=MULTI_STYLE_RE)
def test_multi_name_detection_first_invalid(self):
- classes = astroid.extract_node("""
+ classes = astroid.extract_node(
+ """
class class_a(object): #@
pass
class classb(object): #@
pass
class CLASSC(object): #@
pass
- """)
+ """
+ )
messages = [
- Message('invalid-name', node=classes[0],
- args=('Class', 'class_a', "'(?:(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$' pattern")),
- Message('invalid-name', node=classes[2],
- args=('Class', 'CLASSC', "'(?:(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$' pattern"))
+ Message(
+ "invalid-name",
+ node=classes[0],
+ args=(
+ "Class",
+ "class_a",
+ "'(?:(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$' pattern",
+ ),
+ ),
+ Message(
+ "invalid-name",
+ node=classes[2],
+ args=(
+ "Class",
+ "CLASSC",
+ "'(?:(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$' pattern",
+ ),
+ ),
]
with self.assertAddsMessages(*messages):
for cls in classes:
self.checker.visit_classdef(cls)
self.checker.leave_module(cls.root)
- @set_config(method_rgx=MULTI_STYLE_RE,
- function_rgx=MULTI_STYLE_RE,
- name_group=('function:method',))
+ @set_config(
+ method_rgx=MULTI_STYLE_RE,
+ function_rgx=MULTI_STYLE_RE,
+ name_group=("function:method",),
+ )
def test_multi_name_detection_group(self):
- function_defs = astroid.extract_node("""
+ function_defs = astroid.extract_node(
+ """
class First(object):
def func(self): #@
pass
def FUNC(): #@
pass
- """, module_name='test')
- message = Message('invalid-name', node=function_defs[1],
- args=('Function', 'FUNC',
- "'(?:(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$' pattern"))
+ """,
+ module_name="test",
+ )
+ message = Message(
+ "invalid-name",
+ node=function_defs[1],
+ args=("Function", "FUNC", "'(?:(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$' pattern"),
+ )
with self.assertAddsMessages(message):
for func in function_defs:
self.checker.visit_functiondef(func)
self.checker.leave_module(func.root)
- @set_config(function_rgx=re.compile('(?:(?P<ignore>FOO)|(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$'))
+ @set_config(
+ function_rgx=re.compile("(?:(?P<ignore>FOO)|(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$")
+ )
def test_multi_name_detection_exempt(self):
- function_defs = astroid.extract_node("""
+ function_defs = astroid.extract_node(
+ """
def FOO(): #@
pass
def lower(): #@
@@ -309,77 +378,93 @@ class TestMultiNamingStyle(CheckerTestCase):
pass
def UPPER(): #@
pass
- """)
- message = Message('invalid-name', node=function_defs[3],
- args=('Function', 'UPPER',
- "'(?:(?P<ignore>FOO)|(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$' pattern"))
+ """
+ )
+ message = Message(
+ "invalid-name",
+ node=function_defs[3],
+ args=(
+ "Function",
+ "UPPER",
+ "'(?:(?P<ignore>FOO)|(?P<UP>[A-Z]+)|(?P<down>[a-z]+))$' pattern",
+ ),
+ )
with self.assertAddsMessages(message):
for func in function_defs:
self.checker.visit_functiondef(func)
self.checker.leave_module(func.root)
+
class TestComparison(CheckerTestCase):
CHECKER_CLASS = base.ComparisonChecker
def test_comparison(self):
node = astroid.extract_node("foo == True")
- message = Message('singleton-comparison',
- node=node,
- args=(True, "just 'expr' or 'expr is True'"))
+ message = Message(
+ "singleton-comparison",
+ node=node,
+ args=(True, "just 'expr' or 'expr is True'"),
+ )
with self.assertAddsMessages(message):
self.checker.visit_compare(node)
node = astroid.extract_node("foo == False")
- message = Message('singleton-comparison',
- node=node,
- args=(False, "'not expr' or 'expr is False'"))
+ message = Message(
+ "singleton-comparison",
+ node=node,
+ args=(False, "'not expr' or 'expr is False'"),
+ )
with self.assertAddsMessages(message):
self.checker.visit_compare(node)
node = astroid.extract_node("foo == None")
- message = Message('singleton-comparison',
- node=node,
- args=(None, "'expr is None'"))
+ message = Message(
+ "singleton-comparison", node=node, args=(None, "'expr is None'")
+ )
with self.assertAddsMessages(message):
self.checker.visit_compare(node)
node = astroid.extract_node("True == foo")
- messages = (Message('misplaced-comparison-constant',
- node=node,
- args=('foo == True',)),
- Message('singleton-comparison',
- node=node,
- args=(True, "just 'expr' or 'expr is True'")))
+ messages = (
+ Message("misplaced-comparison-constant", node=node, args=("foo == True",)),
+ Message(
+ "singleton-comparison",
+ node=node,
+ args=(True, "just 'expr' or 'expr is True'"),
+ ),
+ )
with self.assertAddsMessages(*messages):
self.checker.visit_compare(node)
node = astroid.extract_node("False == foo")
- messages = (Message('misplaced-comparison-constant',
- node=node,
- args=('foo == False',)),
- Message('singleton-comparison',
- node=node,
- args=(False, "'not expr' or 'expr is False'")))
+ messages = (
+ Message("misplaced-comparison-constant", node=node, args=("foo == False",)),
+ Message(
+ "singleton-comparison",
+ node=node,
+ args=(False, "'not expr' or 'expr is False'"),
+ ),
+ )
with self.assertAddsMessages(*messages):
self.checker.visit_compare(node)
node = astroid.extract_node("None == foo")
- messages = (Message('misplaced-comparison-constant',
- node=node,
- args=('foo == None',)),
- Message('singleton-comparison',
- node=node,
- args=(None, "'expr is None'")))
+ messages = (
+ Message("misplaced-comparison-constant", node=node, args=("foo == None",)),
+ Message("singleton-comparison", node=node, args=(None, "'expr is None'")),
+ )
with self.assertAddsMessages(*messages):
self.checker.visit_compare(node)
class TestNamePresets(unittest.TestCase):
- SNAKE_CASE_NAMES = {'test_snake_case', 'test_snake_case11', 'test_https_200'}
- CAMEL_CASE_NAMES = {'testCamelCase', 'testCamelCase11', 'testHTTP200'}
- UPPER_CASE_NAMES = {'TEST_UPPER_CASE', 'TEST_UPPER_CASE11', 'TEST_HTTP_200'}
- PASCAL_CASE_NAMES = {'TestPascalCase', 'TestPascalCase11', 'TestHTTP200'}
- ALL_NAMES = SNAKE_CASE_NAMES | CAMEL_CASE_NAMES | UPPER_CASE_NAMES | PASCAL_CASE_NAMES
+ SNAKE_CASE_NAMES = {"test_snake_case", "test_snake_case11", "test_https_200"}
+ CAMEL_CASE_NAMES = {"testCamelCase", "testCamelCase11", "testHTTP200"}
+ UPPER_CASE_NAMES = {"TEST_UPPER_CASE", "TEST_UPPER_CASE11", "TEST_HTTP_200"}
+ PASCAL_CASE_NAMES = {"TestPascalCase", "TestPascalCase11", "TestHTTP200"}
+ ALL_NAMES = (
+ SNAKE_CASE_NAMES | CAMEL_CASE_NAMES | UPPER_CASE_NAMES | PASCAL_CASE_NAMES
+ )
def _test_name_is_correct_for_all_name_types(self, naming_style, name):
for name_type in base.KNOWN_NAME_TYPES:
@@ -391,25 +476,31 @@ class TestNamePresets(unittest.TestCase):
def _test_should_always_pass(self, naming_style):
always_pass_data = [
- ('__add__', 'method'),
- ('__set_name__', 'method'),
- ('__version__', 'const'),
- ('__author__', 'const')
+ ("__add__", "method"),
+ ("__set_name__", "method"),
+ ("__version__", "const"),
+ ("__author__", "const"),
]
for name, name_type in always_pass_data:
self._test_is_correct(naming_style, name, name_type)
def _test_is_correct(self, naming_style, name, name_type):
rgx = naming_style.get_regex(name_type)
- self.assertTrue(rgx.match(name),
- "{!r} does not match pattern {!r} (style: {}, type: {})".
- format(name, rgx, naming_style, name_type))
+ self.assertTrue(
+ rgx.match(name),
+ "{!r} does not match pattern {!r} (style: {}, type: {})".format(
+ name, rgx, naming_style, name_type
+ ),
+ )
def _test_is_incorrect(self, naming_style, name, name_type):
rgx = naming_style.get_regex(name_type)
- self.assertFalse(rgx.match(name),
- "{!r} match pattern {!r} but shouldn't (style: {}, type: {})".
- format(name, rgx, naming_style, name_type))
+ self.assertFalse(
+ rgx.match(name),
+ "{!r} match pattern {!r} but shouldn't (style: {}, type: {})".format(
+ name, rgx, naming_style, name_type
+ ),
+ )
def test_snake_case(self):
naming_style = base.SnakeCaseStyle
diff --git a/pylint/test/unittest_checker_classes.py b/pylint/test/unittest_checker_classes.py
index cd8981717..1bf929b71 100644
--- a/pylint/test/unittest_checker_classes.py
+++ b/pylint/test/unittest_checker_classes.py
@@ -13,30 +13,35 @@ import astroid
from pylint.checkers import classes
from pylint.testutils import CheckerTestCase, Message, set_config
+
class TestVariablesChecker(CheckerTestCase):
CHECKER_CLASS = classes.ClassChecker
def test_bitbucket_issue_164(self):
"""Issue 164 report a false negative for access-member-before-definition"""
- n1, n2 = astroid.extract_node("""
+ n1, n2 = astroid.extract_node(
+ """
class MyClass1:
def __init__(self):
self.first += 5 #@
self.first = 0 #@
- """)
- message = Message('access-member-before-definition',
- node=n1.target, args=('first', n2.lineno))
+ """
+ )
+ message = Message(
+ "access-member-before-definition", node=n1.target, args=("first", n2.lineno)
+ )
with self.assertAddsMessages(message):
self.walk(n1.root())
- @set_config(exclude_protected=('_meta', '_manager'))
+ @set_config(exclude_protected=("_meta", "_manager"))
def test_exclude_protected(self):
"""Test that exclude-protected can be used to
exclude names from protected-access warning.
"""
- node = astroid.parse("""
+ node = astroid.parse(
+ """
class Protected:
'''empty'''
def __init__(self):
@@ -47,23 +52,25 @@ class TestVariablesChecker(CheckerTestCase):
OBJ._meta
OBJ._manager
OBJ._teta
- """)
+ """
+ )
with self.assertAddsMessages(
- Message('protected-access',
- node=node.body[-1].value,
- args='_teta')):
+ Message("protected-access", node=node.body[-1].value, args="_teta")
+ ):
self.walk(node.root())
def test_regression_non_parent_init_called_tracemalloc(self):
# This used to raise a non-parent-init-called on Pylint 1.3
# See issue https://bitbucket.org/logilab/pylint/issue/308/
# for reference.
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
from tracemalloc import Sequence
class _Traces(Sequence):
def __init__(self, traces): #@
Sequence.__init__(self)
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_functiondef(node)
@@ -73,13 +80,15 @@ class TestVariablesChecker(CheckerTestCase):
# ``next(node.infer())`` was used in that checker's
# logic and the first inferred node was an Uninferable object,
# leading to this false positive.
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
import ctypes
class Foo(ctypes.BigEndianStructure):
def __init__(self): #@
ctypes.BigEndianStructure.__init__(self)
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_functiondef(node)
@@ -87,7 +96,8 @@ class TestVariablesChecker(CheckerTestCase):
"""Make sure protect-access doesn't raise
an exception Uninferable attributes"""
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class MC():
@property
def nargs(self):
@@ -96,9 +106,9 @@ class TestVariablesChecker(CheckerTestCase):
class Application(metaclass=MC):
def __new__(cls):
nargs = obj._nargs #@
- """)
+ """
+ )
with self.assertAddsMessages(
- Message('protected-access',
- node=node.value,
- args='_nargs')):
+ Message("protected-access", node=node.value, args="_nargs")
+ ):
self.checker.visit_attribute(node.value)
diff --git a/pylint/test/unittest_checker_exceptions.py b/pylint/test/unittest_checker_exceptions.py
index 84f475c1c..e1596afa2 100644
--- a/pylint/test/unittest_checker_exceptions.py
+++ b/pylint/test/unittest_checker_exceptions.py
@@ -27,13 +27,14 @@ class TestExceptionsChecker(CheckerTestCase):
# `raise Error(...)`, so it beats the purpose of the test.
def test_raising_bad_type_python3(self):
- node = astroid.extract_node('raise (ZeroDivisionError, None) #@')
- message = Message('raising-bad-type', node=node, args='tuple')
+ node = astroid.extract_node("raise (ZeroDivisionError, None) #@")
+ message = Message("raising-bad-type", node=node, args="tuple")
with self.assertAddsMessages(message):
self.checker.visit_raise(node)
def test_bad_exception_context_function(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
def function():
pass
@@ -41,7 +42,8 @@ class TestExceptionsChecker(CheckerTestCase):
pass
except function as exc:
raise Exception from exc #@
- """)
- message = Message('bad-exception-context', node=node)
+ """
+ )
+ message = Message("bad-exception-context", node=node)
with self.assertAddsMessages(message):
self.checker.visit_raise(node)
diff --git a/pylint/test/unittest_checker_format.py b/pylint/test/unittest_checker_format.py
index dfa8ed19d..8811b0822 100644
--- a/pylint/test/unittest_checker_format.py
+++ b/pylint/test/unittest_checker_format.py
@@ -35,71 +35,81 @@ from pylint import reporters
from pylint import lint
-from pylint.testutils import (
- CheckerTestCase, Message, set_config, _tokenize_str,
-)
+from pylint.testutils import CheckerTestCase, Message, set_config, _tokenize_str
class TestMultiStatementLine(CheckerTestCase):
CHECKER_CLASS = FormatChecker
def testSingleLineIfStmts(self):
- stmt = astroid.extract_node("""
+ stmt = astroid.extract_node(
+ """
if True: pass #@
- """)
+ """
+ )
self.checker.config.single_line_if_stmt = False
- with self.assertAddsMessages(Message('multiple-statements', node=stmt.body[0])):
+ with self.assertAddsMessages(Message("multiple-statements", node=stmt.body[0])):
self.visitFirst(stmt)
self.checker.config.single_line_if_stmt = True
with self.assertNoMessages():
self.visitFirst(stmt)
- stmt = astroid.extract_node("""
+ stmt = astroid.extract_node(
+ """
if True: pass #@
else:
pass
- """)
- with self.assertAddsMessages(Message('multiple-statements', node=stmt.body[0])):
+ """
+ )
+ with self.assertAddsMessages(Message("multiple-statements", node=stmt.body[0])):
self.visitFirst(stmt)
def testSingleLineClassStmts(self):
- stmt = astroid.extract_node("""
+ stmt = astroid.extract_node(
+ """
class MyError(Exception): pass #@
- """)
+ """
+ )
self.checker.config.single_line_class_stmt = False
- with self.assertAddsMessages(Message('multiple-statements', node=stmt.body[0])):
+ with self.assertAddsMessages(Message("multiple-statements", node=stmt.body[0])):
self.visitFirst(stmt)
self.checker.config.single_line_class_stmt = True
with self.assertNoMessages():
self.visitFirst(stmt)
- stmt = astroid.extract_node("""
+ stmt = astroid.extract_node(
+ """
class MyError(Exception): a='a' #@
- """)
+ """
+ )
self.checker.config.single_line_class_stmt = False
- with self.assertAddsMessages(Message('multiple-statements', node=stmt.body[0])):
+ with self.assertAddsMessages(Message("multiple-statements", node=stmt.body[0])):
self.visitFirst(stmt)
self.checker.config.single_line_class_stmt = True
with self.assertNoMessages():
self.visitFirst(stmt)
- stmt = astroid.extract_node("""
+ stmt = astroid.extract_node(
+ """
class MyError(Exception): a='a'; b='b' #@
- """)
+ """
+ )
self.checker.config.single_line_class_stmt = False
- with self.assertAddsMessages(Message('multiple-statements', node=stmt.body[0])):
+ with self.assertAddsMessages(Message("multiple-statements", node=stmt.body[0])):
self.visitFirst(stmt)
self.checker.config.single_line_class_stmt = True
- with self.assertAddsMessages(Message('multiple-statements', node=stmt.body[0])):
+ with self.assertAddsMessages(Message("multiple-statements", node=stmt.body[0])):
self.visitFirst(stmt)
def testTryExceptFinallyNoMultipleStatement(self):
- tree = astroid.extract_node("""
+ tree = astroid.extract_node(
+ """
try: #@
pass
except:
pass
finally:
- pass""")
+ pass"""
+ )
with self.assertNoMessages():
self.visitFirst(tree)
@@ -114,19 +124,19 @@ class TestSuperfluousParentheses(CheckerTestCase):
def testCheckKeywordParensHandlesValidCases(self):
self.checker._keywords_with_parens = set()
cases = [
- 'if foo:',
- 'if foo():',
- 'if (x and y) or z:',
- 'assert foo()',
- 'assert ()',
- 'if (1, 2) in (3, 4):',
- 'if (a or b) in c:',
- 'return (x for x in x)',
- 'if (x for x in x):',
- 'for x in (x for x in x):',
- 'not (foo or bar)',
- 'not (foo or bar) and baz',
- ]
+ "if foo:",
+ "if foo():",
+ "if (x and y) or z:",
+ "assert foo()",
+ "assert ()",
+ "if (1, 2) in (3, 4):",
+ "if (a or b) in c:",
+ "return (x for x in x)",
+ "if (x for x in x):",
+ "for x in (x for x in x):",
+ "not (foo or bar)",
+ "not (foo or bar) and baz",
+ ]
with self.assertNoMessages():
for code in cases:
self.checker._check_keyword_parentheses(_tokenize_str(code), 0)
@@ -134,41 +144,42 @@ class TestSuperfluousParentheses(CheckerTestCase):
def testCheckKeywordParensHandlesUnnecessaryParens(self):
self.checker._keywords_with_parens = set()
cases = [
- (Message('superfluous-parens', line=1, args='if'),
- 'if (foo):', 0),
- (Message('superfluous-parens', line=1, args='if'),
- 'if ((foo, bar)):', 0),
- (Message('superfluous-parens', line=1, args='if'),
- 'if (foo(bar)):', 0),
- (Message('superfluous-parens', line=1, args='return'),
- 'return ((x for x in x))', 0),
- (Message('superfluous-parens', line=1, args='not'),
- 'not (foo)', 0),
- (Message('superfluous-parens', line=1, args='not'),
- 'if not (foo):', 1),
- (Message('superfluous-parens', line=1, args='if'),
- 'if (not (foo)):', 0),
- (Message('superfluous-parens', line=1, args='not'),
- 'if (not (foo)):', 2),
- (Message('superfluous-parens', line=1, args='for'),
- 'for (x) in (1, 2, 3):', 0),
- (Message('superfluous-parens', line=1, args='if'),
- 'if (1) in (1, 2, 3):', 0),
- ]
+ (Message("superfluous-parens", line=1, args="if"), "if (foo):", 0),
+ (Message("superfluous-parens", line=1, args="if"), "if ((foo, bar)):", 0),
+ (Message("superfluous-parens", line=1, args="if"), "if (foo(bar)):", 0),
+ (
+ Message("superfluous-parens", line=1, args="return"),
+ "return ((x for x in x))",
+ 0,
+ ),
+ (Message("superfluous-parens", line=1, args="not"), "not (foo)", 0),
+ (Message("superfluous-parens", line=1, args="not"), "if not (foo):", 1),
+ (Message("superfluous-parens", line=1, args="if"), "if (not (foo)):", 0),
+ (Message("superfluous-parens", line=1, args="not"), "if (not (foo)):", 2),
+ (
+ Message("superfluous-parens", line=1, args="for"),
+ "for (x) in (1, 2, 3):",
+ 0,
+ ),
+ (
+ Message("superfluous-parens", line=1, args="if"),
+ "if (1) in (1, 2, 3):",
+ 0,
+ ),
+ ]
for msg, code, offset in cases:
with self.assertAddsMessages(msg):
self.checker._check_keyword_parentheses(_tokenize_str(code), offset)
def testCheckIfArgsAreNotUnicode(self):
self.checker._keywords_with_parens = set()
- cases = [('if (foo):', 0), ('assert (1 == 1)', 0)]
+ cases = [("if (foo):", 0), ("assert (1 == 1)", 0)]
for code, offset in cases:
self.checker._check_keyword_parentheses(_tokenize_str(code), offset)
got = self.linter.release_messages()
assert isinstance(got[-1].args, str)
-
def testFuturePrintStatementWithoutParensWarning(self):
code = """from __future__ import print_function
print('Hello world!')
@@ -189,192 +200,285 @@ class TestCheckSpace(CheckerTestCase):
CHECKER_CLASS = FormatChecker
def testParenthesesGood(self):
- good_cases = [
- '(a)\n',
- '(a * (b + c))\n',
- '(#\n a)\n',
- ]
+ good_cases = ["(a)\n", "(a * (b + c))\n", "(#\n a)\n"]
with self.assertNoMessages():
for code in good_cases:
self.checker.process_tokens(_tokenize_str(code))
def testParenthesesBad(self):
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('No', 'allowed', 'after', 'bracket', '( a)\n^'))):
- self.checker.process_tokens(_tokenize_str('( a)\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=("No", "allowed", "after", "bracket", "( a)\n^"),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("( a)\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('No', 'allowed', 'before', 'bracket', '(a )\n ^'))):
- self.checker.process_tokens(_tokenize_str('(a )\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=("No", "allowed", "before", "bracket", "(a )\n ^"),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("(a )\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('No', 'allowed', 'before', 'bracket', 'foo (a)\n ^'))):
- self.checker.process_tokens(_tokenize_str('foo (a)\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=("No", "allowed", "before", "bracket", "foo (a)\n ^"),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("foo (a)\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('No', 'allowed', 'before', 'bracket', '{1: 2} [1]\n ^'))):
- self.checker.process_tokens(_tokenize_str('{1: 2} [1]\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=("No", "allowed", "before", "bracket", "{1: 2} [1]\n ^"),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("{1: 2} [1]\n"))
def testTrailingCommaGood(self):
with self.assertNoMessages():
- self.checker.process_tokens(_tokenize_str('(a, )\n'))
- self.checker.process_tokens(_tokenize_str('(a,)\n'))
+ self.checker.process_tokens(_tokenize_str("(a, )\n"))
+ self.checker.process_tokens(_tokenize_str("(a,)\n"))
self.checker.config.no_space_check = []
with self.assertNoMessages():
- self.checker.process_tokens(_tokenize_str('(a,)\n'))
+ self.checker.process_tokens(_tokenize_str("(a,)\n"))
@set_config(no_space_check=[])
def testTrailingCommaBad(self):
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('No', 'allowed', 'before', 'bracket', '(a, )\n ^'))):
- self.checker.process_tokens(_tokenize_str('(a, )\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=("No", "allowed", "before", "bracket", "(a, )\n ^"),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("(a, )\n"))
def testComma(self):
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('No', 'allowed', 'before', 'comma', '(a , b)\n ^'))):
- self.checker.process_tokens(_tokenize_str('(a , b)\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=("No", "allowed", "before", "comma", "(a , b)\n ^"),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("(a , b)\n"))
def testSpacesAllowedInsideSlices(self):
- good_cases = [
- '[a:b]\n',
- '[a : b]\n',
- '[a : ]\n',
- '[:a]\n',
- '[:]\n',
- '[::]\n',
- ]
+ good_cases = ["[a:b]\n", "[a : b]\n", "[a : ]\n", "[:a]\n", "[:]\n", "[::]\n"]
with self.assertNoMessages():
for code in good_cases:
self.checker.process_tokens(_tokenize_str(code))
def testKeywordSpacingGood(self):
with self.assertNoMessages():
- self.checker.process_tokens(_tokenize_str('foo(foo=bar)\n'))
- self.checker.process_tokens(_tokenize_str('foo(foo: int = bar)\n'))
- self.checker.process_tokens(_tokenize_str('foo(foo: module.classname = bar)\n'))
- self.checker.process_tokens(_tokenize_str('foo(foo: Dict[int, str] = bar)\n'))
- self.checker.process_tokens(_tokenize_str('foo(foo: \'int\' = bar)\n'))
- self.checker.process_tokens(_tokenize_str('foo(foo: Dict[int, \'str\'] = bar)\n'))
- self.checker.process_tokens(_tokenize_str('lambda x=1: x\n'))
+ self.checker.process_tokens(_tokenize_str("foo(foo=bar)\n"))
+ self.checker.process_tokens(_tokenize_str("foo(foo: int = bar)\n"))
+ self.checker.process_tokens(
+ _tokenize_str("foo(foo: module.classname = bar)\n")
+ )
+ self.checker.process_tokens(
+ _tokenize_str("foo(foo: Dict[int, str] = bar)\n")
+ )
+ self.checker.process_tokens(_tokenize_str("foo(foo: 'int' = bar)\n"))
+ self.checker.process_tokens(
+ _tokenize_str("foo(foo: Dict[int, 'str'] = bar)\n")
+ )
+ self.checker.process_tokens(_tokenize_str("lambda x=1: x\n"))
def testKeywordSpacingBad(self):
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('No', 'allowed', 'before', 'keyword argument assignment',
- '(foo =bar)\n ^'))):
- self.checker.process_tokens(_tokenize_str('(foo =bar)\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=(
+ "No",
+ "allowed",
+ "before",
+ "keyword argument assignment",
+ "(foo =bar)\n ^",
+ ),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("(foo =bar)\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('No', 'allowed', 'after', 'keyword argument assignment',
- '(foo= bar)\n ^'))):
- self.checker.process_tokens(_tokenize_str('(foo= bar)\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=(
+ "No",
+ "allowed",
+ "after",
+ "keyword argument assignment",
+ "(foo= bar)\n ^",
+ ),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("(foo= bar)\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('No', 'allowed', 'around', 'keyword argument assignment',
- '(foo = bar)\n ^'))):
- self.checker.process_tokens(_tokenize_str('(foo = bar)\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=(
+ "No",
+ "allowed",
+ "around",
+ "keyword argument assignment",
+ "(foo = bar)\n ^",
+ ),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("(foo = bar)\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('Exactly one', 'required', 'before', 'keyword argument assignment',
- '(foo: int= bar)\n ^'))):
- self.checker.process_tokens(_tokenize_str('(foo: int= bar)\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=(
+ "Exactly one",
+ "required",
+ "before",
+ "keyword argument assignment",
+ "(foo: int= bar)\n ^",
+ ),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("(foo: int= bar)\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('Exactly one', 'required', 'after', 'keyword argument assignment',
- '(foo: int =bar)\n ^'))):
- self.checker.process_tokens(_tokenize_str('(foo: int =bar)\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=(
+ "Exactly one",
+ "required",
+ "after",
+ "keyword argument assignment",
+ "(foo: int =bar)\n ^",
+ ),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("(foo: int =bar)\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('Exactly one', 'required', 'around', 'keyword argument assignment',
- '(foo: int=bar)\n ^'))):
- self.checker.process_tokens(_tokenize_str('(foo: int=bar)\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=(
+ "Exactly one",
+ "required",
+ "around",
+ "keyword argument assignment",
+ "(foo: int=bar)\n ^",
+ ),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("(foo: int=bar)\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('Exactly one', 'required', 'around', 'keyword argument assignment',
- '(foo: List[int]=bar)\n ^'))):
- self.checker.process_tokens(_tokenize_str('(foo: List[int]=bar)\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=(
+ "Exactly one",
+ "required",
+ "around",
+ "keyword argument assignment",
+ "(foo: List[int]=bar)\n ^",
+ ),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("(foo: List[int]=bar)\n"))
# Regression test for #1831
with self.assertNoMessages():
- self.checker.process_tokens(_tokenize_str("(arg: Tuple[\n int, str] = None):\n"))
+ self.checker.process_tokens(
+ _tokenize_str("(arg: Tuple[\n int, str] = None):\n")
+ )
def testOperatorSpacingGood(self):
- good_cases = [
- 'a = b\n'
- 'a < b\n'
- 'a\n< b\n',
- ]
+ good_cases = ["a = b\n" "a < b\n" "a\n< b\n"]
with self.assertNoMessages():
for code in good_cases:
self.checker.process_tokens(_tokenize_str(code))
def testOperatorSpacingBad(self):
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('Exactly one', 'required', 'before', 'comparison', 'a< b\n ^'))):
- self.checker.process_tokens(_tokenize_str('a< b\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=("Exactly one", "required", "before", "comparison", "a< b\n ^"),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("a< b\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('Exactly one', 'required', 'after', 'comparison', 'a <b\n ^'))):
- self.checker.process_tokens(_tokenize_str('a <b\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=("Exactly one", "required", "after", "comparison", "a <b\n ^"),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("a <b\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('Exactly one', 'required', 'around', 'comparison', 'a<b\n ^'))):
- self.checker.process_tokens(_tokenize_str('a<b\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=("Exactly one", "required", "around", "comparison", "a<b\n ^"),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("a<b\n"))
with self.assertAddsMessages(
- Message('bad-whitespace', line=1,
- args=('Exactly one', 'required', 'around', 'comparison', 'a< b\n ^'))):
- self.checker.process_tokens(_tokenize_str('a< b\n'))
+ Message(
+ "bad-whitespace",
+ line=1,
+ args=("Exactly one", "required", "around", "comparison", "a< b\n ^"),
+ )
+ ):
+ self.checker.process_tokens(_tokenize_str("a< b\n"))
def testValidTypingAnnotationEllipses(self):
"""Make sure ellipses in function typing annotation
doesn't cause a false positive bad-whitespace message"""
with self.assertNoMessages():
self.checker.process_tokens(
- _tokenize_str('def foo(t: Tuple[str, ...] = None):\n'))
+ _tokenize_str("def foo(t: Tuple[str, ...] = None):\n")
+ )
def testEmptyLines(self):
self.checker.config.no_space_check = []
- with self.assertAddsMessages(
- Message('trailing-whitespace', line=2)):
- self.checker.process_tokens(_tokenize_str('a = 1\n \nb = 2\n'))
+ with self.assertAddsMessages(Message("trailing-whitespace", line=2)):
+ self.checker.process_tokens(_tokenize_str("a = 1\n \nb = 2\n"))
- with self.assertAddsMessages(
- Message('trailing-whitespace', line=2)):
- self.checker.process_tokens(_tokenize_str('a = 1\n\t\nb = 2\n'))
+ with self.assertAddsMessages(Message("trailing-whitespace", line=2)):
+ self.checker.process_tokens(_tokenize_str("a = 1\n\t\nb = 2\n"))
- with self.assertAddsMessages(
- Message('trailing-whitespace', line=2)):
- self.checker.process_tokens(_tokenize_str('a = 1\n\v\nb = 2\n'))
+ with self.assertAddsMessages(Message("trailing-whitespace", line=2)):
+ self.checker.process_tokens(_tokenize_str("a = 1\n\v\nb = 2\n"))
with self.assertNoMessages():
- self.checker.process_tokens(_tokenize_str('a = 1\n\f\nb = 2\n'))
+ self.checker.process_tokens(_tokenize_str("a = 1\n\f\nb = 2\n"))
- self.checker.config.no_space_check = ['empty-line']
+ self.checker.config.no_space_check = ["empty-line"]
with self.assertNoMessages():
- self.checker.process_tokens(_tokenize_str('a = 1\n \nb = 2\n'))
+ self.checker.process_tokens(_tokenize_str("a = 1\n \nb = 2\n"))
with self.assertNoMessages():
- self.checker.process_tokens(_tokenize_str('a = 1\n\t\nb = 2\n'))
+ self.checker.process_tokens(_tokenize_str("a = 1\n\t\nb = 2\n"))
with self.assertNoMessages():
- self.checker.process_tokens(_tokenize_str('a = 1\n\v\nb = 2\n'))
-
+ self.checker.process_tokens(_tokenize_str("a = 1\n\v\nb = 2\n"))
def test_encoding_token(self):
"""Make sure the encoding token doesn't change the checker's behavior
@@ -383,8 +487,12 @@ class TestCheckSpace(CheckerTestCase):
reading a file does
"""
with self.assertNoMessages():
- encoding_token = tokenize.TokenInfo(tokenize.ENCODING, "utf-8", (0, 0), (0, 0), '')
- tokens = [encoding_token] + _tokenize_str('if (\n None):\n pass\n')
+ encoding_token = tokenize.TokenInfo(
+ tokenize.ENCODING, "utf-8", (0, 0), (0, 0), ""
+ )
+ tokens = [encoding_token] + _tokenize_str(
+ "if (\n None):\n pass\n"
+ )
self.checker.process_tokens(tokens)
@@ -393,19 +501,22 @@ def test_disable_global_option_end_of_line():
Test for issue with disabling tokenizer messages
that extend beyond the scope of the ast tokens
"""
- file_ = tempfile.NamedTemporaryFile('w', delete=False)
+ file_ = tempfile.NamedTemporaryFile("w", delete=False)
with file_:
- file_.write("""
+ file_.write(
+ """
mylist = [
None
]
- """)
+ """
+ )
try:
linter = lint.PyLinter()
checker = FormatChecker(linter)
linter.register_checker(checker)
args = linter.load_command_line_configuration(
- [file_.name, '-d' ,'bad-continuation'])
+ [file_.name, "-d", "bad-continuation"]
+ )
myreporter = reporters.CollectingReporter()
linter.set_reporter(myreporter)
linter.check(args)
diff --git a/pylint/test/unittest_checker_imports.py b/pylint/test/unittest_checker_imports.py
index fd8d7417a..b5961b26d 100644
--- a/pylint/test/unittest_checker_imports.py
+++ b/pylint/test/unittest_checker_imports.py
@@ -18,61 +18,75 @@ from pylint.checkers import imports
from pylint.testutils import CheckerTestCase, Message, set_config
from pylint.interfaces import UNDEFINED
-REGR_DATA = os.path.join(os.path.dirname(__file__), 'regrtest_data', '')
+REGR_DATA = os.path.join(os.path.dirname(__file__), "regrtest_data", "")
+
class TestImportsChecker(CheckerTestCase):
CHECKER_CLASS = imports.ImportsChecker
- @set_config(ignored_modules=('external_module',
- 'fake_module.submodule',
- 'foo',
- 'bar'))
+ @set_config(
+ ignored_modules=("external_module", "fake_module.submodule", "foo", "bar")
+ )
def test_import_error_skipped(self):
"""Make sure that imports do not emit an 'import-error' when the
module is configured to be ignored."""
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
from external_module import anything
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_importfrom(node)
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
from external_module.another_module import anything
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_importfrom(node)
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
import external_module
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_import(node)
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
from fake_module.submodule import anything
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_importfrom(node)
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
from fake_module.submodule.deeper import anything
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_importfrom(node)
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
import foo, bar
- """)
- msg = Message('multiple-imports', node=node, args='foo, bar')
+ """
+ )
+ msg = Message("multiple-imports", node=node, args="foo, bar")
with self.assertAddsMessages(msg):
self.checker.visit_import(node)
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
import foo
import bar
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_import(node)
@@ -80,17 +94,16 @@ class TestImportsChecker(CheckerTestCase):
"""
Test that duplicate imports on single line raise 'reimported'.
"""
- node = astroid.extract_node('from time import sleep, sleep, time')
- msg = Message(msg_id='reimported', node=node, args=('sleep', 1))
+ node = astroid.extract_node("from time import sleep, sleep, time")
+ msg = Message(msg_id="reimported", node=node, args=("sleep", 1))
with self.assertAddsMessages(msg):
self.checker.visit_importfrom(node)
def test_relative_beyond_top_level(self):
- module = astroid.MANAGER.ast_from_module_name('beyond_top', REGR_DATA)
+ module = astroid.MANAGER.ast_from_module_name("beyond_top", REGR_DATA)
import_from = module.body[0]
- msg = Message(msg_id='relative-beyond-top-level',
- node=import_from)
+ msg = Message(msg_id="relative-beyond-top-level", node=import_from)
with self.assertAddsMessages(msg):
self.checker.visit_importfrom(import_from)
with self.assertNoMessages():
@@ -99,19 +112,21 @@ class TestImportsChecker(CheckerTestCase):
self.checker.visit_importfrom(module.body[2].body[0])
def test_wildcard_import_init(self):
- module = astroid.MANAGER.ast_from_module_name(
- 'init_wildcard', REGR_DATA)
+ module = astroid.MANAGER.ast_from_module_name("init_wildcard", REGR_DATA)
import_from = module.body[0]
with self.assertNoMessages():
self.checker.visit_importfrom(import_from)
def test_wildcard_import_non_init(self):
- module = astroid.MANAGER.ast_from_module_name('wildcard', REGR_DATA)
+ module = astroid.MANAGER.ast_from_module_name("wildcard", REGR_DATA)
import_from = module.body[0]
msg = Message(
- msg_id='wildcard-import', node=import_from,
- args='empty', confidence=UNDEFINED)
+ msg_id="wildcard-import",
+ node=import_from,
+ args="empty",
+ confidence=UNDEFINED,
+ )
with self.assertAddsMessages(msg):
self.checker.visit_importfrom(import_from)
diff --git a/pylint/test/unittest_checker_logging.py b/pylint/test/unittest_checker_logging.py
index af96d9268..ef2b40b0f 100644
--- a/pylint/test/unittest_checker_logging.py
+++ b/pylint/test/unittest_checker_logging.py
@@ -18,39 +18,47 @@ class TestLoggingModuleDetection(CheckerTestCase):
CHECKER_CLASS = logging.LoggingChecker
def test_detects_standard_logging_module(self):
- stmts = astroid.extract_node("""
+ stmts = astroid.extract_node(
+ """
import logging #@
logging.warn('%s' % '%s') #@
- """)
+ """
+ )
self.checker.visit_module(None)
self.checker.visit_import(stmts[0])
- with self.assertAddsMessages(Message('logging-not-lazy', node=stmts[1])):
+ with self.assertAddsMessages(Message("logging-not-lazy", node=stmts[1])):
self.checker.visit_call(stmts[1])
def test_dont_crash_on_invalid_format_string(self):
- node = astroid.parse('''
+ node = astroid.parse(
+ """
import logging
logging.error('0} - {1}'.format(1, 2))
- ''')
+ """
+ )
self.walk(node)
def test_detects_renamed_standard_logging_module(self):
- stmts = astroid.extract_node("""
+ stmts = astroid.extract_node(
+ """
import logging as blogging #@
blogging.warn('%s' % '%s') #@
- """)
+ """
+ )
self.checker.visit_module(None)
self.checker.visit_import(stmts[0])
- with self.assertAddsMessages(Message('logging-not-lazy', node=stmts[1])):
+ with self.assertAddsMessages(Message("logging-not-lazy", node=stmts[1])):
self.checker.visit_call(stmts[1])
- @set_config(logging_modules=['logging', 'my.logging'])
+ @set_config(logging_modules=["logging", "my.logging"])
def test_nonstandard_logging_module(self):
- stmts = astroid.extract_node("""
+ stmts = astroid.extract_node(
+ """
from my import logging as blogging #@
blogging.warn('%s' % '%s') #@
- """)
+ """
+ )
self.checker.visit_module(None)
self.checker.visit_import(stmts[0])
- with self.assertAddsMessages(Message('logging-not-lazy', node=stmts[1])):
+ with self.assertAddsMessages(Message("logging-not-lazy", node=stmts[1])):
self.checker.visit_call(stmts[1])
diff --git a/pylint/test/unittest_checker_misc.py b/pylint/test/unittest_checker_misc.py
index c102338eb..5143698a0 100644
--- a/pylint/test/unittest_checker_misc.py
+++ b/pylint/test/unittest_checker_misc.py
@@ -14,8 +14,10 @@
from pylint.checkers import misc
from pylint.testutils import (
- CheckerTestCase, Message,
- set_config, _create_file_backed_module,
+ CheckerTestCase,
+ Message,
+ set_config,
+ _create_file_backed_module,
)
@@ -24,66 +26,72 @@ class TestFixme(CheckerTestCase):
def test_fixme_with_message(self):
with _create_file_backed_module(
- """a = 1
+ """a = 1
# FIXME message
- """) as module:
+ """
+ ) as module:
with self.assertAddsMessages(
- Message(msg_id='fixme', line=2, args='FIXME message')):
+ Message(msg_id="fixme", line=2, args="FIXME message")
+ ):
self.checker.process_module(module)
def test_todo_without_message(self):
with _create_file_backed_module(
- """a = 1
+ """a = 1
# TODO
- """) as module:
- with self.assertAddsMessages(
- Message(msg_id='fixme', line=2, args='TODO')):
+ """
+ ) as module:
+ with self.assertAddsMessages(Message(msg_id="fixme", line=2, args="TODO")):
self.checker.process_module(module)
def test_xxx_without_space(self):
with _create_file_backed_module(
- """a = 1
+ """a = 1
#XXX
- """) as module:
- with self.assertAddsMessages(
- Message(msg_id='fixme', line=2, args='XXX')):
+ """
+ ) as module:
+ with self.assertAddsMessages(Message(msg_id="fixme", line=2, args="XXX")):
self.checker.process_module(module)
def test_xxx_middle(self):
with _create_file_backed_module(
- """a = 1
+ """a = 1
# midle XXX
- """) as module:
+ """
+ ) as module:
with self.assertNoMessages():
self.checker.process_module(module)
def test_without_space_fixme(self):
with _create_file_backed_module(
- """a = 1
+ """a = 1
#FIXME
- """) as module:
- with self.assertAddsMessages(
- Message(msg_id='fixme', line=2, args='FIXME')):
+ """
+ ) as module:
+ with self.assertAddsMessages(Message(msg_id="fixme", line=2, args="FIXME")):
self.checker.process_module(module)
@set_config(notes=[])
def test_absent_codetag(self):
with _create_file_backed_module(
- """a = 1
+ """a = 1
# FIXME
# TODO
# XXX
- """) as module:
+ """
+ ) as module:
with self.assertNoMessages():
self.checker.process_module(module)
- @set_config(notes=['CODETAG'])
+ @set_config(notes=["CODETAG"])
def test_other_present_codetag(self):
with _create_file_backed_module(
- """a = 1
+ """a = 1
# CODETAG
# FIXME
- """) as module:
+ """
+ ) as module:
with self.assertAddsMessages(
- Message(msg_id='fixme', line=2, args='CODETAG')):
+ Message(msg_id="fixme", line=2, args="CODETAG")
+ ):
self.checker.process_module(module)
diff --git a/pylint/test/unittest_checker_python3.py b/pylint/test/unittest_checker_python3.py
index bdca85a69..184fe053f 100644
--- a/pylint/test/unittest_checker_python3.py
+++ b/pylint/test/unittest_checker_python3.py
@@ -30,40 +30,41 @@ from pylint.interfaces import INFERENCE_FAILURE, INFERENCE
# Decorator for any tests that will fail under Python 3
-python2_only = pytest.mark.skipif(sys.version_info[0] > 2, reason='Python 2 only')
+python2_only = pytest.mark.skipif(sys.version_info[0] > 2, reason="Python 2 only")
# TODO(cpopa): Port these to the functional test framework instead.
+
class TestPython3Checker(testutils.CheckerTestCase):
CHECKER_CLASS = checker.Python3Checker
def check_bad_builtin(self, builtin_name):
- node = astroid.extract_node(builtin_name + ' #@')
- message = builtin_name.lower() + '-builtin'
+ node = astroid.extract_node(builtin_name + " #@")
+ message = builtin_name.lower() + "-builtin"
with self.assertAddsMessages(testutils.Message(message, node=node)):
self.checker.visit_name(node)
@python2_only
def test_bad_builtins(self):
builtins = [
- 'apply',
- 'buffer',
- 'cmp',
- 'coerce',
- 'execfile',
- 'file',
- 'input',
- 'intern',
- 'long',
- 'raw_input',
- 'round',
- 'reduce',
- 'StandardError',
- 'unichr',
- 'unicode',
- 'xrange',
- 'reload',
+ "apply",
+ "buffer",
+ "cmp",
+ "coerce",
+ "execfile",
+ "file",
+ "input",
+ "intern",
+ "long",
+ "raw_input",
+ "round",
+ "reduce",
+ "StandardError",
+ "unichr",
+ "unicode",
+ "xrange",
+ "reload",
]
for builtin in builtins:
self.check_bad_builtin(builtin)
@@ -75,13 +76,17 @@ class TestPython3Checker(testutils.CheckerTestCase):
self.walk(module)
def as_used_by_iterable_in_for_loop_test(self, fxn):
- checker = '{}-builtin-not-iterating'.format(fxn)
- node = astroid.extract_node("""
+ checker = "{}-builtin-not-iterating".format(fxn)
+ node = astroid.extract_node(
+ """
for x in (whatever(
{}() #@
)):
pass
- """.format(fxn))
+ """.format(
+ fxn
+ )
+ )
message = testutils.Message(checker, node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
@@ -99,24 +104,32 @@ class TestPython3Checker(testutils.CheckerTestCase):
self.walk(module)
def as_used_in_variant_in_genexp_test(self, fxn):
- checker = '{}-builtin-not-iterating'.format(fxn)
- node = astroid.extract_node("""
+ checker = "{}-builtin-not-iterating".format(fxn)
+ node = astroid.extract_node(
+ """
list(
__({}(x))
for x in [1]
)
- """.format(fxn))
+ """.format(
+ fxn
+ )
+ )
message = testutils.Message(checker, node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def as_used_in_variant_in_listcomp_test(self, fxn):
- checker = '{}-builtin-not-iterating'.format(fxn)
- node = astroid.extract_node("""
+ checker = "{}-builtin-not-iterating".format(fxn)
+ node = astroid.extract_node(
+ """
[
__({}(None, x))
for x in [[1]]]
- """.format(fxn))
+ """.format(
+ fxn
+ )
+ )
message = testutils.Message(checker, node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
@@ -132,12 +145,16 @@ class TestPython3Checker(testutils.CheckerTestCase):
self.walk(module)
def as_argument_to_random_fxn_test(self, fxn):
- checker = '{}-builtin-not-iterating'.format(fxn)
- node = astroid.extract_node("""
+ checker = "{}-builtin-not-iterating".format(fxn)
+ node = astroid.extract_node(
+ """
y(
{}() #@
)
- """.format(fxn))
+ """.format(
+ fxn
+ )
+ )
message = testutils.Message(checker, node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
@@ -149,17 +166,25 @@ class TestPython3Checker(testutils.CheckerTestCase):
self.walk(module)
def as_iterable_in_unpacking(self, fxn):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
a, b = __({}())
- """.format(fxn))
+ """.format(
+ fxn
+ )
+ )
with self.assertNoMessages():
self.checker.visit_call(node)
def as_assignment(self, fxn):
- checker = '{}-builtin-not-iterating'.format(fxn)
- node = astroid.extract_node("""
+ checker = "{}-builtin-not-iterating".format(fxn)
+ node = astroid.extract_node(
+ """
a = __({}())
- """.format(fxn))
+ """.format(
+ fxn
+ )
+ )
message = testutils.Message(checker, node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
@@ -178,53 +203,61 @@ class TestPython3Checker(testutils.CheckerTestCase):
self.as_assignment(fxn)
self.as_argument_to_materialized_filter(fxn)
- for func in ('iter', 'list', 'tuple', 'sorted',
- 'set', 'sum', 'any', 'all',
- 'enumerate', 'dict'):
+ for func in (
+ "iter",
+ "list",
+ "tuple",
+ "sorted",
+ "set",
+ "sum",
+ "any",
+ "all",
+ "enumerate",
+ "dict",
+ ):
self.as_argument_to_callable_constructor_test(fxn, func)
def test_dict_subclasses_methods_in_iterating_context(self):
- iterating, not_iterating = astroid.extract_node('''
+ iterating, not_iterating = astroid.extract_node(
+ """
from __future__ import absolute_import
from collections import defaultdict
d = defaultdict(list)
a, b = d.keys() #@
x = d.keys() #@
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(iterating.value)
- message = testutils.Message('dict-keys-not-iterating', node=not_iterating.value)
+ message = testutils.Message("dict-keys-not-iterating", node=not_iterating.value)
with self.assertAddsMessages(message):
self.checker.visit_call(not_iterating.value)
def test_dict_methods_in_iterating_context(self):
iterating_code = [
- 'for x in {}(): pass',
- '(x for x in {}())',
- '[x for x in {}()]',
- 'iter({}())',
- 'a, b = {}()',
- 'max({}())',
- 'min({}())',
- '3 in {}()',
- 'set().update({}())',
- '[].extend({}())',
- '{{}}.update({}())',
- '''
+ "for x in {}(): pass",
+ "(x for x in {}())",
+ "[x for x in {}()]",
+ "iter({}())",
+ "a, b = {}()",
+ "max({}())",
+ "min({}())",
+ "3 in {}()",
+ "set().update({}())",
+ "[].extend({}())",
+ "{{}}.update({}())",
+ """
from __future__ import absolute_import
from itertools import chain
chain.from_iterable({}())
- ''',
- ]
- non_iterating_code = [
- 'x = __({}())',
- '__({}())[0]',
+ """,
]
+ non_iterating_code = ["x = __({}())", "__({}())[0]"]
- for method in ('keys', 'items', 'values'):
- dict_method = '{{}}.{}'.format(method)
+ for method in ("keys", "items", "values"):
+ dict_method = "{{}}.{}".format(method)
for code in iterating_code:
with_value = code.format(dict_method)
@@ -236,114 +269,124 @@ class TestPython3Checker(testutils.CheckerTestCase):
with_value = code.format(dict_method)
node = astroid.extract_node(with_value)
- checker = 'dict-{}-not-iterating'.format(method)
+ checker = "dict-{}-not-iterating".format(method)
message = testutils.Message(checker, node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def test_map_in_iterating_context(self):
- self.iterating_context_tests('map')
+ self.iterating_context_tests("map")
def test_zip_in_iterating_context(self):
- self.iterating_context_tests('zip')
+ self.iterating_context_tests("zip")
def test_range_in_iterating_context(self):
- self.iterating_context_tests('range')
+ self.iterating_context_tests("range")
def test_filter_in_iterating_context(self):
- self.iterating_context_tests('filter')
+ self.iterating_context_tests("filter")
def defined_method_test(self, method, warning):
"""Helper for verifying that a certain method is not defined."""
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class Foo(object):
def __{}__(self, other): #@
- pass""".format(method))
+ pass""".format(
+ method
+ )
+ )
message = testutils.Message(warning, node=node)
with self.assertAddsMessages(message):
self.checker.visit_functiondef(node)
def test_delslice_method(self):
- self.defined_method_test('delslice', 'delslice-method')
+ self.defined_method_test("delslice", "delslice-method")
def test_getslice_method(self):
- self.defined_method_test('getslice', 'getslice-method')
+ self.defined_method_test("getslice", "getslice-method")
def test_setslice_method(self):
- self.defined_method_test('setslice', 'setslice-method')
+ self.defined_method_test("setslice", "setslice-method")
def test_coerce_method(self):
- self.defined_method_test('coerce', 'coerce-method')
+ self.defined_method_test("coerce", "coerce-method")
def test_oct_method(self):
- self.defined_method_test('oct', 'oct-method')
+ self.defined_method_test("oct", "oct-method")
def test_hex_method(self):
- self.defined_method_test('hex', 'hex-method')
+ self.defined_method_test("hex", "hex-method")
def test_nonzero_method(self):
- self.defined_method_test('nonzero', 'nonzero-method')
+ self.defined_method_test("nonzero", "nonzero-method")
def test_cmp_method(self):
- self.defined_method_test('cmp', 'cmp-method')
+ self.defined_method_test("cmp", "cmp-method")
def test_div_method(self):
- self.defined_method_test('div', 'div-method')
+ self.defined_method_test("div", "div-method")
def test_idiv_method(self):
- self.defined_method_test('idiv', 'idiv-method')
+ self.defined_method_test("idiv", "idiv-method")
def test_rdiv_method(self):
- self.defined_method_test('rdiv', 'rdiv-method')
+ self.defined_method_test("rdiv", "rdiv-method")
def test_eq_and_hash_method(self):
"""Helper for verifying that a certain method is not defined."""
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class Foo(object): #@
def __eq__(self, other):
pass
def __hash__(self):
- pass""")
+ pass"""
+ )
with self.assertNoMessages():
self.checker.visit_classdef(node)
def test_eq_and_hash_is_none(self):
"""Helper for verifying that a certain method is not defined."""
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class Foo(object): #@
def __eq__(self, other):
pass
- __hash__ = None""")
+ __hash__ = None"""
+ )
with self.assertNoMessages():
self.checker.visit_classdef(node)
def test_eq_without_hash_method(self):
"""Helper for verifying that a certain method is not defined."""
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class Foo(object): #@
def __eq__(self, other):
- pass""")
- message = testutils.Message('eq-without-hash', node=node)
+ pass"""
+ )
+ message = testutils.Message("eq-without-hash", node=node)
with self.assertAddsMessages(message):
self.checker.visit_classdef(node)
@python2_only
def test_print_statement(self):
node = astroid.extract_node('print "Hello, World!" #@')
- message = testutils.Message('print-statement', node=node)
+ message = testutils.Message("print-statement", node=node)
with self.assertAddsMessages(message):
self.checker.visit_print(node)
@python2_only
def test_backtick(self):
- node = astroid.extract_node('`test`')
- message = testutils.Message('backtick', node=node)
+ node = astroid.extract_node("`test`")
+ message = testutils.Message("backtick", node=node)
with self.assertAddsMessages(message):
self.checker.visit_repr(node)
def test_relative_import(self):
- node = astroid.extract_node('import string #@')
- message = testutils.Message('no-absolute-import', node=node)
+ node = astroid.extract_node("import string #@")
+ message = testutils.Message("no-absolute-import", node=node)
with self.assertAddsMessages(message):
self.checker.visit_import(node)
with self.assertNoMessages():
@@ -351,8 +394,8 @@ class TestPython3Checker(testutils.CheckerTestCase):
self.checker.visit_import(node)
def test_relative_from_import(self):
- node = astroid.extract_node('from os import path #@')
- message = testutils.Message('no-absolute-import', node=node)
+ node = astroid.extract_node("from os import path #@")
+ message = testutils.Message("no-absolute-import", node=node)
with self.assertAddsMessages(message):
self.checker.visit_importfrom(node)
with self.assertNoMessages():
@@ -361,176 +404,194 @@ class TestPython3Checker(testutils.CheckerTestCase):
def test_absolute_import(self):
module_import = astroid.parse(
- 'from __future__ import absolute_import; import os')
+ "from __future__ import absolute_import; import os"
+ )
module_from = astroid.parse(
- 'from __future__ import absolute_import; from os import path')
+ "from __future__ import absolute_import; from os import path"
+ )
with self.assertNoMessages():
for module in (module_import, module_from):
self.walk(module)
def test_import_star_module_level(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
def test():
from lala import * #@
- ''')
- absolute = testutils.Message('no-absolute-import', node=node)
- star = testutils.Message('import-star-module-level', node=node)
+ """
+ )
+ absolute = testutils.Message("no-absolute-import", node=node)
+ star = testutils.Message("import-star-module-level", node=node)
with self.assertAddsMessages(absolute, star):
self.checker.visit_importfrom(node)
def test_division(self):
- node = astroid.extract_node('3 / 2 #@')
- message = testutils.Message('old-division', node=node)
+ node = astroid.extract_node("3 / 2 #@")
+ message = testutils.Message("old-division", node=node)
with self.assertAddsMessages(message):
self.checker.visit_binop(node)
def test_division_with_future_statement(self):
- module = astroid.parse('from __future__ import division; 3 / 2')
+ module = astroid.parse("from __future__ import division; 3 / 2")
with self.assertNoMessages():
self.walk(module)
def test_floor_division(self):
- node = astroid.extract_node(' 3 // 2 #@')
+ node = astroid.extract_node(" 3 // 2 #@")
with self.assertNoMessages():
self.checker.visit_binop(node)
def test_division_by_float(self):
- left_node = astroid.extract_node('3.0 / 2 #@')
- right_node = astroid.extract_node(' 3 / 2.0 #@')
+ left_node = astroid.extract_node("3.0 / 2 #@")
+ right_node = astroid.extract_node(" 3 / 2.0 #@")
with self.assertNoMessages():
for node in (left_node, right_node):
self.checker.visit_binop(node)
def test_dict_iter_method(self):
- for meth in ('keys', 'values', 'items'):
- node = astroid.extract_node('x.iter%s() #@' % meth)
- message = testutils.Message('dict-iter-method', node=node)
+ for meth in ("keys", "values", "items"):
+ node = astroid.extract_node("x.iter%s() #@" % meth)
+ message = testutils.Message("dict-iter-method", node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def test_dict_iter_method_on_dict(self):
- nodes = astroid.extract_node('''
+ nodes = astroid.extract_node(
+ """
from collections import defaultdict
{}.iterkeys() #@
defaultdict(list).iterkeys() #@
class Someclass(dict):
pass
Someclass().iterkeys() #@
- ''')
+ """
+ )
for node in nodes:
- message = testutils.Message('dict-iter-method', node=node)
+ message = testutils.Message("dict-iter-method", node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def test_dict_not_iter_method(self):
- arg_node = astroid.extract_node('x.iterkeys(x) #@')
- stararg_node = astroid.extract_node('x.iterkeys(*x) #@')
- kwarg_node = astroid.extract_node('x.iterkeys(y=x) #@')
- non_dict_node = astroid.extract_node('x=[]\nx.iterkeys() #@')
+ arg_node = astroid.extract_node("x.iterkeys(x) #@")
+ stararg_node = astroid.extract_node("x.iterkeys(*x) #@")
+ kwarg_node = astroid.extract_node("x.iterkeys(y=x) #@")
+ non_dict_node = astroid.extract_node("x=[]\nx.iterkeys() #@")
with self.assertNoMessages():
for node in (arg_node, stararg_node, kwarg_node, non_dict_node):
self.checker.visit_call(node)
def test_dict_view_method(self):
- for meth in ('keys', 'values', 'items'):
- node = astroid.extract_node('x.view%s() #@' % meth)
- message = testutils.Message('dict-view-method', node=node)
+ for meth in ("keys", "values", "items"):
+ node = astroid.extract_node("x.view%s() #@" % meth)
+ message = testutils.Message("dict-view-method", node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def test_dict_view_method_on_dict(self):
- nodes = astroid.extract_node('''
+ nodes = astroid.extract_node(
+ """
from collections import defaultdict
{}.viewkeys() #@
defaultdict(list).viewkeys() #@
class Someclass(dict):
pass
Someclass().viewkeys() #@
- ''')
+ """
+ )
for node in nodes:
- message = testutils.Message('dict-view-method', node=node)
+ message = testutils.Message("dict-view-method", node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def test_dict_not_view_method(self):
- arg_node = astroid.extract_node('x.viewkeys(x) #@')
- stararg_node = astroid.extract_node('x.viewkeys(*x) #@')
- kwarg_node = astroid.extract_node('x.viewkeys(y=x) #@')
- non_dict_node = astroid.extract_node('x=[]\nx.viewkeys() #@')
+ arg_node = astroid.extract_node("x.viewkeys(x) #@")
+ stararg_node = astroid.extract_node("x.viewkeys(*x) #@")
+ kwarg_node = astroid.extract_node("x.viewkeys(y=x) #@")
+ non_dict_node = astroid.extract_node("x=[]\nx.viewkeys() #@")
with self.assertNoMessages():
for node in (arg_node, stararg_node, kwarg_node, non_dict_node):
self.checker.visit_call(node)
def test_next_method(self):
- node = astroid.extract_node('x.next() #@')
- message = testutils.Message('next-method-called', node=node)
+ node = astroid.extract_node("x.next() #@")
+ message = testutils.Message("next-method-called", node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def test_not_next_method(self):
- arg_node = astroid.extract_node('x.next(x) #@')
- stararg_node = astroid.extract_node('x.next(*x) #@')
- kwarg_node = astroid.extract_node('x.next(y=x) #@')
+ arg_node = astroid.extract_node("x.next(x) #@")
+ stararg_node = astroid.extract_node("x.next(*x) #@")
+ kwarg_node = astroid.extract_node("x.next(y=x) #@")
with self.assertNoMessages():
for node in (arg_node, stararg_node, kwarg_node):
self.checker.visit_call(node)
def test_metaclass_assignment(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class Foo(object): #@
- __metaclass__ = type""")
- message = testutils.Message('metaclass-assignment', node=node)
+ __metaclass__ = type"""
+ )
+ message = testutils.Message("metaclass-assignment", node=node)
with self.assertAddsMessages(message):
self.checker.visit_classdef(node)
def test_metaclass_global_assignment(self):
- module = astroid.parse('__metaclass__ = type')
+ module = astroid.parse("__metaclass__ = type")
with self.assertNoMessages():
self.walk(module)
@python2_only
def test_parameter_unpacking(self):
- node = astroid.extract_node('def func((a, b)):#@\n pass')
+ node = astroid.extract_node("def func((a, b)):#@\n pass")
arg = node.args.args[0]
- with self.assertAddsMessages(testutils.Message('parameter-unpacking', node=arg)):
+ with self.assertAddsMessages(
+ testutils.Message("parameter-unpacking", node=arg)
+ ):
self.checker.visit_arguments(node.args)
@python2_only
def test_old_raise_syntax(self):
node = astroid.extract_node('raise Exception, "test"')
- message = testutils.Message('old-raise-syntax', node=node)
+ message = testutils.Message("old-raise-syntax", node=node)
with self.assertAddsMessages(message):
self.checker.visit_raise(node)
def test_xreadlines_attribute(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
f.xreadlines #@
- """)
- message = testutils.Message('xreadlines-attribute', node=node)
+ """
+ )
+ message = testutils.Message("xreadlines-attribute", node=node)
with self.assertAddsMessages(message):
self.checker.visit_attribute(node)
def test_exception_message_attribute(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
try:
raise Exception("test")
except Exception as e:
e.message #@
- """)
- message = testutils.Message('exception-message-attribute', node=node)
+ """
+ )
+ message = testutils.Message("exception-message-attribute", node=node)
with self.assertAddsMessages(message):
self.checker.visit_attribute(node)
def test_normal_message_attribute(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
e.message #@
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_attribute(node)
def test_invalid_codec(self):
node = astroid.extract_node('foobar.encode("hex") #@')
- message = testutils.Message('invalid-str-codec', node=node)
+ message = testutils.Message("invalid-str-codec", node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
@@ -546,7 +607,7 @@ class TestPython3Checker(testutils.CheckerTestCase):
def test_invalid_open_codec(self):
node = astroid.extract_node('open(foobar, encoding="hex") #@')
- message = testutils.Message('invalid-str-codec', node=node)
+ message = testutils.Message("invalid-str-codec", node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
@@ -558,203 +619,240 @@ class TestPython3Checker(testutils.CheckerTestCase):
@python2_only
def test_raising_string(self):
node = astroid.extract_node('raise "Test"')
- message = testutils.Message('raising-string', node=node)
+ message = testutils.Message("raising-string", node=node)
with self.assertAddsMessages(message):
self.checker.visit_raise(node)
@python2_only
def test_checker_disabled_by_default(self):
- node = astroid.parse(textwrap.dedent("""
+ node = astroid.parse(
+ textwrap.dedent(
+ """
abc = 1l
raise Exception, "test"
raise "test"
`abc`
- """))
+ """
+ )
+ )
with self.assertNoMessages():
self.walk(node)
def test_using_cmp_argument(self):
- nodes = astroid.extract_node("""
+ nodes = astroid.extract_node(
+ """
[].sort(cmp=lambda x: x) #@
a = list(range(x))
a.sort(cmp=lambda x: x) #@
sorted([], cmp=lambda x: x) #@
- """)
+ """
+ )
for node in nodes:
- message = testutils.Message('using-cmp-argument', node=node)
+ message = testutils.Message("using-cmp-argument", node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def test_sys_maxint(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import sys
sys.maxint #@
- ''')
- message = testutils.Message('sys-max-int', node=node)
+ """
+ )
+ message = testutils.Message("sys-max-int", node=node)
with self.assertAddsMessages(message):
self.checker.visit_attribute(node)
def test_itertools_izip(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
from itertools import izip #@
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
- message = testutils.Message('deprecated-itertools-function', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
+ message = testutils.Message("deprecated-itertools-function", node=node)
with self.assertAddsMessages(absolute_import_message, message):
self.checker.visit_importfrom(node)
def test_deprecated_types_fields(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
from types import StringType #@
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
- message = testutils.Message('deprecated-types-field', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
+ message = testutils.Message("deprecated-types-field", node=node)
with self.assertAddsMessages(absolute_import_message, message):
self.checker.visit_importfrom(node)
def test_sys_maxint_imort_from(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
from sys import maxint #@
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
- message = testutils.Message('sys-max-int', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
+ message = testutils.Message("sys-max-int", node=node)
with self.assertAddsMessages(absolute_import_message, message):
self.checker.visit_importfrom(node)
def test_object_maxint(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
sys = object()
sys.maxint #@
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_attribute(node)
def test_bad_import(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import urllib2, sys #@
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
- message = testutils.Message('bad-python3-import', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
+ message = testutils.Message("bad-python3-import", node=node)
with self.assertAddsMessages(absolute_import_message, message):
self.checker.visit_import(node)
def test_bad_import_turtle(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import turtle #@
turtle.Turtle()
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
with self.assertAddsMessages(absolute_import_message):
self.checker.visit_import(node)
def test_bad_import_dbm(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
from dbm import open as open_ #@
open_("dummy.db")
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
with self.assertAddsMessages(absolute_import_message):
self.checker.visit_importfrom(node)
@python2_only
def test_bad_import_not_on_relative(self):
- samples = [
- 'from .commands import titi',
- 'from . import commands',
- ]
+ samples = ["from .commands import titi", "from . import commands"]
for code in samples:
node = astroid.extract_node(code)
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
with self.assertAddsMessages(absolute_import_message):
self.checker.visit_importfrom(node)
self.checker._future_absolute_import = False
def test_bad_import_conditional(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import six
if six.PY2:
import urllib2 #@
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
with self.assertAddsMessages(absolute_import_message):
self.checker.visit_import(node)
def test_bad_import_try_except_handler(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
try:
from hashlib import sha
except:
import sha #@
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
with self.assertAddsMessages(absolute_import_message):
self.checker.visit_import(node)
def test_bad_import_try(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
try:
import md5 #@
except:
from hashlib import md5
finally:
pass
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
with self.assertAddsMessages(absolute_import_message):
self.checker.visit_import(node)
def test_bad_import_try_finally(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
try:
import Queue #@
finally:
import queue
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
- message = testutils.Message('bad-python3-import', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
+ message = testutils.Message("bad-python3-import", node=node)
with self.assertAddsMessages(absolute_import_message, message):
self.checker.visit_import(node)
def test_bad_import_from(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
from cStringIO import StringIO #@
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
- message = testutils.Message('bad-python3-import', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
+ message = testutils.Message("bad-python3-import", node=node)
with self.assertAddsMessages(absolute_import_message, message):
self.checker.visit_importfrom(node)
def test_bad_string_attribute(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import string
string.maketrans #@
- ''')
- message = testutils.Message('deprecated-string-function', node=node)
+ """
+ )
+ message = testutils.Message("deprecated-string-function", node=node)
with self.assertAddsMessages(message):
self.checker.visit_attribute(node)
def test_bad_operator_attribute(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import operator
operator.div #@
- ''')
- message = testutils.Message('deprecated-operator-function', node=node)
+ """
+ )
+ message = testutils.Message("deprecated-operator-function", node=node)
with self.assertAddsMessages(message):
self.checker.visit_attribute(node)
def test_comprehension_escape(self):
- assign, escaped_node = astroid.extract_node('''
+ assign, escaped_node = astroid.extract_node(
+ """
a = [i for i in range(10)] #@
i #@
- ''')
- good_module = astroid.parse('''
+ """
+ )
+ good_module = astroid.parse(
+ """
{c for c in range(10)} #@
{j:j for j in range(10)} #@
[image_child] = [x for x in range(10)]
thumbnail = func(__(image_child))
- ''')
- message = testutils.Message('comprehension-escape', node=escaped_node)
+ """
+ )
+ message = testutils.Message("comprehension-escape", node=escaped_node)
with self.assertAddsMessages(message):
self.checker.visit_listcomp(assign.value)
@@ -762,16 +860,19 @@ class TestPython3Checker(testutils.CheckerTestCase):
self.walk(good_module)
def test_comprehension_escape_newly_introduced(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
[i for i in range(3)]
for i in range(3):
i
- ''')
+ """
+ )
with self.assertNoMessages():
self.walk(node)
def test_exception_escape(self):
- module = astroid.parse('''
+ module = astroid.parse(
+ """
try: 1/0
except ValueError as exc:
pass
@@ -785,8 +886,9 @@ class TestPython3Checker(testutils.CheckerTestCase):
2/0
except (ValueError, TypeError): #@
exc = 2
- ''')
- message = testutils.Message('exception-escape', node=module.body[1].value)
+ """
+ )
+ message = testutils.Message("exception-escape", node=module.body[1].value)
with self.assertAddsMessages(message):
self.checker.visit_excepthandler(module.body[0].handlers[0])
with self.assertNoMessages():
@@ -794,233 +896,274 @@ class TestPython3Checker(testutils.CheckerTestCase):
self.checker.visit_excepthandler(module.body[4].handlers[0])
def test_bad_sys_attribute(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import sys
sys.exc_clear #@
- ''')
- message = testutils.Message('deprecated-sys-function', node=node)
+ """
+ )
+ message = testutils.Message("deprecated-sys-function", node=node)
with self.assertAddsMessages(message):
self.checker.visit_attribute(node)
def test_bad_urllib_attribute(self):
- nodes = astroid.extract_node('''
+ nodes = astroid.extract_node(
+ """
import urllib
urllib.addbase #@
urllib.splithost #@
urllib.urlretrieve #@
urllib.urlopen #@
urllib.urlencode #@
- ''')
+ """
+ )
for node in nodes:
- message = testutils.Message('deprecated-urllib-function', node=node)
+ message = testutils.Message("deprecated-urllib-function", node=node)
with self.assertAddsMessages(message):
self.checker.visit_attribute(node)
def test_ok_string_attribute(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import string
string.ascii_letters #@
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_attribute(node)
def test_bad_string_call(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import string
string.upper("hello world") #@
- ''')
- message = testutils.Message('deprecated-string-function', node=node)
+ """
+ )
+ message = testutils.Message("deprecated-string-function", node=node)
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def test_ok_shadowed_call(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import six.moves.configparser
six.moves.configparser.ConfigParser() #@
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(node)
def test_ok_string_call(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import string
string.Foramtter() #@
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(node)
def test_bad_string_import_from(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
from string import atoi #@
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
- message = testutils.Message('deprecated-string-function', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
+ message = testutils.Message("deprecated-string-function", node=node)
with self.assertAddsMessages(absolute_import_message, message):
self.checker.visit_importfrom(node)
def test_ok_string_import_from(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
from string import digits #@
- ''')
- absolute_import_message = testutils.Message('no-absolute-import', node=node)
+ """
+ )
+ absolute_import_message = testutils.Message("no-absolute-import", node=node)
with self.assertAddsMessages(absolute_import_message):
self.checker.visit_importfrom(node)
def test_bad_str_translate_call_string_literal(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
foobar.translate(None, 'abc123') #@
- ''')
- message = testutils.Message('deprecated-str-translate-call', node=node,
- confidence=INFERENCE_FAILURE)
+ """
+ )
+ message = testutils.Message(
+ "deprecated-str-translate-call", node=node, confidence=INFERENCE_FAILURE
+ )
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def test_bad_str_translate_call_variable(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
def raz(foobar):
foobar.translate(None, 'hello') #@
- ''')
- message = testutils.Message('deprecated-str-translate-call', node=node,
- confidence=INFERENCE_FAILURE)
+ """
+ )
+ message = testutils.Message(
+ "deprecated-str-translate-call", node=node, confidence=INFERENCE_FAILURE
+ )
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def test_bad_str_translate_call_infer_str(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
foobar = "hello world"
foobar.translate(None, foobar) #@
- ''')
- message = testutils.Message('deprecated-str-translate-call', node=node,
- confidence=INFERENCE)
+ """
+ )
+ message = testutils.Message(
+ "deprecated-str-translate-call", node=node, confidence=INFERENCE
+ )
with self.assertAddsMessages(message):
self.checker.visit_call(node)
def test_ok_str_translate_call_integer(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
foobar.translate(None, 33) #@
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(node)
def test_ok_str_translate_call_keyword(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
foobar.translate(None, 'foobar', raz=33) #@
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(node)
def test_ok_str_translate_call_not_str(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
foobar = {}
foobar.translate(None, 'foobar') #@
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(node)
def test_non_py2_conditional(self):
- code = '''
+ code = """
from __future__ import absolute_import
import sys
x = {}
if sys.maxsize:
x.iterkeys() #@
- '''
+ """
node = astroid.extract_node(code)
module = node.parent.parent
- message = testutils.Message('dict-iter-method', node=node)
+ message = testutils.Message("dict-iter-method", node=node)
with self.assertAddsMessages(message):
self.walk(module)
def test_six_conditional(self):
- code = '''
+ code = """
from __future__ import absolute_import
import six
x = {}
if six.PY2:
x.iterkeys()
- '''
+ """
module = astroid.parse(code)
with self.assertNoMessages():
self.walk(module)
def test_versioninfo_conditional(self):
- code = '''
+ code = """
from __future__ import absolute_import
import sys
x = {}
if sys.version_info[0] == 2:
x.iterkeys()
- '''
+ """
module = astroid.parse(code)
with self.assertNoMessages():
self.walk(module)
def test_versioninfo_tuple_conditional(self):
- code = '''
+ code = """
from __future__ import absolute_import
import sys
x = {}
if sys.version_info == (2, 7):
x.iterkeys()
- '''
+ """
module = astroid.parse(code)
with self.assertNoMessages():
self.walk(module)
def test_six_ifexp_conditional(self):
- code = '''
+ code = """
from __future__ import absolute_import
import six
import string
string.translate if six.PY2 else None
- '''
+ """
module = astroid.parse(code)
with self.assertNoMessages():
self.walk(module)
def test_next_defined(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class Foo(object):
def next(self): #@
- pass""")
- message = testutils.Message('next-method-defined', node=node)
+ pass"""
+ )
+ message = testutils.Message("next-method-defined", node=node)
with self.assertAddsMessages(message):
self.checker.visit_functiondef(node)
def test_next_defined_too_many_args(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class Foo(object):
def next(self, foo=None): #@
- pass""")
+ pass"""
+ )
with self.assertNoMessages():
self.checker.visit_functiondef(node)
def test_next_defined_static_method_too_many_args(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class Foo(object):
@staticmethod
def next(self): #@
- pass""")
+ pass"""
+ )
with self.assertNoMessages():
self.checker.visit_functiondef(node)
def test_next_defined_static_method(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class Foo(object):
@staticmethod
def next(): #@
- pass""")
- message = testutils.Message('next-method-defined', node=node)
+ pass"""
+ )
+ message = testutils.Message("next-method-defined", node=node)
with self.assertAddsMessages(message):
self.checker.visit_functiondef(node)
def test_next_defined_class_method(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class Foo(object):
@classmethod
def next(cls): #@
- pass""")
- message = testutils.Message('next-method-defined', node=node)
+ pass"""
+ )
+ message = testutils.Message("next-method-defined", node=node)
with self.assertAddsMessages(message):
self.checker.visit_functiondef(node)
@@ -1038,7 +1181,7 @@ class TestPython3TokenChecker(testutils.CheckerTestCase):
def test_long_suffix(self):
for code in ("1l", "1L"):
- self._test_token_message(code, 'long-suffix')
+ self._test_token_message(code, "long-suffix")
def test_old_ne_operator(self):
self._test_token_message("1 <> 2", "old-ne-operator")
@@ -1055,8 +1198,8 @@ class TestPython3TokenChecker(testutils.CheckerTestCase):
def test_non_ascii_bytes_literal(self):
code = 'b"测试"'
- self._test_token_message(code, 'non-ascii-bytes-literal')
- for code in ("测试", "测试", 'abcdef', b'\x80'):
+ self._test_token_message(code, "non-ascii-bytes-literal")
+ for code in ("测试", "测试", "abcdef", b"\x80"):
tokens = testutils._tokenize_str(code)
with self.assertNoMessages():
self.checker.process_tokens(tokens)
diff --git a/pylint/test/unittest_checker_similar.py b/pylint/test/unittest_checker_similar.py
index 96590b6ae..bf4842cea 100644
--- a/pylint/test/unittest_checker_similar.py
+++ b/pylint/test/unittest_checker_similar.py
@@ -18,16 +18,19 @@ import pytest
from pylint.checkers import similar
-SIMILAR1 = join(dirname(abspath(__file__)), 'input', 'similar1')
-SIMILAR2 = join(dirname(abspath(__file__)), 'input', 'similar2')
+SIMILAR1 = join(dirname(abspath(__file__)), "input", "similar1")
+SIMILAR2 = join(dirname(abspath(__file__)), "input", "similar2")
def test_ignore_comments():
output = StringIO()
with redirect_stdout(output), pytest.raises(SystemExit) as ex:
- similar.Run(['--ignore-comments', SIMILAR1, SIMILAR2])
+ similar.Run(["--ignore-comments", SIMILAR1, SIMILAR2])
assert ex.value.code == 0
- assert output.getvalue().strip() == ("""
+ assert (
+ output.getvalue().strip()
+ == (
+ """
10 similar lines in 2 files
==%s:0
==%s:0
@@ -42,15 +45,21 @@ def test_ignore_comments():
nine
''' ten
TOTAL lines=44 duplicates=10 percent=22.73
-""" % (SIMILAR1, SIMILAR2)).strip()
+"""
+ % (SIMILAR1, SIMILAR2)
+ ).strip()
+ )
def test_ignore_docsrings():
output = StringIO()
with redirect_stdout(output), pytest.raises(SystemExit) as ex:
- similar.Run(['--ignore-docstrings', SIMILAR1, SIMILAR2])
+ similar.Run(["--ignore-docstrings", SIMILAR1, SIMILAR2])
assert ex.value.code == 0
- assert output.getvalue().strip() == ("""
+ assert (
+ output.getvalue().strip()
+ == (
+ """
8 similar lines in 2 files
==%s:6
==%s:6
@@ -72,17 +81,23 @@ def test_ignore_docsrings():
four
five
TOTAL lines=44 duplicates=13 percent=29.55
-""" % ((SIMILAR1, SIMILAR2) * 2)).strip()
+"""
+ % ((SIMILAR1, SIMILAR2) * 2)
+ ).strip()
+ )
def test_ignore_imports():
output = StringIO()
with redirect_stdout(output), pytest.raises(SystemExit) as ex:
- similar.Run(['--ignore-imports', SIMILAR1, SIMILAR2])
+ similar.Run(["--ignore-imports", SIMILAR1, SIMILAR2])
assert ex.value.code == 0
- assert output.getvalue().strip() == """
+ assert (
+ output.getvalue().strip()
+ == """
TOTAL lines=44 duplicates=0 percent=0.00
""".strip()
+ )
def test_ignore_nothing():
@@ -90,7 +105,10 @@ def test_ignore_nothing():
with redirect_stdout(output), pytest.raises(SystemExit) as ex:
similar.Run([SIMILAR1, SIMILAR2])
assert ex.value.code == 0
- assert output.getvalue().strip() == ("""
+ assert (
+ output.getvalue().strip()
+ == (
+ """
5 similar lines in 2 files
==%s:0
==%s:0
@@ -100,18 +118,21 @@ def test_ignore_nothing():
four
five
TOTAL lines=44 duplicates=5 percent=11.36
-""" % (SIMILAR1, SIMILAR2)).strip()
+"""
+ % (SIMILAR1, SIMILAR2)
+ ).strip()
+ )
def test_help():
output = StringIO()
with redirect_stdout(output):
try:
- similar.Run(['--help'])
+ similar.Run(["--help"])
except SystemExit as ex:
assert ex.code == 0
else:
- pytest.fail('not system exit')
+ pytest.fail("not system exit")
def test_no_args():
@@ -122,4 +143,4 @@ def test_no_args():
except SystemExit as ex:
assert ex.code == 1
else:
- pytest.fail('not system exit')
+ pytest.fail("not system exit")
diff --git a/pylint/test/unittest_checker_spelling.py b/pylint/test/unittest_checker_spelling.py
index 749e42377..f6b5ec2ed 100644
--- a/pylint/test/unittest_checker_spelling.py
+++ b/pylint/test/unittest_checker_spelling.py
@@ -39,19 +39,29 @@ class TestSpellingChecker(CheckerTestCase):
skip_on_missing_package_or_dict = pytest.mark.skipif(
spell_dict is None,
- reason="missing python-enchant package or missing spelling dictionaries")
+ reason="missing python-enchant package or missing spelling dictionaries",
+ )
def _get_msg_suggestions(self, word, count=4):
- return "'{}'".format("' or '".join(self.checker.spelling_dict.suggest(word)[:count]))
+ return "'{}'".format(
+ "' or '".join(self.checker.spelling_dict.suggest(word)[:count])
+ )
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_check_bad_coment(self):
with self.assertAddsMessages(
- Message('wrong-spelling-in-comment', line=1,
- args=('coment', '# bad coment',
- ' ^^^^^^',
- self._get_msg_suggestions('coment')))):
+ Message(
+ "wrong-spelling-in-comment",
+ line=1,
+ args=(
+ "coment",
+ "# bad coment",
+ " ^^^^^^",
+ self._get_msg_suggestions("coment"),
+ ),
+ )
+ ):
self.checker.process_tokens(_tokenize_str("# bad coment"))
@skip_on_missing_package_or_dict
@@ -59,123 +69,174 @@ class TestSpellingChecker(CheckerTestCase):
@set_config(max_spelling_suggestions=2)
def test_check_bad_coment_custom_suggestion_count(self):
with self.assertAddsMessages(
- Message('wrong-spelling-in-comment', line=1,
- args=('coment', '# bad coment',
- ' ^^^^^^',
- self._get_msg_suggestions('coment', count=2)))):
+ Message(
+ "wrong-spelling-in-comment",
+ line=1,
+ args=(
+ "coment",
+ "# bad coment",
+ " ^^^^^^",
+ self._get_msg_suggestions("coment", count=2),
+ ),
+ )
+ ):
self.checker.process_tokens(_tokenize_str("# bad coment"))
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_check_bad_docstring(self):
- stmt = astroid.extract_node(
- 'def fff():\n """bad coment"""\n pass')
+ stmt = astroid.extract_node('def fff():\n """bad coment"""\n pass')
with self.assertAddsMessages(
- Message('wrong-spelling-in-docstring', line=2,
- args=('coment', 'bad coment',
- ' ^^^^^^',
- self._get_msg_suggestions('coment')))):
+ Message(
+ "wrong-spelling-in-docstring",
+ line=2,
+ args=(
+ "coment",
+ "bad coment",
+ " ^^^^^^",
+ self._get_msg_suggestions("coment"),
+ ),
+ )
+ ):
self.checker.visit_functiondef(stmt)
- stmt = astroid.extract_node(
- 'class Abc(object):\n """bad coment"""\n pass')
+ stmt = astroid.extract_node('class Abc(object):\n """bad coment"""\n pass')
with self.assertAddsMessages(
- Message('wrong-spelling-in-docstring', line=2,
- args=('coment', 'bad coment',
- ' ^^^^^^',
- self._get_msg_suggestions('coment')))):
+ Message(
+ "wrong-spelling-in-docstring",
+ line=2,
+ args=(
+ "coment",
+ "bad coment",
+ " ^^^^^^",
+ self._get_msg_suggestions("coment"),
+ ),
+ )
+ ):
self.checker.visit_classdef(stmt)
- @pytest.mark.skipif(True, reason='pyenchant\'s tokenizer strips these')
+ @pytest.mark.skipif(True, reason="pyenchant's tokenizer strips these")
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_invalid_docstring_characters(self):
- stmt = astroid.extract_node(
- 'def fff():\n """test\\x00"""\n pass')
+ stmt = astroid.extract_node('def fff():\n """test\\x00"""\n pass')
with self.assertAddsMessages(
- Message('invalid-characters-in-docstring', line=2,
- args=('test\x00',))):
+ Message("invalid-characters-in-docstring", line=2, args=("test\x00",))
+ ):
self.checker.visit_functiondef(stmt)
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_skip_shebangs(self):
- self.checker.process_tokens(_tokenize_str('#!/usr/bin/env python'))
+ self.checker.process_tokens(_tokenize_str("#!/usr/bin/env python"))
assert self.linter.release_messages() == []
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_skip_python_coding_comments(self):
- self.checker.process_tokens(_tokenize_str(
- '# -*- coding: utf-8 -*-'))
+ self.checker.process_tokens(_tokenize_str("# -*- coding: utf-8 -*-"))
assert self.linter.release_messages() == []
- self.checker.process_tokens(_tokenize_str(
- '# coding=utf-8'))
+ self.checker.process_tokens(_tokenize_str("# coding=utf-8"))
assert self.linter.release_messages() == []
- self.checker.process_tokens(_tokenize_str(
- '# vim: set fileencoding=utf-8 :'))
+ self.checker.process_tokens(_tokenize_str("# vim: set fileencoding=utf-8 :"))
assert self.linter.release_messages() == []
# Now with a shebang first
- self.checker.process_tokens(_tokenize_str(
- '#!/usr/bin/env python\n# -*- coding: utf-8 -*-'))
+ self.checker.process_tokens(
+ _tokenize_str("#!/usr/bin/env python\n# -*- coding: utf-8 -*-")
+ )
assert self.linter.release_messages() == []
- self.checker.process_tokens(_tokenize_str(
- '#!/usr/bin/env python\n# coding=utf-8'))
+ self.checker.process_tokens(
+ _tokenize_str("#!/usr/bin/env python\n# coding=utf-8")
+ )
assert self.linter.release_messages() == []
- self.checker.process_tokens(_tokenize_str(
- '#!/usr/bin/env python\n# vim: set fileencoding=utf-8 :'))
+ self.checker.process_tokens(
+ _tokenize_str("#!/usr/bin/env python\n# vim: set fileencoding=utf-8 :")
+ )
assert self.linter.release_messages() == []
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_skip_top_level_pylint_enable_disable_comments(self):
- self.checker.process_tokens(_tokenize_str('# Line 1\n Line 2\n# pylint: disable=ungrouped-imports'))
+ self.checker.process_tokens(
+ _tokenize_str("# Line 1\n Line 2\n# pylint: disable=ungrouped-imports")
+ )
assert self.linter.release_messages() == []
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_skip_words_with_numbers(self):
- self.checker.process_tokens(_tokenize_str('\n# 0ne\n# Thr33\n# Sh3ll'))
+ self.checker.process_tokens(_tokenize_str("\n# 0ne\n# Thr33\n# Sh3ll"))
assert self.linter.release_messages() == []
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_skip_wiki_words(self):
stmt = astroid.extract_node(
- 'class ComentAbc(object):\n """ComentAbc with a bad coment"""\n pass')
+ 'class ComentAbc(object):\n """ComentAbc with a bad coment"""\n pass'
+ )
with self.assertAddsMessages(
- Message('wrong-spelling-in-docstring', line=2,
- args=('coment', 'ComentAbc with a bad coment',
- ' ^^^^^^',
- self._get_msg_suggestions('coment')))):
+ Message(
+ "wrong-spelling-in-docstring",
+ line=2,
+ args=(
+ "coment",
+ "ComentAbc with a bad coment",
+ " ^^^^^^",
+ self._get_msg_suggestions("coment"),
+ ),
+ )
+ ):
self.checker.visit_classdef(stmt)
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_skip_camel_cased_words(self):
stmt = astroid.extract_node(
- 'class ComentAbc(object):\n """comentAbc with a bad coment"""\n pass')
+ 'class ComentAbc(object):\n """comentAbc with a bad coment"""\n pass'
+ )
with self.assertAddsMessages(
- Message('wrong-spelling-in-docstring', line=2,
- args=('coment', 'comentAbc with a bad coment',
- ' ^^^^^^',
- self._get_msg_suggestions('coment')))):
+ Message(
+ "wrong-spelling-in-docstring",
+ line=2,
+ args=(
+ "coment",
+ "comentAbc with a bad coment",
+ " ^^^^^^",
+ self._get_msg_suggestions("coment"),
+ ),
+ )
+ ):
self.checker.visit_classdef(stmt)
# With just a single upper case letter in the end
stmt = astroid.extract_node(
- 'class ComentAbc(object):\n """argumentN with a bad coment"""\n pass')
+ 'class ComentAbc(object):\n """argumentN with a bad coment"""\n pass'
+ )
with self.assertAddsMessages(
- Message('wrong-spelling-in-docstring', line=2,
- args=('coment', 'argumentN with a bad coment',
- ' ^^^^^^',
- self._get_msg_suggestions('coment')))):
+ Message(
+ "wrong-spelling-in-docstring",
+ line=2,
+ args=(
+ "coment",
+ "argumentN with a bad coment",
+ " ^^^^^^",
+ self._get_msg_suggestions("coment"),
+ ),
+ )
+ ):
self.checker.visit_classdef(stmt)
- for ccn in ('xmlHttpRequest', 'newCustomer', 'newCustomerId',
- 'innerStopwatch', 'supportsIpv6OnIos', 'affine3D'):
+ for ccn in (
+ "xmlHttpRequest",
+ "newCustomer",
+ "newCustomerId",
+ "innerStopwatch",
+ "supportsIpv6OnIos",
+ "affine3D",
+ ):
stmt = astroid.extract_node(
- 'class TestClass(object):\n """{} comment"""\n pass'.format(ccn))
+ 'class TestClass(object):\n """{} comment"""\n pass'.format(ccn)
+ )
self.checker.visit_classdef(stmt)
assert self.linter.release_messages() == []
@@ -183,45 +244,63 @@ class TestSpellingChecker(CheckerTestCase):
@set_config(spelling_dict=spell_dict)
def test_skip_words_with_underscores(self):
stmt = astroid.extract_node(
- 'def fff(param_name):\n """test param_name"""\n pass')
+ 'def fff(param_name):\n """test param_name"""\n pass'
+ )
self.checker.visit_functiondef(stmt)
assert self.linter.release_messages() == []
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_skip_email_address(self):
- self.checker.process_tokens(_tokenize_str('# uname@domain.tld'))
+ self.checker.process_tokens(_tokenize_str("# uname@domain.tld"))
assert self.linter.release_messages() == []
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_skip_urls(self):
- self.checker.process_tokens(_tokenize_str('# https://github.com/rfk/pyenchant'))
+ self.checker.process_tokens(_tokenize_str("# https://github.com/rfk/pyenchant"))
assert self.linter.release_messages() == []
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_skip_sphinx_directives(self):
stmt = astroid.extract_node(
- 'class ComentAbc(object):\n """This is :class:`ComentAbc` with a bad coment"""\n pass')
+ 'class ComentAbc(object):\n """This is :class:`ComentAbc` with a bad coment"""\n pass'
+ )
with self.assertAddsMessages(
- Message('wrong-spelling-in-docstring', line=2,
- args=('coment', 'This is :class:`ComentAbc` with a bad coment',
- ' ^^^^^^',
- self._get_msg_suggestions('coment')))):
+ Message(
+ "wrong-spelling-in-docstring",
+ line=2,
+ args=(
+ "coment",
+ "This is :class:`ComentAbc` with a bad coment",
+ " ^^^^^^",
+ self._get_msg_suggestions("coment"),
+ ),
+ )
+ ):
self.checker.visit_classdef(stmt)
@skip_on_missing_package_or_dict
@set_config(spelling_dict=spell_dict)
def test_handle_words_joined_by_forward_slash(self):
- stmt = astroid.extract_node('''
+ stmt = astroid.extract_node(
+ '''
class ComentAbc(object):
"""This is Comment/Abcz with a bad comment"""
pass
- ''')
+ '''
+ )
with self.assertAddsMessages(
- Message('wrong-spelling-in-docstring', line=3,
- args=('Abcz', 'This is Comment/Abcz with a bad comment',
- ' ^^^^',
- self._get_msg_suggestions('Abcz')))):
+ Message(
+ "wrong-spelling-in-docstring",
+ line=3,
+ args=(
+ "Abcz",
+ "This is Comment/Abcz with a bad comment",
+ " ^^^^",
+ self._get_msg_suggestions("Abcz"),
+ ),
+ )
+ ):
self.checker.visit_classdef(stmt)
diff --git a/pylint/test/unittest_checker_stdlib.py b/pylint/test/unittest_checker_stdlib.py
index 5c570041c..f425b0f99 100644
--- a/pylint/test/unittest_checker_stdlib.py
+++ b/pylint/test/unittest_checker_stdlib.py
@@ -44,64 +44,74 @@ class TestStdlibChecker(CheckerTestCase):
manager = astroid.MANAGER
transform = astroid.inference_tip(infer_func)
with _add_transform(manager, astroid.Name, transform):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
call_something()
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(node)
def test_copy_environ(self):
# shallow copy of os.environ should be reported
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
import copy, os
copy.copy(os.environ)
- """)
+ """
+ )
with self.assertAddsMessages(
- Message(
- msg_id='shallow-copy-environ', node=node, confidence=UNDEFINED)
+ Message(msg_id="shallow-copy-environ", node=node, confidence=UNDEFINED)
):
self.checker.visit_call(node)
def test_copy_environ_hidden(self):
# shallow copy of os.environ should be reported
# hide function names to be sure that checker is not just matching text
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
from copy import copy as test_cp
import os as o
test_cp(o.environ)
- """)
+ """
+ )
with self.assertAddsMessages(
- Message(
- msg_id='shallow-copy-environ', node=node, confidence=UNDEFINED)
+ Message(msg_id="shallow-copy-environ", node=node, confidence=UNDEFINED)
):
self.checker.visit_call(node)
def test_copy_dict(self):
# copy of dict is OK
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
import copy
test_dict = {}
copy.copy(test_dict)
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(node)
def test_copy_uninferable(self):
# copy of uninferable object should not raise exception, nor make
# the checker crash
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
import copy
from missing_library import MissingObject
copy.copy(MissingObject)
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(node)
def test_deepcopy_environ(self):
# deepcopy of os.environ is OK
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
import copy, os
copy.deepcopy(os.environ)
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(node)
diff --git a/pylint/test/unittest_checker_strings.py b/pylint/test/unittest_checker_strings.py
index 64eedbeba..57dc0af8d 100644
--- a/pylint/test/unittest_checker_strings.py
+++ b/pylint/test/unittest_checker_strings.py
@@ -25,31 +25,36 @@ class TestStringChecker(CheckerTestCase):
node = astroid.extract_node(code)
self.checker.visit_binop(node)
- for code in ("'%s' % 1",
- "'%(key)s' % {'key' : 1}",
- "'%d' % 1",
- "'%(key)d' % {'key' : 1}",
- "'%f' % 1",
- "'%(key)f' % {'key' : 1}",
- "'%d' % 1.1",
- "'%(key)d' % {'key' : 1.1}",
- "'%s' % []",
- "'%(key)s' % {'key' : []}",
- "'%s' % None",
- "'%(key)s' % {'key' : None}"):
+ for code in (
+ "'%s' % 1",
+ "'%(key)s' % {'key' : 1}",
+ "'%d' % 1",
+ "'%(key)d' % {'key' : 1}",
+ "'%f' % 1",
+ "'%(key)f' % {'key' : 1}",
+ "'%d' % 1.1",
+ "'%(key)d' % {'key' : 1.1}",
+ "'%s' % []",
+ "'%(key)s' % {'key' : []}",
+ "'%s' % None",
+ "'%(key)s' % {'key' : None}",
+ ):
with self.assertNoMessages():
node = astroid.extract_node(code)
self.checker.visit_binop(node)
- for code, arg_type, format_type in [("'%d' % '1'", 'builtins.str', 'd'),
- ("'%(key)d' % {'key' : '1'}", 'builtins.str', 'd'),
- ("'%x' % 1.1", 'builtins.float', 'x'),
- ("'%(key)x' % {'key' : 1.1}", 'builtins.float', 'x'),
- ("'%d' % []", 'builtins.list', 'd'),
- ("'%(key)d' % {'key' : []}", 'builtins.list', 'd')]:
+ for code, arg_type, format_type in [
+ ("'%d' % '1'", "builtins.str", "d"),
+ ("'%(key)d' % {'key' : '1'}", "builtins.str", "d"),
+ ("'%x' % 1.1", "builtins.float", "x"),
+ ("'%(key)x' % {'key' : 1.1}", "builtins.float", "x"),
+ ("'%d' % []", "builtins.list", "d"),
+ ("'%(key)d' % {'key' : []}", "builtins.list", "d"),
+ ]:
node = astroid.extract_node(code)
with self.assertAddsMessages(
- Message('bad-string-format-type',
- node=node,
- args=(arg_type, format_type))):
+ Message(
+ "bad-string-format-type", node=node, args=(arg_type, format_type)
+ )
+ ):
self.checker.visit_binop(node)
diff --git a/pylint/test/unittest_checker_typecheck.py b/pylint/test/unittest_checker_typecheck.py
index cd1dd2301..173210cf1 100644
--- a/pylint/test/unittest_checker_typecheck.py
+++ b/pylint/test/unittest_checker_typecheck.py
@@ -28,14 +28,16 @@ def c_extension_missing():
"""Coverage module has C-extension, which we can reuse for test"""
try:
import coverage.tracer as _
+
return False
except ImportError:
_ = None
return True
-needs_c_extension = pytest.mark.skipif(c_extension_missing(),
- reason='Requires coverage (source of C-extension)')
+needs_c_extension = pytest.mark.skipif(
+ c_extension_missing(), reason="Requires coverage (source of C-extension)"
+)
class TestTypeChecker(CheckerTestCase):
@@ -46,110 +48,141 @@ class TestTypeChecker(CheckerTestCase):
"""Make sure that a module attribute access is checked by pylint.
"""
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
import optparse
optparse.THIS_does_not_EXIST
- """)
+ """
+ )
with self.assertAddsMessages(
- Message(
- 'no-member',
- node=node,
- args=('Module', 'optparse', 'THIS_does_not_EXIST', ''))):
+ Message(
+ "no-member",
+ node=node,
+ args=("Module", "optparse", "THIS_does_not_EXIST", ""),
+ )
+ ):
self.checker.visit_attribute(node)
- @set_config(ignored_modules=('argparse',))
+ @set_config(ignored_modules=("argparse",))
def test_no_member_in_getattr_ignored(self):
"""Make sure that a module attribute access check is omitted with a
module that is configured to be ignored.
"""
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
import argparse
argparse.THIS_does_not_EXIST
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_attribute(node)
- @set_config(ignored_classes=('xml.etree.', ))
+ @set_config(ignored_classes=("xml.etree.",))
def test_ignored_modules_invalid_pattern(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import xml
xml.etree.Lala
- ''')
- message = Message('no-member', node=node,
- args=('Module', 'xml.etree', 'Lala', ''))
+ """
+ )
+ message = Message(
+ "no-member", node=node, args=("Module", "xml.etree", "Lala", "")
+ )
with self.assertAddsMessages(message):
self.checker.visit_attribute(node)
- @set_config(ignored_modules=('xml.etree*', ))
+ @set_config(ignored_modules=("xml.etree*",))
def test_ignored_modules_patterns(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import xml
xml.etree.portocola #@
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_attribute(node)
- @set_config(ignored_classes=('xml.*', ))
+ @set_config(ignored_classes=("xml.*",))
def test_ignored_classes_no_recursive_pattern(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import xml
xml.etree.ElementTree.Test
- ''')
- message = Message('no-member', node=node,
- args=('Module', 'xml.etree.ElementTree', 'Test', ''))
+ """
+ )
+ message = Message(
+ "no-member", node=node, args=("Module", "xml.etree.ElementTree", "Test", "")
+ )
with self.assertAddsMessages(message):
self.checker.visit_attribute(node)
- @set_config(ignored_classes=('optparse.Values', ))
+ @set_config(ignored_classes=("optparse.Values",))
def test_ignored_classes_qualified_name(self):
"""Test that ignored-classes supports qualified name for ignoring."""
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import optparse
optparse.Values.lala
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_attribute(node)
- @set_config(ignored_classes=('Values', ))
+ @set_config(ignored_classes=("Values",))
def test_ignored_classes_only_name(self):
"""Test that ignored_classes works with the name only."""
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
import optparse
optparse.Values.lala
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_attribute(node)
@set_config(suggestion_mode=False)
@needs_c_extension
def test_nomember_on_c_extension_error_msg(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
from coverage import tracer
tracer.CTracer #@
- ''')
- message = Message('no-member', node=node,
- args=('Module', 'coverage.tracer', 'CTracer', ''))
+ """
+ )
+ message = Message(
+ "no-member", node=node, args=("Module", "coverage.tracer", "CTracer", "")
+ )
with self.assertAddsMessages(message):
self.checker.visit_attribute(node)
@set_config(suggestion_mode=True)
@needs_c_extension
def test_nomember_on_c_extension_info_msg(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
from coverage import tracer
tracer.CTracer #@
- ''')
- message = Message('c-extension-no-member', node=node,
- args=('Module', 'coverage.tracer', 'CTracer', ''))
+ """
+ )
+ message = Message(
+ "c-extension-no-member",
+ node=node,
+ args=("Module", "coverage.tracer", "CTracer", ""),
+ )
with self.assertAddsMessages(message):
self.checker.visit_attribute(node)
- @set_config(contextmanager_decorators=('contextlib.contextmanager',
- '.custom_contextmanager'))
+ @set_config(
+ contextmanager_decorators=(
+ "contextlib.contextmanager",
+ ".custom_contextmanager",
+ )
+ )
def test_custom_context_manager(self):
"""Test that @custom_contextmanager is recognized as configured."""
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
from contextlib import contextmanager
def custom_contextmanager(f):
return contextmanager(f)
@@ -158,12 +191,14 @@ class TestTypeChecker(CheckerTestCase):
yield
with dec():
pass
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_with(node)
def test_invalid_metaclass(self):
- module = astroid.parse('''
+ module = astroid.parse(
+ """
import six
class InvalidAsMetaclass(object):
@@ -180,18 +215,24 @@ class TestTypeChecker(CheckerTestCase):
@six.add_metaclass(2)
class ThirdInvalid(object):
pass
- ''')
- for class_obj, metaclass_name in (('ThirdInvalid', '2'),
- ('SecondInvalid', 'InvalidAsMetaclass'),
- ('FirstInvalid', 'int')):
+ """
+ )
+ for class_obj, metaclass_name in (
+ ("ThirdInvalid", "2"),
+ ("SecondInvalid", "InvalidAsMetaclass"),
+ ("FirstInvalid", "int"),
+ ):
classdef = module[class_obj]
- message = Message('invalid-metaclass', node=classdef, args=(metaclass_name, ))
+ message = Message(
+ "invalid-metaclass", node=classdef, args=(metaclass_name,)
+ )
with self.assertAddsMessages(message):
self.checker.visit_classdef(classdef)
- @pytest.mark.skipif(sys.version_info[0] < 3, reason='Needs Python 3.')
+ @pytest.mark.skipif(sys.version_info[0] < 3, reason="Needs Python 3.")
def test_invalid_metaclass_function_metaclasses(self):
- module = astroid.parse('''
+ module = astroid.parse(
+ """
def invalid_metaclass_1(name, bases, attrs):
return int
def invalid_metaclass_2(name, bases, attrs):
@@ -200,20 +241,25 @@ class TestTypeChecker(CheckerTestCase):
pass
class InvalidSecond(metaclass=invalid_metaclass_2):
pass
- ''')
- for class_obj, metaclass_name in (('Invalid', 'int'), ('InvalidSecond', '1')):
+ """
+ )
+ for class_obj, metaclass_name in (("Invalid", "int"), ("InvalidSecond", "1")):
classdef = module[class_obj]
- message = Message('invalid-metaclass', node=classdef, args=(metaclass_name, ))
+ message = Message(
+ "invalid-metaclass", node=classdef, args=(metaclass_name,)
+ )
with self.assertAddsMessages(message):
self.checker.visit_classdef(classdef)
- @pytest.mark.skipif(sys.version_info < (3, 5), reason='Needs Python 3.5.')
+ @pytest.mark.skipif(sys.version_info < (3, 5), reason="Needs Python 3.5.")
def test_typing_namedtuple_not_callable_issue1295(self):
- module = astroid.parse("""
+ module = astroid.parse(
+ """
import typing
Named = typing.NamedTuple('Named', [('foo', int), ('bar', int)])
named = Named(1, 2)
- """)
+ """
+ )
call = module.body[-1].value
callables = call.func.inferred()
assert len(callables) == 1
@@ -221,12 +267,14 @@ class TestTypeChecker(CheckerTestCase):
with self.assertNoMessages():
self.checker.visit_call(call)
- @pytest.mark.skipif(sys.version_info < (3, 5), reason='Needs Python 3.5.')
+ @pytest.mark.skipif(sys.version_info < (3, 5), reason="Needs Python 3.5.")
def test_typing_namedtuple_unsubscriptable_object_issue1295(self):
- module = astroid.parse("""
+ module = astroid.parse(
+ """
import typing
MyType = typing.Tuple[str, str]
- """)
+ """
+ )
subscript = module.body[-1].value
with self.assertNoMessages():
self.checker.visit_subscript(subscript)
@@ -240,15 +288,18 @@ class TestTypeChecker(CheckerTestCase):
Test for https://github.com/PyCQA/pylint/issues/1699
"""
- call = astroid.extract_node("""
+ call = astroid.extract_node(
+ """
import multiprocessing
multiprocessing.current_process() #@
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(call)
def test_descriptor_call(self):
- call = astroid.extract_node("""
+ call = astroid.extract_node(
+ """
def func():
pass
@@ -260,6 +311,7 @@ class TestTypeChecker(CheckerTestCase):
a = ADescriptor()
AggregateCls().a() #@
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_call(call)
diff --git a/pylint/test/unittest_checker_variables.py b/pylint/test/unittest_checker_variables.py
index 1f891bfd1..a9b590a0a 100644
--- a/pylint/test/unittest_checker_variables.py
+++ b/pylint/test/unittest_checker_variables.py
@@ -23,84 +23,100 @@ from pylint.checkers import variables
from pylint.testutils import CheckerTestCase, linter, set_config, Message
from pylint.interfaces import UNDEFINED
+
class TestVariablesChecker(CheckerTestCase):
CHECKER_CLASS = variables.VariablesChecker
def test_bitbucket_issue_78(self):
""" Issue 78 report a false positive for unused-module """
- module = astroid.parse("""
+ module = astroid.parse(
+ """
from sys import path
path += ['stuff']
def func():
other = 1
return len(other)
- """)
+ """
+ )
with self.assertNoMessages():
self.walk(module)
- @set_config(ignored_modules=('argparse',))
+ @set_config(ignored_modules=("argparse",))
def test_no_name_in_module_skipped(self):
"""Make sure that 'from ... import ...' does not emit a
'no-name-in-module' with a module that is configured
to be ignored.
"""
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
from argparse import THIS_does_not_EXIST
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_importfrom(node)
def test_all_elements_without_parent(self):
- node = astroid.extract_node('__all__ = []')
- node.value.elts.append(astroid.Const('test'))
+ node = astroid.extract_node("__all__ = []")
+ node.value.elts.append(astroid.Const("test"))
root = node.root()
with self.assertNoMessages():
self.checker.visit_module(root)
self.checker.leave_module(root)
def test_redefined_builtin_ignored(self):
- node = astroid.parse('''
+ node = astroid.parse(
+ """
from future.builtins import open
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_module(node)
- @set_config(redefining_builtins_modules=('os',))
+ @set_config(redefining_builtins_modules=("os",))
def test_redefined_builtin_custom_modules(self):
- node = astroid.parse('''
+ node = astroid.parse(
+ """
from os import open
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_module(node)
- @set_config(redefining_builtins_modules=('os',))
+ @set_config(redefining_builtins_modules=("os",))
def test_redefined_builtin_modname_not_ignored(self):
- node = astroid.parse('''
+ node = astroid.parse(
+ """
from future.builtins import open
- ''')
+ """
+ )
with self.assertAddsMessages(
- Message('redefined-builtin', node=node.body[0], args='open')):
+ Message("redefined-builtin", node=node.body[0], args="open")
+ ):
self.checker.visit_module(node)
- @set_config(redefining_builtins_modules=('os',))
+ @set_config(redefining_builtins_modules=("os",))
def test_redefined_builtin_in_function(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
def test():
from os import open
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_module(node.root())
self.checker.visit_functiondef(node)
def test_unassigned_global(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
def func():
global sys #@
import sys, lala
- ''')
- msg = Message('global-statement', node=node, confidence=UNDEFINED)
+ """
+ )
+ msg = Message("global-statement", node=node, confidence=UNDEFINED)
with self.assertAddsMessages(msg):
self.checker.visit_global(node)
@@ -117,66 +133,83 @@ class TestVariablesCheckerWithTearDown(CheckerTestCase):
def teardown_method(self, method):
self.checker._to_consume = self._to_consume_backup
- @set_config(callbacks=('callback_', '_callback'))
+ @set_config(callbacks=("callback_", "_callback"))
def test_custom_callback_string(self):
""" Test the --calbacks option works. """
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
def callback_one(abc):
''' should not emit unused-argument. '''
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_functiondef(node)
self.checker.leave_functiondef(node)
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
def two_callback(abc, defg):
''' should not emit unused-argument. '''
- """)
+ """
+ )
with self.assertNoMessages():
self.checker.visit_functiondef(node)
self.checker.leave_functiondef(node)
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
def normal_func(abc):
''' should emit unused-argument. '''
- """)
+ """
+ )
with self.assertAddsMessages(
- Message('unused-argument', node=node['abc'], args='abc')):
+ Message("unused-argument", node=node["abc"], args="abc")
+ ):
self.checker.visit_functiondef(node)
self.checker.leave_functiondef(node)
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
def cb_func(abc):
''' Previous callbacks are overridden. '''
- """)
+ """
+ )
with self.assertAddsMessages(
- Message('unused-argument', node=node['abc'], args='abc')):
+ Message("unused-argument", node=node["abc"], args="abc")
+ ):
self.checker.visit_functiondef(node)
self.checker.leave_functiondef(node)
- @set_config(redefining_builtins_modules=('os',))
+ @set_config(redefining_builtins_modules=("os",))
def test_redefined_builtin_modname_not_ignored(self):
- node = astroid.parse('''
+ node = astroid.parse(
+ """
from future.builtins import open
- ''')
+ """
+ )
with self.assertAddsMessages(
- Message('redefined-builtin', node=node.body[0], args='open')):
+ Message("redefined-builtin", node=node.body[0], args="open")
+ ):
self.checker.visit_module(node)
- @set_config(redefining_builtins_modules=('os',))
+ @set_config(redefining_builtins_modules=("os",))
def test_redefined_builtin_in_function(self):
- node = astroid.extract_node('''
+ node = astroid.extract_node(
+ """
def test():
from os import open
- ''')
+ """
+ )
with self.assertNoMessages():
self.checker.visit_module(node.root())
self.checker.visit_functiondef(node)
def test_import_as_underscore(self):
- node = astroid.parse('''
+ node = astroid.parse(
+ """
import math as _
- ''')
+ """
+ )
with self.assertNoMessages():
self.walk(node)
@@ -186,11 +219,13 @@ class TestVariablesCheckerWithTearDown(CheckerTestCase):
# Issue 1824
# https://github.com/PyCQA/pylint/issues/1824
- node = astroid.parse('''
+ node = astroid.parse(
+ """
class MyObject(object):
method1 = lambda func: func()
method2 = lambda function: function()
- ''')
+ """
+ )
with self.assertNoMessages():
self.walk(node)
@@ -200,30 +235,35 @@ class TestVariablesCheckerWithTearDown(CheckerTestCase):
https://github.com/PyCQA/pylint/issues/760
"""
- node = astroid.parse('''
+ node = astroid.parse(
+ """
lambda x: lambda: x + 1
- ''')
+ """
+ )
with self.assertNoMessages():
self.walk(node)
-
@set_config(ignored_argument_names=re.compile("arg"))
def test_ignored_argument_names_no_message(self):
"""Make sure is_ignored_argument_names properly ignores
function arguments"""
- node = astroid.parse('''
+ node = astroid.parse(
+ """
def fooby(arg):
pass
- ''')
+ """
+ )
with self.assertNoMessages():
self.walk(node)
@set_config(ignored_argument_names=re.compile("args|kwargs"))
def test_ignored_argument_names_starred_args(self):
- node = astroid.parse('''
+ node = astroid.parse(
+ """
def fooby(*args, **kwargs):
pass
- ''')
+ """
+ )
with self.assertNoMessages():
self.walk(node)
@@ -232,11 +272,12 @@ class TestMissingSubmodule(CheckerTestCase):
CHECKER_CLASS = variables.VariablesChecker
def test_package_all(self):
- regr_data = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'regrtest_data')
+ regr_data = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "regrtest_data"
+ )
sys.path.insert(0, regr_data)
try:
- linter.check(os.path.join(regr_data, 'package_all'))
+ linter.check(os.path.join(regr_data, "package_all"))
got = linter.reporter.finalize().strip()
assert got == "E: 3: Undefined variable name 'missing' in __all__"
finally:
diff --git a/pylint/test/unittest_checkers_utils.py b/pylint/test/unittest_checkers_utils.py
index 78555bbcb..1fb4ce368 100644
--- a/pylint/test/unittest_checkers_utils.py
+++ b/pylint/test/unittest_checkers_utils.py
@@ -17,53 +17,56 @@ from pylint.checkers import utils
import pytest
-@pytest.mark.parametrize("name,expected", [
- ('min', True),
- ('__builtins__', True),
- ('__path__', False),
- ('__file__', False),
- ('whatever', False),
- ('mybuiltin', False),
-])
+@pytest.mark.parametrize(
+ "name,expected",
+ [
+ ("min", True),
+ ("__builtins__", True),
+ ("__path__", False),
+ ("__file__", False),
+ ("whatever", False),
+ ("mybuiltin", False),
+ ],
+)
def testIsBuiltin(name, expected):
assert utils.is_builtin(name) == expected
-@pytest.mark.parametrize("fn,kw", [
- ('foo(3)', {'keyword': 'bar'}),
- ('foo(one=a, two=b, three=c)', {'position': 1}),
-])
+@pytest.mark.parametrize(
+ "fn,kw",
+ [("foo(3)", {"keyword": "bar"}), ("foo(one=a, two=b, three=c)", {"position": 1})],
+)
def testGetArgumentFromCallError(fn, kw):
with pytest.raises(utils.NoSuchArgumentError):
node = astroid.extract_node(fn)
utils.get_argument_from_call(node, **kw)
-@pytest.mark.parametrize("fn,kw", [
- ('foo(bar=3)', {'keyword': 'bar'}),
- ('foo(a, b, c)', {'position': 1}),
-])
+@pytest.mark.parametrize(
+ "fn,kw", [("foo(bar=3)", {"keyword": "bar"}), ("foo(a, b, c)", {"position": 1})]
+)
def testGetArgumentFromCallExists(fn, kw):
node = astroid.extract_node(fn)
assert utils.get_argument_from_call(node, **kw) is not None
def testGetArgumentFromCall():
- node = astroid.extract_node('foo(a, not_this_one=1, this_one=2)')
- arg = utils.get_argument_from_call(node, position=2, keyword='this_one')
+ node = astroid.extract_node("foo(a, not_this_one=1, this_one=2)")
+ arg = utils.get_argument_from_call(node, position=2, keyword="this_one")
assert 2 == arg.value
- node = astroid.extract_node('foo(a)')
+ node = astroid.extract_node("foo(a)")
with pytest.raises(utils.NoSuchArgumentError):
utils.get_argument_from_call(node, position=1)
with pytest.raises(ValueError):
utils.get_argument_from_call(node, None, None)
name = utils.get_argument_from_call(node, position=0)
- assert name.name == 'a'
+ assert name.name == "a"
def test_error_of_type():
- nodes = astroid.extract_node("""
+ nodes = astroid.extract_node(
+ """
try: pass
except AttributeError: #@
pass
@@ -72,16 +75,18 @@ def test_error_of_type():
pass
except: #@
pass
- """)
+ """
+ )
assert utils.error_of_type(nodes[0], AttributeError)
- assert utils.error_of_type(nodes[0], (AttributeError, ))
+ assert utils.error_of_type(nodes[0], (AttributeError,))
assert not utils.error_of_type(nodes[0], Exception)
assert utils.error_of_type(nodes[1], Exception)
assert utils.error_of_type(nodes[2], ImportError)
def test_node_ignores_exception():
- nodes = astroid.extract_node("""
+ nodes = astroid.extract_node(
+ """
try:
1/0 #@
except ZeroDivisionError:
@@ -98,7 +103,8 @@ def test_node_ignores_exception():
1/0 #@
except ValueError:
pass
- """)
+ """
+ )
assert utils.node_ignores_exception(nodes[0], ZeroDivisionError)
assert not utils.node_ignores_exception(nodes[1], ZeroDivisionError)
assert utils.node_ignores_exception(nodes[2], ZeroDivisionError)
@@ -106,32 +112,38 @@ def test_node_ignores_exception():
def test_is_subclass_of_node_b_derived_from_node_a():
- nodes = astroid.extract_node("""
+ nodes = astroid.extract_node(
+ """
class Superclass: #@
pass
class Subclass(Superclass): #@
pass
- """)
+ """
+ )
assert utils.is_subclass_of(nodes[1], nodes[0])
def test_is_subclass_of_node_b_not_derived_from_node_a():
- nodes = astroid.extract_node("""
+ nodes = astroid.extract_node(
+ """
class OneClass: #@
pass
class AnotherClass: #@
pass
- """)
+ """
+ )
assert not utils.is_subclass_of(nodes[1], nodes[0])
def test_is_subclass_of_not_classdefs():
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class OneClass: #@
pass
- """)
+ """
+ )
assert not utils.is_subclass_of(None, node)
assert not utils.is_subclass_of(node, None)
assert not utils.is_subclass_of(None, None)
diff --git a/pylint/test/unittest_config.py b/pylint/test/unittest_config.py
index 86ce1c260..17e48d5a8 100644
--- a/pylint/test/unittest_config.py
+++ b/pylint/test/unittest_config.py
@@ -16,7 +16,7 @@ from pylint import config
import pytest
-RE_PATTERN_TYPE = getattr(re, 'Pattern', getattr(re, '_pattern_type', None))
+RE_PATTERN_TYPE = getattr(re, "Pattern", getattr(re, "_pattern_type", None))
def test__regexp_validator_valid():
@@ -24,10 +24,12 @@ def test__regexp_validator_valid():
assert isinstance(result, RE_PATTERN_TYPE)
assert result.pattern == "test_.*"
+
def test__regexp_validator_invalid():
with pytest.raises(sre_constants.error):
config._regexp_validator(None, None, "test_)")
+
def test__csv_validator_no_spaces():
values = ["One", "Two", "Three"]
result = config._csv_validator(None, None, ",".join(values))
@@ -36,6 +38,7 @@ def test__csv_validator_no_spaces():
for i, value in enumerate(values):
assert result[i] == value
+
def test__csv_validator_spaces():
values = ["One", "Two", "Three"]
result = config._csv_validator(None, None, ", ".join(values))
@@ -44,6 +47,7 @@ def test__csv_validator_spaces():
for i, value in enumerate(values):
assert result[i] == value
+
def test__regexp_csv_validator_valid():
pattern_strings = ["test_.*", "foo\\.bar", "^baz$"]
result = config._regexp_csv_validator(None, None, ",".join(pattern_strings))
@@ -51,6 +55,7 @@ def test__regexp_csv_validator_valid():
assert isinstance(regex, RE_PATTERN_TYPE)
assert regex.pattern == pattern_strings[i]
+
def test__regexp_csv_validator_invalid():
pattern_strings = ["test_.*", "foo\\.bar", "^baz)$"]
with pytest.raises(sre_constants.error):
diff --git a/pylint/test/unittest_lint.py b/pylint/test/unittest_lint.py
index ceccbf65b..5421d6713 100644
--- a/pylint/test/unittest_lint.py
+++ b/pylint/test/unittest_lint.py
@@ -39,8 +39,15 @@ from io import StringIO
from pylint import config, lint
from pylint.lint import PyLinter, Run, preprocess_options, ArgumentPreprocessingError
-from pylint.utils import MSG_STATE_SCOPE_CONFIG, MSG_STATE_SCOPE_MODULE, MSG_STATE_CONFIDENCE, \
- MessagesStore, MessageDefinition, FileState, tokenize_module
+from pylint.utils import (
+ MSG_STATE_SCOPE_CONFIG,
+ MSG_STATE_SCOPE_MODULE,
+ MSG_STATE_CONFIDENCE,
+ MessagesStore,
+ MessageDefinition,
+ FileState,
+ tokenize_module,
+)
from pylint.exceptions import InvalidMessageError, UnknownMessageError
import pylint.testutils as testutils
from pylint.reporters import text
@@ -50,16 +57,16 @@ from pylint import exceptions
from pylint import interfaces
import pytest
-if os.name == 'java':
- if os._name == 'nt':
- HOME = 'USERPROFILE'
+if os.name == "java":
+ if os._name == "nt":
+ HOME = "USERPROFILE"
else:
- HOME = 'HOME'
+ HOME = "HOME"
else:
- if sys.platform == 'win32':
- HOME = 'USERPROFILE'
+ if sys.platform == "win32":
+ HOME = "USERPROFILE"
else:
- HOME = 'HOME'
+ HOME = "HOME"
try:
PYPY_VERSION_INFO = sys.pypy_version_info
@@ -69,13 +76,13 @@ except AttributeError:
@contextmanager
def fake_home():
- folder = tempfile.mkdtemp('fake-home')
+ folder = tempfile.mkdtemp("fake-home")
old_home = os.environ.get(HOME)
try:
os.environ[HOME] = folder
yield
finally:
- os.environ.pop('PYLINTRC', '')
+ os.environ.pop("PYLINTRC", "")
if old_home is None:
del os.environ[HOME]
else:
@@ -89,8 +96,9 @@ def remove(file):
except OSError:
pass
+
HERE = abspath(dirname(__file__))
-INPUTDIR = join(HERE, 'input')
+INPUTDIR = join(HERE, "input")
@contextmanager
@@ -104,7 +112,7 @@ def tempdir():
# Get real path of tempfile, otherwise test fail on mac os x
current_dir = getcwd()
chdir(tmp)
- abs_tmp = abspath('.')
+ abs_tmp = abspath(".")
try:
yield abs_tmp
@@ -113,7 +121,7 @@ def tempdir():
rmtree(abs_tmp)
-def create_files(paths, chroot='.'):
+def create_files(paths, chroot="."):
"""Creates directories and files found in <path>.
:param paths: list of relative paths to files or directories
@@ -137,7 +145,7 @@ def create_files(paths, chroot='.'):
path = join(chroot, path)
filename = basename(path)
# path is a directory path
- if filename == '':
+ if filename == "":
dirs.add(path)
# path is a filename path
else:
@@ -147,7 +155,7 @@ def create_files(paths, chroot='.'):
if not isdir(dirpath):
os.makedirs(dirpath)
for filepath in files:
- open(filepath, 'w').close()
+ open(filepath, "w").close()
@pytest.fixture
@@ -165,17 +173,13 @@ def test_no_args(fake_path):
assert sys.path == fake_path
-@pytest.mark.parametrize("case", [
- ['a/b/'],
- ['a/b'],
- ['a/b/__init__.py'],
- ['a/'],
- ['a'],
-])
+@pytest.mark.parametrize(
+ "case", [["a/b/"], ["a/b"], ["a/b/__init__.py"], ["a/"], ["a"]]
+)
def test_one_arg(fake_path, case):
with tempdir() as chroot:
- create_files(['a/b/__init__.py'])
- expected = [join(chroot, 'a')] + ["."] + fake_path
+ create_files(["a/b/__init__.py"])
+ expected = [join(chroot, "a")] + ["."] + fake_path
assert sys.path == fake_path
with lint.fix_import_path(case):
@@ -183,16 +187,19 @@ def test_one_arg(fake_path, case):
assert sys.path == fake_path
-@pytest.mark.parametrize("case", [
- ['a/b', 'a/c'],
- ['a/c/', 'a/b/'],
- ['a/b/__init__.py', 'a/c/__init__.py'],
- ['a', 'a/c/__init__.py'],
-])
+@pytest.mark.parametrize(
+ "case",
+ [
+ ["a/b", "a/c"],
+ ["a/c/", "a/b/"],
+ ["a/b/__init__.py", "a/c/__init__.py"],
+ ["a", "a/c/__init__.py"],
+ ],
+)
def test_two_similar_args(fake_path, case):
with tempdir() as chroot:
- create_files(['a/b/__init__.py', 'a/c/__init__.py'])
- expected = [join(chroot, 'a')] + ["."] + fake_path
+ create_files(["a/b/__init__.py", "a/c/__init__.py"])
+ expected = [join(chroot, "a")] + ["."] + fake_path
assert sys.path == fake_path
with lint.fix_import_path(case):
@@ -200,18 +207,25 @@ def test_two_similar_args(fake_path, case):
assert sys.path == fake_path
-@pytest.mark.parametrize("case", [
- ['a/b/c/__init__.py', 'a/d/__init__.py', 'a/e/f.py'],
- ['a/b/c', 'a', 'a/e'],
- ['a/b/c', 'a', 'a/b/c', 'a/e', 'a'],
-])
+@pytest.mark.parametrize(
+ "case",
+ [
+ ["a/b/c/__init__.py", "a/d/__init__.py", "a/e/f.py"],
+ ["a/b/c", "a", "a/e"],
+ ["a/b/c", "a", "a/b/c", "a/e", "a"],
+ ],
+)
def test_more_args(fake_path, case):
with tempdir() as chroot:
- create_files(['a/b/c/__init__.py', 'a/d/__init__.py', 'a/e/f.py'])
- expected = [
- join(chroot, suffix)
- for suffix in [sep.join(('a', 'b')), 'a', sep.join(('a', 'e'))]
- ] + ["."] + fake_path
+ create_files(["a/b/c/__init__.py", "a/d/__init__.py", "a/e/f.py"])
+ expected = (
+ [
+ join(chroot, suffix)
+ for suffix in [sep.join(("a", "b")), "a", sep.join(("a", "e"))]
+ ]
+ + ["."]
+ + fake_path
+ )
assert sys.path == fake_path
with lint.fix_import_path(case):
@@ -219,12 +233,12 @@ def test_more_args(fake_path, case):
assert sys.path == fake_path
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def disable(disable):
- return ['I']
+ return ["I"]
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def reporter(reporter):
return testutils.TestReporter
@@ -232,18 +246,18 @@ def reporter(reporter):
@pytest.fixture
def init_linter(linter):
linter.open()
- linter.set_current_module('toto')
- linter.file_state = FileState('toto')
+ linter.set_current_module("toto")
+ linter.file_state = FileState("toto")
return linter
def test_pylint_visit_method_taken_in_account(linter):
class CustomChecker(checkers.BaseChecker):
__implements__ = interfaces.IAstroidChecker
- name = 'custom'
- msgs = {'W9999': ('', 'custom', '')}
+ name = "custom"
+ msgs = {"W9999": ("", "custom", "")}
- @check_messages('custom')
+ @check_messages("custom")
def visit_class(self, _):
pass
@@ -251,119 +265,119 @@ def test_pylint_visit_method_taken_in_account(linter):
linter.open()
out = StringIO()
linter.set_reporter(text.TextReporter(out))
- linter.check('abc')
+ linter.check("abc")
def test_enable_message(init_linter):
linter = init_linter
- assert linter.is_message_enabled('W0101')
- assert linter.is_message_enabled('W0102')
- linter.disable('W0101', scope='package')
- linter.disable('W0102', scope='module', line=1)
- assert not linter.is_message_enabled('W0101')
- assert not linter.is_message_enabled('W0102', 1)
- linter.set_current_module('tutu')
- assert not linter.is_message_enabled('W0101')
- assert linter.is_message_enabled('W0102')
- linter.enable('W0101', scope='package')
- linter.enable('W0102', scope='module', line=1)
- assert linter.is_message_enabled('W0101')
- assert linter.is_message_enabled('W0102', 1)
+ assert linter.is_message_enabled("W0101")
+ assert linter.is_message_enabled("W0102")
+ linter.disable("W0101", scope="package")
+ linter.disable("W0102", scope="module", line=1)
+ assert not linter.is_message_enabled("W0101")
+ assert not linter.is_message_enabled("W0102", 1)
+ linter.set_current_module("tutu")
+ assert not linter.is_message_enabled("W0101")
+ assert linter.is_message_enabled("W0102")
+ linter.enable("W0101", scope="package")
+ linter.enable("W0102", scope="module", line=1)
+ assert linter.is_message_enabled("W0101")
+ assert linter.is_message_enabled("W0102", 1)
def test_enable_message_category(init_linter):
linter = init_linter
- assert linter.is_message_enabled('W0101')
- assert linter.is_message_enabled('C0202')
- linter.disable('W', scope='package')
- linter.disable('C', scope='module', line=1)
- assert not linter.is_message_enabled('W0101')
- assert linter.is_message_enabled('C0202')
- assert not linter.is_message_enabled('C0202', line=1)
- linter.set_current_module('tutu')
- assert not linter.is_message_enabled('W0101')
- assert linter.is_message_enabled('C0202')
- linter.enable('W', scope='package')
- linter.enable('C', scope='module', line=1)
- assert linter.is_message_enabled('W0101')
- assert linter.is_message_enabled('C0202')
- assert linter.is_message_enabled('C0202', line=1)
+ assert linter.is_message_enabled("W0101")
+ assert linter.is_message_enabled("C0202")
+ linter.disable("W", scope="package")
+ linter.disable("C", scope="module", line=1)
+ assert not linter.is_message_enabled("W0101")
+ assert linter.is_message_enabled("C0202")
+ assert not linter.is_message_enabled("C0202", line=1)
+ linter.set_current_module("tutu")
+ assert not linter.is_message_enabled("W0101")
+ assert linter.is_message_enabled("C0202")
+ linter.enable("W", scope="package")
+ linter.enable("C", scope="module", line=1)
+ assert linter.is_message_enabled("W0101")
+ assert linter.is_message_enabled("C0202")
+ assert linter.is_message_enabled("C0202", line=1)
def test_message_state_scope(init_linter):
class FakeConfig(object):
- confidence = ['HIGH']
+ confidence = ["HIGH"]
linter = init_linter
- linter.disable('C0202')
- assert MSG_STATE_SCOPE_CONFIG == linter.get_message_state_scope('C0202')
- linter.disable('W0101', scope='module', line=3)
- assert MSG_STATE_SCOPE_CONFIG == linter.get_message_state_scope('C0202')
- assert MSG_STATE_SCOPE_MODULE == linter.get_message_state_scope('W0101', 3)
- linter.enable('W0102', scope='module', line=3)
- assert MSG_STATE_SCOPE_MODULE == linter.get_message_state_scope('W0102', 3)
+ linter.disable("C0202")
+ assert MSG_STATE_SCOPE_CONFIG == linter.get_message_state_scope("C0202")
+ linter.disable("W0101", scope="module", line=3)
+ assert MSG_STATE_SCOPE_CONFIG == linter.get_message_state_scope("C0202")
+ assert MSG_STATE_SCOPE_MODULE == linter.get_message_state_scope("W0101", 3)
+ linter.enable("W0102", scope="module", line=3)
+ assert MSG_STATE_SCOPE_MODULE == linter.get_message_state_scope("W0102", 3)
linter.config = FakeConfig()
- assert MSG_STATE_CONFIDENCE == \
- linter.get_message_state_scope('this-is-bad',
- confidence=interfaces.INFERENCE)
+ assert MSG_STATE_CONFIDENCE == linter.get_message_state_scope(
+ "this-is-bad", confidence=interfaces.INFERENCE
+ )
def test_enable_message_block(init_linter):
linter = init_linter
linter.open()
- filepath = join(INPUTDIR, 'func_block_disable_msg.py')
- linter.set_current_module('func_block_disable_msg')
- astroid = linter.get_ast(filepath, 'func_block_disable_msg')
+ filepath = join(INPUTDIR, "func_block_disable_msg.py")
+ linter.set_current_module("func_block_disable_msg")
+ astroid = linter.get_ast(filepath, "func_block_disable_msg")
linter.process_tokens(tokenize_module(astroid))
fs = linter.file_state
fs.collect_block_lines(linter.msgs_store, astroid)
# global (module level)
- assert linter.is_message_enabled('W0613')
- assert linter.is_message_enabled('E1101')
+ assert linter.is_message_enabled("W0613")
+ assert linter.is_message_enabled("E1101")
# meth1
- assert linter.is_message_enabled('W0613', 13)
+ assert linter.is_message_enabled("W0613", 13)
# meth2
- assert not linter.is_message_enabled('W0613', 18)
+ assert not linter.is_message_enabled("W0613", 18)
# meth3
- assert not linter.is_message_enabled('E1101', 24)
- assert linter.is_message_enabled('E1101', 26)
+ assert not linter.is_message_enabled("E1101", 24)
+ assert linter.is_message_enabled("E1101", 26)
# meth4
- assert not linter.is_message_enabled('E1101', 32)
- assert linter.is_message_enabled('E1101', 36)
+ assert not linter.is_message_enabled("E1101", 32)
+ assert linter.is_message_enabled("E1101", 36)
# meth5
- assert not linter.is_message_enabled('E1101', 42)
- assert not linter.is_message_enabled('E1101', 43)
- assert linter.is_message_enabled('E1101', 46)
- assert not linter.is_message_enabled('E1101', 49)
- assert not linter.is_message_enabled('E1101', 51)
+ assert not linter.is_message_enabled("E1101", 42)
+ assert not linter.is_message_enabled("E1101", 43)
+ assert linter.is_message_enabled("E1101", 46)
+ assert not linter.is_message_enabled("E1101", 49)
+ assert not linter.is_message_enabled("E1101", 51)
# meth6
- assert not linter.is_message_enabled('E1101', 57)
- assert linter.is_message_enabled('E1101', 61)
- assert not linter.is_message_enabled('E1101', 64)
- assert not linter.is_message_enabled('E1101', 66)
-
- assert linter.is_message_enabled('E0602', 57)
- assert linter.is_message_enabled('E0602', 61)
- assert not linter.is_message_enabled('E0602', 62)
- assert linter.is_message_enabled('E0602', 64)
- assert linter.is_message_enabled('E0602', 66)
+ assert not linter.is_message_enabled("E1101", 57)
+ assert linter.is_message_enabled("E1101", 61)
+ assert not linter.is_message_enabled("E1101", 64)
+ assert not linter.is_message_enabled("E1101", 66)
+
+ assert linter.is_message_enabled("E0602", 57)
+ assert linter.is_message_enabled("E0602", 61)
+ assert not linter.is_message_enabled("E0602", 62)
+ assert linter.is_message_enabled("E0602", 64)
+ assert linter.is_message_enabled("E0602", 66)
# meth7
- assert not linter.is_message_enabled('E1101', 70)
- assert linter.is_message_enabled('E1101', 72)
- assert linter.is_message_enabled('E1101', 75)
- assert linter.is_message_enabled('E1101', 77)
+ assert not linter.is_message_enabled("E1101", 70)
+ assert linter.is_message_enabled("E1101", 72)
+ assert linter.is_message_enabled("E1101", 75)
+ assert linter.is_message_enabled("E1101", 77)
fs = linter.file_state
- assert 17 == fs._suppression_mapping['W0613', 18]
- assert 30 == fs._suppression_mapping['E1101', 33]
- assert ('E1101', 46) not in fs._suppression_mapping
- assert 1 == fs._suppression_mapping['C0302', 18]
- assert 1 == fs._suppression_mapping['C0302', 50]
+ assert 17 == fs._suppression_mapping["W0613", 18]
+ assert 30 == fs._suppression_mapping["E1101", 33]
+ assert ("E1101", 46) not in fs._suppression_mapping
+ assert 1 == fs._suppression_mapping["C0302", 18]
+ assert 1 == fs._suppression_mapping["C0302", 50]
# This is tricky. While the disable in line 106 is disabling
# both 108 and 110, this is usually not what the user wanted.
# Therefore, we report the closest previous disable comment.
- assert 106 == fs._suppression_mapping['E1101', 108]
- assert 109 == fs._suppression_mapping['E1101', 110]
+ assert 106 == fs._suppression_mapping["E1101", 108]
+ assert 109 == fs._suppression_mapping["E1101", 110]
def test_enable_by_symbol(init_linter):
@@ -372,146 +386,150 @@ def test_enable_by_symbol(init_linter):
The state is consistent across symbols and numbers.
"""
linter = init_linter
- assert linter.is_message_enabled('W0101')
- assert linter.is_message_enabled('unreachable')
- assert linter.is_message_enabled('W0102')
- assert linter.is_message_enabled('dangerous-default-value')
- linter.disable('unreachable', scope='package')
- linter.disable('dangerous-default-value', scope='module', line=1)
- assert not linter.is_message_enabled('W0101')
- assert not linter.is_message_enabled('unreachable')
- assert not linter.is_message_enabled('W0102', 1)
- assert not linter.is_message_enabled('dangerous-default-value', 1)
- linter.set_current_module('tutu')
- assert not linter.is_message_enabled('W0101')
- assert not linter.is_message_enabled('unreachable')
- assert linter.is_message_enabled('W0102')
- assert linter.is_message_enabled('dangerous-default-value')
- linter.enable('unreachable', scope='package')
- linter.enable('dangerous-default-value', scope='module', line=1)
- assert linter.is_message_enabled('W0101')
- assert linter.is_message_enabled('unreachable')
- assert linter.is_message_enabled('W0102', 1)
- assert linter.is_message_enabled('dangerous-default-value', 1)
+ assert linter.is_message_enabled("W0101")
+ assert linter.is_message_enabled("unreachable")
+ assert linter.is_message_enabled("W0102")
+ assert linter.is_message_enabled("dangerous-default-value")
+ linter.disable("unreachable", scope="package")
+ linter.disable("dangerous-default-value", scope="module", line=1)
+ assert not linter.is_message_enabled("W0101")
+ assert not linter.is_message_enabled("unreachable")
+ assert not linter.is_message_enabled("W0102", 1)
+ assert not linter.is_message_enabled("dangerous-default-value", 1)
+ linter.set_current_module("tutu")
+ assert not linter.is_message_enabled("W0101")
+ assert not linter.is_message_enabled("unreachable")
+ assert linter.is_message_enabled("W0102")
+ assert linter.is_message_enabled("dangerous-default-value")
+ linter.enable("unreachable", scope="package")
+ linter.enable("dangerous-default-value", scope="module", line=1)
+ assert linter.is_message_enabled("W0101")
+ assert linter.is_message_enabled("unreachable")
+ assert linter.is_message_enabled("W0102", 1)
+ assert linter.is_message_enabled("dangerous-default-value", 1)
def test_enable_report(linter):
- assert linter.report_is_enabled('RP0001')
- linter.disable('RP0001')
- assert not linter.report_is_enabled('RP0001')
- linter.enable('RP0001')
- assert linter.report_is_enabled('RP0001')
+ assert linter.report_is_enabled("RP0001")
+ linter.disable("RP0001")
+ assert not linter.report_is_enabled("RP0001")
+ linter.enable("RP0001")
+ assert linter.report_is_enabled("RP0001")
def test_report_output_format_aliased(linter):
text.register(linter)
- linter.set_option('output-format', 'text')
- assert linter.reporter.__class__.__name__ == 'TextReporter'
+ linter.set_option("output-format", "text")
+ assert linter.reporter.__class__.__name__ == "TextReporter"
def test_set_unsupported_reporter(linter):
text.register(linter)
with pytest.raises(exceptions.InvalidReporterError):
- linter.set_option('output-format', 'missing.module.Class')
+ linter.set_option("output-format", "missing.module.Class")
def test_set_option_1(linter):
- linter.set_option('disable', 'C0111,W0234')
- assert not linter.is_message_enabled('C0111')
- assert not linter.is_message_enabled('W0234')
- assert linter.is_message_enabled('W0113')
- assert not linter.is_message_enabled('missing-docstring')
- assert not linter.is_message_enabled('non-iterator-returned')
+ linter.set_option("disable", "C0111,W0234")
+ assert not linter.is_message_enabled("C0111")
+ assert not linter.is_message_enabled("W0234")
+ assert linter.is_message_enabled("W0113")
+ assert not linter.is_message_enabled("missing-docstring")
+ assert not linter.is_message_enabled("non-iterator-returned")
def test_set_option_2(linter):
- linter.set_option('disable', ('C0111', 'W0234'))
- assert not linter.is_message_enabled('C0111')
- assert not linter.is_message_enabled('W0234')
- assert linter.is_message_enabled('W0113')
- assert not linter.is_message_enabled('missing-docstring')
- assert not linter.is_message_enabled('non-iterator-returned')
+ linter.set_option("disable", ("C0111", "W0234"))
+ assert not linter.is_message_enabled("C0111")
+ assert not linter.is_message_enabled("W0234")
+ assert linter.is_message_enabled("W0113")
+ assert not linter.is_message_enabled("missing-docstring")
+ assert not linter.is_message_enabled("non-iterator-returned")
def test_enable_checkers(linter):
- linter.disable('design')
- assert not ('design' in [c.name for c in linter.prepare_checkers()])
- linter.enable('design')
- assert 'design' in [c.name for c in linter.prepare_checkers()]
+ linter.disable("design")
+ assert not ("design" in [c.name for c in linter.prepare_checkers()])
+ linter.enable("design")
+ assert "design" in [c.name for c in linter.prepare_checkers()]
def test_errors_only(linter):
linter.error_mode()
checkers = linter.prepare_checkers()
checker_names = {c.name for c in checkers}
- should_not = {'design', 'format', 'metrics',
- 'miscellaneous', 'similarities'}
+ should_not = {"design", "format", "metrics", "miscellaneous", "similarities"}
assert set() == should_not & checker_names
def test_disable_similar(linter):
- linter.set_option('disable', 'RP0801')
- linter.set_option('disable', 'R0801')
- assert not ('similarities' in [c.name for c in linter.prepare_checkers()])
+ linter.set_option("disable", "RP0801")
+ linter.set_option("disable", "R0801")
+ assert not ("similarities" in [c.name for c in linter.prepare_checkers()])
def test_disable_alot(linter):
"""check that we disabled a lot of checkers"""
- linter.set_option('reports', False)
- linter.set_option('disable', 'R,C,W')
+ linter.set_option("reports", False)
+ linter.set_option("disable", "R,C,W")
checker_names = [c.name for c in linter.prepare_checkers()]
- for cname in ('design', 'metrics', 'similarities'):
+ for cname in ("design", "metrics", "similarities"):
assert not (cname in checker_names), cname
def test_addmessage(linter):
linter.set_reporter(testutils.TestReporter())
linter.open()
- linter.set_current_module('0123')
- linter.add_message('C0301', line=1, args=(1, 2))
- linter.add_message('line-too-long', line=2, args=(3, 4))
- assert ['C: 1: Line too long (1/2)', 'C: 2: Line too long (3/4)'] == \
- linter.reporter.messages
+ linter.set_current_module("0123")
+ linter.add_message("C0301", line=1, args=(1, 2))
+ linter.add_message("line-too-long", line=2, args=(3, 4))
+ assert [
+ "C: 1: Line too long (1/2)",
+ "C: 2: Line too long (3/4)",
+ ] == linter.reporter.messages
def test_addmessage_invalid(linter):
linter.set_reporter(testutils.TestReporter())
linter.open()
- linter.set_current_module('0123')
+ linter.set_current_module("0123")
with pytest.raises(InvalidMessageError) as cm:
- linter.add_message('line-too-long', args=(1, 2))
+ linter.add_message("line-too-long", args=(1, 2))
assert str(cm.value) == "Message C0301 must provide line, got None"
with pytest.raises(InvalidMessageError) as cm:
- linter.add_message('line-too-long', line=2, node='fake_node', args=(1, 2))
- assert str(cm.value) == "Message C0301 must only provide line, got line=2, node=fake_node"
+ linter.add_message("line-too-long", line=2, node="fake_node", args=(1, 2))
+ assert (
+ str(cm.value)
+ == "Message C0301 must only provide line, got line=2, node=fake_node"
+ )
with pytest.raises(InvalidMessageError) as cm:
- linter.add_message('C0321')
+ linter.add_message("C0321")
assert str(cm.value) == "Message C0321 must provide Node, got None"
def test_init_hooks_called_before_load_plugins():
with pytest.raises(RuntimeError):
- Run(['--load-plugins', 'unexistant', '--init-hook', 'raise RuntimeError'])
+ Run(["--load-plugins", "unexistant", "--init-hook", "raise RuntimeError"])
with pytest.raises(RuntimeError):
- Run(['--init-hook', 'raise RuntimeError', '--load-plugins', 'unexistant'])
+ Run(["--init-hook", "raise RuntimeError", "--load-plugins", "unexistant"])
def test_analyze_explicit_script(linter):
linter.set_reporter(testutils.TestReporter())
- linter.check(os.path.join(os.path.dirname(__file__), 'data', 'ascript'))
- assert ['C: 2: Line too long (175/100)'] == linter.reporter.messages
+ linter.check(os.path.join(os.path.dirname(__file__), "data", "ascript"))
+ assert ["C: 2: Line too long (175/100)"] == linter.reporter.messages
def test_python3_checker_disabled(linter):
checker_names = [c.name for c in linter.prepare_checkers()]
- assert 'python3' not in checker_names
+ assert "python3" not in checker_names
- linter.set_option('enable', 'python3')
+ linter.set_option("enable", "python3")
checker_names = [c.name for c in linter.prepare_checkers()]
- assert 'python3' in checker_names
+ assert "python3" in checker_names
def test_full_documentation(linter):
@@ -534,21 +552,21 @@ def test_full_documentation(linter):
@pytest.fixture
def pop_pylintrc():
- os.environ.pop('PYLINTRC', None)
+ os.environ.pop("PYLINTRC", None)
@pytest.mark.usefixtures("pop_pylintrc")
def test_pylint_home():
- uhome = os.path.expanduser('~')
- if uhome == '~':
- expected = '.pylint.d'
+ uhome = os.path.expanduser("~")
+ if uhome == "~":
+ expected = ".pylint.d"
else:
- expected = os.path.join(uhome, '.pylint.d')
+ expected = os.path.join(uhome, ".pylint.d")
assert config.PYLINT_HOME == expected
try:
- pylintd = join(tempfile.gettempdir(), '.pylint.d')
- os.environ['PYLINTHOME'] = pylintd
+ pylintd = join(tempfile.gettempdir(), ".pylint.d")
+ os.environ["PYLINTHOME"] = pylintd
try:
reload(config)
assert config.PYLINT_HOME == pylintd
@@ -558,12 +576,14 @@ def test_pylint_home():
except:
pass
finally:
- del os.environ['PYLINTHOME']
+ del os.environ["PYLINTHOME"]
-@pytest.mark.skipif(PYPY_VERSION_INFO,
- reason="TOX runs this test from within the repo and finds "
- "the project's pylintrc.")
+@pytest.mark.skipif(
+ PYPY_VERSION_INFO,
+ reason="TOX runs this test from within the repo and finds "
+ "the project's pylintrc.",
+)
@pytest.mark.usefixtures("pop_pylintrc")
def test_pylintrc():
with fake_home():
@@ -571,10 +591,9 @@ def test_pylintrc():
chdir(os.path.dirname(os.path.abspath(sys.executable)))
try:
assert config.find_pylintrc() is None
- os.environ['PYLINTRC'] = join(tempfile.gettempdir(),
- '.pylintrc')
+ os.environ["PYLINTRC"] = join(tempfile.gettempdir(), ".pylintrc")
assert config.find_pylintrc() is None
- os.environ['PYLINTRC'] = '.'
+ os.environ["PYLINTRC"] = "."
assert config.find_pylintrc() is None
finally:
chdir(current_dir)
@@ -585,17 +604,25 @@ def test_pylintrc():
def test_pylintrc_parentdir():
with tempdir() as chroot:
- create_files(['a/pylintrc', 'a/b/__init__.py', 'a/b/pylintrc',
- 'a/b/c/__init__.py', 'a/b/c/d/__init__.py',
- 'a/b/c/d/e/.pylintrc'])
+ create_files(
+ [
+ "a/pylintrc",
+ "a/b/__init__.py",
+ "a/b/pylintrc",
+ "a/b/c/__init__.py",
+ "a/b/c/d/__init__.py",
+ "a/b/c/d/e/.pylintrc",
+ ]
+ )
with fake_home():
assert config.find_pylintrc() is None
- results = {'a' : join(chroot, 'a', 'pylintrc'),
- 'a/b' : join(chroot, 'a', 'b', 'pylintrc'),
- 'a/b/c' : join(chroot, 'a', 'b', 'pylintrc'),
- 'a/b/c/d' : join(chroot, 'a', 'b', 'pylintrc'),
- 'a/b/c/d/e' : join(chroot, 'a', 'b', 'c', 'd', 'e', '.pylintrc'),
- }
+ results = {
+ "a": join(chroot, "a", "pylintrc"),
+ "a/b": join(chroot, "a", "b", "pylintrc"),
+ "a/b/c": join(chroot, "a", "b", "pylintrc"),
+ "a/b/c/d": join(chroot, "a", "b", "pylintrc"),
+ "a/b/c/d/e": join(chroot, "a", "b", "c", "d", "e", ".pylintrc"),
+ }
for basedir, expected in results.items():
os.chdir(join(chroot, basedir))
assert config.find_pylintrc() == expected
@@ -605,13 +632,14 @@ def test_pylintrc_parentdir():
def test_pylintrc_parentdir_no_package():
with tempdir() as chroot:
with fake_home():
- create_files(['a/pylintrc', 'a/b/pylintrc', 'a/b/c/d/__init__.py'])
+ create_files(["a/pylintrc", "a/b/pylintrc", "a/b/c/d/__init__.py"])
assert config.find_pylintrc() is None
- results = {'a' : join(chroot, 'a', 'pylintrc'),
- 'a/b' : join(chroot, 'a', 'b', 'pylintrc'),
- 'a/b/c' : None,
- 'a/b/c/d' : None,
- }
+ results = {
+ "a": join(chroot, "a", "pylintrc"),
+ "a/b": join(chroot, "a", "b", "pylintrc"),
+ "a/b/c": None,
+ "a/b/c/d": None,
+ }
for basedir, expected in results.items():
os.chdir(join(chroot, basedir))
assert config.find_pylintrc() == expected
@@ -623,29 +651,28 @@ class TestPreprocessOptions(object):
def test_value_equal(self):
self.args = []
- preprocess_options(['--foo', '--bar=baz', '--qu=ux'],
- {'foo': (self._callback, False),
- 'qu': (self._callback, True)})
- assert [('foo', None), ('qu', 'ux')] == self.args
+ preprocess_options(
+ ["--foo", "--bar=baz", "--qu=ux"],
+ {"foo": (self._callback, False), "qu": (self._callback, True)},
+ )
+ assert [("foo", None), ("qu", "ux")] == self.args
def test_value_space(self):
self.args = []
- preprocess_options(['--qu', 'ux'],
- {'qu': (self._callback, True)})
- assert [('qu', 'ux')] == self.args
+ preprocess_options(["--qu", "ux"], {"qu": (self._callback, True)})
+ assert [("qu", "ux")] == self.args
def test_error_missing_expected_value(self):
with pytest.raises(ArgumentPreprocessingError):
- preprocess_options(['--foo', '--bar', '--qu=ux'],
- {'bar': (None, True)})
+ preprocess_options(["--foo", "--bar", "--qu=ux"], {"bar": (None, True)})
with pytest.raises(ArgumentPreprocessingError):
- preprocess_options(['--foo', '--bar'],
- {'bar': (None, True)})
+ preprocess_options(["--foo", "--bar"], {"bar": (None, True)})
def test_error_unexpected_value(self):
with pytest.raises(ArgumentPreprocessingError):
- preprocess_options(['--foo', '--bar=spam', '--qu=ux'],
- {'bar': (None, False)})
+ preprocess_options(
+ ["--foo", "--bar=spam", "--qu=ux"], {"bar": (None, False)}
+ )
@pytest.fixture
@@ -653,15 +680,22 @@ def store():
store = MessagesStore()
class Checker(object):
- name = 'achecker'
+ name = "achecker"
msgs = {
- 'W1234': ('message', 'msg-symbol', 'msg description.',
- {'old_names': [('W0001', 'old-symbol')]}),
- 'E1234': ('Duplicate keyword argument %r in %s call',
- 'duplicate-keyword-arg',
- 'Used when a function call passes the same keyword argument multiple times.',
- {'maxversion': (2, 6)}),
- }
+ "W1234": (
+ "message",
+ "msg-symbol",
+ "msg description.",
+ {"old_names": [("W0001", "old-symbol")]},
+ ),
+ "E1234": (
+ "Duplicate keyword argument %r in %s call",
+ "duplicate-keyword-arg",
+ "Used when a function call passes the same keyword argument multiple times.",
+ {"maxversion": (2, 6)},
+ ),
+ }
+
store.register_messages(Checker())
return store
@@ -671,53 +705,62 @@ class TestMessagesStore(object):
assert desc == msg.format_help(checkerref=checkerref)
def test_check_message_id(self, store):
- assert isinstance(store.get_message_definition('W1234'), MessageDefinition)
+ assert isinstance(store.get_message_definition("W1234"), MessageDefinition)
with pytest.raises(UnknownMessageError):
- store.get_message_definition('YB12')
+ store.get_message_definition("YB12")
def test_message_help(self, store):
- msg = store.get_message_definition('W1234')
+ msg = store.get_message_definition("W1234")
self._compare_messages(
- ''':msg-symbol (W1234): *message*
- msg description. This message belongs to the achecker checker.''',
- msg, checkerref=True)
+ """:msg-symbol (W1234): *message*
+ msg description. This message belongs to the achecker checker.""",
+ msg,
+ checkerref=True,
+ )
self._compare_messages(
- ''':msg-symbol (W1234): *message*
- msg description.''',
- msg, checkerref=False)
+ """:msg-symbol (W1234): *message*
+ msg description.""",
+ msg,
+ checkerref=False,
+ )
def test_message_help_minmax(self, store):
# build the message manually to be python version independent
- msg = store.get_message_definition('E1234')
+ msg = store.get_message_definition("E1234")
self._compare_messages(
- ''':duplicate-keyword-arg (E1234): *Duplicate keyword argument %r in %s call*
+ """:duplicate-keyword-arg (E1234): *Duplicate keyword argument %r in %s call*
Used when a function call passes the same keyword argument multiple times.
This message belongs to the achecker checker. It can't be emitted when using
- Python >= 2.6.''',
- msg, checkerref=True)
+ Python >= 2.6.""",
+ msg,
+ checkerref=True,
+ )
self._compare_messages(
- ''':duplicate-keyword-arg (E1234): *Duplicate keyword argument %r in %s call*
+ """:duplicate-keyword-arg (E1234): *Duplicate keyword argument %r in %s call*
Used when a function call passes the same keyword argument multiple times.
- This message can't be emitted when using Python >= 2.6.''',
- msg, checkerref=False)
+ This message can't be emitted when using Python >= 2.6.""",
+ msg,
+ checkerref=False,
+ )
def test_list_messages(self, store):
output = StringIO()
with redirect_stdout(output):
store.list_messages()
# cursory examination of the output: we're mostly testing it completes
- assert ':msg-symbol (W1234): *message*' in output.getvalue()
+ assert ":msg-symbol (W1234): *message*" in output.getvalue()
def test_add_renamed_message(self, store):
- store.add_renamed_message('W1234', 'old-bad-name', 'msg-symbol')
- assert 'msg-symbol' == store.get_message_definition('W1234').symbol
- assert 'msg-symbol' == store.get_message_definition('old-bad-name').symbol
+ store.add_renamed_message("W1234", "old-bad-name", "msg-symbol")
+ assert "msg-symbol" == store.get_message_definition("W1234").symbol
+ assert "msg-symbol" == store.get_message_definition("old-bad-name").symbol
def test_add_renamed_message_invalid(self, store):
# conflicting message ID
with pytest.raises(InvalidMessageError) as cm:
store.add_renamed_message(
- 'W1234', 'old-msg-symbol', 'duplicate-keyword-arg')
+ "W1234", "old-msg-symbol", "duplicate-keyword-arg"
+ )
expected = (
"Message id 'W1234' cannot have both 'msg-symbol' and 'old-msg-symbol' "
"as symbolic name."
@@ -725,25 +768,26 @@ class TestMessagesStore(object):
assert str(cm.value) == expected
def test_renamed_message_register(self, store):
- assert 'msg-symbol' == store.get_message_definition('W0001').symbol
- assert 'msg-symbol' == store.get_message_definition('old-symbol').symbol
+ assert "msg-symbol" == store.get_message_definition("W0001").symbol
+ assert "msg-symbol" == store.get_message_definition("old-symbol").symbol
def test_custom_should_analyze_file():
- '''Check that we can write custom should_analyze_file that work
+ """Check that we can write custom should_analyze_file that work
even for arguments.
- '''
+ """
class CustomPyLinter(PyLinter):
def should_analyze_file(self, modname, path, is_argument=False):
- if os.path.basename(path) == 'wrong.py':
+ if os.path.basename(path) == "wrong.py":
return False
return super(CustomPyLinter, self).should_analyze_file(
- modname, path, is_argument=is_argument)
+ modname, path, is_argument=is_argument
+ )
- package_dir = os.path.join(HERE, 'regrtest_data', 'bad_package')
- wrong_file = os.path.join(package_dir, 'wrong.py')
+ package_dir = os.path.join(HERE, "regrtest_data", "bad_package")
+ wrong_file = os.path.join(package_dir, "wrong.py")
for jobs in [1, 2]:
reporter = testutils.TestReporter()
@@ -761,7 +805,7 @@ def test_custom_should_analyze_file():
messages = reporter.messages
assert len(messages) == 1
- assert 'invalid syntax' in messages[0]
+ assert "invalid syntax" in messages[0]
def test_filename_with__init__(init_linter):
@@ -772,7 +816,7 @@ def test_filename_with__init__(init_linter):
linter = init_linter
linter.open()
linter.set_reporter(reporter)
- filepath = join(INPUTDIR, 'not__init__.py')
+ filepath = join(INPUTDIR, "not__init__.py")
linter.check([filepath])
messages = reporter.messages
assert len(messages) == 0
diff --git a/pylint/test/unittest_pyreverse_diadefs.py b/pylint/test/unittest_pyreverse_diadefs.py
index dcc1c09e7..641497905 100644
--- a/pylint/test/unittest_pyreverse_diadefs.py
+++ b/pylint/test/unittest_pyreverse_diadefs.py
@@ -27,13 +27,13 @@ def _process_classes(classes):
"""extract class names of a list"""
return sorted([(isinstance(c.node, astroid.ClassDef), c.title) for c in classes])
+
def _process_relations(relations):
"""extract relation indices from a relation list"""
result = []
for rel_type, rels in relations.items():
for rel in rels:
- result.append( (rel_type, rel.from_object.title,
- rel.to_object.title) )
+ result.append((rel_type, rel.from_object.title, rel.to_object.title))
result.sort()
return result
@@ -43,17 +43,17 @@ def HANDLER():
return DiadefsHandler(Config())
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def PROJECT():
- return get_project('data')
+ return get_project("data")
def test_option_values(HANDLER, PROJECT):
"""test for ancestor, associated and module options"""
df_h = DiaDefGenerator(Linker(PROJECT), HANDLER)
cl_config = Config()
- cl_config.classes = ['Specialization']
- cl_h = DiaDefGenerator(Linker(PROJECT), DiadefsHandler(cl_config) )
+ cl_config.classes = ["Specialization"]
+ cl_h = DiaDefGenerator(Linker(PROJECT), DiadefsHandler(cl_config))
assert (0, 0) == df_h._get_levels()
assert False == df_h.module_names
assert (-1, -1) == cl_h._get_levels()
@@ -65,11 +65,11 @@ def test_option_values(HANDLER, PROJECT):
hndl._set_default_options()
assert (-1, -1) == hndl._get_levels()
assert True == hndl.module_names
- handler = DiadefsHandler( Config())
+ handler = DiadefsHandler(Config())
df_h = DiaDefGenerator(Linker(PROJECT), handler)
cl_config = Config()
- cl_config.classes = ['Specialization']
- cl_h = DiaDefGenerator(Linker(PROJECT), DiadefsHandler(cl_config) )
+ cl_config.classes = ["Specialization"]
+ cl_h = DiaDefGenerator(Linker(PROJECT), DiadefsHandler(cl_config))
for hndl in [df_h, cl_h]:
hndl.config.show_ancestors = 2
hndl.config.show_associated = 1
@@ -78,36 +78,45 @@ def test_option_values(HANDLER, PROJECT):
assert (2, 1) == hndl._get_levels()
assert False == hndl.module_names
-#def test_default_values():
+ # def test_default_values():
"""test efault values for package or class diagrams"""
# TODO : should test difference between default values for package
# or class diagrams
+
class TestDefaultDiadefGenerator(object):
def test_known_values1(self, HANDLER, PROJECT):
dd = DefaultDiadefGenerator(Linker(PROJECT), HANDLER).visit(PROJECT)
assert len(dd) == 2
keys = [d.TYPE for d in dd]
- assert keys == ['package', 'class']
+ assert keys == ["package", "class"]
pd = dd[0]
- assert pd.title == 'packages No Name'
- modules = sorted([(isinstance(m.node, astroid.Module), m.title)
- for m in pd.objects])
- assert modules == [(True, 'data'),
- (True, 'data.clientmodule_test'),
- (True, 'data.suppliermodule_test')]
+ assert pd.title == "packages No Name"
+ modules = sorted(
+ [(isinstance(m.node, astroid.Module), m.title) for m in pd.objects]
+ )
+ assert modules == [
+ (True, "data"),
+ (True, "data.clientmodule_test"),
+ (True, "data.suppliermodule_test"),
+ ]
cd = dd[1]
- assert cd.title == 'classes No Name'
+ assert cd.title == "classes No Name"
classes = _process_classes(cd.objects)
- assert classes == [(True, 'Ancestor'),
- (True, 'DoNothing'),
- (True, 'Interface'),
- (True, 'Specialization')]
-
- _should_rels = [('association', 'DoNothing', 'Ancestor'),
- ('association', 'DoNothing', 'Specialization'),
- ('implements', 'Ancestor', 'Interface'),
- ('specialization', 'Specialization', 'Ancestor')]
+ assert classes == [
+ (True, "Ancestor"),
+ (True, "DoNothing"),
+ (True, "Interface"),
+ (True, "Specialization"),
+ ]
+
+ _should_rels = [
+ ("association", "DoNothing", "Ancestor"),
+ ("association", "DoNothing", "Specialization"),
+ ("implements", "Ancestor", "Interface"),
+ ("specialization", "Specialization", "Ancestor"),
+ ]
+
def test_exctract_relations(self, HANDLER, PROJECT):
"""test extract_relations between classes"""
cd = DefaultDiadefGenerator(Linker(PROJECT), HANDLER).visit(PROJECT)[1]
@@ -120,44 +129,49 @@ class TestDefaultDiadefGenerator(object):
different classes possibly in different modules"""
# XXX should be catching pyreverse environnement problem but doesn't
# pyreverse doesn't extracts the relations but this test ok
- project = get_project('data')
+ project = get_project("data")
handler = DiadefsHandler(Config())
- diadefs = handler.get_diadefs(project, Linker(project, tag=True) )
+ diadefs = handler.get_diadefs(project, Linker(project, tag=True))
cd = diadefs[1]
relations = _process_relations(cd.relationships)
assert relations == self._should_rels
def test_known_values2(self, HANDLER):
- project = get_project('data.clientmodule_test')
+ project = get_project("data.clientmodule_test")
dd = DefaultDiadefGenerator(Linker(project), HANDLER).visit(project)
assert len(dd) == 1
keys = [d.TYPE for d in dd]
- assert keys == ['class']
+ assert keys == ["class"]
cd = dd[0]
- assert cd.title == 'classes No Name'
+ assert cd.title == "classes No Name"
classes = _process_classes(cd.objects)
- assert classes == [(True, 'Ancestor'),
- (True, 'Specialization')]
+ assert classes == [(True, "Ancestor"), (True, "Specialization")]
def test_known_values1(HANDLER, PROJECT):
- HANDLER.config.classes = ['Specialization']
+ HANDLER.config.classes = ["Specialization"]
cdg = ClassDiadefGenerator(Linker(PROJECT), HANDLER)
- special = 'data.clientmodule_test.Specialization'
+ special = "data.clientmodule_test.Specialization"
cd = cdg.class_diagram(PROJECT, special)
assert cd.title == special
classes = _process_classes(cd.objects)
- assert classes == [(True, 'data.clientmodule_test.Ancestor'),
- (True, special),
- (True, 'data.suppliermodule_test.DoNothing')]
+ assert classes == [
+ (True, "data.clientmodule_test.Ancestor"),
+ (True, special),
+ (True, "data.suppliermodule_test.DoNothing"),
+ ]
def test_known_values2(HANDLER, PROJECT):
- HANDLER.config.classes = ['Specialization']
+ HANDLER.config.classes = ["Specialization"]
HANDLER.config.module_names = False
- cd = ClassDiadefGenerator(Linker(PROJECT), HANDLER).class_diagram(PROJECT, 'data.clientmodule_test.Specialization')
- assert cd.title == 'data.clientmodule_test.Specialization'
+ cd = ClassDiadefGenerator(Linker(PROJECT), HANDLER).class_diagram(
+ PROJECT, "data.clientmodule_test.Specialization"
+ )
+ assert cd.title == "data.clientmodule_test.Specialization"
classes = _process_classes(cd.objects)
- assert classes == [(True, 'Ancestor'),
- (True, 'DoNothing'),
- (True, 'Specialization')]
+ assert classes == [
+ (True, "Ancestor"),
+ (True, "DoNothing"),
+ (True, "Specialization"),
+ ]
diff --git a/pylint/test/unittest_pyreverse_inspector.py b/pylint/test/unittest_pyreverse_inspector.py
index 82bae03c4..f87bddbfd 100644
--- a/pylint/test/unittest_pyreverse_inspector.py
+++ b/pylint/test/unittest_pyreverse_inspector.py
@@ -21,54 +21,54 @@ from unittest_pyreverse_writer import get_project
@pytest.fixture
def project():
- project = get_project('data', 'data')
+ project = get_project("data", "data")
linker = inspector.Linker(project)
linker.visit(project)
return project
def test_class_implements(project):
- klass = project.get_module('data.clientmodule_test')['Ancestor']
- assert hasattr(klass, 'implements')
+ klass = project.get_module("data.clientmodule_test")["Ancestor"]
+ assert hasattr(klass, "implements")
assert len(klass.implements) == 1
assert isinstance(klass.implements[0], nodes.ClassDef)
assert klass.implements[0].name == "Interface"
def test_class_implements_specialization(project):
- klass = project.get_module('data.clientmodule_test')['Specialization']
- assert hasattr(klass, 'implements')
+ klass = project.get_module("data.clientmodule_test")["Specialization"]
+ assert hasattr(klass, "implements")
assert len(klass.implements) == 0
def test_locals_assignment_resolution(project):
- klass = project.get_module('data.clientmodule_test')['Specialization']
- assert hasattr(klass, 'locals_type')
+ klass = project.get_module("data.clientmodule_test")["Specialization"]
+ assert hasattr(klass, "locals_type")
type_dict = klass.locals_type
assert len(type_dict) == 2
keys = sorted(type_dict.keys())
- assert keys == ['TYPE', 'top']
- assert len(type_dict['TYPE']) == 1
- assert type_dict['TYPE'][0].value == 'final class'
- assert len(type_dict['top']) == 1
- assert type_dict['top'][0].value == 'class'
+ assert keys == ["TYPE", "top"]
+ assert len(type_dict["TYPE"]) == 1
+ assert type_dict["TYPE"][0].value == "final class"
+ assert len(type_dict["top"]) == 1
+ assert type_dict["top"][0].value == "class"
def test_instance_attrs_resolution(project):
- klass = project.get_module('data.clientmodule_test')['Specialization']
- assert hasattr(klass, 'instance_attrs_type')
+ klass = project.get_module("data.clientmodule_test")["Specialization"]
+ assert hasattr(klass, "instance_attrs_type")
type_dict = klass.instance_attrs_type
assert len(type_dict) == 2
keys = sorted(type_dict.keys())
- assert keys == ['_id', 'relation']
- assert isinstance(type_dict['relation'][0], bases.Instance), \
- type_dict['relation']
- assert type_dict['relation'][0].name == 'DoNothing'
- assert type_dict['_id'][0] is astroid.Uninferable
+ assert keys == ["_id", "relation"]
+ assert isinstance(type_dict["relation"][0], bases.Instance), type_dict["relation"]
+ assert type_dict["relation"][0].name == "DoNothing"
+ assert type_dict["_id"][0] is astroid.Uninferable
def test_concat_interfaces():
- cls = astroid.extract_node('''
+ cls = astroid.extract_node(
+ '''
class IMachin: pass
class Correct2:
@@ -82,13 +82,15 @@ def test_concat_interfaces():
class InterfaceCanNowBeFound: #@
"""docstring"""
__implements__ = BadArgument.__implements__ + Correct2.__implements__
- ''')
+ '''
+ )
interfaces = inspector.interfaces(cls)
- assert [i.name for i in interfaces] == ['IMachin']
+ assert [i.name for i in interfaces] == ["IMachin"]
def test_interfaces():
- module = astroid.parse('''
+ module = astroid.parse(
+ """
class Interface(object): pass
class MyIFace(Interface): pass
class AnotherIFace(Interface): pass
@@ -99,25 +101,25 @@ def test_interfaces():
class Concrete2:
__implements__ = (MyIFace, AnotherIFace)
class Concrete23(Concrete1): pass
- ''')
-
- for klass, interfaces in (('Concrete0', ['MyIFace']),
- ('Concrete1', ['MyIFace', 'AnotherIFace']),
- ('Concrete2', ['MyIFace', 'AnotherIFace']),
- ('Concrete23', ['MyIFace', 'AnotherIFace'])):
+ """
+ )
+
+ for klass, interfaces in (
+ ("Concrete0", ["MyIFace"]),
+ ("Concrete1", ["MyIFace", "AnotherIFace"]),
+ ("Concrete2", ["MyIFace", "AnotherIFace"]),
+ ("Concrete23", ["MyIFace", "AnotherIFace"]),
+ ):
klass = module[klass]
assert [i.name for i in inspector.interfaces(klass)] == interfaces
def test_from_directory(project):
- expected = os.path.join('pylint', 'test', 'data', '__init__.py')
- assert project.name == 'data'
+ expected = os.path.join("pylint", "test", "data", "__init__.py")
+ assert project.name == "data"
assert project.path.endswith(expected)
def test_project_node(project):
- expected = [
- 'data', 'data.clientmodule_test',
- 'data.suppliermodule_test',
- ]
+ expected = ["data", "data.clientmodule_test", "data.suppliermodule_test"]
assert sorted(project.keys()) == expected
diff --git a/pylint/test/unittest_pyreverse_writer.py b/pylint/test/unittest_pyreverse_writer.py
index 922ec10c4..cf2e915b0 100644
--- a/pylint/test/unittest_pyreverse_writer.py
+++ b/pylint/test/unittest_pyreverse_writer.py
@@ -28,16 +28,24 @@ from pylint.pyreverse.utils import get_visibility
_DEFAULTS = {
- 'all_ancestors': None, 'show_associated': None,
- 'module_names': None,
- 'output_format': 'dot', 'diadefs_file': None, 'quiet': 0,
- 'show_ancestors': None, 'classes': (), 'all_associated': None,
- 'mode': 'PUB_ONLY', 'show_builtin': False, 'only_classnames': False
- }
+ "all_ancestors": None,
+ "show_associated": None,
+ "module_names": None,
+ "output_format": "dot",
+ "diadefs_file": None,
+ "quiet": 0,
+ "show_ancestors": None,
+ "classes": (),
+ "all_associated": None,
+ "mode": "PUB_ONLY",
+ "show_builtin": False,
+ "only_classnames": False,
+}
class Config(object):
"""config object for tests"""
+
def __init__(self):
for attr, value in _DEFAULTS.items():
setattr(self, attr, value)
@@ -45,27 +53,33 @@ class Config(object):
def _file_lines(path):
# we don't care about the actual encoding, but python3 forces us to pick one
- with codecs.open(path, encoding='latin1') as stream:
- lines = [line.strip() for line in stream.readlines()
- if (line.find('squeleton generated by ') == -1 and
- not line.startswith('__revision__ = "$Id:'))]
+ with codecs.open(path, encoding="latin1") as stream:
+ lines = [
+ line.strip()
+ for line in stream.readlines()
+ if (
+ line.find("squeleton generated by ") == -1
+ and not line.startswith('__revision__ = "$Id:')
+ )
+ ]
return [line for line in lines if line]
def get_project(module, name="No Name"):
"""return an astroid project representation"""
+
def _astroid_wrapper(func, modname):
return func(modname)
- return project_from_files([module], _astroid_wrapper,
- project_name=name)
+
+ return project_from_files([module], _astroid_wrapper, project_name=name)
-DOT_FILES = ['packages_No_Name.dot', 'classes_No_Name.dot']
+DOT_FILES = ["packages_No_Name.dot", "classes_No_Name.dot"]
@pytest.fixture(scope="module")
def setup():
- project = get_project(os.path.join(os.path.dirname(__file__), 'data'))
+ project = get_project(os.path.join(os.path.dirname(__file__), "data"))
linker = Linker(project)
CONFIG = Config()
handler = DiadefsHandler(CONFIG)
@@ -85,27 +99,38 @@ def setup():
@pytest.mark.usefixtures("setup")
@pytest.mark.parametrize("generated_file", DOT_FILES)
def test_dot_files(generated_file):
- expected_file = os.path.join(os.path.dirname(__file__), 'data', generated_file)
+ expected_file = os.path.join(os.path.dirname(__file__), "data", generated_file)
generated = _file_lines(generated_file)
expected = _file_lines(expected_file)
- generated = '\n'.join(generated)
- expected = '\n'.join(expected)
- files = "\n *** expected : %s, generated : %s \n" % (
- expected_file, generated_file)
- assert expected == generated, '%s%s' % (
- files, '\n'.join(line for line in unified_diff(
- expected.splitlines(), generated.splitlines())))
+ generated = "\n".join(generated)
+ expected = "\n".join(expected)
+ files = "\n *** expected : %s, generated : %s \n" % (expected_file, generated_file)
+ assert expected == generated, "%s%s" % (
+ files,
+ "\n".join(
+ line for line in unified_diff(expected.splitlines(), generated.splitlines())
+ ),
+ )
os.remove(generated_file)
-@pytest.mark.parametrize("names, expected",
- [(["__reduce_ex__", "__setattr__"], "special"),
+@pytest.mark.parametrize(
+ "names, expected",
+ [
+ (["__reduce_ex__", "__setattr__"], "special"),
(["__g_", "____dsf", "__23_9"], "private"),
(["simple"], "public"),
- (["_", "__", "___", "____", "_____", "___e__", "_nextsimple",
- "_filter_it_"], "protected")])
+ (
+ ["_", "__", "___", "____", "_____", "___e__", "_nextsimple", "_filter_it_"],
+ "protected",
+ ),
+ ],
+)
def test_get_visibility(names, expected):
for name in names:
got = get_visibility(name)
- assert got == expected, \
- 'got %s instead of %s for value %s' % (got, expected, name)
+ assert got == expected, "got %s instead of %s for value %s" % (
+ got,
+ expected,
+ name,
+ )
diff --git a/pylint/test/unittest_reporters_json.py b/pylint/test/unittest_reporters_json.py
index d8e622036..ba7853887 100644
--- a/pylint/test/unittest_reporters_json.py
+++ b/pylint/test/unittest_reporters_json.py
@@ -29,24 +29,25 @@ def test_simple_json_output():
linter.config.persistent = 0
linter.reporter.set_output(output)
linter.open()
- linter.set_current_module('0123')
- linter.add_message('line-too-long', line=1, args=(1, 2))
+ linter.set_current_module("0123")
+ linter.add_message("line-too-long", line=1, args=(1, 2))
# we call this method because we didn't actually run the checkers
reporter.display_messages(None)
- expected_result = [[
- ("column", 0),
- ("line", 1),
- ("message", "Line too long (1/2)"),
- ("message-id", "C0301"),
- ("module", "0123"),
- ("obj", ""),
- ("path", "0123"),
- ("symbol", "line-too-long"),
- ("type", "convention"),
- ]]
+ expected_result = [
+ [
+ ("column", 0),
+ ("line", 1),
+ ("message", "Line too long (1/2)"),
+ ("message-id", "C0301"),
+ ("module", "0123"),
+ ("obj", ""),
+ ("path", "0123"),
+ ("symbol", "line-too-long"),
+ ("type", "convention"),
+ ]
+ ]
report_result = json.loads(output.getvalue())
- report_result = [sorted(report_result[0].items(),
- key=lambda item: item[0])]
+ report_result = [sorted(report_result[0].items(), key=lambda item: item[0])]
assert report_result == expected_result
diff --git a/pylint/test/unittest_reporting.py b/pylint/test/unittest_reporting.py
index b7b974d42..88f2ce4d5 100644
--- a/pylint/test/unittest_reporting.py
+++ b/pylint/test/unittest_reporting.py
@@ -20,28 +20,27 @@ from pylint.reporters.text import TextReporter, ParseableTextReporter
import pytest
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def reporter(reporter):
return TextReporter
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def disable(disable):
- return ['I']
+ return ["I"]
def test_template_option(linter):
output = StringIO()
linter.reporter.set_output(output)
- linter.set_option('msg-template', '{msg_id}:{line:03d}')
+ linter.set_option("msg-template", "{msg_id}:{line:03d}")
linter.open()
- linter.set_current_module('0123')
- linter.add_message('C0301', line=1, args=(1, 2))
- linter.add_message('line-too-long', line=2, args=(3, 4))
- assert output.getvalue() == \
- '************* Module 0123\n' \
- 'C0301:001\n' \
- 'C0301:002\n'
+ linter.set_current_module("0123")
+ linter.add_message("C0301", line=1, args=(1, 2))
+ linter.add_message("line-too-long", line=2, args=(3, 4))
+ assert (
+ output.getvalue() == "************* Module 0123\n" "C0301:001\n" "C0301:002\n"
+ )
def test_parseable_output_deprecated():
@@ -61,14 +60,15 @@ def test_parseable_output_regression():
checkers.initialize(linter)
linter.config.persistent = 0
linter.reporter.set_output(output)
- linter.set_option('output-format', 'parseable')
+ linter.set_option("output-format", "parseable")
linter.open()
- linter.set_current_module('0123')
- linter.add_message('line-too-long', line=1, args=(1, 2))
- assert output.getvalue() == \
- '************* Module 0123\n' \
- '0123:1: [C0301(line-too-long), ] ' \
- 'Line too long (1/2)\n'
+ linter.set_current_module("0123")
+ linter.add_message("line-too-long", line=1, args=(1, 2))
+ assert (
+ output.getvalue() == "************* Module 0123\n"
+ "0123:1: [C0301(line-too-long), ] "
+ "Line too long (1/2)\n"
+ )
def test_display_results_is_renamed():
diff --git a/pylint/test/unittest_utils.py b/pylint/test/unittest_utils.py
index e0c453dcd..7bdf502ba 100644
--- a/pylint/test/unittest_utils.py
+++ b/pylint/test/unittest_utils.py
@@ -40,47 +40,47 @@ class TestPyLintASTWalker(object):
def __init__(self):
self.called = set()
- @check_messages('first-message')
+ @check_messages("first-message")
def visit_module(self, module):
- self.called.add('module')
+ self.called.add("module")
- @check_messages('second-message')
+ @check_messages("second-message")
def visit_call(self, module):
raise NotImplementedError
- @check_messages('second-message', 'third-message')
+ @check_messages("second-message", "third-message")
def visit_assignname(self, module):
- self.called.add('assignname')
+ self.called.add("assignname")
- @check_messages('second-message')
+ @check_messages("second-message")
def leave_assignname(self, module):
raise NotImplementedError
def test_check_messages(self):
- linter = self.MockLinter({'first-message': True,
- 'second-message': False,
- 'third-message': True})
+ linter = self.MockLinter(
+ {"first-message": True, "second-message": False, "third-message": True}
+ )
walker = utils.PyLintASTWalker(linter)
checker = self.Checker()
walker.add_checker(checker)
walker.walk(astroid.parse("x = func()"))
- assert {'module', 'assignname'} == checker.called
+ assert {"module", "assignname"} == checker.called
def test_deprecated_methods(self):
class Checker(object):
def __init__(self):
self.called = False
- @check_messages('first-message')
+ @check_messages("first-message")
def visit_assname(self, node):
self.called = True
- linter = self.MockLinter({'first-message': True})
+ linter = self.MockLinter({"first-message": True})
walker = utils.PyLintASTWalker(linter)
checker = Checker()
walker.add_checker(checker)
with warnings.catch_warnings(record=True):
- warnings.simplefilter('always')
+ warnings.simplefilter("always")
walker.walk(astroid.parse("x = 1"))
assert not checker.called
@@ -103,52 +103,112 @@ def store():
return utils.MessagesStore()
-@pytest.mark.parametrize("messages,expected", [
- ({'W1234': ('message one', 'msg-symbol-one', 'msg description'),
- 'W4321': ('message two', 'msg-symbol-two', 'msg description')},
- r"Inconsistent checker part in message id 'W4321' (expected 'x12xx' because we already had ['W1234'])."),
-
- ({'W1233': ('message two', 'msg-symbol-two', 'msg description',
- {'old_names': [('W1234', 'old-symbol')]}),
- 'W1234': ('message one', 'msg-symbol-one', 'msg description')},
- "Message id 'W1234' cannot have both 'old-symbol' and 'msg-symbol-one' as symbolic name."),
-
- ({'W1234': ('message one', 'msg-symbol-one', 'msg description'),
- 'W1235': ('message two', 'msg-symbol-two', 'msg description',
- {'old_names': [('W1234', 'old-symbol')]})},
- "Message id 'W1234' cannot have both 'msg-symbol-one' and 'old-symbol' as symbolic name."),
-
- ({'W1234': ('message one', 'msg-symbol-one', 'msg description',
- {'old_names': [('W1201', 'old-symbol-one')]}),
- 'W1235': ('message two', 'msg-symbol-two', 'msg description',
- {'old_names': [('W1201', 'old-symbol-two')]})},
- "Message id 'W1201' cannot have both 'old-symbol-one' and 'old-symbol-two' as symbolic name."),
-
- ({'W1234': ('message one', 'msg-symbol', 'msg description'),
- 'W1235': ('message two', 'msg-symbol', 'msg description')},
- "Message symbol 'msg-symbol' cannot be used for 'W1234' and 'W1235' at the same time."),
-
- ({'W1233': ('message two', 'msg-symbol-two', 'msg description',
- {'old_names': [('W1230', 'msg-symbol-one')]}),
- 'W1234': ('message one', 'msg-symbol-one', 'msg description')},
- "Message symbol 'msg-symbol-one' cannot be used for 'W1230' and 'W1234' at the same time."),
-
- ({'W1234': ('message one', 'msg-symbol-one', 'msg description'),
- 'W1235': ('message two', 'msg-symbol-two', 'msg description',
- {'old_names': [('W1230', 'msg-symbol-one')]})},
- "Message symbol 'msg-symbol-one' cannot be used for 'W1234' and 'W1235' at the same time."),
-
- ({'W1234': ('message one', 'msg-symbol-one', 'msg description',
- {'old_names': [('W1230', 'old-symbol-one')]}),
- 'W1235': ('message two', 'msg-symbol-two', 'msg description',
- {'old_names': [('W1231', 'old-symbol-one')]})},
- "Message symbol 'old-symbol-one' cannot be used for 'W1230' and 'W1235' at the same time."),
-
-])
+@pytest.mark.parametrize(
+ "messages,expected",
+ [
+ (
+ {
+ "W1234": ("message one", "msg-symbol-one", "msg description"),
+ "W4321": ("message two", "msg-symbol-two", "msg description"),
+ },
+ r"Inconsistent checker part in message id 'W4321' (expected 'x12xx' because we already had ['W1234']).",
+ ),
+ (
+ {
+ "W1233": (
+ "message two",
+ "msg-symbol-two",
+ "msg description",
+ {"old_names": [("W1234", "old-symbol")]},
+ ),
+ "W1234": ("message one", "msg-symbol-one", "msg description"),
+ },
+ "Message id 'W1234' cannot have both 'old-symbol' and 'msg-symbol-one' as symbolic name.",
+ ),
+ (
+ {
+ "W1234": ("message one", "msg-symbol-one", "msg description"),
+ "W1235": (
+ "message two",
+ "msg-symbol-two",
+ "msg description",
+ {"old_names": [("W1234", "old-symbol")]},
+ ),
+ },
+ "Message id 'W1234' cannot have both 'msg-symbol-one' and 'old-symbol' as symbolic name.",
+ ),
+ (
+ {
+ "W1234": (
+ "message one",
+ "msg-symbol-one",
+ "msg description",
+ {"old_names": [("W1201", "old-symbol-one")]},
+ ),
+ "W1235": (
+ "message two",
+ "msg-symbol-two",
+ "msg description",
+ {"old_names": [("W1201", "old-symbol-two")]},
+ ),
+ },
+ "Message id 'W1201' cannot have both 'old-symbol-one' and 'old-symbol-two' as symbolic name.",
+ ),
+ (
+ {
+ "W1234": ("message one", "msg-symbol", "msg description"),
+ "W1235": ("message two", "msg-symbol", "msg description"),
+ },
+ "Message symbol 'msg-symbol' cannot be used for 'W1234' and 'W1235' at the same time.",
+ ),
+ (
+ {
+ "W1233": (
+ "message two",
+ "msg-symbol-two",
+ "msg description",
+ {"old_names": [("W1230", "msg-symbol-one")]},
+ ),
+ "W1234": ("message one", "msg-symbol-one", "msg description"),
+ },
+ "Message symbol 'msg-symbol-one' cannot be used for 'W1230' and 'W1234' at the same time.",
+ ),
+ (
+ {
+ "W1234": ("message one", "msg-symbol-one", "msg description"),
+ "W1235": (
+ "message two",
+ "msg-symbol-two",
+ "msg description",
+ {"old_names": [("W1230", "msg-symbol-one")]},
+ ),
+ },
+ "Message symbol 'msg-symbol-one' cannot be used for 'W1234' and 'W1235' at the same time.",
+ ),
+ (
+ {
+ "W1234": (
+ "message one",
+ "msg-symbol-one",
+ "msg description",
+ {"old_names": [("W1230", "old-symbol-one")]},
+ ),
+ "W1235": (
+ "message two",
+ "msg-symbol-two",
+ "msg description",
+ {"old_names": [("W1231", "old-symbol-one")]},
+ ),
+ },
+ "Message symbol 'old-symbol-one' cannot be used for 'W1230' and 'W1235' at the same time.",
+ ),
+ ],
+)
def test_register_error(store, messages, expected):
class Checker(object):
- name = 'checker'
+ name = "checker"
msgs = messages
+
with pytest.raises(InvalidMessageError) as cm:
store.register_messages(Checker())
assert str(cm.value) == expected
@@ -156,32 +216,31 @@ def test_register_error(store, messages, expected):
def test_register_error_new_id_duplicate_of_new(store):
class CheckerOne(object):
- name = 'checker_one'
- msgs = {
- 'W1234': ('message one', 'msg-symbol-one', 'msg description.'),
- }
+ name = "checker_one"
+ msgs = {"W1234": ("message one", "msg-symbol-one", "msg description.")}
class CheckerTwo(object):
- name = 'checker_two'
- msgs = {
- 'W1234': ('message two', 'msg-symbol-two', 'another msg description.'),
- }
+ name = "checker_two"
+ msgs = {"W1234": ("message two", "msg-symbol-two", "another msg description.")}
store.register_messages(CheckerOne())
test_register_error(
- store, {'W1234': ('message two', 'msg-symbol-two', 'another msg description.')},
- "Message id 'W1234' cannot have both 'msg-symbol-one' and 'msg-symbol-two' as symbolic name."
+ store,
+ {"W1234": ("message two", "msg-symbol-two", "another msg description.")},
+ "Message id 'W1234' cannot have both 'msg-symbol-one' and 'msg-symbol-two' as symbolic name.",
)
-@pytest.mark.parametrize("msgid,expected", [
- ("Q1234", "Bad message type Q in 'Q1234'"),
- ("W12345", "Invalid message id 'W12345'"),
-])
+@pytest.mark.parametrize(
+ "msgid,expected",
+ [
+ ("Q1234", "Bad message type Q in 'Q1234'"),
+ ("W12345", "Invalid message id 'W12345'"),
+ ],
+)
def test_create_invalid_message_type(msgid, expected):
with pytest.raises(InvalidMessageError) as cm:
- utils.MessageDefinition('checker', msgid,
- 'msg', 'descr', 'symbol', 'scope')
+ utils.MessageDefinition("checker", msgid, "msg", "descr", "symbol", "scope")
assert str(cm.value) == expected
@@ -189,98 +248,107 @@ def test_decoding_stream_unknown_encoding():
"""decoding_stream should fall back to *some* decoding when given an
unknown encoding.
"""
- binary_io = io.BytesIO(b'foo\nbar')
- stream = utils.decoding_stream(binary_io, 'garbage-encoding')
+ binary_io = io.BytesIO(b"foo\nbar")
+ stream = utils.decoding_stream(binary_io, "garbage-encoding")
# should still act like a StreamReader
ret = stream.readlines()
- assert ret == ['foo\n', 'bar']
+ assert ret == ["foo\n", "bar"]
def test_decoding_stream_known_encoding():
- binary_io = io.BytesIO('€'.encode('cp1252'))
- stream = utils.decoding_stream(binary_io, 'cp1252')
- assert stream.read() == '€'
+ binary_io = io.BytesIO("€".encode("cp1252"))
+ stream = utils.decoding_stream(binary_io, "cp1252")
+ assert stream.read() == "€"
-class TestGetNodeLastLineno:
+class TestGetNodeLastLineno:
def test_get_node_last_lineno_simple(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
pass
- """)
+ """
+ )
assert get_node_last_lineno(node) == 2
-
def test_get_node_last_lineno_if_simple(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
if True:
print(1)
pass
- """)
+ """
+ )
assert get_node_last_lineno(node) == 4
-
def test_get_node_last_lineno_if_elseif_else(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
if True:
print(1)
elif False:
print(2)
else:
print(3)
- """)
+ """
+ )
assert get_node_last_lineno(node) == 7
-
def test_get_node_last_lineno_while(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
while True:
print(1)
- """)
+ """
+ )
assert get_node_last_lineno(node) == 3
-
def test_get_node_last_lineno_while_else(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
while True:
print(1)
else:
print(2)
- """)
+ """
+ )
assert get_node_last_lineno(node) == 5
-
def test_get_node_last_lineno_for(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
for x in range(0, 5):
print(1)
- """)
+ """
+ )
assert get_node_last_lineno(node) == 3
-
def test_get_node_last_lineno_for_else(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
for x in range(0, 5):
print(1)
else:
print(2)
- """)
+ """
+ )
assert get_node_last_lineno(node) == 5
-
def test_get_node_last_lineno_try(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
try:
print(1)
except ValueError:
print(2)
except Exception:
print(3)
- """)
+ """
+ )
assert get_node_last_lineno(node) == 7
-
def test_get_node_last_lineno_try_except_else(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
try:
print(1)
except Exception:
@@ -288,24 +356,26 @@ class TestGetNodeLastLineno:
print(3)
else:
print(4)
- """)
+ """
+ )
assert get_node_last_lineno(node) == 8
-
def test_get_node_last_lineno_try_except_finally(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
try:
print(1)
except Exception:
print(2)
finally:
print(4)
- """)
+ """
+ )
assert get_node_last_lineno(node) == 7
-
def test_get_node_last_lineno_try_except_else_finally(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
try:
print(1)
except Exception:
@@ -314,39 +384,44 @@ class TestGetNodeLastLineno:
print(3)
finally:
print(4)
- """)
+ """
+ )
assert get_node_last_lineno(node) == 9
-
def test_get_node_last_lineno_with(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
with x as y:
print(1)
pass
- """)
+ """
+ )
assert get_node_last_lineno(node) == 4
-
def test_get_node_last_lineno_method(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
def x(a, b):
print(a, b)
pass
- """)
+ """
+ )
assert get_node_last_lineno(node) == 4
-
def test_get_node_last_lineno_decorator(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
@decor()
def x(a, b):
print(a, b)
pass
- """)
+ """
+ )
assert get_node_last_lineno(node) == 5
def test_get_node_last_lineno_class(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class C(object):
CONST = True
@@ -356,12 +431,13 @@ class TestGetNodeLastLineno:
def y(self):
pass
pass
- """)
+ """
+ )
assert get_node_last_lineno(node) == 10
-
def test_get_node_last_lineno_combined(self):
- node = astroid.extract_node("""
+ node = astroid.extract_node(
+ """
class C(object):
CONST = True
@@ -372,5 +448,6 @@ class TestGetNodeLastLineno:
pass
finally:
pass
- """)
+ """
+ )
assert get_node_last_lineno(node) == 11
diff --git a/pylint/testutils.py b/pylint/testutils.py
index 0ed4b96a4..f22b897e8 100644
--- a/pylint/testutils.py
+++ b/pylint/testutils.py
@@ -44,8 +44,8 @@ from pylint.lint import PyLinter
# Utils
-SYS_VERS_STR = '%d%d%d' % sys.version_info[:3]
-TITLE_UNDERLINES = ['', '=', '-', '.']
+SYS_VERS_STR = "%d%d%d" % sys.version_info[:3]
+TITLE_UNDERLINES = ["", "=", "-", "."]
PREFIX = abspath(dirname(__file__))
PY3K = sys.version_info[0] == 3
@@ -62,28 +62,28 @@ def _get_tests_info(input_dir, msg_dir, prefix, suffix):
lower versions -> message with highest num
"""
result = []
- for fname in glob(join(input_dir, prefix + '*' + suffix)):
+ for fname in glob(join(input_dir, prefix + "*" + suffix)):
infile = basename(fname)
fbase = splitext(infile)[0]
# filter input files :
- pyrestr = fbase.rsplit('_py', 1)[-1] # like _26 or 26
- if pyrestr.isdigit(): # '24', '25'...
+ pyrestr = fbase.rsplit("_py", 1)[-1] # like _26 or 26
+ if pyrestr.isdigit(): # '24', '25'...
if SYS_VERS_STR < pyrestr:
continue
- if pyrestr.startswith('_') and pyrestr[1:].isdigit():
+ if pyrestr.startswith("_") and pyrestr[1:].isdigit():
# skip test for higher python versions
if SYS_VERS_STR >= pyrestr[1:]:
continue
- messages = glob(join(msg_dir, fbase + '*.txt'))
+ messages = glob(join(msg_dir, fbase + "*.txt"))
# the last one will be without ext, i.e. for all or upper versions:
if messages:
for outfile in sorted(messages, reverse=True):
- py_rest = outfile.rsplit('_py', 1)[-1][:-4]
+ py_rest = outfile.rsplit("_py", 1)[-1][:-4]
if py_rest.isdigit() and SYS_VERS_STR >= py_rest:
break
else:
# This will provide an error message indicating the missing filename.
- outfile = join(msg_dir, fbase + '.txt')
+ outfile = join(msg_dir, fbase + ".txt")
result.append((infile, outfile))
return result
@@ -93,7 +93,7 @@ class TestReporter(BaseReporter):
__implements__ = IReporter
- def __init__(self): # pylint: disable=super-init-not-called
+ def __init__(self): # pylint: disable=super-init-not-called
self.message_ids = {}
self.reset()
@@ -111,13 +111,13 @@ class TestReporter(BaseReporter):
msg = msg.msg
self.message_ids[msg_id] = 1
if obj:
- obj = ':%s' % obj
+ obj = ":%s" % obj
sigle = msg_id[0]
- if PY3K and linesep != '\n':
+ if PY3K and linesep != "\n":
# 2to3 writes os.linesep instead of using
# the previosly used line separators
- msg = msg.replace('\r\n', '\n')
- self.messages.append('%s:%3s%s: %s' % (sigle, line, obj, msg))
+ msg = msg.replace("\r\n", "\n")
+ self.messages.append("%s:%3s%s: %s" % (sigle, line, obj, msg))
def finalize(self):
self.messages.sort()
@@ -130,6 +130,7 @@ class TestReporter(BaseReporter):
# pylint: disable=unused-argument
def on_set_current_module(self, module, filepath):
pass
+
# pylint: enable=unused-argument
def display_reports(self, layout):
@@ -139,7 +140,6 @@ class TestReporter(BaseReporter):
class MinimalTestReporter(BaseReporter):
-
def handle_message(self, msg):
self.messages.append(msg)
@@ -149,8 +149,9 @@ class MinimalTestReporter(BaseReporter):
_display = None
-class Message(collections.namedtuple('Message',
- ['msg_id', 'line', 'node', 'args', 'confidence'])):
+class Message(
+ collections.namedtuple("Message", ["msg_id", "line", "node", "args", "confidence"])
+):
def __new__(cls, msg_id, line=None, node=None, args=None, confidence=None):
return tuple.__new__(cls, (msg_id, line, node, args, confidence))
@@ -166,6 +167,7 @@ class Message(collections.namedtuple('Message',
class UnittestLinter:
"""A fake linter class to capture checker messages."""
+
# pylint: disable=unused-argument, no-self-use
def __init__(self):
@@ -178,8 +180,9 @@ class UnittestLinter:
finally:
self._messages = []
- def add_message(self, msg_id, line=None, node=None, args=None, confidence=None,
- col_offset=None):
+ def add_message(
+ self, msg_id, line=None, node=None, args=None, confidence=None, col_offset=None
+ ):
# Do not test col_offset for now since changing Message breaks everything
self._messages.append(Message(msg_id, line, node, args, confidence))
@@ -195,8 +198,10 @@ class UnittestLinter:
def options_providers(self):
return linter.options_providers
+
def set_config(**kwargs):
"""Decorator for setting config values on a checker."""
+
def _wrapper(fun):
@functools.wraps(fun)
def _forward(self):
@@ -208,17 +213,19 @@ def set_config(**kwargs):
fun(self)
return _forward
+
return _wrapper
class CheckerTestCase:
"""A base testcase class for unit testing individual checker classes."""
+
CHECKER_CLASS = None
CONFIG = {}
def setup_method(self):
self.linter = UnittestLinter()
- self.checker = self.CHECKER_CLASS(self.linter) # pylint: disable=not-callable
+ self.checker = self.CHECKER_CLASS(self.linter) # pylint: disable=not-callable
for key, value in self.CONFIG.items():
setattr(self.checker.config, key, value)
self.checker.open()
@@ -239,9 +246,10 @@ class CheckerTestCase:
"""
yield
got = self.linter.release_messages()
- msg = ('Expected messages did not match actual.\n'
- 'Expected:\n%s\nGot:\n%s' % ('\n'.join(repr(m) for m in messages),
- '\n'.join(repr(m) for m in got)))
+ msg = "Expected messages did not match actual.\n" "Expected:\n%s\nGot:\n%s" % (
+ "\n".join(repr(m) for m in messages),
+ "\n".join(repr(m) for m in got),
+ )
assert list(messages) == got, msg
def walk(self, node):
@@ -278,7 +286,7 @@ def _create_tempfile(content=None):
if content:
if sys.version_info >= (3, 0):
# erff
- os.write(file_handle, bytes(content, 'ascii'))
+ os.write(file_handle, bytes(content, "ascii"))
else:
os.write(file_handle, content)
try:
diff --git a/pylint/utils.py b/pylint/utils.py
index 49216b675..7c7879cae 100644
--- a/pylint/utils.py
+++ b/pylint/utils.py
@@ -62,25 +62,18 @@ from pylint.exceptions import InvalidMessageError, UnknownMessageError, EmptyRep
MSG_TYPES = {
- 'I' : 'info',
- 'C' : 'convention',
- 'R' : 'refactor',
- 'W' : 'warning',
- 'E' : 'error',
- 'F' : 'fatal'
- }
+ "I": "info",
+ "C": "convention",
+ "R": "refactor",
+ "W": "warning",
+ "E": "error",
+ "F": "fatal",
+}
MSG_TYPES_LONG = {v: k for k, v in MSG_TYPES.items()}
-MSG_TYPES_STATUS = {
- 'I' : 0,
- 'C' : 16,
- 'R' : 8,
- 'W' : 4,
- 'E' : 2,
- 'F' : 1
- }
+MSG_TYPES_STATUS = {"I": 0, "C": 16, "R": 8, "W": 4, "E": 2, "F": 1}
-_MSG_ORDER = 'EWRCIF'
+_MSG_ORDER = "EWRCIF"
MSG_STATE_SCOPE_CONFIG = 0
MSG_STATE_SCOPE_MODULE = 1
MSG_STATE_CONFIDENCE = 2
@@ -88,27 +81,50 @@ MSG_STATE_CONFIDENCE = 2
# Allow stopping after the first semicolon encountered,
# so that an option can be continued with the reasons
# why it is active or disabled.
-OPTION_RGX = re.compile(r'\s*#.*\bpylint:\s*([^;]+);{0,1}')
+OPTION_RGX = re.compile(r"\s*#.*\bpylint:\s*([^;]+);{0,1}")
# The line/node distinction does not apply to fatal errors and reports.
-_SCOPE_EXEMPT = 'FR'
+_SCOPE_EXEMPT = "FR"
+
class WarningScope:
- LINE = 'line-based-msg'
- NODE = 'node-based-msg'
+ LINE = "line-based-msg"
+ NODE = "node-based-msg"
+
_MsgBase = collections.namedtuple(
- '_MsgBase',
- ['msg_id', 'symbol', 'msg', 'C', 'category', 'confidence',
- 'abspath', 'path', 'module', 'obj', 'line', 'column'])
+ "_MsgBase",
+ [
+ "msg_id",
+ "symbol",
+ "msg",
+ "C",
+ "category",
+ "confidence",
+ "abspath",
+ "path",
+ "module",
+ "obj",
+ "line",
+ "column",
+ ],
+)
class Message(_MsgBase):
"""This class represent a message to be issued by the reporters"""
+
def __new__(cls, msg_id, symbol, location, msg, confidence):
return _MsgBase.__new__(
- cls, msg_id, symbol, msg, msg_id[0], MSG_TYPES[msg_id[0]],
- confidence, *location)
+ cls,
+ msg_id,
+ symbol,
+ msg,
+ msg_id[0],
+ MSG_TYPES[msg_id[0]],
+ confidence,
+ *location
+ )
def format(self, template):
"""Format the message according to the given template.
@@ -124,18 +140,19 @@ class Message(_MsgBase):
def get_module_and_frameid(node):
"""return the module name and the frame id in the module"""
frame = node.frame()
- module, obj = '', []
+ module, obj = "", []
while frame:
if isinstance(frame, Module):
module = frame.name
else:
- obj.append(getattr(frame, 'name', '<lambda>'))
+ obj.append(getattr(frame, "name", "<lambda>"))
try:
frame = frame.parent.frame()
except AttributeError:
frame = None
obj.reverse()
- return module, '.'.join(obj)
+ return module, ".".join(obj)
+
def category_id(cid):
cid = cid.upper()
@@ -143,14 +160,16 @@ def category_id(cid):
return cid
return MSG_TYPES_LONG.get(cid)
+
def safe_decode(line, encoding, *args, **kwargs):
- '''return decoded line from encoding or decode with default encoding'''
+ """return decoded line from encoding or decode with default encoding"""
try:
return line.decode(encoding or sys.getdefaultencoding(), *args, **kwargs)
except LookupError:
return line.decode(sys.getdefaultencoding(), *args, **kwargs)
-def decoding_stream(stream, encoding, errors='strict'):
+
+def decoding_stream(stream, encoding, errors="strict"):
try:
reader_cls = codecs.getreader(encoding or sys.getdefaultencoding())
except LookupError:
@@ -163,6 +182,7 @@ def tokenize_module(module):
readline = stream.readline
return list(tokenize.tokenize(readline))
+
def build_message_def(checker, msgid, msg_tuple):
if implements(checker, (IRawChecker, ITokenChecker)):
default_scope = WarningScope.LINE
@@ -177,22 +197,34 @@ def build_message_def(checker, msgid, msg_tuple):
# messages should have a symbol, but for backward compatibility
# they may not.
(msg, descr) = msg_tuple
- warnings.warn("[pylint 0.26] description of message %s doesn't include "
- "a symbolic name" % msgid, DeprecationWarning)
+ warnings.warn(
+ "[pylint 0.26] description of message %s doesn't include "
+ "a symbolic name" % msgid,
+ DeprecationWarning,
+ )
symbol = None
- options.setdefault('scope', default_scope)
+ options.setdefault("scope", default_scope)
return MessageDefinition(checker, msgid, msg, descr, symbol, **options)
class MessageDefinition:
- def __init__(self, checker, msgid, msg, descr, symbol, scope,
- minversion=None, maxversion=None, old_names=None):
+ def __init__(
+ self,
+ checker,
+ msgid,
+ msg,
+ descr,
+ symbol,
+ scope,
+ minversion=None,
+ maxversion=None,
+ old_names=None,
+ ):
self.checker = checker
if len(msgid) != 5:
- raise InvalidMessageError('Invalid message id %r' % msgid)
+ raise InvalidMessageError("Invalid message id %r" % msgid)
if not msgid[0] in MSG_TYPES:
- raise InvalidMessageError(
- 'Bad message type %s in %r' % (msgid[0], msgid))
+ raise InvalidMessageError("Bad message type %s in %r" % (msgid[0], msgid))
self.msgid = msgid
self.msg = msg
self.descr = descr
@@ -217,37 +249,37 @@ class MessageDefinition:
"""return the help string for the given message id"""
desc = self.descr
if checkerref:
- desc += ' This message belongs to the %s checker.' % \
- self.checker.name
+ desc += " This message belongs to the %s checker." % self.checker.name
title = self.msg
if self.symbol:
- msgid = '%s (%s)' % (self.symbol, self.msgid)
+ msgid = "%s (%s)" % (self.symbol, self.msgid)
else:
msgid = self.msgid
if self.minversion or self.maxversion:
restr = []
if self.minversion:
- restr.append('< %s' % '.'.join([str(n) for n in self.minversion]))
+ restr.append("< %s" % ".".join([str(n) for n in self.minversion]))
if self.maxversion:
- restr.append('>= %s' % '.'.join([str(n) for n in self.maxversion]))
- restr = ' or '.join(restr)
+ restr.append(">= %s" % ".".join([str(n) for n in self.maxversion]))
+ restr = " or ".join(restr)
if checkerref:
desc += " It can't be emitted when using Python %s." % restr
else:
desc += " This message can't be emitted when using Python %s." % restr
- desc = _normalize_text(' '.join(desc.split()), indent=' ')
- if title != '%s':
+ desc = _normalize_text(" ".join(desc.split()), indent=" ")
+ if title != "%s":
title = title.splitlines()[0]
- return ':%s: *%s*\n%s' % (msgid, title.rstrip(" "), desc)
- return ':%s:\n%s' % (msgid, desc)
+ return ":%s: *%s*\n%s" % (msgid, title.rstrip(" "), desc)
+ return ":%s:\n%s" % (msgid, desc)
class MessagesHandlerMixIn:
"""a mix-in class containing all the messages related methods for the main
lint class
"""
- __by_id_managed_msgs = [] # type: ignore
+
+ __by_id_managed_msgs = [] # type: ignore
def __init__(self):
self._msgs_state = {}
@@ -273,31 +305,36 @@ class MessagesHandlerMixIn:
msg = self.msgs_store.get_message_definition(msgid)
if msgid == msg.msgid:
MessagesHandlerMixIn.__by_id_managed_msgs.append(
- (self.current_name, msg.msgid, msg.symbol, line, is_disabled))
+ (self.current_name, msg.msgid, msg.symbol, line, is_disabled)
+ )
except UnknownMessageError:
pass
- def disable(self, msgid, scope='package', line=None, ignore_unknown=False):
+ def disable(self, msgid, scope="package", line=None, ignore_unknown=False):
"""don't output message of the given id"""
- self._set_msg_status(msgid, enable=False, scope=scope,
- line=line, ignore_unknown=ignore_unknown)
+ self._set_msg_status(
+ msgid, enable=False, scope=scope, line=line, ignore_unknown=ignore_unknown
+ )
self._register_by_id_managed_msg(msgid, line)
- def enable(self, msgid, scope='package', line=None, ignore_unknown=False):
+ def enable(self, msgid, scope="package", line=None, ignore_unknown=False):
"""reenable message of the given id"""
- self._set_msg_status(msgid, enable=True, scope=scope,
- line=line, ignore_unknown=ignore_unknown)
+ self._set_msg_status(
+ msgid, enable=True, scope=scope, line=line, ignore_unknown=ignore_unknown
+ )
self._register_by_id_managed_msg(msgid, line, is_disabled=False)
- def _set_msg_status(self, msgid, enable, scope='package', line=None, ignore_unknown=False):
- assert scope in ('package', 'module')
+ def _set_msg_status(
+ self, msgid, enable, scope="package", line=None, ignore_unknown=False
+ ):
+ assert scope in ("package", "module")
- if msgid == 'all':
+ if msgid == "all":
for _msgid in MSG_TYPES:
self._set_msg_status(_msgid, enable, scope, line, ignore_unknown)
if enable and not self._python3_porting_mode:
# Don't activate the python 3 porting checker if it wasn't activated explicitly.
- self.disable('python3')
+ self.disable("python3")
return
# msgid is a category?
@@ -317,7 +354,7 @@ class MessagesHandlerMixIn:
return
# msgid is report id?
- if msgid.lower().startswith('rp'):
+ if msgid.lower().startswith("rp"):
if enable:
self.enable_report(msgid)
else:
@@ -332,19 +369,24 @@ class MessagesHandlerMixIn:
return
raise
- if scope == 'module':
+ if scope == "module":
self.file_state.set_msg_status(msg, line, enable)
- if not enable and msg.symbol != 'locally-disabled':
- self.add_message('locally-disabled', line=line,
- args=(msg.symbol, msg.msgid))
+ if not enable and msg.symbol != "locally-disabled":
+ self.add_message(
+ "locally-disabled", line=line, args=(msg.symbol, msg.msgid)
+ )
else:
msgs = self._msgs_state
msgs[msg.msgid] = enable
# sync configuration object
- self.config.enable = [self._message_symbol(mid) for mid, val
- in sorted(msgs.items()) if val]
- self.config.disable = [self._message_symbol(mid) for mid, val
- in sorted(msgs.items()) if not val]
+ self.config.enable = [
+ self._message_symbol(mid) for mid, val in sorted(msgs.items()) if val
+ ]
+ self.config.disable = [
+ self._message_symbol(mid)
+ for mid, val in sorted(msgs.items())
+ if not val
+ ]
def _message_symbol(self, msgid):
"""Get the message symbol of the given message id
@@ -391,16 +433,23 @@ class MessagesHandlerMixIn:
except KeyError:
# Check if the message's line is after the maximum line existing in ast tree.
# This line won't appear in the ast tree and won't be referred in
- # self.file_state._module_msgs_state
+ #  self.file_state._module_msgs_state
# This happens for example with a commented line at the end of a module.
max_line_number = self.file_state.get_effective_max_line_number()
- if (max_line_number and line > max_line_number):
+ if max_line_number and line > max_line_number:
fallback = msgid not in self.file_state._raw_module_msgs_state
return self._msgs_state.get(msgid, fallback)
return self._msgs_state.get(msgid, True)
- def add_message(self, msg_descr, line=None, node=None, args=None, confidence=UNDEFINED,
- col_offset=None):
+ def add_message(
+ self,
+ msg_descr,
+ line=None,
+ node=None,
+ args=None,
+ confidence=UNDEFINED,
+ col_offset=None,
+ ):
"""Adds a message given by ID or name.
If provided, the message string is expanded using args.
@@ -419,53 +468,69 @@ class MessagesHandlerMixIn:
if msg_info.scope == WarningScope.LINE:
if line is None:
raise InvalidMessageError(
- 'Message %s must provide line, got None' % msgid)
+ "Message %s must provide line, got None" % msgid
+ )
if node is not None:
raise InvalidMessageError(
- 'Message %s must only provide line, '
- 'got line=%s, node=%s' % (msgid, line, node))
+ "Message %s must only provide line, "
+ "got line=%s, node=%s" % (msgid, line, node)
+ )
elif msg_info.scope == WarningScope.NODE:
# Node-based warnings may provide an override line.
if node is None:
raise InvalidMessageError(
- 'Message %s must provide Node, got None' % msgid)
+ "Message %s must provide Node, got None" % msgid
+ )
if line is None and node is not None:
line = node.fromlineno
- if col_offset is None and hasattr(node, 'col_offset'):
- col_offset = node.col_offset # XXX measured in bytes for utf-8, divide by two for chars?
+ if col_offset is None and hasattr(node, "col_offset"):
+ col_offset = (
+ node.col_offset
+ ) # XXX measured in bytes for utf-8, divide by two for chars?
# should this message be displayed
if not self.is_message_enabled(msgid, line, confidence):
self.file_state.handle_ignored_message(
self.get_message_state_scope(msgid, line, confidence),
- msgid, line, node, args, confidence)
+ msgid,
+ line,
+ node,
+ args,
+ confidence,
+ )
return
# update stats
msg_cat = MSG_TYPES[msgid[0]]
self.msg_status |= MSG_TYPES_STATUS[msgid[0]]
self.stats[msg_cat] += 1
- self.stats['by_module'][self.current_name][msg_cat] += 1
+ self.stats["by_module"][self.current_name][msg_cat] += 1
try:
- self.stats['by_msg'][symbol] += 1
+ self.stats["by_msg"][symbol] += 1
except KeyError:
- self.stats['by_msg'][symbol] = 1
+ self.stats["by_msg"][symbol] = 1
# expand message ?
msg = msg_info.msg
if args:
msg %= args
# get module and object
if node is None:
- module, obj = self.current_name, ''
+ module, obj = self.current_name, ""
abspath = self.current_file
else:
module, obj = get_module_and_frameid(node)
abspath = node.root().file
- path = abspath.replace(self.reporter.path_strip_prefix, '', 1)
+ path = abspath.replace(self.reporter.path_strip_prefix, "", 1)
# add the message
self.reporter.handle_message(
- Message(msgid, symbol,
- (abspath, path, module, obj, line or 1, col_offset or 0), msg, confidence))
+ Message(
+ msgid,
+ symbol,
+ (abspath, path, module, obj, line or 1, col_offset or 0),
+ msg,
+ confidence,
+ )
+ )
def print_full_documentation(self, stream=None):
"""output a full documentation in ReST format"""
@@ -480,28 +545,28 @@ class MessagesHandlerMixIn:
by_checker = {}
for checker in self.get_checkers():
- if checker.name == 'master':
+ if checker.name == "master":
if checker.options:
for section, options in checker.options_by_section():
if section is None:
- title = 'General options'
+ title = "General options"
else:
- title = '%s options' % section.capitalize()
+ title = "%s options" % section.capitalize()
print(title, file=stream)
- print('~' * len(title), file=stream)
+ print("~" * len(title), file=stream)
_rest_format_section(stream, None, options)
print("", file=stream)
else:
name = checker.name
try:
- by_checker[name]['options'] += checker.options_and_values()
- by_checker[name]['msgs'].update(checker.msgs)
- by_checker[name]['reports'] += checker.reports
+ by_checker[name]["options"] += checker.options_and_values()
+ by_checker[name]["msgs"].update(checker.msgs)
+ by_checker[name]["reports"] += checker.reports
except KeyError:
by_checker[name] = {
- 'options': list(checker.options_and_values()),
- 'msgs': dict(checker.msgs),
- 'reports': list(checker.reports),
+ "options": list(checker.options_and_values()),
+ "msgs": dict(checker.msgs),
+ "reports": list(checker.reports),
}
print("Pylint checkers' options and switches", file=stream)
@@ -528,19 +593,19 @@ class MessagesHandlerMixIn:
if not stream:
stream = sys.stdout
- doc = info.get('doc')
- module = info.get('module')
- msgs = info.get('msgs')
- options = info.get('options')
- reports = info.get('reports')
+ doc = info.get("doc")
+ module = info.get("module")
+ msgs = info.get("msgs")
+ options = info.get("options")
+ reports = info.get("reports")
- checker_title = '%s checker' % (checker_name.replace("_", " ").title())
+ checker_title = "%s checker" % (checker_name.replace("_", " ").title())
if module:
# Provide anchor to link against
print(".. _%s:\n" % module, file=stream)
print(checker_title, file=stream)
- print('~' * len(checker_title), file=stream)
+ print("~" * len(checker_title), file=stream)
print("", file=stream)
if module:
print("This checker is provided by ``%s``." % module, file=stream)
@@ -548,35 +613,37 @@ class MessagesHandlerMixIn:
print("", file=stream)
if doc:
# Provide anchor to link against
- title = '{} Documentation'.format(checker_title)
+ title = "{} Documentation".format(checker_title)
print(title, file=stream)
- print('^' * len(title), file=stream)
+ print("^" * len(title), file=stream)
print(cleandoc(doc), file=stream)
print("", file=stream)
if options:
- title = '{} Options'.format(checker_title)
+ title = "{} Options".format(checker_title)
print(title, file=stream)
- print('^' * len(title), file=stream)
+ print("^" * len(title), file=stream)
_rest_format_section(stream, None, options)
print("", file=stream)
if msgs:
- title = '{} Messages'.format(checker_title)
+ title = "{} Messages".format(checker_title)
print(title, file=stream)
- print('^' * len(title), file=stream)
- for msgid, msg in sorted(msgs.items(),
- key=lambda kv: (_MSG_ORDER.index(kv[0][0]), kv[1])):
+ print("^" * len(title), file=stream)
+ for msgid, msg in sorted(
+ msgs.items(), key=lambda kv: (_MSG_ORDER.index(kv[0][0]), kv[1])
+ ):
msg = build_message_def(checker_name, msgid, msg)
print(msg.format_help(checkerref=False), file=stream)
print("", file=stream)
if reports:
- title = '{} Reports'.format(checker_title)
+ title = "{} Reports".format(checker_title)
print(title, file=stream)
- print('^' * len(title), file=stream)
+ print("^" * len(title), file=stream)
for report in reports:
- print(':%s: %s' % report[:2], file=stream)
+ print(":%s: %s" % report[:2], file=stream)
print("", file=stream)
print("", file=stream)
+
class FileState:
"""Hold internal state specific to the currently analyzed file"""
@@ -621,8 +688,10 @@ class FileState:
#
# this is necessary to disable locally messages applying to class /
# function using their fromlineno
- if (isinstance(node, (nodes.Module, nodes.ClassDef, nodes.FunctionDef))
- and node.body):
+ if (
+ isinstance(node, (nodes.Module, nodes.ClassDef, nodes.FunctionDef))
+ and node.body
+ ):
firstchildlineno = node.body[0].fromlineno
else:
firstchildlineno = last
@@ -640,11 +709,11 @@ class FileState:
else:
first_ = lineno
last_ = last
- for line in range(first_, last_+1):
+ for line in range(first_, last_ + 1):
# do not override existing entries
if line in self._module_msgs_state.get(msgid, ()):
continue
- if line in lines: # state change in the same block
+ if line in lines: # state change in the same block
state = lines[line]
original_lineno = line
if not state:
@@ -663,8 +732,9 @@ class FileState:
except KeyError:
self._module_msgs_state[msg.msgid] = {line: status}
- def handle_ignored_message(self, state_scope, msgid, line,
- node, args, confidence): # pylint: disable=unused-argument
+ def handle_ignored_message(
+ self, state_scope, msgid, line, node, args, confidence
+ ): # pylint: disable=unused-argument
"""Report an ignored message.
state_scope is either MSG_STATE_SCOPE_MODULE or MSG_STATE_SCOPE_CONFIG,
@@ -682,13 +752,16 @@ class FileState:
for warning, lines in self._raw_module_msgs_state.items():
for line, enable in lines.items():
if not enable and (warning, line) not in self._ignored_msgs:
- yield 'useless-suppression', line, \
- (msgs_store.get_msg_display_string(warning),)
+ yield "useless-suppression", line, (
+ msgs_store.get_msg_display_string(warning),
+ )
# don't use iteritems here, _ignored_msgs may be modified by add_message
for (warning, from_), lines in list(self._ignored_msgs.items()):
for line in lines:
- yield 'suppressed-message', line, \
- (msgs_store.get_msg_display_string(warning), from_)
+ yield "suppressed-message", line, (
+ msgs_store.get_msg_display_string(warning),
+ from_,
+ )
def get_effective_max_line_number(self):
return self._effective_max_line_number
@@ -794,7 +867,6 @@ class MessagesStore:
checker_id = message.msgid[1:3]
existing_ids.append(message.msgid)
-
def _register_alternative_name(self, msg, msgid, symbol):
"""helper for register_message()"""
self._check_id_and_symbol_consistency(msgid, symbol)
@@ -874,7 +946,9 @@ class MessagesStore:
:param str other_msgid: Other offending msgid
:raises InvalidMessageError: when a msgid is duplicated.
"""
- error_message = "Message symbol '{symbol}' cannot be used for ".format(symbol=symbol)
+ error_message = "Message symbol '{symbol}' cannot be used for ".format(
+ symbol=symbol
+ )
error_message += "'{other_msgid}' and '{msgid}' at the same time.".format(
other_msgid=other_msgid, msgid=msgid
)
@@ -896,7 +970,9 @@ class MessagesStore:
except KeyError:
pass
raise UnknownMessageError(
- 'No such message id {msgid_or_symbol}'.format(msgid_or_symbol=msgid_or_symbol)
+ "No such message id {msgid_or_symbol}".format(
+ msgid_or_symbol=msgid_or_symbol
+ )
)
def get_msg_display_string(self, msgid):
@@ -931,6 +1007,7 @@ class ReportsHandlerMixIn:
"""a mix-in class containing all the reports and stats manipulation
related methods for the main lint class
"""
+
def __init__(self):
self._reports = collections.defaultdict(list)
self._reports_state = {}
@@ -970,8 +1047,7 @@ class ReportsHandlerMixIn:
def make_reports(self, stats, old_stats):
"""render registered reports"""
- sect = Section('Report',
- '%s statements analysed.'% (self.stats['statement']))
+ sect = Section("Report", "%s statements analysed." % (self.stats["statement"]))
for checker in self.report_order():
for reportid, r_title, r_cb in self._reports[checker]:
if not self.report_is_enabled(reportid):
@@ -990,12 +1066,13 @@ class ReportsHandlerMixIn:
raise an AssertionError if there is a key conflict
"""
for key, value in kwargs.items():
- if key[-1] == '_':
+ if key[-1] == "_":
key = key[:-1]
assert key not in self.stats
self.stats[key] = value
return self.stats
+
def _basename_in_blacklist_re(base_name, black_list_re):
"""Determines if the basename is matched in a regex blacklist
@@ -1011,11 +1088,14 @@ def _basename_in_blacklist_re(base_name, black_list_re):
return True
return False
+
def _modpath_from_file(filename, is_namespace):
def _is_package_cb(path, parts):
return modutils.check_modpath_has_init(path, parts) or is_namespace
- return modutils.modpath_from_file_with_callback(filename, is_package_cb=_is_package_cb)
+ return modutils.modpath_from_file_with_callback(
+ filename, is_package_cb=_is_package_cb
+ )
def expand_modules(files_or_modules, black_list, black_list_re):
@@ -1032,28 +1112,28 @@ def expand_modules(files_or_modules, black_list, black_list_re):
if exists(something):
# this is a file or a directory
try:
- modname = '.'.join(modutils.modpath_from_file(something))
+ modname = ".".join(modutils.modpath_from_file(something))
except ImportError:
modname = splitext(basename(something))[0]
if isdir(something):
- filepath = join(something, '__init__.py')
+ filepath = join(something, "__init__.py")
else:
filepath = something
else:
# suppose it's a module or package
modname = something
try:
- filepath = modutils.file_from_modpath(modname.split('.'))
+ filepath = modutils.file_from_modpath(modname.split("."))
if filepath is None:
continue
except (ImportError, SyntaxError) as ex:
# FIXME p3k : the SyntaxError is a Python bug and should be
# removed as soon as possible http://bugs.python.org/issue10588
- errors.append({'key': 'fatal', 'mod': modname, 'ex': ex})
+ errors.append({"key": "fatal", "mod": modname, "ex": ex})
continue
filepath = normpath(filepath)
- modparts = (modname or something).split('.')
+ modparts = (modname or something).split(".")
try:
spec = modutils.file_info_from_modpath(modparts, path=sys.path)
@@ -1066,30 +1146,45 @@ def expand_modules(files_or_modules, black_list, black_list_re):
is_directory = modutils.is_directory(spec)
if not is_namespace:
- result.append({'path': filepath, 'name': modname, 'isarg': True,
- 'basepath': filepath, 'basename': modname})
-
- has_init = (not (modname.endswith('.__init__') or modname == '__init__')
- and basename(filepath) == '__init__.py')
+ result.append(
+ {
+ "path": filepath,
+ "name": modname,
+ "isarg": True,
+ "basepath": filepath,
+ "basename": modname,
+ }
+ )
+
+ has_init = (
+ not (modname.endswith(".__init__") or modname == "__init__")
+ and basename(filepath) == "__init__.py"
+ )
if has_init or is_namespace or is_directory:
- for subfilepath in modutils.get_module_files(dirname(filepath), black_list,
- list_all=is_namespace):
+ for subfilepath in modutils.get_module_files(
+ dirname(filepath), black_list, list_all=is_namespace
+ ):
if filepath == subfilepath:
continue
if _basename_in_blacklist_re(basename(subfilepath), black_list_re):
continue
modpath = _modpath_from_file(subfilepath, is_namespace)
- submodname = '.'.join(modpath)
- result.append({'path': subfilepath, 'name': submodname,
- 'isarg': False,
- 'basepath': filepath, 'basename': modname})
+ submodname = ".".join(modpath)
+ result.append(
+ {
+ "path": subfilepath,
+ "name": submodname,
+ "isarg": False,
+ "basepath": filepath,
+ "basename": modname,
+ }
+ )
return result, errors
class PyLintASTWalker:
-
def __init__(self, linter):
# callbacks per node types
self.nbstatements = 0
@@ -1098,7 +1193,7 @@ class PyLintASTWalker:
self.linter = linter
def _is_method_enabled(self, method):
- if not hasattr(method, 'checks_msgs'):
+ if not hasattr(method, "checks_msgs"):
return True
for msg_desc in method.checks_msgs:
if self.linter.is_message_enabled(msg_desc):
@@ -1114,21 +1209,21 @@ class PyLintASTWalker:
leaves = self.leave_events
for member in dir(checker):
cid = member[6:]
- if cid == 'default':
+ if cid == "default":
continue
- if member.startswith('visit_'):
+ if member.startswith("visit_"):
v_meth = getattr(checker, member)
# don't use visit_methods with no activated message:
if self._is_method_enabled(v_meth):
visits[cid].append(v_meth)
vcids.add(cid)
- elif member.startswith('leave_'):
+ elif member.startswith("leave_"):
l_meth = getattr(checker, member)
# don't use leave_methods with no activated message:
if self._is_method_enabled(l_meth):
leaves[cid].append(l_meth)
lcids.add(cid)
- visit_default = getattr(checker, 'visit_default', None)
+ visit_default = getattr(checker, "visit_default", None)
if visit_default:
for cls in nodes.ALL_NODE_CLASSES:
cid = cls.__name__.lower()
@@ -1161,7 +1256,8 @@ class PyLintASTWalker:
cb(astroid)
-PY_EXTS = ('.py', '.pyc', '.pyo', '.pyw', '.so', '.dll')
+PY_EXTS = (".py", ".pyc", ".pyo", ".pyw", ".so", ".dll")
+
def register_plugins(linter, directory):
"""load all module and package in the given directory, looking for a
@@ -1170,23 +1266,28 @@ def register_plugins(linter, directory):
imported = {}
for filename in os.listdir(directory):
base, extension = splitext(filename)
- if base in imported or base == '__pycache__':
+ if base in imported or base == "__pycache__":
continue
- if extension in PY_EXTS and base != '__init__' or (
- not extension and isdir(join(directory, base))):
+ if (
+ extension in PY_EXTS
+ and base != "__init__"
+ or (not extension and isdir(join(directory, base)))
+ ):
try:
module = modutils.load_module_from_file(join(directory, filename))
except ValueError:
# empty module name (usually emacs auto-save files)
continue
except ImportError as exc:
- print("Problem importing module %s: %s" % (filename, exc),
- file=sys.stderr)
+ print(
+ "Problem importing module %s: %s" % (filename, exc), file=sys.stderr
+ )
else:
- if hasattr(module, 'register'):
+ if hasattr(module, "register"):
module.register(linter)
imported[base] = 1
+
def get_global_option(checker, option, default=None):
""" Retrieve an option defined by the given *checker* or
by all known option providers.
@@ -1209,25 +1310,27 @@ def get_global_option(checker, option, default=None):
return default
-def deprecated_option(shortname=None, opt_type=None, help_msg=None, deprecation_msg=None):
- def _warn_deprecated(option, optname, *args): # pylint: disable=unused-argument
+def deprecated_option(
+ shortname=None, opt_type=None, help_msg=None, deprecation_msg=None
+):
+ def _warn_deprecated(option, optname, *args): # pylint: disable=unused-argument
if deprecation_msg:
sys.stderr.write(deprecation_msg % (optname,))
option = {
- 'help': help_msg,
- 'hide': True,
- 'type': opt_type,
- 'action': 'callback',
- 'callback': _warn_deprecated,
- 'deprecated': True
+ "help": help_msg,
+ "hide": True,
+ "type": opt_type,
+ "action": "callback",
+ "callback": _warn_deprecated,
+ "deprecated": True,
}
if shortname:
- option['shortname'] = shortname
+ option["shortname"] = shortname
return option
-def _splitstrip(string, sep=','):
+def _splitstrip(string, sep=","):
"""return a list of stripped string by splitting the string given as
argument on `sep` (',' by default). Empty string are discarded.
@@ -1261,17 +1364,20 @@ def _unquote(string):
"""
if not string:
return string
- if string[0] in '"\'':
+ if string[0] in "\"'":
string = string[1:]
- if string[-1] in '"\'':
+ if string[-1] in "\"'":
string = string[:-1]
return string
-def _normalize_text(text, line_len=80, indent=''):
+def _normalize_text(text, line_len=80, indent=""):
"""Wrap the text on the given line length."""
- return '\n'.join(textwrap.wrap(text, width=line_len, initial_indent=indent,
- subsequent_indent=indent))
+ return "\n".join(
+ textwrap.wrap(
+ text, width=line_len, initial_indent=indent, subsequent_indent=indent
+ )
+ )
def _check_csv(value):
@@ -1283,20 +1389,20 @@ def _check_csv(value):
def _comment(string):
"""return string as a comment"""
lines = [line.strip() for line in string.splitlines()]
- return '# ' + ('%s# ' % os.linesep).join(lines)
+ return "# " + ("%s# " % os.linesep).join(lines)
def _format_option_value(optdict, value):
"""return the user input's value from a 'compiled' value"""
if isinstance(value, (list, tuple)):
- value = ','.join(_format_option_value(optdict, item) for item in value)
+ value = ",".join(_format_option_value(optdict, item) for item in value)
elif isinstance(value, dict):
- value = ','.join('%s:%s' % (k, v) for k, v in value.items())
- elif hasattr(value, 'match'): # optdict.get('type') == 'regexp'
+ value = ",".join("%s:%s" % (k, v) for k, v in value.items())
+ elif hasattr(value, "match"): # optdict.get('type') == 'regexp'
# compiled regexp
value = value.pattern
- elif optdict.get('type') == 'yn':
- value = 'yes' if value else 'no'
+ elif optdict.get("type") == "yn":
+ value = "yes" if value else "no"
elif isinstance(value, str) and value.isspace():
value = "'%s'" % value
return value
@@ -1306,7 +1412,7 @@ def _ini_format_section(stream, section, options, doc=None):
"""format an options section using the INI format"""
if doc:
print(_comment(doc), file=stream)
- print('[%s]' % section, file=stream)
+ print("[%s]" % section, file=stream)
_ini_format(stream, options)
@@ -1314,24 +1420,24 @@ def _ini_format(stream, options):
"""format options using the INI format"""
for optname, optdict, value in options:
value = _format_option_value(optdict, value)
- help_opt = optdict.get('help')
+ help_opt = optdict.get("help")
if help_opt:
- help_opt = _normalize_text(help_opt, line_len=79, indent='# ')
+ help_opt = _normalize_text(help_opt, line_len=79, indent="# ")
print(file=stream)
print(help_opt, file=stream)
else:
print(file=stream)
if value is None:
- print('#%s=' % optname, file=stream)
+ print("#%s=" % optname, file=stream)
else:
value = str(value).strip()
- if re.match(r'^([\w-]+,)+[\w-]+$', str(value)):
- separator = '\n ' + ' ' * len(optname)
- value = separator.join(
- x + ',' for x in str(value).split(','))
+ if re.match(r"^([\w-]+,)+[\w-]+$", str(value)):
+ separator = "\n " + " " * len(optname)
+ value = separator.join(x + "," for x in str(value).split(","))
# remove trailing ',' from last element of the list
value = value[:-1]
- print('%s=%s' % (optname, value), file=stream)
+ print("%s=%s" % (optname, value), file=stream)
+
format_section = _ini_format_section
@@ -1339,17 +1445,17 @@ format_section = _ini_format_section
def _rest_format_section(stream, section, options, doc=None):
"""format an options section using as ReST formatted output"""
if section:
- print('%s\n%s' % (section, "'"*len(section)), file=stream)
+ print("%s\n%s" % (section, "'" * len(section)), file=stream)
if doc:
- print(_normalize_text(doc, line_len=79, indent=''), file=stream)
+ print(_normalize_text(doc, line_len=79, indent=""), file=stream)
print(file=stream)
for optname, optdict, value in options:
- help_opt = optdict.get('help')
- print(':%s:' % optname, file=stream)
+ help_opt = optdict.get("help")
+ print(":%s:" % optname, file=stream)
if help_opt:
- help_opt = _normalize_text(help_opt, line_len=79, indent=' ')
+ help_opt = _normalize_text(help_opt, line_len=79, indent=" ")
print(help_opt, file=stream)
if value:
value = str(_format_option_value(optdict, value))
print(file=stream)
- print(' Default: ``%s``' % value.replace("`` ", "```` ``"), file=stream)
+ print(" Default: ``%s``" % value.replace("`` ", "```` ``"), file=stream)
diff --git a/pylintrc b/pylintrc
index bbcfe911c..112277e0a 100644
--- a/pylintrc
+++ b/pylintrc
@@ -60,6 +60,8 @@ disable=
missing-docstring,
protected-access,
too-few-public-methods,
+ # handled by black
+ format
[REPORTS]