summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJacob Walls <jacobtylerwalls@gmail.com>2023-05-14 09:06:16 -0400
committerGitHub <noreply@github.com>2023-05-14 09:06:16 -0400
commit4e48d46b7239c2d7a70a05929fd49595d45cc29f (patch)
treeacec40ebd24109f8ef06ed8b93c0f940fa24ea9a
parentaed3c080388a8dc1d44c1a14a5ed243233f77c1c (diff)
downloadpylint-git-4e48d46b7239c2d7a70a05929fd49595d45cc29f.tar.gz
Load custom plugins when linting in parallel (#8683)
-rw-r--r--doc/user_guide/usage/run.rst8
-rw-r--r--doc/whatsnew/fragments/4874.bugfix5
-rw-r--r--pylint/lint/parallel.py5
-rw-r--r--pylint/lint/pylinter.py9
-rw-r--r--tests/test_check_parallel.py17
5 files changed, 36 insertions, 8 deletions
diff --git a/doc/user_guide/usage/run.rst b/doc/user_guide/usage/run.rst
index 7e6e1a830..5cb02e557 100644
--- a/doc/user_guide/usage/run.rst
+++ b/doc/user_guide/usage/run.rst
@@ -160,10 +160,10 @@ This will spawn 4 parallel Pylint sub-process, where each provided module will
be checked in parallel. Discovered problems by checkers are not displayed
immediately. They are shown just after checking a module is complete.
-There are some limitations in running checks in parallel in the current
-implementation. It is not possible to use custom plugins
-(i.e. ``--load-plugins`` option), nor it is not possible to use
-initialization hooks (i.e. the ``--init-hook`` option).
+There is one known limitation with running checks in parallel as currently
+implemented. Since the division of files into worker processes is indeterminate,
+checkers that depend on comparing multiple files (e.g. ``cyclic-import``
+and ``duplicate-code``) can produce indeterminate results.
Exit codes
----------
diff --git a/doc/whatsnew/fragments/4874.bugfix b/doc/whatsnew/fragments/4874.bugfix
new file mode 100644
index 000000000..9dcb34c1f
--- /dev/null
+++ b/doc/whatsnew/fragments/4874.bugfix
@@ -0,0 +1,5 @@
+``--jobs`` can now be used with ``--load-plugins``.
+
+This had regressed in astroid 2.5.0.
+
+Closes #4874
diff --git a/pylint/lint/parallel.py b/pylint/lint/parallel.py
index e57c7e8ab..72f770043 100644
--- a/pylint/lint/parallel.py
+++ b/pylint/lint/parallel.py
@@ -52,6 +52,11 @@ def _worker_initialize(
_worker_linter.set_reporter(reporters.CollectingReporter())
_worker_linter.open()
+ # Re-register dynamic plugins, since the pool does not have access to the
+ # astroid module that existed when the linter was pickled.
+ _worker_linter.load_plugin_modules(_worker_linter._dynamic_plugins, force=True)
+ _worker_linter.load_plugin_configuration()
+
if extra_packages_paths:
_augment_sys_path(extra_packages_paths)
diff --git a/pylint/lint/pylinter.py b/pylint/lint/pylinter.py
index ed607aca5..324c54b1b 100644
--- a/pylint/lint/pylinter.py
+++ b/pylint/lint/pylinter.py
@@ -13,7 +13,7 @@ import sys
import tokenize
import traceback
from collections import defaultdict
-from collections.abc import Callable, Iterator, Sequence
+from collections.abc import Callable, Iterable, Iterator, Sequence
from io import TextIOWrapper
from pathlib import Path
from re import Pattern
@@ -363,15 +363,18 @@ class PyLinter(
checkers.initialize(self)
reporters.initialize(self)
- def load_plugin_modules(self, modnames: list[str]) -> None:
+ def load_plugin_modules(self, modnames: Iterable[str], force: bool = False) -> None:
"""Check a list of pylint plugins modules, load and register them.
If a module cannot be loaded, never try to load it again and instead
store the error message for later use in ``load_plugin_configuration``
below.
+
+ If `force` is True (useful when multiprocessing), then the plugin is
+ reloaded regardless if an entry exists in self._dynamic_plugins.
"""
for modname in modnames:
- if modname in self._dynamic_plugins:
+ if modname in self._dynamic_plugins and not force:
continue
try:
module = astroid.modutils.load_module_from_name(modname)
diff --git a/tests/test_check_parallel.py b/tests/test_check_parallel.py
index 24f958406..d56502eaf 100644
--- a/tests/test_check_parallel.py
+++ b/tests/test_check_parallel.py
@@ -14,10 +14,11 @@ import sys
from concurrent.futures import ProcessPoolExecutor
from concurrent.futures.process import BrokenProcessPool
from pickle import PickleError
+from typing import TYPE_CHECKING
+from unittest.mock import patch
import dill
import pytest
-from astroid import nodes
import pylint.interfaces
import pylint.lint.parallel
@@ -30,6 +31,9 @@ from pylint.testutils import GenericTestReporter as Reporter
from pylint.typing import FileItem
from pylint.utils import LinterStats, ModuleStats
+if TYPE_CHECKING:
+ from astroid import nodes
+
def _gen_file_data(idx: int = 0) -> FileItem:
"""Generates a file to use as a stream."""
@@ -182,6 +186,17 @@ class TestCheckParallelFramework:
)
assert "fake-path" in sys.path
+ def test_worker_initialize_reregisters_custom_plugins(self) -> None:
+ linter = PyLinter(reporter=Reporter())
+ linter.load_plugin_modules(["pylint.extensions.private_import"])
+
+ pickled = dill.dumps(linter)
+ with patch(
+ "pylint.extensions.private_import.register", side_effect=AssertionError
+ ):
+ with pytest.raises(AssertionError):
+ worker_initialize(linter=pickled)
+
@pytest.mark.needs_two_cores
def test_worker_initialize_pickling(self) -> None:
"""Test that we can pickle objects that standard pickling in multiprocessing can't.