summaryrefslogtreecommitdiff
path: root/tests/benchmark
diff options
context:
space:
mode:
authorPierre Sassoulas <pierre.sassoulas@gmail.com>2020-09-10 22:53:36 +0200
committerPierre Sassoulas <pierre.sassoulas@gmail.com>2020-09-10 23:23:11 +0200
commit74d5bcb352141bc742488e74d1b37c1cb381a7ce (patch)
treefb0491d8c46eb85b8557e1efc2deee66b6173ecb /tests/benchmark
parent9ed9065e19dfcccecab04eba71bf557e91f2379c (diff)
downloadpylint-git-74d5bcb352141bc742488e74d1b37c1cb381a7ce.tar.gz
Update formatting following black and isort update
Diffstat (limited to 'tests/benchmark')
-rw-r--r--tests/benchmark/test_baseline_benchmarks.py150
1 files changed, 90 insertions, 60 deletions
diff --git a/tests/benchmark/test_baseline_benchmarks.py b/tests/benchmark/test_baseline_benchmarks.py
index d9899ce99..ed9f599d3 100644
--- a/tests/benchmark/test_baseline_benchmarks.py
+++ b/tests/benchmark/test_baseline_benchmarks.py
@@ -29,40 +29,52 @@ def _empty_filepath():
class SleepingChecker(BaseChecker):
- """ A checker that sleeps, the wall-clock time should reduce as we add workers
+ """A checker that sleeps, the wall-clock time should reduce as we add workers
As we apply a roughly constant amount of "work" in this checker any variance is
- likely to be caused by the pylint system. """
+ likely to be caused by the pylint system."""
__implements__ = (pylint.interfaces.IRawChecker,)
name = "sleeper"
- msgs = {"R9999": ("Test", "test-check", "Some helpful text.",)}
+ msgs = {
+ "R9999": (
+ "Test",
+ "test-check",
+ "Some helpful text.",
+ )
+ }
sleep_duration = 0.5 # the time to pretend we're doing work for
def process_module(self, _astroid):
- """ Sleeps for `sleep_duration` on each call
+ """Sleeps for `sleep_duration` on each call
- This effectively means each file costs ~`sleep_duration`+framework overhead """
+ This effectively means each file costs ~`sleep_duration`+framework overhead"""
time.sleep(self.sleep_duration)
class SleepingCheckerLong(BaseChecker):
- """ A checker that sleeps, the wall-clock time should reduce as we add workers
+ """A checker that sleeps, the wall-clock time should reduce as we add workers
As we apply a roughly constant amount of "work" in this checker any variance is
- likely to be caused by the pylint system. """
+ likely to be caused by the pylint system."""
__implements__ = (pylint.interfaces.IRawChecker,)
name = "long-sleeper"
- msgs = {"R9999": ("Test", "test-check", "Some helpful text.",)}
+ msgs = {
+ "R9999": (
+ "Test",
+ "test-check",
+ "Some helpful text.",
+ )
+ }
sleep_duration = 0.5 # the time to pretend we're doing work for
def process_module(self, _astroid):
- """ Sleeps for `sleep_duration` on each call
+ """Sleeps for `sleep_duration` on each call
- This effectively means each file costs ~`sleep_duration`+framework overhead """
+ This effectively means each file costs ~`sleep_duration`+framework overhead"""
time.sleep(self.sleep_duration)
@@ -72,18 +84,26 @@ class NoWorkChecker(BaseChecker):
__implements__ = (pylint.interfaces.IRawChecker,)
name = "sleeper"
- msgs = {"R9999": ("Test", "test-check", "Some helpful text.",)}
+ msgs = {
+ "R9999": (
+ "Test",
+ "test-check",
+ "Some helpful text.",
+ )
+ }
def process_module(self, _astroid):
pass
-@pytest.mark.benchmark(group="baseline",)
+@pytest.mark.benchmark(
+ group="baseline",
+)
class TestEstablishBaselineBenchmarks:
- """ Naive benchmarks for the high-level pylint framework
+ """Naive benchmarks for the high-level pylint framework
Because this benchmarks the fundemental and common parts and changes seen here will
- impact everything else """
+ impact everything else"""
empty_filepath = _empty_filepath()
empty_file_info = (
@@ -94,7 +114,7 @@ class TestEstablishBaselineBenchmarks:
lot_of_files = 500
def test_baseline_benchmark_j1(self, benchmark):
- """ Establish a baseline of pylint performance with no work
+ """Establish a baseline of pylint performance with no work
We will add extra Checkers in other benchmarks.
@@ -105,13 +125,14 @@ class TestEstablishBaselineBenchmarks:
assert linter.config.jobs == 1
assert len(linter._checkers) == 1, "Should just have 'master'"
benchmark(linter.check, fileinfos)
- assert linter.msg_status == 0, (
- "Expected no errors to be thrown: %s"
- % pprint.pformat(linter.reporter.messages)
+ assert (
+ linter.msg_status == 0
+ ), "Expected no errors to be thrown: %s" % pprint.pformat(
+ linter.reporter.messages
)
def test_baseline_benchmark_j10(self, benchmark):
- """ Establish a baseline of pylint performance with no work across threads
+ """Establish a baseline of pylint performance with no work across threads
Same as `test_baseline_benchmark_j1` but we use -j10 with 10 fake files to
ensure end-to-end-system invoked.
@@ -128,9 +149,10 @@ class TestEstablishBaselineBenchmarks:
assert linter.config.jobs == 10
assert len(linter._checkers) == 1, "Should have 'master'"
benchmark(linter.check, fileinfos)
- assert linter.msg_status == 0, (
- "Expected no errors to be thrown: %s"
- % pprint.pformat(linter.reporter.messages)
+ assert (
+ linter.msg_status == 0
+ ), "Expected no errors to be thrown: %s" % pprint.pformat(
+ linter.reporter.messages
)
def test_baseline_benchmark_check_parallel_j10(self, benchmark):
@@ -142,16 +164,17 @@ class TestEstablishBaselineBenchmarks:
assert len(linter._checkers) == 1, "Should have 'master'"
benchmark(check_parallel, linter, jobs=10, files=fileinfos)
- assert linter.msg_status == 0, (
- "Expected no errors to be thrown: %s"
- % pprint.pformat(linter.reporter.messages)
+ assert (
+ linter.msg_status == 0
+ ), "Expected no errors to be thrown: %s" % pprint.pformat(
+ linter.reporter.messages
)
def test_baseline_lots_of_files_j1(self, benchmark):
- """ Establish a baseline with only 'master' checker being run in -j1
+ """Establish a baseline with only 'master' checker being run in -j1
We do not register any checkers except the default 'master', so the cost is just
- that of the system with a lot of files registerd """
+ that of the system with a lot of files registerd"""
if benchmark.disabled:
benchmark(print, "skipping, only benchmark large file counts")
return # _only_ run this test is profiling
@@ -161,17 +184,18 @@ class TestEstablishBaselineBenchmarks:
assert linter.config.jobs == 1
assert len(linter._checkers) == 1, "Should have 'master'"
benchmark(linter.check, fileinfos)
- assert linter.msg_status == 0, (
- "Expected no errors to be thrown: %s"
- % pprint.pformat(linter.reporter.messages)
+ assert (
+ linter.msg_status == 0
+ ), "Expected no errors to be thrown: %s" % pprint.pformat(
+ linter.reporter.messages
)
def test_baseline_lots_of_files_j10(self, benchmark):
- """ Establish a baseline with only 'master' checker being run in -j10
+ """Establish a baseline with only 'master' checker being run in -j10
As with the -j1 variant above `test_baseline_lots_of_files_j1`, we do not
register any checkers except the default 'master', so the cost is just that of
- the check_parallel system across 10 workers, plus the overhead of PyLinter """
+ the check_parallel system across 10 workers, plus the overhead of PyLinter"""
if benchmark.disabled:
benchmark(print, "skipping, only benchmark large file counts")
return # _only_ run this test is profiling
@@ -181,16 +205,17 @@ class TestEstablishBaselineBenchmarks:
assert linter.config.jobs == 10
assert len(linter._checkers) == 1, "Should have 'master'"
benchmark(linter.check, fileinfos)
- assert linter.msg_status == 0, (
- "Expected no errors to be thrown: %s"
- % pprint.pformat(linter.reporter.messages)
+ assert (
+ linter.msg_status == 0
+ ), "Expected no errors to be thrown: %s" % pprint.pformat(
+ linter.reporter.messages
)
def test_baseline_lots_of_files_j1_empty_checker(self, benchmark):
- """ Baselines pylint for a single extra checker being run in -j1, for N-files
+ """Baselines pylint for a single extra checker being run in -j1, for N-files
We use a checker that does no work, so the cost is just that of the system at
- scale """
+ scale"""
if benchmark.disabled:
benchmark(print, "skipping, only benchmark large file counts")
return # _only_ run this test is profiling
@@ -201,16 +226,17 @@ class TestEstablishBaselineBenchmarks:
assert linter.config.jobs == 1
assert len(linter._checkers) == 2, "Should have 'master' and 'sleeper'"
benchmark(linter.check, fileinfos)
- assert linter.msg_status == 0, (
- "Expected no errors to be thrown: %s"
- % pprint.pformat(linter.reporter.messages)
+ assert (
+ linter.msg_status == 0
+ ), "Expected no errors to be thrown: %s" % pprint.pformat(
+ linter.reporter.messages
)
def test_baseline_lots_of_files_j10_empty_checker(self, benchmark):
- """ Baselines pylint for a single extra checker being run in -j10, for N-files
+ """Baselines pylint for a single extra checker being run in -j10, for N-files
We use a checker that does no work, so the cost is just that of the system at
- scale, across workers """
+ scale, across workers"""
if benchmark.disabled:
benchmark(print, "skipping, only benchmark large file counts")
return # _only_ run this test is profiling
@@ -221,19 +247,20 @@ class TestEstablishBaselineBenchmarks:
assert linter.config.jobs == 10
assert len(linter._checkers) == 2, "Should have 'master' and 'sleeper'"
benchmark(linter.check, fileinfos)
- assert linter.msg_status == 0, (
- "Expected no errors to be thrown: %s"
- % pprint.pformat(linter.reporter.messages)
+ assert (
+ linter.msg_status == 0
+ ), "Expected no errors to be thrown: %s" % pprint.pformat(
+ linter.reporter.messages
)
def test_baseline_benchmark_j1_single_working_checker(self, benchmark):
- """ Establish a baseline of single-worker performance for PyLinter
+ """Establish a baseline of single-worker performance for PyLinter
Here we mimick a single Checker that does some work so that we can see the
impact of running a simple system with -j1 against the same system with -j10.
We expect this benchmark to take very close to
- `numfiles*SleepingChecker.sleep_duration` """
+ `numfiles*SleepingChecker.sleep_duration`"""
if benchmark.disabled:
benchmark(print, "skipping, do not want to sleep in main tests")
return # _only_ run this test is profiling
@@ -247,19 +274,20 @@ class TestEstablishBaselineBenchmarks:
assert linter.config.jobs == 1
assert len(linter._checkers) == 2, "Should have 'master' and 'sleeper'"
benchmark(linter.check, fileinfos)
- assert linter.msg_status == 0, (
- "Expected no errors to be thrown: %s"
- % pprint.pformat(linter.reporter.messages)
+ assert (
+ linter.msg_status == 0
+ ), "Expected no errors to be thrown: %s" % pprint.pformat(
+ linter.reporter.messages
)
def test_baseline_benchmark_j10_single_working_checker(self, benchmark):
- """ Establishes baseline of multi-worker performance for PyLinter/check_parallel
+ """Establishes baseline of multi-worker performance for PyLinter/check_parallel
We expect this benchmark to take less time that test_baseline_benchmark_j1,
`error_margin*(1/J)*(numfiles*SleepingChecker.sleep_duration)`
Because of the cost of the framework and system the performance difference will
- *not* be 1/10 of -j1 versions. """
+ *not* be 1/10 of -j1 versions."""
if benchmark.disabled:
benchmark(print, "skipping, do not want to sleep in main tests")
return # _only_ run this test is profiling
@@ -274,13 +302,14 @@ class TestEstablishBaselineBenchmarks:
assert linter.config.jobs == 10
assert len(linter._checkers) == 2, "Should have 'master' and 'sleeper'"
benchmark(linter.check, fileinfos)
- assert linter.msg_status == 0, (
- "Expected no errors to be thrown: %s"
- % pprint.pformat(linter.reporter.messages)
+ assert (
+ linter.msg_status == 0
+ ), "Expected no errors to be thrown: %s" % pprint.pformat(
+ linter.reporter.messages
)
def test_baseline_benchmark_j1_all_checks_single_file(self, benchmark):
- """ Runs a single file, with -j1, against all plug-ins
+ """Runs a single file, with -j1, against all plug-ins
... that's the intent at least.
"""
@@ -292,13 +321,14 @@ class TestEstablishBaselineBenchmarks:
print("len(runner.linter._checkers)", len(runner.linter._checkers))
assert len(runner.linter._checkers) > 1, "Should have more than 'master'"
- assert runner.linter.msg_status == 0, (
- "Expected no errors to be thrown: %s"
- % pprint.pformat(runner.linter.reporter.messages)
+ assert (
+ runner.linter.msg_status == 0
+ ), "Expected no errors to be thrown: %s" % pprint.pformat(
+ runner.linter.reporter.messages
)
def test_baseline_benchmark_j1_all_checks_lots_of_files(self, benchmark):
- """ Runs lots of files, with -j1, against all plug-ins
+ """Runs lots of files, with -j1, against all plug-ins
... that's the intent at least.
"""