summaryrefslogtreecommitdiff
path: root/pylint/checkers/similar.py
diff options
context:
space:
mode:
authorPierre Sassoulas <pierre.sassoulas@gmail.com>2022-07-15 20:56:58 +0200
committerGitHub <noreply@github.com>2022-07-15 20:56:58 +0200
commit6ca2786e2287e44a2d4520f445c949352052409a (patch)
tree7c08bae642622d70ebc21484a5b8ba5c714f5fb3 /pylint/checkers/similar.py
parent748426c9b1da06bd3b5872fecc2ffc4733925643 (diff)
downloadpylint-git-6ca2786e2287e44a2d4520f445c949352052409a.tar.gz
[typing] Add typing to the similar checker (#7191)
Co-authored-by: Daniƫl van Noord <13665637+DanielNoord@users.noreply.github.com>
Diffstat (limited to 'pylint/checkers/similar.py')
-rw-r--r--pylint/checkers/similar.py42
1 files changed, 20 insertions, 22 deletions
diff --git a/pylint/checkers/similar.py b/pylint/checkers/similar.py
index 77f2fc835..7938c27e8 100644
--- a/pylint/checkers/similar.py
+++ b/pylint/checkers/similar.py
@@ -229,18 +229,16 @@ def hash_lineset(
# Need different iterators on same lines but each one is shifted 1 from the precedent
shifted_lines = [iter(lines[i:]) for i in range(min_common_lines)]
- for index_i, *succ_lines in enumerate(zip(*shifted_lines)):
- start_linenumber = lineset.stripped_lines[index_i].line_number
+ for i, *succ_lines in enumerate(zip(*shifted_lines)):
+ start_linenumber = LineNumber(lineset.stripped_lines[i].line_number)
try:
- end_linenumber = lineset.stripped_lines[
- index_i + min_common_lines
- ].line_number
+ end_linenumber = lineset.stripped_lines[i + min_common_lines].line_number
except IndexError:
- end_linenumber = lineset.stripped_lines[-1].line_number + 1
+ end_linenumber = LineNumber(lineset.stripped_lines[-1].line_number + 1)
- index = Index(index_i)
+ index = Index(i)
index2lines[index] = SuccessiveLinesLimits(
- start=LineNumber(start_linenumber), end=LineNumber(end_linenumber)
+ start=start_linenumber, end=end_linenumber
)
l_c = LinesChunk(lineset.name, index, *succ_lines)
@@ -542,7 +540,7 @@ class Similar:
for lineset2 in self.linesets[idx + 1 :]:
yield from self._find_common(lineset, lineset2)
- def get_map_data(self):
+ def get_map_data(self) -> list[LineSet]:
"""Returns the data we can use for a map/reduce process.
In this case we are returning this instance's Linesets, that is all file
@@ -550,7 +548,7 @@ class Similar:
"""
return self.linesets
- def combine_mapreduce_data(self, linesets_collection):
+ def combine_mapreduce_data(self, linesets_collection: list[list[LineSet]]) -> None:
"""Reduces and recombines data into a format that we can report on.
The partner function of get_map_data()
@@ -694,29 +692,29 @@ class LineSet:
def __str__(self):
return f"<Lineset for {self.name}>"
- def __len__(self):
+ def __len__(self) -> int:
return len(self._real_lines)
- def __getitem__(self, index):
+ def __getitem__(self, index: int) -> LineSpecifs:
return self._stripped_lines[index]
- def __lt__(self, other):
+ def __lt__(self, other: LineSet) -> bool:
return self.name < other.name
- def __hash__(self):
+ def __hash__(self) -> int:
return id(self)
- def __eq__(self, other):
+ def __eq__(self, other: Any) -> bool:
if not isinstance(other, LineSet):
return False
return self.__dict__ == other.__dict__
@property
- def stripped_lines(self):
+ def stripped_lines(self) -> list[LineSpecifs]:
return self._stripped_lines
@property
- def real_lines(self):
+ def real_lines(self) -> list[str]:
return self._real_lines
@@ -817,7 +815,7 @@ class SimilarChecker(BaseRawFileChecker, Similar):
ignore_signatures=self.linter.config.ignore_signatures,
)
- def open(self):
+ def open(self) -> None:
"""Init the checkers: reset linesets and statistics information."""
self.linesets = []
self.linter.stats.reset_duplicated_lines()
@@ -840,7 +838,7 @@ class SimilarChecker(BaseRawFileChecker, Similar):
with node.stream() as stream:
self.append_stream(self.linter.current_name, stream, node.file_encoding) # type: ignore[arg-type]
- def close(self):
+ def close(self) -> None:
"""Compute and display similarities on closing (i.e. end of parsing)."""
total = sum(len(lineset) for lineset in self.linesets)
duplicated = 0
@@ -861,11 +859,11 @@ class SimilarChecker(BaseRawFileChecker, Similar):
stats.nb_duplicated_lines += int(duplicated)
stats.percent_duplicated_lines += float(total and duplicated * 100.0 / total)
- def get_map_data(self):
+ def get_map_data(self) -> list[LineSet]:
"""Passthru override."""
return Similar.get_map_data(self)
- def reduce_map_data(self, linter, data):
+ def reduce_map_data(self, linter: PyLinter, data: list[list[LineSet]]) -> None:
"""Reduces and recombines data into a format that we can report on.
The partner function of get_map_data()
@@ -877,7 +875,7 @@ def register(linter: PyLinter) -> None:
linter.register_checker(SimilarChecker(linter))
-def usage(status=0):
+def usage(status: int = 0) -> NoReturn:
"""Display command line usage information."""
print("finds copy pasted blocks in a set of files")
print()