summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNed Batchelder <ned@nedbatchelder.com>2022-12-29 16:53:54 -0500
committerNed Batchelder <ned@nedbatchelder.com>2022-12-29 17:29:50 -0500
commit21d66355a392d3d3dec8f79770e4be7673edf1dd (patch)
tree42182e5a89f76d86d4eb85d13667c983cd02ef0b
parentaeb32f806e46fe920e2742133598f3d5cd0c5749 (diff)
downloadpython-coveragepy-git-21d66355a392d3d3dec8f79770e4be7673edf1dd.tar.gz
mypy: check python.py
-rw-r--r--coverage/parser.py4
-rw-r--r--coverage/phystokens.py4
-rw-r--r--coverage/plugin.py4
-rw-r--r--coverage/python.py82
-rw-r--r--coverage/types.py7
-rw-r--r--tox.ini6
6 files changed, 63 insertions, 44 deletions
diff --git a/coverage/parser.py b/coverage/parser.py
index 3e3b92ad..9c71e2d3 100644
--- a/coverage/parser.py
+++ b/coverage/parser.py
@@ -238,9 +238,9 @@ class PythonParser:
"""Implement `FileReporter.translate_lines`."""
return self.first_lines(lines)
- def translate_arcs(self, arcs: Iterable[TArc]) -> List[TArc]:
+ def translate_arcs(self, arcs: Iterable[TArc]) -> Set[TArc]:
"""Implement `FileReporter.translate_arcs`."""
- return [(self.first_line(a), self.first_line(b)) for (a, b) in arcs]
+ return {(self.first_line(a), self.first_line(b)) for (a, b) in arcs}
def parse_source(self) -> None:
"""Parse source text to find executable lines, excluded lines, etc.
diff --git a/coverage/phystokens.py b/coverage/phystokens.py
index 0842f658..a45242fd 100644
--- a/coverage/phystokens.py
+++ b/coverage/phystokens.py
@@ -14,7 +14,7 @@ import tokenize
from typing import Iterable, List, Optional, Set, Tuple
from coverage import env
-from coverage.types import TLineNo
+from coverage.types import TLineNo, TSourceTokenLines
TokenInfos = Iterable[tokenize.TokenInfo]
@@ -91,7 +91,7 @@ class MatchCaseFinder(ast.NodeVisitor):
self.generic_visit(node)
-def source_token_lines(source: str) -> Iterable[List[Tuple[str, str]]]:
+def source_token_lines(source: str) -> TSourceTokenLines:
"""Generate a series of lines, one for each line in `source`.
Each line is a list of pairs, each pair is a token::
diff --git a/coverage/plugin.py b/coverage/plugin.py
index 5f101aaa..ee1ae365 100644
--- a/coverage/plugin.py
+++ b/coverage/plugin.py
@@ -121,7 +121,7 @@ from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
from coverage import files
from coverage.misc import _needs_to_implement
-from coverage.types import TArc, TConfigurable, TLineNo
+from coverage.types import TArc, TConfigurable, TLineNo, TSourceTokenLines
class CoveragePlugin:
@@ -504,7 +504,7 @@ class FileReporter:
"""
return f"Line {start} didn't jump to line {end}"
- def source_token_lines(self) -> Iterable[List[Tuple[str, str]]]:
+ def source_token_lines(self) -> TSourceTokenLines:
"""Generate a series of tokenized lines, one for each line in `source`.
These tokens are used for syntax-colored reports.
diff --git a/coverage/python.py b/coverage/python.py
index b3232085..5716eb27 100644
--- a/coverage/python.py
+++ b/coverage/python.py
@@ -3,23 +3,30 @@
"""Python source expertise for coverage.py"""
+from __future__ import annotations
+
import os.path
import types
import zipimport
+from typing import cast, Dict, Iterable, Optional, Set, TYPE_CHECKING
+
from coverage import env
from coverage.exceptions import CoverageException, NoSource
from coverage.files import canonical_filename, relative_filename, zip_location
-from coverage.misc import contract, expensive, isolate_module, join_regex
+from coverage.misc import expensive, isolate_module, join_regex
from coverage.parser import PythonParser
from coverage.phystokens import source_token_lines, source_encoding
from coverage.plugin import FileReporter
+from coverage.types import TArc, TLineNo, TMorf, TSourceTokenLines
+
+if TYPE_CHECKING:
+ from coverage import Coverage
os = isolate_module(os)
-@contract(returns='bytes')
-def read_python_source(filename):
+def read_python_source(filename: str) -> bytes:
"""Read the Python source text from `filename`.
Returns bytes.
@@ -35,8 +42,7 @@ def read_python_source(filename):
return source.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
-@contract(returns='unicode')
-def get_python_source(filename):
+def get_python_source(filename: str) -> str:
"""Return the source code, as unicode."""
base, ext = os.path.splitext(filename)
if ext == ".py" and env.WINDOWS:
@@ -44,24 +50,25 @@ def get_python_source(filename):
else:
exts = [ext]
+ source_bytes: Optional[bytes]
for ext in exts:
try_filename = base + ext
if os.path.exists(try_filename):
# A regular text file: open it.
- source = read_python_source(try_filename)
+ source_bytes = read_python_source(try_filename)
break
# Maybe it's in a zip file?
- source = get_zip_bytes(try_filename)
- if source is not None:
+ source_bytes = get_zip_bytes(try_filename)
+ if source_bytes is not None:
break
else:
# Couldn't find source.
raise NoSource(f"No source for code: '{filename}'.")
# Replace \f because of http://bugs.python.org/issue19035
- source = source.replace(b'\f', b' ')
- source = source.decode(source_encoding(source), "replace")
+ source_bytes = source_bytes.replace(b'\f', b' ')
+ source = source_bytes.decode(source_encoding(source_bytes), "replace")
# Python code should always end with a line with a newline.
if source and source[-1] != '\n':
@@ -70,8 +77,7 @@ def get_python_source(filename):
return source
-@contract(returns='bytes|None')
-def get_zip_bytes(filename):
+def get_zip_bytes(filename: str) -> Optional[bytes]:
"""Get data from `filename` if it is a zip file path.
Returns the bytestring data read from the zip file, or None if no zip file
@@ -87,14 +93,15 @@ def get_zip_bytes(filename):
except zipimport.ZipImportError:
return None
try:
- data = zi.get_data(inner)
+ # typeshed is wrong for get_data: https://github.com/python/typeshed/pull/9428
+ data = cast(bytes, zi.get_data(inner))
except OSError:
return None
return data
return None
-def source_for_file(filename):
+def source_for_file(filename: str) -> str:
"""Return the source filename for `filename`.
Given a file name being traced, return the best guess as to the source
@@ -127,7 +134,7 @@ def source_for_file(filename):
return filename
-def source_for_morf(morf):
+def source_for_morf(morf: TMorf) -> str:
"""Get the source filename for the module-or-file `morf`."""
if hasattr(morf, '__file__') and morf.__file__:
filename = morf.__file__
@@ -145,7 +152,7 @@ def source_for_morf(morf):
class PythonFileReporter(FileReporter):
"""Report support for a Python file."""
- def __init__(self, morf, coverage=None):
+ def __init__(self, morf: TMorf, coverage: Optional[Coverage]=None) -> None:
self.coverage = coverage
filename = source_for_morf(morf)
@@ -153,6 +160,7 @@ class PythonFileReporter(FileReporter):
fname = filename
canonicalize = True
if self.coverage is not None:
+ assert self.coverage.config is not None
if self.coverage.config.relative_files:
canonicalize = False
if canonicalize:
@@ -168,20 +176,20 @@ class PythonFileReporter(FileReporter):
name = relative_filename(filename)
self.relname = name
- self._source = None
- self._parser = None
+ self._source: Optional[str] = None
+ self._parser: Optional[PythonParser] = None
self._excluded = None
- def __repr__(self):
+ def __repr__(self) -> str:
return f"<PythonFileReporter {self.filename!r}>"
- @contract(returns='unicode')
- def relative_filename(self):
+ def relative_filename(self) -> str:
return self.relname
@property
- def parser(self):
+ def parser(self) -> PythonParser:
"""Lazily create a :class:`PythonParser`."""
+ assert self.coverage is not None
if self._parser is None:
self._parser = PythonParser(
filename=self.filename,
@@ -190,22 +198,24 @@ class PythonFileReporter(FileReporter):
self._parser.parse_source()
return self._parser
- def lines(self):
+ def lines(self) -> Set[TLineNo]:
"""Return the line numbers of statements in the file."""
return self.parser.statements
- def excluded_lines(self):
+ def excluded_lines(self) -> Set[TLineNo]:
"""Return the line numbers of statements in the file."""
return self.parser.excluded
- def translate_lines(self, lines):
+ def translate_lines(self, lines: Iterable[TLineNo]) -> Set[TLineNo]:
return self.parser.translate_lines(lines)
- def translate_arcs(self, arcs):
+ def translate_arcs(self, arcs: Iterable[TArc]) -> Set[TArc]:
return self.parser.translate_arcs(arcs)
@expensive
- def no_branch_lines(self):
+ def no_branch_lines(self) -> Set[TLineNo]:
+ assert self.coverage is not None
+ assert self.coverage.config is not None
no_branch = self.parser.lines_matching(
join_regex(self.coverage.config.partial_list),
join_regex(self.coverage.config.partial_always_list),
@@ -213,23 +223,27 @@ class PythonFileReporter(FileReporter):
return no_branch
@expensive
- def arcs(self):
+ def arcs(self) -> Set[TArc]:
return self.parser.arcs()
@expensive
- def exit_counts(self):
+ def exit_counts(self) -> Dict[TLineNo, int]:
return self.parser.exit_counts()
- def missing_arc_description(self, start, end, executed_arcs=None):
+ def missing_arc_description(
+ self,
+ start: TLineNo,
+ end: TLineNo,
+ executed_arcs: Optional[Set[TArc]]=None,
+ ) -> str:
return self.parser.missing_arc_description(start, end, executed_arcs)
- @contract(returns='unicode')
- def source(self):
+ def source(self) -> str:
if self._source is None:
self._source = get_python_source(self.filename)
return self._source
- def should_be_python(self):
+ def should_be_python(self) -> bool:
"""Does it seem like this file should contain Python?
This is used to decide if a file reported as part of the execution of
@@ -249,5 +263,5 @@ class PythonFileReporter(FileReporter):
# Everything else is probably not Python.
return False
- def source_token_lines(self):
+ def source_token_lines(self) -> TSourceTokenLines:
return source_token_lines(self.source())
diff --git a/coverage/types.py b/coverage/types.py
index 015c3747..d138b2f2 100644
--- a/coverage/types.py
+++ b/coverage/types.py
@@ -5,7 +5,8 @@
Types for use throughout coverage.py.
"""
-from typing import Dict, List, Optional, Tuple, Union, TYPE_CHECKING
+from types import ModuleType
+from typing import Dict, Iterable, List, Optional, Tuple, Union, TYPE_CHECKING
if TYPE_CHECKING:
# Protocol is new in 3.8. PYVERSIONS
@@ -53,6 +54,10 @@ TLineNo = int
TArc = Tuple[TLineNo, TLineNo]
+TMorf = Union[ModuleType, str]
+
+TSourceTokenLines = Iterable[List[Tuple[str, str]]]
+
## Debugging
class TWarnFn(Protocol):
diff --git a/tox.ini b/tox.ini
index 5cf5e68c..e1785719 100644
--- a/tox.ini
+++ b/tox.ini
@@ -98,9 +98,9 @@ deps =
setenv =
{[testenv]setenv}
T_AN=coverage/config.py coverage/files.py coverage/numbits.py
- T_OR=coverage/parser.py coverage/phystokens.py coverage/plugin.py coverage/results.py
- T_SZ=coverage/sqldata.py coverage/tomlconfig.py coverage/types.py
- TYPEABLE={env:T_AN} {env:T_OR} {env:T_SZ}
+ T_OP=coverage/parser.py coverage/phystokens.py coverage/plugin.py coverage/python.py
+ T_QZ=coverage/results.py coverage/sqldata.py coverage/tomlconfig.py coverage/types.py
+ TYPEABLE={env:T_AN} {env:T_OP} {env:T_QZ}
commands =
# PYVERSIONS