summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNed Batchelder <ned@nedbatchelder.com>2023-03-22 17:12:30 -0400
committerNed Batchelder <ned@nedbatchelder.com>2023-03-22 18:34:50 -0400
commitb7be64538aa480fce641349d3053e9a84862d571 (patch)
treea5e195f650afffd026a662a628eda7b37fc5fece
parent5a94109e646870aef6188de25ba314f73fa1245d (diff)
downloadpython-coveragepy-git-b7be64538aa480fce641349d3053e9a84862d571.tar.gz
style: double quotes
-rw-r--r--coverage/config.py126
-rw-r--r--coverage/control.py74
-rw-r--r--coverage/data.py6
-rw-r--r--coverage/debug.py28
-rw-r--r--coverage/env.py18
-rw-r--r--coverage/execfile.py10
-rw-r--r--coverage/files.py6
-rw-r--r--coverage/inorout.py34
-rw-r--r--coverage/misc.py14
-rw-r--r--coverage/multiproc.py6
-rw-r--r--coverage/numbits.py6
-rw-r--r--coverage/parser.py12
-rw-r--r--coverage/phystokens.py12
-rw-r--r--coverage/plugin.py22
-rw-r--r--coverage/plugin_support.py4
-rw-r--r--coverage/python.py16
-rw-r--r--coverage/pytracer.py20
-rw-r--r--coverage/results.py2
-rw-r--r--coverage/templite.py38
-rw-r--r--coverage/version.py6
20 files changed, 230 insertions, 230 deletions
diff --git a/coverage/config.py b/coverage/config.py
index 9518e535..1edbe0de 100644
--- a/coverage/config.py
+++ b/coverage/config.py
@@ -114,8 +114,8 @@ class HandyConfigParser(configparser.ConfigParser):
"""
value_list = self.get(section, option)
values = []
- for value_line in value_list.split('\n'):
- for value in value_line.split(','):
+ for value_line in value_list.split("\n"):
+ for value in value_line.split(","):
value = value.strip()
if value:
values.append(value)
@@ -150,20 +150,20 @@ TConfigParser = Union[HandyConfigParser, TomlConfigParser]
# The default line exclusion regexes.
DEFAULT_EXCLUDE = [
- r'#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(cover|COVER)',
+ r"#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(cover|COVER)",
]
# The default partial branch regexes, to be modified by the user.
DEFAULT_PARTIAL = [
- r'#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(branch|BRANCH)',
+ r"#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(branch|BRANCH)",
]
# The default partial branch regexes, based on Python semantics.
# These are any Python branching constructs that can't actually execute all
# their branches.
DEFAULT_PARTIAL_ALWAYS = [
- 'while (True|1|False|0):',
- 'if (True|1|False|0):',
+ "while (True|1|False|0):",
+ "if (True|1|False|0):",
]
@@ -286,7 +286,7 @@ class CoverageConfig(TConfigurable, TPluginConfig):
"""
_, ext = os.path.splitext(filename)
cp: TConfigParser
- if ext == '.toml':
+ if ext == ".toml":
cp = TomlConfigParser(our_file)
else:
cp = HandyConfigParser(our_file)
@@ -328,9 +328,9 @@ class CoverageConfig(TConfigurable, TPluginConfig):
)
# [paths] is special
- if cp.has_section('paths'):
- for option in cp.options('paths'):
- self.paths[option] = cp.getlist('paths', option)
+ if cp.has_section("paths"):
+ for option in cp.options("paths"):
+ self.paths[option] = cp.getlist("paths", option)
any_set = True
# plugins can have options
@@ -370,64 +370,64 @@ class CoverageConfig(TConfigurable, TPluginConfig):
# configuration value from the file.
# [run]
- ('branch', 'run:branch', 'boolean'),
- ('command_line', 'run:command_line'),
- ('concurrency', 'run:concurrency', 'list'),
- ('context', 'run:context'),
- ('cover_pylib', 'run:cover_pylib', 'boolean'),
- ('data_file', 'run:data_file'),
- ('debug', 'run:debug', 'list'),
- ('debug_file', 'run:debug_file'),
- ('disable_warnings', 'run:disable_warnings', 'list'),
- ('dynamic_context', 'run:dynamic_context'),
- ('parallel', 'run:parallel', 'boolean'),
- ('plugins', 'run:plugins', 'list'),
- ('relative_files', 'run:relative_files', 'boolean'),
- ('run_include', 'run:include', 'list'),
- ('run_omit', 'run:omit', 'list'),
- ('sigterm', 'run:sigterm', 'boolean'),
- ('source', 'run:source', 'list'),
- ('source_pkgs', 'run:source_pkgs', 'list'),
- ('timid', 'run:timid', 'boolean'),
- ('_crash', 'run:_crash'),
+ ("branch", "run:branch", "boolean"),
+ ("command_line", "run:command_line"),
+ ("concurrency", "run:concurrency", "list"),
+ ("context", "run:context"),
+ ("cover_pylib", "run:cover_pylib", "boolean"),
+ ("data_file", "run:data_file"),
+ ("debug", "run:debug", "list"),
+ ("debug_file", "run:debug_file"),
+ ("disable_warnings", "run:disable_warnings", "list"),
+ ("dynamic_context", "run:dynamic_context"),
+ ("parallel", "run:parallel", "boolean"),
+ ("plugins", "run:plugins", "list"),
+ ("relative_files", "run:relative_files", "boolean"),
+ ("run_include", "run:include", "list"),
+ ("run_omit", "run:omit", "list"),
+ ("sigterm", "run:sigterm", "boolean"),
+ ("source", "run:source", "list"),
+ ("source_pkgs", "run:source_pkgs", "list"),
+ ("timid", "run:timid", "boolean"),
+ ("_crash", "run:_crash"),
# [report]
- ('exclude_list', 'report:exclude_lines', 'regexlist'),
- ('exclude_also', 'report:exclude_also', 'regexlist'),
- ('fail_under', 'report:fail_under', 'float'),
- ('format', 'report:format', 'boolean'),
- ('ignore_errors', 'report:ignore_errors', 'boolean'),
- ('include_namespace_packages', 'report:include_namespace_packages', 'boolean'),
- ('partial_always_list', 'report:partial_branches_always', 'regexlist'),
- ('partial_list', 'report:partial_branches', 'regexlist'),
- ('precision', 'report:precision', 'int'),
- ('report_contexts', 'report:contexts', 'list'),
- ('report_include', 'report:include', 'list'),
- ('report_omit', 'report:omit', 'list'),
- ('show_missing', 'report:show_missing', 'boolean'),
- ('skip_covered', 'report:skip_covered', 'boolean'),
- ('skip_empty', 'report:skip_empty', 'boolean'),
- ('sort', 'report:sort'),
+ ("exclude_list", "report:exclude_lines", "regexlist"),
+ ("exclude_also", "report:exclude_also", "regexlist"),
+ ("fail_under", "report:fail_under", "float"),
+ ("format", "report:format", "boolean"),
+ ("ignore_errors", "report:ignore_errors", "boolean"),
+ ("include_namespace_packages", "report:include_namespace_packages", "boolean"),
+ ("partial_always_list", "report:partial_branches_always", "regexlist"),
+ ("partial_list", "report:partial_branches", "regexlist"),
+ ("precision", "report:precision", "int"),
+ ("report_contexts", "report:contexts", "list"),
+ ("report_include", "report:include", "list"),
+ ("report_omit", "report:omit", "list"),
+ ("show_missing", "report:show_missing", "boolean"),
+ ("skip_covered", "report:skip_covered", "boolean"),
+ ("skip_empty", "report:skip_empty", "boolean"),
+ ("sort", "report:sort"),
# [html]
- ('extra_css', 'html:extra_css'),
- ('html_dir', 'html:directory'),
- ('html_skip_covered', 'html:skip_covered', 'boolean'),
- ('html_skip_empty', 'html:skip_empty', 'boolean'),
- ('html_title', 'html:title'),
- ('show_contexts', 'html:show_contexts', 'boolean'),
+ ("extra_css", "html:extra_css"),
+ ("html_dir", "html:directory"),
+ ("html_skip_covered", "html:skip_covered", "boolean"),
+ ("html_skip_empty", "html:skip_empty", "boolean"),
+ ("html_title", "html:title"),
+ ("show_contexts", "html:show_contexts", "boolean"),
# [xml]
- ('xml_output', 'xml:output'),
- ('xml_package_depth', 'xml:package_depth', 'int'),
+ ("xml_output", "xml:output"),
+ ("xml_package_depth", "xml:package_depth", "int"),
# [json]
- ('json_output', 'json:output'),
- ('json_pretty_print', 'json:pretty_print', 'boolean'),
- ('json_show_contexts', 'json:show_contexts', 'boolean'),
+ ("json_output", "json:output"),
+ ("json_pretty_print", "json:pretty_print", "boolean"),
+ ("json_show_contexts", "json:show_contexts", "boolean"),
# [lcov]
- ('lcov_output', 'lcov:output'),
+ ("lcov_output", "lcov:output"),
]
def _set_attr_from_config_option(
@@ -435,7 +435,7 @@ class CoverageConfig(TConfigurable, TPluginConfig):
cp: TConfigParser,
attr: str,
where: str,
- type_: str = '',
+ type_: str = "",
) -> bool:
"""Set an attribute on self if it exists in the ConfigParser.
@@ -444,7 +444,7 @@ class CoverageConfig(TConfigurable, TPluginConfig):
"""
section, option = where.split(":")
if cp.has_option(section, option):
- method = getattr(cp, 'get' + type_)
+ method = getattr(cp, "get" + type_)
setattr(self, attr, method(section, option))
return True
return False
@@ -548,7 +548,7 @@ def config_files_to_try(config_file: Union[bool, str]) -> List[Tuple[str, bool,
specified_file = (config_file is not True)
if not specified_file:
# No file was specified. Check COVERAGE_RCFILE.
- rcfile = os.environ.get('COVERAGE_RCFILE')
+ rcfile = os.environ.get("COVERAGE_RCFILE")
if rcfile:
config_file = rcfile
specified_file = True
@@ -602,10 +602,10 @@ def read_coverage_config(
# $set_env.py: COVERAGE_DEBUG - Options for --debug.
# 3) from environment variables:
- env_data_file = os.environ.get('COVERAGE_FILE')
+ env_data_file = os.environ.get("COVERAGE_FILE")
if env_data_file:
config.data_file = env_data_file
- debugs = os.environ.get('COVERAGE_DEBUG')
+ debugs = os.environ.get("COVERAGE_DEBUG")
if debugs:
config.debug.extend(d.strip() for d in debugs.split(","))
diff --git a/coverage/control.py b/coverage/control.py
index 290da655..acce622d 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -84,7 +84,7 @@ class Coverage(TConfigurable):
cov.start()
#.. call your code ..
cov.stop()
- cov.html_report(directory='covhtml')
+ cov.html_report(directory="covhtml")
Note: in keeping with Python custom, names starting with underscore are
not part of the public API. They might stop working at any point. Please
@@ -343,7 +343,7 @@ class Coverage(TConfigurable):
self._should_write_debug = False
self._write_startup_debug()
- # '[run] _crash' will raise an exception if the value is close by in
+ # "[run] _crash" will raise an exception if the value is close by in
# the call stack, for testing error handling.
if self.config._crash and self.config._crash in short_stack(limit=4):
raise RuntimeError(f"Crashing because called by {self.config._crash}")
@@ -380,7 +380,7 @@ class Coverage(TConfigurable):
"""
assert self._inorout is not None
disp = self._inorout.should_trace(filename, frame)
- if self._debug.should('trace'):
+ if self._debug.should("trace"):
self._debug.write(disposition_debug_msg(disp))
return disp
@@ -392,7 +392,7 @@ class Coverage(TConfigurable):
"""
assert self._inorout is not None
reason = self._inorout.check_include_omit_etc(filename, frame)
- if self._debug.should('trace'):
+ if self._debug.should("trace"):
if not reason:
msg = f"Including {filename!r}"
else:
@@ -420,7 +420,7 @@ class Coverage(TConfigurable):
self._warnings.append(msg)
if slug:
msg = f"{msg} ({slug})"
- if self._debug.should('pid'):
+ if self._debug.should("pid"):
msg = f"[{os.getpid()}] {msg}"
warnings.warn(msg, category=CoverageWarning, stacklevel=2)
@@ -566,7 +566,7 @@ class Coverage(TConfigurable):
self._inorout = InOrOut(
config=self.config,
warn=self._warn,
- debug=(self._debug if self._debug.should('trace') else None),
+ debug=(self._debug if self._debug.should("trace") else None),
include_namespace_packages=self.config.include_namespace_packages,
)
self._inorout.plugins = self._plugins
@@ -703,13 +703,13 @@ class Coverage(TConfigurable):
self._collector.switch_context(new_context)
- def clear_exclude(self, which: str = 'exclude') -> None:
+ def clear_exclude(self, which: str = "exclude") -> None:
"""Clear the exclude list."""
self._init()
setattr(self.config, which + "_list", [])
self._exclude_regex_stale()
- def exclude(self, regex: str, which: str = 'exclude') -> None:
+ def exclude(self, regex: str, which: str = "exclude") -> None:
"""Exclude source lines from execution consideration.
A number of lists of regular expressions are maintained. Each list
@@ -740,7 +740,7 @@ class Coverage(TConfigurable):
self._exclude_re[which] = join_regex(excl_list)
return self._exclude_re[which]
- def get_exclude_list(self, which: str = 'exclude') -> List[str]:
+ def get_exclude_list(self, which: str = "exclude") -> List[str]:
"""Return a list of excluded regex strings.
`which` indicates which list is desired. See :meth:`exclude` for the
@@ -969,7 +969,7 @@ class Coverage(TConfigurable):
return file_reporters
def _prepare_data_for_reporting(self) -> None:
- """Re-map data before reporting, to get implicit 'combine' behavior."""
+ """Re-map data before reporting, to get implicit "combine" behavior."""
if self.config.paths:
mapped_data = CoverageData(warn=self._warn, debug=self._debug, no_disk=True)
if self._data is not None:
@@ -1238,10 +1238,10 @@ class Coverage(TConfigurable):
) -> float:
"""Generate an LCOV report of coverage results.
- Each module in 'morfs' is included in the report. 'outfile' is the
+ Each module in `morfs` is included in the report. `outfile` is the
path to write the file to, "-" will write to stdout.
- See :meth 'report' for other arguments.
+ See :meth:`report` for other arguments.
.. versionadded:: 6.3
"""
@@ -1275,30 +1275,30 @@ class Coverage(TConfigurable):
return entries
info = [
- ('coverage_version', covmod.__version__),
- ('coverage_module', covmod.__file__),
- ('tracer', self._collector.tracer_name() if self._collector is not None else "-none-"),
- ('CTracer', 'available' if HAS_CTRACER else "unavailable"),
- ('plugins.file_tracers', plugin_info(self._plugins.file_tracers)),
- ('plugins.configurers', plugin_info(self._plugins.configurers)),
- ('plugins.context_switchers', plugin_info(self._plugins.context_switchers)),
- ('configs_attempted', self.config.attempted_config_files),
- ('configs_read', self.config.config_files_read),
- ('config_file', self.config.config_file),
- ('config_contents',
- repr(self.config._config_contents) if self.config._config_contents else '-none-'
+ ("coverage_version", covmod.__version__),
+ ("coverage_module", covmod.__file__),
+ ("tracer", self._collector.tracer_name() if self._collector is not None else "-none-"),
+ ("CTracer", "available" if HAS_CTRACER else "unavailable"),
+ ("plugins.file_tracers", plugin_info(self._plugins.file_tracers)),
+ ("plugins.configurers", plugin_info(self._plugins.configurers)),
+ ("plugins.context_switchers", plugin_info(self._plugins.context_switchers)),
+ ("configs_attempted", self.config.attempted_config_files),
+ ("configs_read", self.config.config_files_read),
+ ("config_file", self.config.config_file),
+ ("config_contents",
+ repr(self.config._config_contents) if self.config._config_contents else "-none-"
),
- ('data_file', self._data.data_filename() if self._data is not None else "-none-"),
- ('python', sys.version.replace('\n', '')),
- ('platform', platform.platform()),
- ('implementation', platform.python_implementation()),
- ('executable', sys.executable),
- ('def_encoding', sys.getdefaultencoding()),
- ('fs_encoding', sys.getfilesystemencoding()),
- ('pid', os.getpid()),
- ('cwd', os.getcwd()),
- ('path', sys.path),
- ('environment', human_sorted(
+ ("data_file", self._data.data_filename() if self._data is not None else "-none-"),
+ ("python", sys.version.replace("\n", "")),
+ ("platform", platform.platform()),
+ ("implementation", platform.python_implementation()),
+ ("executable", sys.executable),
+ ("def_encoding", sys.getdefaultencoding()),
+ ("fs_encoding", sys.getfilesystemencoding()),
+ ("pid", os.getpid()),
+ ("cwd", os.getcwd()),
+ ("path", sys.path),
+ ("environment", human_sorted(
f"{k} = {v}"
for k, v in os.environ.items()
if (
@@ -1306,7 +1306,7 @@ class Coverage(TConfigurable):
(k in ("HOME", "TEMP", "TMP"))
)
)),
- ('command_line', " ".join(getattr(sys, 'argv', ['-none-']))),
+ ("command_line", " ".join(getattr(sys, "argv", ["-none-"]))),
]
if self._inorout is not None:
@@ -1324,7 +1324,7 @@ if int(os.environ.get("COVERAGE_DEBUG_CALLS", 0)): # pragma: debugg
Coverage = decorate_methods( # type: ignore[misc]
show_calls(show_args=True),
- butnot=['get_data']
+ butnot=["get_data"]
)(Coverage)
diff --git a/coverage/data.py b/coverage/data.py
index c737d593..c196ac7a 100644
--- a/coverage/data.py
+++ b/coverage/data.py
@@ -133,7 +133,7 @@ def combine_parallel_data(
if f == data.data_filename():
# Sometimes we are combining into a file which is one of the
# parallel files. Skip that file.
- if data._debug.should('dataio'):
+ if data._debug.should("dataio"):
data._debug.write(f"Skipping combining ourself: {f!r}")
continue
@@ -153,7 +153,7 @@ def combine_parallel_data(
delete_this_one = not keep
if combine_this_one:
- if data._debug.should('dataio'):
+ if data._debug.should("dataio"):
data._debug.write(f"Combining data file {f!r}")
file_hashes.add(sha)
try:
@@ -177,7 +177,7 @@ def combine_parallel_data(
message(f"Skipping duplicate data {rel_file_name}")
if delete_this_one:
- if data._debug.should('dataio'):
+ if data._debug.should("dataio"):
data._debug.write(f"Deleting data file {f!r}")
file_be_gone(f)
diff --git a/coverage/debug.py b/coverage/debug.py
index d56a66bb..3ef6dae8 100644
--- a/coverage/debug.py
+++ b/coverage/debug.py
@@ -50,12 +50,12 @@ class DebugControl:
self.suppress_callers = False
filters = []
- if self.should('pid'):
+ if self.should("pid"):
filters.append(add_pid_and_tid)
self.output = DebugOutputFile.get_one(
output,
file_name=file_name,
- show_process=self.should('process'),
+ show_process=self.should("process"),
filters=filters,
)
self.raw_output = self.output.outfile
@@ -86,11 +86,11 @@ class DebugControl:
"""
self.output.write(msg+"\n")
- if self.should('self'):
- caller_self = inspect.stack()[1][0].f_locals.get('self')
+ if self.should("self"):
+ caller_self = inspect.stack()[1][0].f_locals.get("self")
if caller_self is not None:
self.output.write(f"self: {caller_self!r}\n")
- if self.should('callers'):
+ if self.should("callers"):
dump_stack_frames(out=self.output, skip=1)
self.output.flush()
@@ -228,7 +228,7 @@ def add_pid_and_tid(text: str) -> str:
class AutoReprMixin:
"""A mixin implementing an automatic __repr__ for debugging."""
- auto_repr_ignore = ['auto_repr_ignore', '$coverage.object_id']
+ auto_repr_ignore = ["auto_repr_ignore", "$coverage.object_id"]
def __repr__(self) -> str:
show_attrs = (
@@ -251,7 +251,7 @@ def simplify(v: Any) -> Any: # pragma: debugging
elif isinstance(v, (list, tuple)):
return type(v)(simplify(vv) for vv in v)
elif hasattr(v, "__dict__"):
- return simplify({'.'+k: v for k, v in v.__dict__.items()})
+ return simplify({"."+k: v for k, v in v.__dict__.items()})
else:
return v
@@ -312,8 +312,8 @@ class DebugOutputFile:
if self.show_process:
self.filters.insert(0, CwdTracker().filter)
self.write(f"New process: executable: {sys.executable!r}\n")
- self.write("New process: cmd: {!r}\n".format(getattr(sys, 'argv', None)))
- if hasattr(os, 'getppid'):
+ self.write("New process: cmd: {!r}\n".format(getattr(sys, "argv", None)))
+ if hasattr(os, "getppid"):
self.write(f"New process: pid: {os.getpid()!r}, parent pid: {os.getppid()!r}\n")
@classmethod
@@ -367,8 +367,8 @@ class DebugOutputFile:
# a process-wide singleton. So stash it in sys.modules instead of
# on a class attribute. Yes, this is aggressively gross.
- SYS_MOD_NAME = '$coverage.debug.DebugOutputFile.the_one'
- SINGLETON_ATTR = 'the_one_and_is_interim'
+ SYS_MOD_NAME = "$coverage.debug.DebugOutputFile.the_one"
+ SINGLETON_ATTR = "the_one_and_is_interim"
@classmethod
def _set_singleton_data(cls, the_one: DebugOutputFile, interim: bool) -> None:
@@ -485,7 +485,7 @@ def show_calls(
def _clean_stack_line(s: str) -> str: # pragma: debugging
"""Simplify some paths in a stack trace, for compactness."""
s = s.strip()
- s = s.replace(os.path.dirname(__file__) + '/', '')
- s = s.replace(os.path.dirname(os.__file__) + '/', '')
- s = s.replace(sys.prefix + '/', '')
+ s = s.replace(os.path.dirname(__file__) + "/", "")
+ s = s.replace(os.path.dirname(os.__file__) + "/", "")
+ s = s.replace(sys.prefix + "/", "")
return s
diff --git a/coverage/env.py b/coverage/env.py
index b2229281..bdc2c785 100644
--- a/coverage/env.py
+++ b/coverage/env.py
@@ -40,7 +40,7 @@ class PYBEHAVIOR:
# Does Python conform to PEP626, Precise line numbers for debugging and other tools.
# https://www.python.org/dev/peps/pep-0626
- pep626 = CPYTHON and (PYVERSION > (3, 10, 0, 'alpha', 4))
+ pep626 = CPYTHON and (PYVERSION > (3, 10, 0, "alpha", 4))
# Is "if __debug__" optimized away?
if PYPY:
@@ -60,7 +60,7 @@ class PYBEHAVIOR:
else:
optimize_if_not_debug = 1
else:
- if PYVERSION >= (3, 8, 0, 'beta', 1):
+ if PYVERSION >= (3, 8, 0, "beta", 1):
optimize_if_not_debug = 2
else:
optimize_if_not_debug = 1
@@ -69,7 +69,7 @@ class PYBEHAVIOR:
negative_lnotab = not (PYPY and PYPYVERSION < (7, 2))
# 3.7 changed how functions with only docstrings are numbered.
- docstring_only_function = (not PYPY) and ((3, 7, 0, 'beta', 5) <= PYVERSION <= (3, 10))
+ docstring_only_function = (not PYPY) and ((3, 7, 0, "beta", 5) <= PYVERSION <= (3, 10))
# When a break/continue/return statement in a try block jumps to a finally
# block, does the finally block do the break/continue/return (pre-3.8), or
@@ -93,7 +93,7 @@ class PYBEHAVIOR:
# CPython 3.11 now jumps to the decorator line again while executing
# the decorator.
- trace_decorator_line_again = (CPYTHON and PYVERSION > (3, 11, 0, 'alpha', 3, 0))
+ trace_decorator_line_again = (CPYTHON and PYVERSION > (3, 11, 0, "alpha", 3, 0))
# Are while-true loops optimized into absolute jumps with no loop setup?
nix_while_true = (PYVERSION >= (3, 8))
@@ -125,7 +125,7 @@ class PYBEHAVIOR:
keep_constant_test = pep626
# When leaving a with-block, do we visit the with-line again for the exit?
- exit_through_with = (PYVERSION >= (3, 10, 0, 'beta'))
+ exit_through_with = (PYVERSION >= (3, 10, 0, "beta"))
# Match-case construct.
match_case = (PYVERSION >= (3, 10))
@@ -135,20 +135,20 @@ class PYBEHAVIOR:
# Modules start with a line numbered zero. This means empty modules have
# only a 0-number line, which is ignored, giving a truly empty module.
- empty_is_empty = (PYVERSION >= (3, 11, 0, 'beta', 4))
+ empty_is_empty = (PYVERSION >= (3, 11, 0, "beta", 4))
# Coverage.py specifics.
# Are we using the C-implemented trace function?
-C_TRACER = os.getenv('COVERAGE_TEST_TRACER', 'c') == 'c'
+C_TRACER = os.getenv("COVERAGE_TEST_TRACER", "c") == "c"
# Are we coverage-measuring ourselves?
-METACOV = os.getenv('COVERAGE_COVERAGE', '') != ''
+METACOV = os.getenv("COVERAGE_COVERAGE", "") != ""
# Are we running our test suite?
# Even when running tests, you can use COVERAGE_TESTING=0 to disable the
# test-specific behavior like AST checking.
-TESTING = os.getenv('COVERAGE_TESTING', '') == 'True'
+TESTING = os.getenv("COVERAGE_TESTING", "") == "True"
def debug_info() -> Iterable[Tuple[str, Any]]:
diff --git a/coverage/execfile.py b/coverage/execfile.py
index ef0277d6..aac4d30b 100644
--- a/coverage/execfile.py
+++ b/coverage/execfile.py
@@ -172,7 +172,7 @@ class PyRunner:
self._prepare2()
# Create a module to serve as __main__
- main_mod = ModuleType('__main__')
+ main_mod = ModuleType("__main__")
from_pyc = self.arg0.endswith((".pyc", ".pyo"))
main_mod.__file__ = self.arg0
@@ -184,9 +184,9 @@ class PyRunner:
if self.spec is not None:
main_mod.__spec__ = self.spec
- main_mod.__builtins__ = sys.modules['builtins'] # type: ignore[attr-defined]
+ main_mod.__builtins__ = sys.modules["builtins"] # type: ignore[attr-defined]
- sys.modules['__main__'] = main_mod
+ sys.modules["__main__"] = main_mod
# Set sys.argv properly.
sys.argv = self.args
@@ -228,7 +228,7 @@ class PyRunner:
# is non-None when the exception is reported at the upper layer,
# and a nested exception is shown to the user. This getattr fixes
# it somehow? https://bitbucket.org/pypy/pypy/issue/1903
- getattr(err, '__context__', None)
+ getattr(err, "__context__", None)
# Call the excepthook.
try:
@@ -311,7 +311,7 @@ def make_code_from_pyc(filename: str) -> CodeType:
if magic != PYC_MAGIC_NUMBER:
raise NoCode(f"Bad magic number in .pyc file: {magic!r} != {PYC_MAGIC_NUMBER!r}")
- flags = struct.unpack('<L', fpyc.read(4))[0]
+ flags = struct.unpack("<L", fpyc.read(4))[0]
hash_based = flags & 0x01
if hash_based:
fpyc.read(8) # Skip the hash.
diff --git a/coverage/files.py b/coverage/files.py
index 962a9d10..2a117734 100644
--- a/coverage/files.py
+++ b/coverage/files.py
@@ -163,7 +163,7 @@ def zip_location(filename: str) -> Optional[Tuple[str, str]]:
name is in the zipfile.
"""
- for ext in ['.zip', '.whl', '.egg', '.pex']:
+ for ext in [".zip", ".whl", ".egg", ".pex"]:
zipbase, extension, inner = filename.partition(ext + sep(filename))
if extension:
zipfile = zipbase + ext
@@ -273,7 +273,7 @@ class ModuleMatcher:
if module_name.startswith(m):
if module_name == m:
return True
- if module_name[len(m)] == '.':
+ if module_name[len(m)] == ".":
# This is a module in the package
return True
@@ -433,7 +433,7 @@ class PathAliases:
# The pattern is meant to match a file path. Let's make it absolute
# unless it already is, or is meant to match any prefix.
if not self.relative:
- if not pattern.startswith('*') and not isabs_anywhere(pattern + pattern_sep):
+ if not pattern.startswith("*") and not isabs_anywhere(pattern + pattern_sep):
pattern = abs_file(pattern)
if not pattern.endswith(pattern_sep):
pattern += pattern_sep
diff --git a/coverage/inorout.py b/coverage/inorout.py
index d0d0ef91..ff46bac0 100644
--- a/coverage/inorout.py
+++ b/coverage/inorout.py
@@ -83,20 +83,20 @@ def name_for_module(filename: str, frame: Optional[FrameType]) -> str:
"""
module_globals = frame.f_globals if frame is not None else {}
- dunder_name: str = module_globals.get('__name__', None)
+ dunder_name: str = module_globals.get("__name__", None)
- if isinstance(dunder_name, str) and dunder_name != '__main__':
+ if isinstance(dunder_name, str) and dunder_name != "__main__":
# This is the usual case: an imported module.
return dunder_name
- loader = module_globals.get('__loader__', None)
- for attrname in ('fullname', 'name'): # attribute renamed in py3.2
+ loader = module_globals.get("__loader__", None)
+ for attrname in ("fullname", "name"): # attribute renamed in py3.2
if hasattr(loader, attrname):
fullname = getattr(loader, attrname)
else:
continue
- if isinstance(fullname, str) and fullname != '__main__':
+ if isinstance(fullname, str) and fullname != "__main__":
# Module loaded via: runpy -m
return fullname
@@ -110,12 +110,12 @@ def name_for_module(filename: str, frame: Optional[FrameType]) -> str:
def module_is_namespace(mod: ModuleType) -> bool:
"""Is the module object `mod` a PEP420 namespace module?"""
- return hasattr(mod, '__path__') and getattr(mod, '__file__', None) is None
+ return hasattr(mod, "__path__") and getattr(mod, "__file__", None) is None
def module_has_file(mod: ModuleType) -> bool:
"""Does the module object `mod` have an existing __file__ ?"""
- mod__file__ = getattr(mod, '__file__', None)
+ mod__file__ = getattr(mod, "__file__", None)
if mod__file__ is None:
return False
return os.path.exists(mod__file__)
@@ -313,7 +313,7 @@ class InOrOut:
disp.reason = reason
return disp
- if original_filename.startswith('<'):
+ if original_filename.startswith("<"):
return nope(disp, "original file name is not real")
if frame is not None:
@@ -323,10 +323,10 @@ class InOrOut:
# .pyc files can be moved after compilation (for example, by being
# installed), we look for __file__ in the frame and prefer it to the
# co_filename value.
- dunder_file = frame.f_globals and frame.f_globals.get('__file__')
+ dunder_file = frame.f_globals and frame.f_globals.get("__file__")
if dunder_file:
filename = source_for_file(dunder_file)
- if original_filename and not original_filename.startswith('<'):
+ if original_filename and not original_filename.startswith("<"):
orig = os.path.basename(original_filename)
if orig != os.path.basename(filename):
# Files shouldn't be renamed when moved. This happens when
@@ -338,10 +338,10 @@ class InOrOut:
# Empty string is pretty useless.
return nope(disp, "empty string isn't a file name")
- if filename.startswith('memory:'):
+ if filename.startswith("memory:"):
return nope(disp, "memory isn't traceable")
- if filename.startswith('<'):
+ if filename.startswith("<"):
# Lots of non-file execution is represented with artificial
# file names like "<string>", "<doctest readme.txt[0]>", or
# "<exec_function>". Don't ever trace these executions, since we
@@ -484,7 +484,7 @@ class InOrOut:
msg = f"Already imported a file that will be measured: {filename}"
self.warn(msg, slug="already-imported")
warned.add(filename)
- elif self.debug and self.debug.should('trace'):
+ elif self.debug and self.debug.should("trace"):
self.debug.write(
"Didn't trace already imported file {!r}: {}".format(
disp.original_filename, disp.reason
@@ -580,9 +580,9 @@ class InOrOut:
]
matcher_names = [
- 'source_match', 'source_pkgs_match',
- 'include_match', 'omit_match',
- 'cover_match', 'pylib_match', 'third_match', 'source_in_third_match',
+ "source_match", "source_pkgs_match",
+ "include_match", "omit_match",
+ "cover_match", "pylib_match", "third_match", "source_in_third_match",
]
for matcher_name in matcher_names:
@@ -590,7 +590,7 @@ class InOrOut:
if matcher:
matcher_info = matcher.info()
else:
- matcher_info = '-none-'
+ matcher_info = "-none-"
info.append((matcher_name, matcher_info))
return info
diff --git a/coverage/misc.py b/coverage/misc.py
index e0658eb1..8cefa12e 100644
--- a/coverage/misc.py
+++ b/coverage/misc.py
@@ -218,14 +218,14 @@ class Hasher:
self.update(v[k])
else:
for k in dir(v):
- if k.startswith('__'):
+ if k.startswith("__"):
continue
a = getattr(v, k)
if inspect.isroutine(a):
continue
self.update(k)
self.update(a)
- self.hash.update(b'.')
+ self.hash.update(b".")
def hexdigest(self) -> str:
"""Retrieve the hex digest of the hash."""
@@ -292,7 +292,7 @@ def substitute_variables(text: str, variables: Mapping[str, str]) -> str:
)
"""
- dollar_groups = ('dollar', 'word1', 'word2')
+ dollar_groups = ("dollar", "word1", "word2")
def dollar_replace(match: re.Match[str]) -> str:
"""Called for each $replacement."""
@@ -302,11 +302,11 @@ def substitute_variables(text: str, variables: Mapping[str, str]) -> str:
return "$"
elif word in variables:
return variables[word]
- elif match['strict']:
+ elif match["strict"]:
msg = f"Variable {word} is undefined: {text!r}"
raise CoverageException(msg)
else:
- return match['defval']
+ return match["defval"]
text = re.sub(dollar_pattern, dollar_replace, text)
return text
@@ -315,7 +315,7 @@ def substitute_variables(text: str, variables: Mapping[str, str]) -> str:
def format_local_datetime(dt: datetime.datetime) -> str:
"""Return a string with local timezone representing the date.
"""
- return dt.astimezone().strftime('%Y-%m-%d %H:%M %z')
+ return dt.astimezone().strftime("%Y-%m-%d %H:%M %z")
def import_local_file(modname: str, modfile: Optional[str] = None) -> ModuleType:
@@ -327,7 +327,7 @@ def import_local_file(modname: str, modfile: Optional[str] = None) -> ModuleType
"""
if modfile is None:
- modfile = modname + '.py'
+ modfile = modname + ".py"
spec = importlib.util.spec_from_file_location(modname, modfile)
assert spec is not None
mod = importlib.util.module_from_spec(spec)
diff --git a/coverage/multiproc.py b/coverage/multiproc.py
index e11ca7b7..2fd8ad5d 100644
--- a/coverage/multiproc.py
+++ b/coverage/multiproc.py
@@ -56,10 +56,10 @@ class Stowaway:
self.rcfile = rcfile
def __getstate__(self) -> Dict[str, str]:
- return {'rcfile': self.rcfile}
+ return {"rcfile": self.rcfile}
def __setstate__(self, state: Dict[str, str]) -> None:
- patch_multiprocessing(state['rcfile'])
+ patch_multiprocessing(state["rcfile"])
def patch_multiprocessing(rcfile: str) -> None:
@@ -96,7 +96,7 @@ def patch_multiprocessing(rcfile: str) -> None:
def get_preparation_data_with_stowaway(name: str) -> Dict[str, Any]:
"""Get the original preparation data, and also insert our stowaway."""
d = original_get_preparation_data(name)
- d['stowaway'] = Stowaway(rcfile)
+ d["stowaway"] = Stowaway(rcfile)
return d
spawn.get_preparation_data = get_preparation_data_with_stowaway
diff --git a/coverage/numbits.py b/coverage/numbits.py
index 26e5c272..71b974de 100644
--- a/coverage/numbits.py
+++ b/coverage/numbits.py
@@ -36,7 +36,7 @@ def nums_to_numbits(nums: Iterable[int]) -> bytes:
nbytes = max(nums) // 8 + 1
except ValueError:
# nums was empty.
- return b''
+ return b""
b = bytearray(nbytes)
for num in nums:
b[num//8] |= 1 << num % 8
@@ -82,7 +82,7 @@ def numbits_intersection(numbits1: bytes, numbits2: bytes) -> bytes:
"""
byte_pairs = zip_longest(numbits1, numbits2, fillvalue=0)
intersection_bytes = bytes(b1 & b2 for b1, b2 in byte_pairs)
- return intersection_bytes.rstrip(b'\0')
+ return intersection_bytes.rstrip(b"\0")
def numbits_any_intersection(numbits1: bytes, numbits2: bytes) -> bool:
@@ -130,7 +130,7 @@ def register_sqlite_functions(connection: sqlite3.Connection) -> None:
import sqlite3
from coverage.numbits import register_sqlite_functions
- conn = sqlite3.connect('example.db')
+ conn = sqlite3.connect("example.db")
register_sqlite_functions(conn)
c = conn.cursor()
# Kind of a nonsense query:
diff --git a/coverage/parser.py b/coverage/parser.py
index ae70b4f0..e653a9cc 100644
--- a/coverage/parser.py
+++ b/coverage/parser.py
@@ -60,7 +60,7 @@ class PythonParser:
self.exclude = exclude
# The text lines of the parsed code.
- self.lines: List[str] = self.text.split('\n')
+ self.lines: List[str] = self.text.split("\n")
# The normalized line numbers of the statements in the code. Exclusions
# are taken into account, and statements are adjusted to their first
@@ -149,13 +149,13 @@ class PythonParser:
elif toktype == token.DEDENT:
indent -= 1
elif toktype == token.NAME:
- if ttext == 'class':
+ if ttext == "class":
# Class definitions look like branches in the bytecode, so
# we need to exclude them. The simplest way is to note the
- # lines with the 'class' keyword.
+ # lines with the "class" keyword.
self.raw_classdefs.add(slineno)
elif toktype == token.OP:
- if ttext == ':' and nesting == 0:
+ if ttext == ":" and nesting == 0:
should_exclude = (elineno in self.raw_excluded) or excluding_decorators
if not excluding and should_exclude:
# Start excluding a suite. We trigger off of the colon
@@ -165,7 +165,7 @@ class PythonParser:
exclude_indent = indent
excluding = True
excluding_decorators = False
- elif ttext == '@' and first_on_line:
+ elif ttext == "@" and first_on_line:
# A decorator.
if elineno in self.raw_excluded:
excluding_decorators = True
@@ -763,7 +763,7 @@ class AstArcAnalyzer:
if node.keys[0] is not None:
return node.keys[0].lineno
else:
- # Unpacked dict literals `{**{'a':1}}` have None as the key,
+ # Unpacked dict literals `{**{"a":1}}` have None as the key,
# use the value in that case.
return node.values[0].lineno
else:
diff --git a/coverage/phystokens.py b/coverage/phystokens.py
index 4d1ee46e..d5659268 100644
--- a/coverage/phystokens.py
+++ b/coverage/phystokens.py
@@ -57,7 +57,7 @@ def _phys_tokens(toks: TokenInfos) -> TokenInfos:
if last_ttext.endswith("\\"):
inject_backslash = False
elif ttype == token.STRING:
- if "\n" in ttext and ttext.split('\n', 1)[0][-1] == '\\':
+ if "\n" in ttext and ttext.split("\n", 1)[0][-1] == "\\":
# It's a multi-line string and the first line ends with
# a backslash, so we don't need to inject another.
inject_backslash = False
@@ -113,7 +113,7 @@ def source_token_lines(source: str) -> TSourceTokenLines:
line: List[Tuple[str, str]] = []
col = 0
- source = source.expandtabs(8).replace('\r\n', '\n')
+ source = source.expandtabs(8).replace("\r\n", "\n")
tokgen = generate_tokens(source)
if env.PYBEHAVIOR.soft_keywords:
@@ -121,13 +121,13 @@ def source_token_lines(source: str) -> TSourceTokenLines:
for ttype, ttext, (sline, scol), (_, ecol), _ in _phys_tokens(tokgen):
mark_start = True
- for part in re.split('(\n)', ttext):
- if part == '\n':
+ for part in re.split("(\n)", ttext):
+ if part == "\n":
yield line
line = []
col = 0
mark_end = False
- elif part == '':
+ elif part == "":
mark_end = False
elif ttype in ws_tokens:
mark_end = False
@@ -135,7 +135,7 @@ def source_token_lines(source: str) -> TSourceTokenLines:
if mark_start and scol > col:
line.append(("ws", " " * (scol - col)))
mark_start = False
- tok_class = tokenize.tok_name.get(ttype, 'xx').lower()[:3]
+ tok_class = tokenize.tok_name.get(ttype, "xx").lower()[:3]
if ttype == token.NAME:
if keyword.iskeyword(ttext):
# Hard keywords are always keywords.
diff --git a/coverage/plugin.py b/coverage/plugin.py
index 5279c4d0..2c1ffada 100644
--- a/coverage/plugin.py
+++ b/coverage/plugin.py
@@ -519,29 +519,29 @@ class FileReporter(CoveragePluginBase):
Each line is a list of pairs, each pair is a token::
- [('key', 'def'), ('ws', ' '), ('nam', 'hello'), ('op', '('), ... ]
+ [("key", "def"), ("ws", " "), ("nam", "hello"), ("op", "("), ... ]
Each pair has a token class, and the token text. The token classes
are:
- * ``'com'``: a comment
- * ``'key'``: a keyword
- * ``'nam'``: a name, or identifier
- * ``'num'``: a number
- * ``'op'``: an operator
- * ``'str'``: a string literal
- * ``'ws'``: some white space
- * ``'txt'``: some other kind of text
+ * ``"com"``: a comment
+ * ``"key"``: a keyword
+ * ``"nam"``: a name, or identifier
+ * ``"num"``: a number
+ * ``"op"``: an operator
+ * ``"str"``: a string literal
+ * ``"ws"``: some white space
+ * ``"txt"``: some other kind of text
If you concatenate all the token texts, and then join them with
newlines, you should have your original source back.
The default implementation simply returns each line tagged as
- ``'txt'``.
+ ``"txt"``.
"""
for line in self.source().splitlines():
- yield [('txt', line)]
+ yield [("txt", line)]
def __eq__(self, other: Any) -> bool:
return isinstance(other, FileReporter) and self.filename == other.filename
diff --git a/coverage/plugin_support.py b/coverage/plugin_support.py
index 4ed02c5c..c99fb5e3 100644
--- a/coverage/plugin_support.py
+++ b/coverage/plugin_support.py
@@ -114,7 +114,7 @@ class Plugins:
"""
plugin_name = f"{self.current_module}.{plugin.__class__.__name__}"
- if self.debug and self.debug.should('plugin'):
+ if self.debug and self.debug.should("plugin"):
self.debug.write(f"Loaded plugin {self.current_module!r}: {plugin!r}")
labelled = LabelledDebug(f"plugin {self.current_module!r}", self.debug)
plugin = DebugPluginWrapper(plugin, labelled)
@@ -150,7 +150,7 @@ class LabelledDebug:
def message_prefix(self) -> str:
"""The prefix to use on messages, combining the labels."""
- prefixes = self.labels + ['']
+ prefixes = self.labels + [""]
return ":\n".join(" "*i+label for i, label in enumerate(prefixes))
def write(self, message: str) -> None:
diff --git a/coverage/python.py b/coverage/python.py
index 744ab4cb..3deb6819 100644
--- a/coverage/python.py
+++ b/coverage/python.py
@@ -63,12 +63,12 @@ def get_python_source(filename: str) -> str:
raise NoSource(f"No source for code: '{filename}'.")
# Replace \f because of http://bugs.python.org/issue19035
- source_bytes = source_bytes.replace(b'\f', b' ')
+ source_bytes = source_bytes.replace(b"\f", b" ")
source = source_bytes.decode(source_encoding(source_bytes), "replace")
# Python code should always end with a line with a newline.
- if source and source[-1] != '\n':
- source += '\n'
+ if source and source[-1] != "\n":
+ source += "\n"
return source
@@ -127,7 +127,7 @@ def source_for_file(filename: str) -> str:
def source_for_morf(morf: TMorf) -> str:
"""Get the source filename for the module-or-file `morf`."""
- if hasattr(morf, '__file__') and morf.__file__:
+ if hasattr(morf, "__file__") and morf.__file__:
filename = morf.__file__
elif isinstance(morf, types.ModuleType):
# A module should have had .__file__, otherwise we can't use it.
@@ -157,9 +157,9 @@ class PythonFileReporter(FileReporter):
fname = canonical_filename(filename)
super().__init__(fname)
- if hasattr(morf, '__name__'):
+ if hasattr(morf, "__name__"):
name = morf.__name__.replace(".", os.sep)
- if os.path.basename(filename).startswith('__init__.'):
+ if os.path.basename(filename).startswith("__init__."):
name += os.sep + "__init__"
name += ".py"
else:
@@ -183,7 +183,7 @@ class PythonFileReporter(FileReporter):
if self._parser is None:
self._parser = PythonParser(
filename=self.filename,
- exclude=self.coverage._exclude_regex('exclude'),
+ exclude=self.coverage._exclude_regex("exclude"),
)
self._parser.parse_source()
return self._parser
@@ -244,7 +244,7 @@ class PythonFileReporter(FileReporter):
_, ext = os.path.splitext(self.filename)
# Anything named *.py* should be Python.
- if ext.startswith('.py'):
+ if ext.startswith(".py"):
return True
# A file with no extension should be Python.
if not ext:
diff --git a/coverage/pytracer.py b/coverage/pytracer.py
index 6723c2a1..81832b0f 100644
--- a/coverage/pytracer.py
+++ b/coverage/pytracer.py
@@ -20,11 +20,11 @@ from coverage.types import (
)
# We need the YIELD_VALUE opcode below, in a comparison-friendly form.
-RESUME = dis.opmap.get('RESUME')
-RETURN_VALUE = dis.opmap['RETURN_VALUE']
+RESUME = dis.opmap.get("RESUME")
+RETURN_VALUE = dis.opmap["RETURN_VALUE"]
if RESUME is None:
- YIELD_VALUE = dis.opmap['YIELD_VALUE']
- YIELD_FROM = dis.opmap['YIELD_FROM']
+ YIELD_VALUE = dis.opmap["YIELD_VALUE"]
+ YIELD_FROM = dis.opmap["YIELD_FROM"]
YIELD_FROM_OFFSET = 0 if env.PYPY else 2
# When running meta-coverage, this file can try to trace itself, which confuses
@@ -78,7 +78,7 @@ class PyTracer(TTracer):
self.in_atexit = False
# On exit, self.in_atexit = True
- atexit.register(setattr, self, 'in_atexit', True)
+ atexit.register(setattr, self, "in_atexit", True)
# Cache a bound method on the instance, so that we don't have to
# re-create a bound method object all the time.
@@ -150,10 +150,10 @@ class PyTracer(TTracer):
)
return None
- # if event != 'call' and frame.f_code.co_filename != self.cur_file_name:
+ # if event != "call" and frame.f_code.co_filename != self.cur_file_name:
# self.log("---\n*", frame.f_code.co_filename, self.cur_file_name, frame.f_lineno)
- if event == 'call':
+ if event == "call":
# Should we start a new context?
if self.should_start_context and self.context is None:
context_maybe = self.should_start_context(frame)
@@ -215,13 +215,13 @@ class PyTracer(TTracer):
oparg = frame.f_code.co_code[frame.f_lasti + 1]
real_call = (oparg == 0)
else:
- real_call = (getattr(frame, 'f_lasti', -1) < 0)
+ real_call = (getattr(frame, "f_lasti", -1) < 0)
if real_call:
self.last_line = -frame.f_code.co_firstlineno
else:
self.last_line = frame.f_lineno
- elif event == 'line':
+ elif event == "line":
# Record an executed line.
if self.cur_file_data is not None:
flineno: TLineNo = frame.f_lineno
@@ -232,7 +232,7 @@ class PyTracer(TTracer):
cast(Set[TLineNo], self.cur_file_data).add(flineno)
self.last_line = flineno
- elif event == 'return':
+ elif event == "return":
if self.trace_arcs and self.cur_file_data:
# Record an arc leaving the function, but beware that a
# "return" event might just mean yielding from a generator.
diff --git a/coverage/results.py b/coverage/results.py
index 2731700e..ea6dc207 100644
--- a/coverage/results.py
+++ b/coverage/results.py
@@ -357,7 +357,7 @@ def format_lines(
dest = (ex if ex > 0 else "exit")
line_items.append((line, f"{line}->{dest}"))
- ret = ', '.join(t[-1] for t in sorted(line_items))
+ ret = ", ".join(t[-1] for t in sorted(line_items))
return ret
diff --git a/coverage/templite.py b/coverage/templite.py
index 897a58f9..11ea847b 100644
--- a/coverage/templite.py
+++ b/coverage/templite.py
@@ -109,11 +109,11 @@ class Templite:
<p>You are interested in {{topic}}.</p>
{% endif %}
''',
- {'upper': str.upper},
+ {"upper": str.upper},
)
text = templite.render({
- 'name': "Ned",
- 'topics': ['Python', 'Geometry', 'Juggling'],
+ "name": "Ned",
+ "topics": ["Python", "Geometry", "Juggling"],
})
"""
@@ -161,37 +161,37 @@ class Templite:
squash = in_joined = False
for token in tokens:
- if token.startswith('{'):
+ if token.startswith("{"):
start, end = 2, -2
- squash = (token[-3] == '-')
+ squash = (token[-3] == "-")
if squash:
end = -3
- if token.startswith('{#'):
+ if token.startswith("{#"):
# Comment: ignore it and move on.
continue
- elif token.startswith('{{'):
+ elif token.startswith("{{"):
# An expression to evaluate.
expr = self._expr_code(token[start:end].strip())
buffered.append("to_str(%s)" % expr)
else:
- # token.startswith('{%')
+ # token.startswith("{%")
# Action tag: split into words and parse further.
flush_output()
words = token[start:end].strip().split()
- if words[0] == 'if':
+ if words[0] == "if":
# An if statement: evaluate the expression to determine if.
if len(words) != 2:
self._syntax_error("Don't understand if", token)
- ops_stack.append('if')
+ ops_stack.append("if")
code.add_line("if %s:" % self._expr_code(words[1]))
code.indent()
- elif words[0] == 'for':
+ elif words[0] == "for":
# A loop: iterate over expression result.
- if len(words) != 4 or words[2] != 'in':
+ if len(words) != 4 or words[2] != "in":
self._syntax_error("Don't understand for", token)
- ops_stack.append('for')
+ ops_stack.append("for")
self._variable(words[1], self.loop_vars)
code.add_line(
"for c_{} in {}:".format(
@@ -200,10 +200,10 @@ class Templite:
)
)
code.indent()
- elif words[0] == 'joined':
- ops_stack.append('joined')
+ elif words[0] == "joined":
+ ops_stack.append("joined")
in_joined = True
- elif words[0].startswith('end'):
+ elif words[0].startswith("end"):
# Endsomething. Pop the ops stack.
if len(words) != 1:
self._syntax_error("Don't understand end", token)
@@ -213,7 +213,7 @@ class Templite:
start_what = ops_stack.pop()
if start_what != end_what:
self._syntax_error("Mismatched end tag", end_what)
- if end_what == 'joined':
+ if end_what == "joined":
in_joined = False
else:
code.dedent()
@@ -236,14 +236,14 @@ class Templite:
for var_name in self.all_vars - self.loop_vars:
vars_code.add_line(f"c_{var_name} = context[{var_name!r}]")
- code.add_line('return "".join(result)')
+ code.add_line("return ''.join(result)")
code.dedent()
self._render_function = cast(
Callable[
[Dict[str, Any], Callable[..., Any]],
str
],
- code.get_globals()['render_function'],
+ code.get_globals()["render_function"],
)
def _expr_code(self, expr: str) -> str:
diff --git a/coverage/version.py b/coverage/version.py
index ace1d259..3a27a388 100644
--- a/coverage/version.py
+++ b/coverage/version.py
@@ -21,10 +21,10 @@ def _make_version(
dev: int = 0,
) -> str:
"""Create a readable version string from version_info tuple components."""
- assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
+ assert releaselevel in ["alpha", "beta", "candidate", "final"]
version = "%d.%d.%d" % (major, minor, micro)
- if releaselevel != 'final':
- short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
+ if releaselevel != "final":
+ short = {"alpha": "a", "beta": "b", "candidate": "rc"}[releaselevel]
version += f"{short}{serial}"
if dev != 0:
version += f".dev{dev}"