summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--coverage/annotate.py21
-rw-r--r--coverage/cmdline.py21
-rw-r--r--coverage/collector.py18
-rw-r--r--coverage/config.py10
-rw-r--r--coverage/control.py20
-rw-r--r--coverage/data.py8
-rw-r--r--coverage/debug.py36
-rw-r--r--coverage/disposition.py6
-rw-r--r--coverage/env.py2
-rw-r--r--coverage/execfile.py12
-rw-r--r--coverage/files.py16
-rw-r--r--coverage/html.py24
-rw-r--r--coverage/inorout.py30
-rw-r--r--coverage/jsonreport.py3
-rw-r--r--coverage/misc.py6
-rw-r--r--coverage/multiproc.py2
-rw-r--r--coverage/parser.py73
-rw-r--r--coverage/phystokens.py4
-rw-r--r--coverage/plugin.py8
-rw-r--r--coverage/plugin_support.py54
-rw-r--r--coverage/python.py10
-rw-r--r--coverage/pytracer.py4
-rw-r--r--coverage/report.py2
-rw-r--r--coverage/results.py4
-rw-r--r--coverage/sqldata.py42
-rw-r--r--coverage/summary.py30
-rw-r--r--coverage/templite.py16
-rw-r--r--coverage/tomlconfig.py7
-rw-r--r--coverage/xmlreport.py5
29 files changed, 243 insertions, 251 deletions
diff --git a/coverage/annotate.py b/coverage/annotate.py
index 999ab6e5..a6ee4636 100644
--- a/coverage/annotate.py
+++ b/coverage/annotate.py
@@ -3,7 +3,6 @@
"""Source file annotation for coverage.py."""
-import io
import os
import re
@@ -14,7 +13,7 @@ from coverage.report import get_analysis_to_report
os = isolate_module(os)
-class AnnotateReporter(object):
+class AnnotateReporter:
"""Generate annotated source files showing line coverage.
This reporter creates annotated copies of the measured source files. Each
@@ -74,7 +73,7 @@ class AnnotateReporter(object):
else:
dest_file = fr.filename + ",cover"
- with io.open(dest_file, 'w', encoding='utf8') as dest:
+ with open(dest_file, 'w', encoding='utf8') as dest:
i = 0
j = 0
covered = True
@@ -87,22 +86,22 @@ class AnnotateReporter(object):
if i < len(statements) and statements[i] == lineno:
covered = j >= len(missing) or missing[j] > lineno
if self.blank_re.match(line):
- dest.write(u' ')
+ dest.write(' ')
elif self.else_re.match(line):
# Special logic for lines containing only 'else:'.
if i >= len(statements) and j >= len(missing):
- dest.write(u'! ')
+ dest.write('! ')
elif i >= len(statements) or j >= len(missing):
- dest.write(u'> ')
+ dest.write('> ')
elif statements[i] == missing[j]:
- dest.write(u'! ')
+ dest.write('! ')
else:
- dest.write(u'> ')
+ dest.write('> ')
elif lineno in excluded:
- dest.write(u'- ')
+ dest.write('- ')
elif covered:
- dest.write(u'> ')
+ dest.write('> ')
else:
- dest.write(u'! ')
+ dest.write('! ')
dest.write(line)
diff --git a/coverage/cmdline.py b/coverage/cmdline.py
index fa473509..318cd5a0 100644
--- a/coverage/cmdline.py
+++ b/coverage/cmdline.py
@@ -3,7 +3,6 @@
"""Command-line support for coverage.py."""
-from __future__ import print_function
import glob
import optparse
@@ -24,7 +23,7 @@ from coverage.misc import BaseCoverageException, ExceptionDuringRun, NoSource
from coverage.results import should_fail_under
-class Opts(object):
+class Opts:
"""A namespace class for individual options we'll build parsers from."""
append = optparse.make_option(
@@ -195,7 +194,7 @@ class Opts(object):
)
-class CoverageOptionParser(optparse.OptionParser, object):
+class CoverageOptionParser(optparse.OptionParser):
"""Base OptionParser for coverage.py.
Problems don't exit the program.
@@ -204,7 +203,7 @@ class CoverageOptionParser(optparse.OptionParser, object):
"""
def __init__(self, *args, **kwargs):
- super(CoverageOptionParser, self).__init__(
+ super().__init__(
add_help_option=False, *args, **kwargs
)
self.set_defaults(
@@ -251,7 +250,7 @@ class CoverageOptionParser(optparse.OptionParser, object):
"""
try:
- options, args = super(CoverageOptionParser, self).parse_args(args, options)
+ options, args = super().parse_args(args, options)
except self.OptionParserError:
return False, None, None
return True, options, args
@@ -266,7 +265,7 @@ class GlobalOptionParser(CoverageOptionParser):
"""Command-line parser for coverage.py global option arguments."""
def __init__(self):
- super(GlobalOptionParser, self).__init__()
+ super().__init__()
self.add_options([
Opts.help,
@@ -289,7 +288,7 @@ class CmdOptionParser(CoverageOptionParser):
"""
if usage:
usage = "%prog " + usage
- super(CmdOptionParser, self).__init__(
+ super().__init__(
usage=usage,
description=description,
)
@@ -306,10 +305,10 @@ class CmdOptionParser(CoverageOptionParser):
def get_prog_name(self):
"""Override of an undocumented function in optparse.OptionParser."""
- program_name = super(CmdOptionParser, self).get_prog_name()
+ program_name = super().get_prog_name()
# Include the sub-command for this parser as part of the command.
- return "{command} {subcommand}".format(command=program_name, subcommand=self.cmd)
+ return f"{program_name} {self.cmd}"
GLOBAL_ARGS = [
@@ -498,7 +497,7 @@ def show_help(error=None, topic=None, parser=None):
if error:
print(error, file=sys.stderr)
- print("Use '%s help' for help." % (program_name,), file=sys.stderr)
+ print(f"Use '{program_name} help' for help.", file=sys.stderr)
elif parser:
print(parser.format_help().strip())
print()
@@ -514,7 +513,7 @@ def show_help(error=None, topic=None, parser=None):
OK, ERR, FAIL_UNDER = 0, 1, 2
-class CoverageScript(object):
+class CoverageScript:
"""The command-line interface to coverage.py."""
def __init__(self):
diff --git a/coverage/collector.py b/coverage/collector.py
index e6bb9829..fd88e37d 100644
--- a/coverage/collector.py
+++ b/coverage/collector.py
@@ -32,7 +32,7 @@ except ImportError:
CTracer = None
-class Collector(object):
+class Collector:
"""Collects trace data.
Creates a Tracer object for each thread, since they track stack
@@ -138,7 +138,7 @@ class Collector(object):
raise CoverageException("Don't understand concurrency=%s" % concurrency)
except ImportError:
raise CoverageException(
- "Couldn't trace with concurrency=%s, the module isn't installed." % (
+ "Couldn't trace with concurrency={}, the module isn't installed.".format(
self.concurrency,
)
)
@@ -161,7 +161,7 @@ class Collector(object):
self.supports_plugins = False
def __repr__(self):
- return "<Collector at 0x%x: %s>" % (id(self), self.tracer_name())
+ return f"<Collector at 0x{id(self):x}: {self.tracer_name()}>"
def use_data(self, covdata, context):
"""Use `covdata` for recording data."""
@@ -243,7 +243,7 @@ class Collector(object):
tracer.concur_id_func = self.concur_id_func
elif self.concur_id_func:
raise CoverageException(
- "Can't support concurrency=%s with %s, only threads are supported" % (
+ "Can't support concurrency={} with {}, only threads are supported".format(
self.concurrency, self.tracer_name(),
)
)
@@ -331,9 +331,9 @@ class Collector(object):
if self._collectors[-1] is not self:
print("self._collectors:")
for c in self._collectors:
- print(" {!r}\n{}".format(c, c.origin))
+ print(f" {c!r}\n{c.origin}")
assert self._collectors[-1] is self, (
- "Expected current collector to be %r, but it's %r" % (self, self._collectors[-1])
+ f"Expected current collector to be {self!r}, but it's {self._collectors[-1]!r}"
)
self.pause()
@@ -352,7 +352,7 @@ class Collector(object):
if stats:
print("\nCoverage.py tracer stats:")
for k in sorted(stats.keys()):
- print("%20s: %s" % (k, stats[k]))
+ print(f"{k:>20}: {stats[k]}")
if self.threading:
self.threading.settrace(None)
@@ -389,7 +389,7 @@ class Collector(object):
file_tracer = disposition.file_tracer
plugin = file_tracer._coverage_plugin
plugin_name = plugin._coverage_plugin_name
- self.warn("Disabling plug-in {!r} due to previous exception".format(plugin_name))
+ self.warn(f"Disabling plug-in {plugin_name!r} due to previous exception")
plugin._coverage_enabled = False
disposition.trace = False
@@ -418,7 +418,7 @@ class Collector(object):
else:
raise runtime_err
- return dict((self.cached_mapped_file(k), v) for k, v in items if v)
+ return {self.cached_mapped_file(k): v for k, v in items if v}
def plugin_was_disabled(self, plugin):
"""Record that `plugin` was disabled during the run."""
diff --git a/coverage/config.py b/coverage/config.py
index 608c027a..136e2976 100644
--- a/coverage/config.py
+++ b/coverage/config.py
@@ -128,7 +128,7 @@ class HandyConfigParser(configparser.RawConfigParser):
re.compile(value)
except re.error as e:
raise CoverageException(
- "Invalid [%s].%s value %r: %s" % (section, option, value, e)
+ f"Invalid [{section}].{option} value {value!r}: {e}"
)
if value:
value_list.append(value)
@@ -154,7 +154,7 @@ DEFAULT_PARTIAL_ALWAYS = [
]
-class CoverageConfig(object):
+class CoverageConfig:
"""Coverage.py configuration.
The attributes of this class are the various settings that control the
@@ -276,7 +276,7 @@ class CoverageConfig(object):
try:
files_read = cp.read(filename)
except (configparser.Error, TomlDecodeError) as err:
- raise CoverageException("Couldn't read config file %s: %s" % (filename, err))
+ raise CoverageException(f"Couldn't read config file {filename}: {err}")
if not files_read:
return False
@@ -289,7 +289,7 @@ class CoverageConfig(object):
if was_set:
any_set = True
except ValueError as err:
- raise CoverageException("Couldn't read config file %s: %s" % (filename, err))
+ raise CoverageException(f"Couldn't read config file {filename}: {err}")
# Check that there are no unrecognized options.
all_options = collections.defaultdict(set)
@@ -302,7 +302,7 @@ class CoverageConfig(object):
if real_section:
for unknown in set(cp.options(section)) - options:
raise CoverageException(
- "Unrecognized option '[%s] %s=' in config file %s" % (
+ "Unrecognized option '[{}] {}=' in config file {}".format(
real_section, unknown, filename
)
)
diff --git a/coverage/control.py b/coverage/control.py
index 3ccf313e..b3c5b7dc 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -60,7 +60,7 @@ def override_config(cov, **kwargs):
_DEFAULT_DATAFILE = DefaultValue("MISSING")
-class Coverage(object):
+class Coverage:
"""Programmatic access to coverage.py.
To use::
@@ -290,7 +290,7 @@ class Coverage(object):
# '[run] _crash' will raise an exception if the value is close by in
# the call stack, for testing error handling.
if self.config._crash and self.config._crash in short_stack(limit=4):
- raise Exception("Crashing because called by {}".format(self.config._crash))
+ raise Exception(f"Crashing because called by {self.config._crash}")
def _write_startup_debug(self):
"""Write out debug info at startup if needed."""
@@ -333,9 +333,9 @@ class Coverage(object):
reason = self._inorout.check_include_omit_etc(filename, frame)
if self._debug.should('trace'):
if not reason:
- msg = "Including %r" % (filename,)
+ msg = f"Including {filename!r}"
else:
- msg = "Not including %r: %s" % (filename, reason)
+ msg = f"Not including {filename!r}: {reason}"
self._debug.write(msg)
return not reason
@@ -358,7 +358,7 @@ class Coverage(object):
self._warnings.append(msg)
if slug:
- msg = "%s (%s)" % (msg, slug)
+ msg = f"{msg} ({slug})"
if self._debug.should('pid'):
msg = "[%d] %s" % (os.getpid(), msg)
sys.stderr.write("Coverage.py warning: %s\n" % msg)
@@ -442,7 +442,7 @@ class Coverage(object):
context_switchers = [should_start_context_test_function]
else:
raise CoverageException(
- "Don't understand dynamic_context setting: {!r}".format(dycon)
+ f"Don't understand dynamic_context setting: {dycon!r}"
)
context_switchers.extend(
@@ -477,7 +477,7 @@ class Coverage(object):
# Early warning if we aren't going to be able to support plugins.
if self._plugins.file_tracers and not self._collector.supports_plugins:
self._warn(
- "Plugin file tracers (%s) aren't supported with %s" % (
+ "Plugin file tracers ({}) aren't supported with {}".format(
", ".join(
plugin._coverage_plugin_name
for plugin in self._plugins.file_tracers
@@ -561,7 +561,7 @@ class Coverage(object):
def _atexit(self):
"""Clean up on process shutdown."""
if self._debug.should("process"):
- self._debug.write("atexit: pid: {}, instance: {!r}".format(os.getpid(), self))
+ self._debug.write(f"atexit: pid: {os.getpid()}, instance: {self!r}")
if self._started:
self.stop()
if self._auto_save:
@@ -821,7 +821,7 @@ class Coverage(object):
file_reporter = plugin.file_reporter(mapped_morf)
if file_reporter is None:
raise CoverageException(
- "Plugin %r did not provide a file reporter for %r." % (
+ "Plugin {!r} did not provide a file reporter for {!r}.".format(
plugin._coverage_plugin_name, morf
)
)
@@ -1061,7 +1061,7 @@ class Coverage(object):
('cwd', os.getcwd()),
('path', sys.path),
('environment', sorted(
- ("%s = %s" % (k, v))
+ f"{k} = {v}"
for k, v in os.environ.items()
if any(slug in k for slug in ("COV", "PY"))
)),
diff --git a/coverage/data.py b/coverage/data.py
index 5dd1dfe3..cf258328 100644
--- a/coverage/data.py
+++ b/coverage/data.py
@@ -90,7 +90,7 @@ def combine_parallel_data(data, aliases=None, data_paths=None, strict=False, kee
pattern = os.path.join(os.path.abspath(p), localdot)
files_to_combine.extend(glob.glob(pattern))
else:
- raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,))
+ raise CoverageException(f"Couldn't combine from non-existent path '{p}'")
if strict and not files_to_combine:
raise CoverageException("No data to combine")
@@ -101,10 +101,10 @@ def combine_parallel_data(data, aliases=None, data_paths=None, strict=False, kee
# Sometimes we are combining into a file which is one of the
# parallel files. Skip that file.
if data._debug.should('dataio'):
- data._debug.write("Skipping combining ourself: %r" % (f,))
+ data._debug.write(f"Skipping combining ourself: {f!r}")
continue
if data._debug.should('dataio'):
- data._debug.write("Combining data file %r" % (f,))
+ data._debug.write(f"Combining data file {f!r}")
try:
new_data = CoverageData(f, debug=data._debug)
new_data.read()
@@ -118,7 +118,7 @@ def combine_parallel_data(data, aliases=None, data_paths=None, strict=False, kee
files_combined += 1
if not keep:
if data._debug.should('dataio'):
- data._debug.write("Deleting combined data file %r" % (f,))
+ data._debug.write(f"Deleting combined data file {f!r}")
file_be_gone(f)
if strict and not files_combined:
diff --git a/coverage/debug.py b/coverage/debug.py
index efcaca2a..f86e0244 100644
--- a/coverage/debug.py
+++ b/coverage/debug.py
@@ -29,7 +29,7 @@ FORCED_DEBUG = []
FORCED_DEBUG_FILE = None
-class DebugControl(object):
+class DebugControl:
"""Control and output for debugging."""
show_repr_attr = False # For SimpleReprMixin
@@ -50,7 +50,7 @@ class DebugControl(object):
self.raw_output = self.output.outfile
def __repr__(self):
- return "<DebugControl options=%r raw_output=%r>" % (self.options, self.raw_output)
+ return f"<DebugControl options={self.options!r} raw_output={self.raw_output!r}>"
def should(self, option):
"""Decide whether to output debug information in category `option`."""
@@ -78,7 +78,7 @@ class DebugControl(object):
if self.should('self'):
caller_self = inspect.stack()[1][0].f_locals.get('self')
if caller_self is not None:
- self.output.write("self: {!r}\n".format(caller_self))
+ self.output.write(f"self: {caller_self!r}\n")
if self.should('callers'):
dump_stack_frames(out=self.output, skip=1)
self.output.flush()
@@ -87,14 +87,14 @@ class DebugControl(object):
class DebugControlString(DebugControl):
"""A `DebugControl` that writes to a StringIO, for testing."""
def __init__(self, options):
- super(DebugControlString, self).__init__(options, io.StringIO())
+ super().__init__(options, io.StringIO())
def get_output(self):
"""Get the output text from the `DebugControl`."""
return self.raw_output.getvalue()
-class NoDebugging(object):
+class NoDebugging:
"""A replacement for DebugControl that will never try to do anything."""
def should(self, option): # pylint: disable=unused-argument
"""Should we write debug messages? Never."""
@@ -184,12 +184,12 @@ def short_id(id64):
def add_pid_and_tid(text):
"""A filter to add pid and tid to debug messages."""
# Thread ids are useful, but too long. Make a shorter one.
- tid = "{:04x}".format(short_id(_thread.get_ident()))
- text = "{:5d}.{}: {}".format(os.getpid(), tid, text)
+ tid = f"{short_id(_thread.get_ident()):04x}"
+ text = f"{os.getpid():5d}.{tid}: {text}"
return text
-class SimpleReprMixin(object):
+class SimpleReprMixin:
"""A mixin implementing a simple __repr__."""
simple_repr_ignore = ['simple_repr_ignore', '$coverage.object_id']
@@ -203,7 +203,7 @@ class SimpleReprMixin(object):
return "<{klass} @0x{id:x} {attrs}>".format(
klass=self.__class__.__name__,
id=id(self),
- attrs=" ".join("{}={!r}".format(k, v) for k, v in show_attrs),
+ attrs=" ".join(f"{k}={v!r}" for k, v in show_attrs),
)
@@ -246,7 +246,7 @@ def filter_text(text, filters):
return text + ending
-class CwdTracker(object): # pragma: debugging
+class CwdTracker: # pragma: debugging
"""A class to add cwd info to debug messages."""
def __init__(self):
self.cwd = None
@@ -255,12 +255,12 @@ class CwdTracker(object): # pragma: debugging
"""Add a cwd message for each new cwd."""
cwd = os.getcwd()
if cwd != self.cwd:
- text = "cwd is now {!r}\n".format(cwd) + text
+ text = f"cwd is now {cwd!r}\n" + text
self.cwd = cwd
return text
-class DebugOutputFile(object): # pragma: debugging
+class DebugOutputFile: # pragma: debugging
"""A file-like object that includes pid and cwd information."""
def __init__(self, outfile, show_process, filters):
self.outfile = outfile
@@ -269,10 +269,10 @@ class DebugOutputFile(object): # pragma: debugging
if self.show_process:
self.filters.insert(0, CwdTracker().filter)
- self.write("New process: executable: %r\n" % (sys.executable,))
- self.write("New process: cmd: %r\n" % (getattr(sys, 'argv', None),))
+ self.write(f"New process: executable: {sys.executable!r}\n")
+ self.write("New process: cmd: {!r}\n".format(getattr(sys, 'argv', None)))
if hasattr(os, 'getppid'):
- self.write("New process: pid: %r, parent pid: %r\n" % (os.getpid(), os.getppid()))
+ self.write(f"New process: pid: {os.getpid()!r}, parent pid: {os.getppid()!r}\n")
SYS_MOD_NAME = '$coverage.debug.DebugOutputFile.the_one'
@@ -371,7 +371,7 @@ def show_calls(show_args=True, show_stack=False, show_return=False): # pragma
def _wrapper(self, *args, **kwargs):
oid = getattr(self, OBJ_ID_ATTR, None)
if oid is None:
- oid = "{:08d} {:04d}".format(os.getpid(), next(OBJ_IDS))
+ oid = f"{os.getpid():08d} {next(OBJ_IDS):04d}"
setattr(self, OBJ_ID_ATTR, oid)
extra = ""
if show_args:
@@ -387,11 +387,11 @@ def show_calls(show_args=True, show_stack=False, show_return=False): # pragma
extra += " @ "
extra += "; ".join(_clean_stack_line(l) for l in short_stack().splitlines())
callid = next(CALLS)
- msg = "{} {:04d} {}{}\n".format(oid, callid, func.__name__, extra)
+ msg = f"{oid} {callid:04d} {func.__name__}{extra}\n"
DebugOutputFile.get_one(interim=True).write(msg)
ret = func(self, *args, **kwargs)
if show_return:
- msg = "{} {:04d} {} return {!r}\n".format(oid, callid, func.__name__, ret)
+ msg = f"{oid} {callid:04d} {func.__name__} return {ret!r}\n"
DebugOutputFile.get_one(interim=True).write(msg)
return ret
return _wrapper
diff --git a/coverage/disposition.py b/coverage/disposition.py
index 9b9a997d..dfcc6def 100644
--- a/coverage/disposition.py
+++ b/coverage/disposition.py
@@ -4,7 +4,7 @@
"""Simple value objects for tracking what to do with files."""
-class FileDisposition(object):
+class FileDisposition:
"""A simple value type for recording what to do with a file."""
pass
@@ -29,9 +29,9 @@ def disposition_init(cls, original_filename):
def disposition_debug_msg(disp):
"""Make a nice debug message of what the FileDisposition is doing."""
if disp.trace:
- msg = "Tracing %r" % (disp.original_filename,)
+ msg = f"Tracing {disp.original_filename!r}"
if disp.file_tracer:
msg += ": will be traced by %r" % disp.file_tracer
else:
- msg = "Not tracing %r: %s" % (disp.original_filename, disp.reason)
+ msg = f"Not tracing {disp.original_filename!r}: {disp.reason}"
return msg
diff --git a/coverage/env.py b/coverage/env.py
index ab59e275..ce6d42c5 100644
--- a/coverage/env.py
+++ b/coverage/env.py
@@ -28,7 +28,7 @@ if PYPY:
PYPY3 = PYPY and PY3
# Python behavior.
-class PYBEHAVIOR(object):
+class PYBEHAVIOR:
"""Flags indicating this Python's behavior."""
# Does Python conform to PEP626, Precise line numbers for debugging and other tools.
diff --git a/coverage/execfile.py b/coverage/execfile.py
index e96a5265..c2709a74 100644
--- a/coverage/execfile.py
+++ b/coverage/execfile.py
@@ -23,7 +23,7 @@ os = isolate_module(os)
PYC_MAGIC_NUMBER = importlib.util.MAGIC_NUMBER
-class DummyLoader(object):
+class DummyLoader:
"""A shim for the pep302 __loader__, emulating pkgutil.ImpLoader.
Currently only implements the .fullname attribute
@@ -43,7 +43,7 @@ def find_module(modulename):
except ImportError as err:
raise NoSource(str(err))
if not spec:
- raise NoSource("No module named %r" % (modulename,))
+ raise NoSource(f"No module named {modulename!r}")
pathname = spec.origin
packagename = spec.name
if spec.submodule_search_locations:
@@ -61,7 +61,7 @@ def find_module(modulename):
return pathname, packagename, spec
-class PyRunner(object):
+class PyRunner:
"""Multi-stage execution of Python code.
This is meant to emulate real Python execution as closely as possible.
@@ -271,7 +271,7 @@ def make_code_from_py(filename):
# Open the source file.
try:
source = get_python_source(filename)
- except (IOError, NoSource):
+ except (OSError, NoSource):
raise NoSource("No file to run: '%s'" % filename)
code = compile_unicode(source, filename, "exec")
@@ -282,7 +282,7 @@ def make_code_from_pyc(filename):
"""Get a code object from a .pyc file."""
try:
fpyc = open(filename, "rb")
- except IOError:
+ except OSError:
raise NoCode("No file to run: '%s'" % filename)
with fpyc:
@@ -290,7 +290,7 @@ def make_code_from_pyc(filename):
# match or we won't run the file.
magic = fpyc.read(4)
if magic != PYC_MAGIC_NUMBER:
- raise NoCode("Bad magic number in .pyc file: {} != {}".format(magic, PYC_MAGIC_NUMBER))
+ raise NoCode(f"Bad magic number in .pyc file: {magic} != {PYC_MAGIC_NUMBER}")
date_based = True
if env.PYBEHAVIOR.hashed_pyc_pep552:
diff --git a/coverage/files.py b/coverage/files.py
index f7272bd7..1f78e0b6 100644
--- a/coverage/files.py
+++ b/coverage/files.py
@@ -195,7 +195,7 @@ def prep_patterns(patterns):
return prepped
-class TreeMatcher(object):
+class TreeMatcher:
"""A matcher for files in a tree.
Construct with a list of paths, either files or directories. Paths match
@@ -209,7 +209,7 @@ class TreeMatcher(object):
self.name = name
def __repr__(self):
- return "<TreeMatcher {!r} {!r}>".format(self.name, self.original_paths)
+ return f"<TreeMatcher {self.name!r} {self.original_paths!r}>"
def info(self):
"""A list of strings for displaying when dumping state."""
@@ -229,14 +229,14 @@ class TreeMatcher(object):
return False
-class ModuleMatcher(object):
+class ModuleMatcher:
"""A matcher for modules in a tree."""
def __init__(self, module_names, name):
self.modules = list(module_names)
self.name = name
def __repr__(self):
- return "<ModuleMatcher {!r} {!r}>".format(self.name, self.modules)
+ return f"<ModuleMatcher {self.name!r} {self.modules!r}>"
def info(self):
"""A list of strings for displaying when dumping state."""
@@ -258,7 +258,7 @@ class ModuleMatcher(object):
return False
-class FnmatchMatcher(object):
+class FnmatchMatcher:
"""A matcher for files by file name pattern."""
def __init__(self, pats, name):
self.pats = list(pats)
@@ -266,7 +266,7 @@ class FnmatchMatcher(object):
self.name = name
def __repr__(self):
- return "<FnmatchMatcher {!r} {!r}>".format(self.name, self.pats)
+ return f"<FnmatchMatcher {self.name!r} {self.pats!r}>"
def info(self):
"""A list of strings for displaying when dumping state."""
@@ -320,7 +320,7 @@ def fnmatches_to_regex(patterns, case_insensitive=False, partial=False):
return compiled
-class PathAliases(object):
+class PathAliases:
"""A collection of aliases for paths.
When combining data files from remote machines, often the paths to source
@@ -337,7 +337,7 @@ class PathAliases(object):
def pprint(self): # pragma: debugging
"""Dump the important parts of the PathAliases, for debugging."""
for regex, result in self.aliases:
- print("{!r} --> {!r}".format(regex.pattern, result))
+ print(f"{regex.pattern!r} --> {result!r}")
def add(self, pattern, result):
"""Add the `pattern`/`result` pair to the list of aliases.
diff --git a/coverage/html.py b/coverage/html.py
index f4670caf..5965b048 100644
--- a/coverage/html.py
+++ b/coverage/html.py
@@ -56,7 +56,7 @@ def data_filename(fname, pkgdir=""):
else:
tried.append(static_filename)
raise CoverageException(
- "Couldn't find static file %r from %r, tried: %r" % (fname, os.getcwd(), tried)
+ f"Couldn't find static file {fname!r} from {os.getcwd()!r}, tried: {tried!r}"
)
@@ -73,7 +73,7 @@ def write_html(fname, html):
fout.write(html.encode('ascii', 'xmlcharrefreplace'))
-class HtmlDataGeneration(object):
+class HtmlDataGeneration:
"""Generate structured data to be turned into HTML reports."""
EMPTY = "(empty)"
@@ -127,7 +127,7 @@ class HtmlDataGeneration(object):
if contexts == [self.EMPTY]:
contexts_label = self.EMPTY
else:
- contexts_label = "{} ctx".format(len(contexts))
+ contexts_label = f"{len(contexts)} ctx"
context_list = contexts
lines.append(types.SimpleNamespace(
@@ -151,7 +151,7 @@ class HtmlDataGeneration(object):
return file_data
-class HtmlReporter(object):
+class HtmlReporter:
"""HTML reporting."""
# These files will be copied from the htmlfiles directory to the output
@@ -308,15 +308,15 @@ class HtmlReporter(object):
else:
tok_html = escape(tok_text) or '&nbsp;'
html.append(
- u'<span class="{}">{}</span>'.format(tok_type, tok_html)
+ f'<span class="{tok_type}">{tok_html}</span>'
)
ldata.html = ''.join(html)
if ldata.short_annotations:
# 202F is NARROW NO-BREAK SPACE.
# 219B is RIGHTWARDS ARROW WITH STROKE.
- ldata.annotate = u",&nbsp;&nbsp; ".join(
- u"{}&#x202F;&#x219B;&#x202F;{}".format(ldata.number, d)
+ ldata.annotate = ",&nbsp;&nbsp; ".join(
+ f"{ldata.number}&#x202F;&#x219B;&#x202F;{d}"
for d in ldata.short_annotations
)
else:
@@ -327,10 +327,10 @@ class HtmlReporter(object):
if len(longs) == 1:
ldata.annotate_long = longs[0]
else:
- ldata.annotate_long = u"{:d} missed branches: {}".format(
+ ldata.annotate_long = "{:d} missed branches: {}".format(
len(longs),
- u", ".join(
- u"{:d}) {}".format(num, ann_long)
+ ", ".join(
+ f"{num:d}) {ann_long}"
for num, ann_long in enumerate(longs, start=1)
),
)
@@ -369,7 +369,7 @@ class HtmlReporter(object):
self.incr.write()
-class IncrementalChecker(object):
+class IncrementalChecker:
"""Logic and data to support incremental reporting."""
STATUS_FILE = "status.json"
@@ -419,7 +419,7 @@ class IncrementalChecker(object):
status_file = os.path.join(self.directory, self.STATUS_FILE)
with open(status_file) as fstatus:
status = json.load(fstatus)
- except (IOError, ValueError):
+ except (OSError, ValueError):
usable = False
else:
usable = True
diff --git a/coverage/inorout.py b/coverage/inorout.py
index f4d99772..b46162ee 100644
--- a/coverage/inorout.py
+++ b/coverage/inorout.py
@@ -176,7 +176,7 @@ def add_coverage_paths(paths):
paths.add(canonical_path(mod))
-class InOrOut(object):
+class InOrOut:
"""Machinery for determining what files to measure."""
def __init__(self, warn, debug):
@@ -237,36 +237,36 @@ class InOrOut(object):
against = []
if self.source:
self.source_match = TreeMatcher(self.source, "source")
- against.append("trees {!r}".format(self.source_match))
+ against.append(f"trees {self.source_match!r}")
if self.source_pkgs:
self.source_pkgs_match = ModuleMatcher(self.source_pkgs, "source_pkgs")
- against.append("modules {!r}".format(self.source_pkgs_match))
+ against.append(f"modules {self.source_pkgs_match!r}")
debug("Source matching against " + " and ".join(against))
else:
if self.pylib_paths:
self.pylib_match = TreeMatcher(self.pylib_paths, "pylib")
- debug("Python stdlib matching: {!r}".format(self.pylib_match))
+ debug(f"Python stdlib matching: {self.pylib_match!r}")
if self.include:
self.include_match = FnmatchMatcher(self.include, "include")
- debug("Include matching: {!r}".format(self.include_match))
+ debug(f"Include matching: {self.include_match!r}")
if self.omit:
self.omit_match = FnmatchMatcher(self.omit, "omit")
- debug("Omit matching: {!r}".format(self.omit_match))
+ debug(f"Omit matching: {self.omit_match!r}")
self.cover_match = TreeMatcher(self.cover_paths, "coverage")
- debug("Coverage code matching: {!r}".format(self.cover_match))
+ debug(f"Coverage code matching: {self.cover_match!r}")
self.third_match = TreeMatcher(self.third_paths, "third")
- debug("Third-party lib matching: {!r}".format(self.third_match))
+ debug(f"Third-party lib matching: {self.third_match!r}")
# Check if the source we want to measure has been installed as a
# third-party package.
for pkg in self.source_pkgs:
try:
modfile = file_for_module(pkg)
- debug("Imported {} as {}".format(pkg, modfile))
+ debug(f"Imported {pkg} as {modfile}")
except CoverageException as exc:
- debug("Couldn't import {}: {}".format(pkg, exc))
+ debug(f"Couldn't import {pkg}: {exc}")
continue
if modfile and self.third_match.match(modfile):
self.source_in_third = True
@@ -401,7 +401,7 @@ class InOrOut(object):
if modulename in self.source_pkgs_unmatched:
self.source_pkgs_unmatched.remove(modulename)
else:
- extra = "module {!r} ".format(modulename)
+ extra = f"module {modulename!r} "
if not ok and self.source_match:
if self.source_match.match(filename):
ok = True
@@ -465,7 +465,7 @@ class InOrOut(object):
# of tracing anyway.
continue
if disp.trace:
- msg = "Already imported a file that will be measured: {}".format(filename)
+ msg = f"Already imported a file that will be measured: {filename}"
self.warn(msg, slug="already-imported")
warned.add(filename)
elif self.debug and self.debug.should('trace'):
@@ -518,12 +518,10 @@ class InOrOut(object):
not module_has_file(sys.modules[pkg])):
continue
pkg_file = source_for_file(sys.modules[pkg].__file__)
- for ret in self._find_executable_files(canonical_path(pkg_file)):
- yield ret
+ yield from self._find_executable_files(canonical_path(pkg_file))
for src in self.source:
- for ret in self._find_executable_files(src):
- yield ret
+ yield from self._find_executable_files(src)
def _find_plugin_files(self, src_dir):
"""Get executable files from the plugins."""
diff --git a/coverage/jsonreport.py b/coverage/jsonreport.py
index ccb46a89..70ceb71f 100644
--- a/coverage/jsonreport.py
+++ b/coverage/jsonreport.py
@@ -1,4 +1,3 @@
-# coding: utf-8
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
@@ -12,7 +11,7 @@ from coverage.report import get_analysis_to_report
from coverage.results import Numbers
-class JsonReporter(object):
+class JsonReporter:
"""A reporter for writing JSON coverage results."""
def __init__(self, coverage):
diff --git a/coverage/misc.py b/coverage/misc.py
index 6f104ac0..52583589 100644
--- a/coverage/misc.py
+++ b/coverage/misc.py
@@ -196,7 +196,7 @@ def filename_suffix(suffix):
return suffix
-class Hasher(object):
+class Hasher:
"""Hashes Python data into md5."""
def __init__(self):
self.md5 = hashlib.md5()
@@ -253,7 +253,7 @@ def _needs_to_implement(that, func_name):
)
-class DefaultValue(object):
+class DefaultValue:
"""A sentinel object to use for unusual default-value needs.
Construct with a string that will be used as the repr, for display in help
@@ -307,7 +307,7 @@ def substitute_variables(text, variables):
elif word in variables:
return variables[word]
elif match.group('strict'):
- msg = "Variable {} is undefined: {!r}".format(word, text)
+ msg = f"Variable {word} is undefined: {text!r}"
raise CoverageException(msg)
else:
return match.group('defval')
diff --git a/coverage/multiproc.py b/coverage/multiproc.py
index 8b6651bc..6a104520 100644
--- a/coverage/multiproc.py
+++ b/coverage/multiproc.py
@@ -53,7 +53,7 @@ class ProcessWithCoverage(OriginalProcess): # pylint: disable=abstract-m
if debug.should("multiproc"):
debug.write("Saved multiprocessing data")
-class Stowaway(object):
+class Stowaway:
"""An object to pickle, so when it is unpickled, it can apply the monkey-patch."""
def __init__(self, rcfile):
self.rcfile = rcfile
diff --git a/coverage/parser.py b/coverage/parser.py
index 61ef7539..f847d970 100644
--- a/coverage/parser.py
+++ b/coverage/parser.py
@@ -18,7 +18,7 @@ from coverage.misc import NoSource, NotPython, StopEverything
from coverage.phystokens import compile_unicode, generate_tokens, neuter_encoding_declaration
-class PythonParser(object):
+class PythonParser:
"""Parse code to find executable lines, excluded lines, etc.
This information is all based on static analysis: no code execution is
@@ -40,9 +40,9 @@ class PythonParser(object):
from coverage.python import get_python_source
try:
self.text = get_python_source(self.filename)
- except IOError as err:
+ except OSError as err:
raise NoSource(
- "No source for code: '%s': %s" % (self.filename, err)
+ f"No source for code: '{self.filename}': {err}"
)
self.exclude = exclude
@@ -247,7 +247,7 @@ class PythonParser(object):
else:
lineno = err.args[1][0] # TokenError
raise NotPython(
- u"Couldn't parse '%s' as Python source: '%s' at line %d" % (
+ "Couldn't parse '%s' as Python source: '%s' at line %d" % (
self.filename, err.args[0], lineno
)
)
@@ -345,16 +345,16 @@ class PythonParser(object):
emsg = "didn't jump to line {lineno}"
emsg = emsg.format(lineno=end)
- msg = "line {start} {emsg}".format(start=actual_start, emsg=emsg)
+ msg = f"line {actual_start} {emsg}"
if smsg is not None:
- msg += ", because {smsg}".format(smsg=smsg.format(lineno=actual_start))
+ msg += f", because {smsg.format(lineno=actual_start)}"
msgs.append(msg)
return " or ".join(msgs)
-class ByteParser(object):
+class ByteParser:
"""Parse bytecode to understand the structure of code."""
@contract(text='unicode')
@@ -367,7 +367,7 @@ class ByteParser(object):
self.code = compile_unicode(text, filename, "exec")
except SyntaxError as synerr:
raise NotPython(
- u"Couldn't parse '%s' as Python source: '%s' at line %d" % (
+ "Couldn't parse '%s' as Python source: '%s' at line %d" % (
filename, synerr.msg, synerr.lineno
)
)
@@ -428,15 +428,14 @@ class ByteParser(object):
"""
for bp in self.child_parsers():
# Get all of the lineno information from this code.
- for l in bp._line_numbers():
- yield l
+ yield from bp._line_numbers()
#
# AST analysis
#
-class LoopBlock(object):
+class LoopBlock:
"""A block on the block stack representing a `for` or `while` loop."""
@contract(start=int)
def __init__(self, start):
@@ -446,7 +445,7 @@ class LoopBlock(object):
self.break_exits = set()
-class FunctionBlock(object):
+class FunctionBlock:
"""A block on the block stack representing a function definition."""
@contract(start=int, name=str)
def __init__(self, start, name):
@@ -456,7 +455,7 @@ class FunctionBlock(object):
self.name = name
-class TryBlock(object):
+class TryBlock:
"""A block on the block stack representing a `try` block."""
@contract(handler_start='int|None', final_start='int|None')
def __init__(self, handler_start, final_start):
@@ -486,7 +485,7 @@ class ArcStart(collections.namedtuple("Arc", "lineno, cause")):
"""
def __new__(cls, lineno, cause=None):
- return super(ArcStart, cls).__new__(cls, lineno, cause)
+ return super().__new__(cls, lineno, cause)
# Define contract words that PyContract doesn't have.
@@ -498,7 +497,7 @@ new_contract('ArcStarts', lambda seq: all(isinstance(x, ArcStart) for x in seq))
# $set_env.py: COVERAGE_AST_DUMP - Dump the AST nodes when parsing code.
AST_DUMP = bool(int(os.environ.get("COVERAGE_AST_DUMP", 0)))
-class NodeList(object):
+class NodeList:
"""A synthetic fictitious node, containing a sequence of nodes.
This is used when collapsing optimized if-statements, to represent the
@@ -514,7 +513,7 @@ class NodeList(object):
# TODO: the cause messages have too many commas.
# TODO: Shouldn't the cause messages join with "and" instead of "or"?
-class AstArcAnalyzer(object):
+class AstArcAnalyzer:
"""Analyze source text with an AST to find executable code paths."""
@contract(text='unicode', statements=set)
@@ -526,8 +525,8 @@ class AstArcAnalyzer(object):
if AST_DUMP: # pragma: debugging
# Dump the AST so that failing tests have helpful output.
- print("Statements: {}".format(self.statements))
- print("Multiline map: {}".format(self.multiline))
+ print(f"Statements: {self.statements}")
+ print(f"Multiline map: {self.multiline}")
ast_dump(self.root_node)
self.arcs = set()
@@ -560,7 +559,7 @@ class AstArcAnalyzer(object):
def add_arc(self, start, end, smsg=None, emsg=None):
"""Add an arc, including message fragments to use if it is missing."""
if self.debug: # pragma: debugging
- print("\nAdding arc: ({}, {}): {!r}, {!r}".format(start, end, smsg, emsg))
+ print(f"\nAdding arc: ({start}, {end}): {smsg!r}, {emsg!r}")
print(short_stack(limit=6))
self.arcs.add((start, end))
@@ -661,7 +660,7 @@ class AstArcAnalyzer(object):
# to see if it's overlooked.
if 0:
if node_name not in self.OK_TO_DEFAULT:
- print("*** Unhandled: {}".format(node))
+ print(f"*** Unhandled: {node}")
# Default for simple statements: one exit from this node.
return {ArcStart(self.line_for_node(node))}
@@ -831,7 +830,7 @@ class AstArcAnalyzer(object):
for xit in exits:
self.add_arc(
xit.lineno, -block.start, xit.cause,
- "didn't except from function {!r}".format(block.name),
+ f"didn't except from function {block.name!r}",
)
break
@@ -846,7 +845,7 @@ class AstArcAnalyzer(object):
for xit in exits:
self.add_arc(
xit.lineno, -block.start, xit.cause,
- "didn't return from function {!r}".format(block.name),
+ f"didn't return from function {block.name!r}",
)
break
@@ -1174,17 +1173,17 @@ class AstArcAnalyzer(object):
for xit in exits:
self.add_arc(
xit.lineno, -start, xit.cause,
- "didn't exit the body of class {!r}".format(node.name),
+ f"didn't exit the body of class {node.name!r}",
)
def _make_oneline_code_method(noun): # pylint: disable=no-self-argument
"""A function to make methods for online callable _code_object__ methods."""
def _code_object__oneline_callable(self, node):
start = self.line_for_node(node)
- self.add_arc(-start, start, None, "didn't run the {} on line {}".format(noun, start))
+ self.add_arc(-start, start, None, f"didn't run the {noun} on line {start}")
self.add_arc(
start, -start, None,
- "didn't finish the {} on line {}".format(noun, start),
+ f"didn't finish the {noun} on line {start}",
)
return _code_object__oneline_callable
@@ -1215,20 +1214,20 @@ if AST_DUMP: # pragma: debugging
"""
indent = " " * depth
if not isinstance(node, ast.AST):
- print("{}<{} {!r}>".format(indent, node.__class__.__name__, node))
+ print(f"{indent}<{node.__class__.__name__} {node!r}>")
return
lineno = getattr(node, "lineno", None)
if lineno is not None:
- linemark = " @ {},{}".format(node.lineno, node.col_offset)
+ linemark = f" @ {node.lineno},{node.col_offset}"
if hasattr(node, "end_lineno"):
linemark += ":"
if node.end_lineno != node.lineno:
- linemark += "{},".format(node.end_lineno)
- linemark += "{}".format(node.end_col_offset)
+ linemark += f"{node.end_lineno},"
+ linemark += f"{node.end_col_offset}"
else:
linemark = ""
- head = "{}<{}{}".format(indent, node.__class__.__name__, linemark)
+ head = f"{indent}<{node.__class__.__name__}{linemark}"
named_fields = [
(name, value)
@@ -1236,10 +1235,10 @@ if AST_DUMP: # pragma: debugging
if name not in SKIP_DUMP_FIELDS
]
if not named_fields:
- print("{}>".format(head))
+ print(f"{head}>")
elif len(named_fields) == 1 and _is_simple_value(named_fields[0][1]):
field_name, value = named_fields[0]
- print("{} {}: {!r}>".format(head, field_name, value))
+ print(f"{head} {field_name}: {value!r}>")
else:
print(head)
if 0:
@@ -1248,16 +1247,16 @@ if AST_DUMP: # pragma: debugging
))
next_indent = indent + " "
for field_name, value in named_fields:
- prefix = "{}{}:".format(next_indent, field_name)
+ prefix = f"{next_indent}{field_name}:"
if _is_simple_value(value):
- print("{} {!r}".format(prefix, value))
+ print(f"{prefix} {value!r}")
elif isinstance(value, list):
- print("{} [".format(prefix))
+ print(f"{prefix} [")
for n in value:
ast_dump(n, depth + 8)
- print("{}]".format(next_indent))
+ print(f"{next_indent}]")
else:
print(prefix)
ast_dump(value, depth + 8)
- print("{}>".format(indent))
+ print(f"{indent}>")
diff --git a/coverage/phystokens.py b/coverage/phystokens.py
index 4b69c476..52c2aa06 100644
--- a/coverage/phystokens.py
+++ b/coverage/phystokens.py
@@ -104,7 +104,7 @@ def source_token_lines(source):
mark_end = False
else:
if mark_start and scol > col:
- line.append(("ws", u" " * (scol - col)))
+ line.append(("ws", " " * (scol - col)))
mark_start = False
tok_class = tokenize.tok_name.get(ttype, 'xx').lower()[:3]
if ttype == token.NAME and keyword.iskeyword(ttext):
@@ -119,7 +119,7 @@ def source_token_lines(source):
yield line
-class CachedTokenizer(object):
+class CachedTokenizer:
"""A one-element cache around tokenize.generate_tokens.
When reporting, coverage.py tokenizes files twice, once to find the
diff --git a/coverage/plugin.py b/coverage/plugin.py
index 6997b489..5b38e336 100644
--- a/coverage/plugin.py
+++ b/coverage/plugin.py
@@ -116,7 +116,7 @@ from coverage import files
from coverage.misc import contract, _needs_to_implement
-class CoveragePlugin(object):
+class CoveragePlugin:
"""Base class for coverage.py plug-ins."""
def file_tracer(self, filename): # pylint: disable=unused-argument
@@ -232,7 +232,7 @@ class CoveragePlugin(object):
return []
-class FileTracer(object):
+class FileTracer:
"""Support needed for files during the execution phase.
File tracer plug-ins implement subclasses of FileTracer to return from
@@ -315,7 +315,7 @@ class FileTracer(object):
return lineno, lineno
-class FileReporter(object):
+class FileReporter:
"""Support needed for files during the analysis and reporting phases.
File tracer plug-ins implement a subclass of `FileReporter`, and return
@@ -476,7 +476,7 @@ class FileReporter(object):
to {end}".
"""
- return "Line {start} didn't jump to line {end}".format(start=start, end=end)
+ return f"Line {start} didn't jump to line {end}"
def source_token_lines(self):
"""Generate a series of tokenized lines, one for each line in `source`.
diff --git a/coverage/plugin_support.py b/coverage/plugin_support.py
index 89c1c765..cf7ef80f 100644
--- a/coverage/plugin_support.py
+++ b/coverage/plugin_support.py
@@ -13,7 +13,7 @@ from coverage.plugin import CoveragePlugin, FileTracer, FileReporter
os = isolate_module(os)
-class Plugins(object):
+class Plugins:
"""The currently loaded collection of coverage.py plugins."""
def __init__(self):
@@ -95,10 +95,10 @@ class Plugins(object):
is a list to append the plugin to.
"""
- plugin_name = "%s.%s" % (self.current_module, plugin.__class__.__name__)
+ plugin_name = f"{self.current_module}.{plugin.__class__.__name__}"
if self.debug and self.debug.should('plugin'):
- self.debug.write("Loaded plugin %r: %r" % (self.current_module, plugin))
- labelled = LabelledDebug("plugin %r" % (self.current_module,), self.debug)
+ self.debug.write(f"Loaded plugin {self.current_module!r}: {plugin!r}")
+ labelled = LabelledDebug(f"plugin {self.current_module!r}", self.debug)
plugin = DebugPluginWrapper(plugin, labelled)
# pylint: disable=attribute-defined-outside-init
@@ -122,7 +122,7 @@ class Plugins(object):
return self.names[plugin_name]
-class LabelledDebug(object):
+class LabelledDebug:
"""A Debug writer, but with labels for prepending to the messages."""
def __init__(self, label, debug, prev_labels=()):
@@ -140,45 +140,45 @@ class LabelledDebug(object):
def write(self, message):
"""Write `message`, but with the labels prepended."""
- self.debug.write("%s%s" % (self.message_prefix(), message))
+ self.debug.write(f"{self.message_prefix()}{message}")
class DebugPluginWrapper(CoveragePlugin):
"""Wrap a plugin, and use debug to report on what it's doing."""
def __init__(self, plugin, debug):
- super(DebugPluginWrapper, self).__init__()
+ super().__init__()
self.plugin = plugin
self.debug = debug
def file_tracer(self, filename):
tracer = self.plugin.file_tracer(filename)
- self.debug.write("file_tracer(%r) --> %r" % (filename, tracer))
+ self.debug.write(f"file_tracer({filename!r}) --> {tracer!r}")
if tracer:
- debug = self.debug.add_label("file %r" % (filename,))
+ debug = self.debug.add_label(f"file {filename!r}")
tracer = DebugFileTracerWrapper(tracer, debug)
return tracer
def file_reporter(self, filename):
reporter = self.plugin.file_reporter(filename)
- self.debug.write("file_reporter(%r) --> %r" % (filename, reporter))
+ self.debug.write(f"file_reporter({filename!r}) --> {reporter!r}")
if reporter:
- debug = self.debug.add_label("file %r" % (filename,))
+ debug = self.debug.add_label(f"file {filename!r}")
reporter = DebugFileReporterWrapper(filename, reporter, debug)
return reporter
def dynamic_context(self, frame):
context = self.plugin.dynamic_context(frame)
- self.debug.write("dynamic_context(%r) --> %r" % (frame, context))
+ self.debug.write(f"dynamic_context({frame!r}) --> {context!r}")
return context
def find_executable_files(self, src_dir):
executable_files = self.plugin.find_executable_files(src_dir)
- self.debug.write("find_executable_files(%r) --> %r" % (src_dir, executable_files))
+ self.debug.write(f"find_executable_files({src_dir!r}) --> {executable_files!r}")
return executable_files
def configure(self, config):
- self.debug.write("configure(%r)" % (config,))
+ self.debug.write(f"configure({config!r})")
self.plugin.configure(config)
def sys_info(self):
@@ -201,24 +201,24 @@ class DebugFileTracerWrapper(FileTracer):
def source_filename(self):
sfilename = self.tracer.source_filename()
- self.debug.write("source_filename() --> %r" % (sfilename,))
+ self.debug.write(f"source_filename() --> {sfilename!r}")
return sfilename
def has_dynamic_source_filename(self):
has = self.tracer.has_dynamic_source_filename()
- self.debug.write("has_dynamic_source_filename() --> %r" % (has,))
+ self.debug.write(f"has_dynamic_source_filename() --> {has!r}")
return has
def dynamic_source_filename(self, filename, frame):
dyn = self.tracer.dynamic_source_filename(filename, frame)
- self.debug.write("dynamic_source_filename(%r, %s) --> %r" % (
+ self.debug.write("dynamic_source_filename({!r}, {}) --> {!r}".format(
filename, self._show_frame(frame), dyn,
))
return dyn
def line_number_range(self, frame):
pair = self.tracer.line_number_range(frame)
- self.debug.write("line_number_range(%s) --> %r" % (self._show_frame(frame), pair))
+ self.debug.write(f"line_number_range({self._show_frame(frame)}) --> {pair!r}")
return pair
@@ -226,48 +226,48 @@ class DebugFileReporterWrapper(FileReporter):
"""A debugging `FileReporter`."""
def __init__(self, filename, reporter, debug):
- super(DebugFileReporterWrapper, self).__init__(filename)
+ super().__init__(filename)
self.reporter = reporter
self.debug = debug
def relative_filename(self):
ret = self.reporter.relative_filename()
- self.debug.write("relative_filename() --> %r" % (ret,))
+ self.debug.write(f"relative_filename() --> {ret!r}")
return ret
def lines(self):
ret = self.reporter.lines()
- self.debug.write("lines() --> %r" % (ret,))
+ self.debug.write(f"lines() --> {ret!r}")
return ret
def excluded_lines(self):
ret = self.reporter.excluded_lines()
- self.debug.write("excluded_lines() --> %r" % (ret,))
+ self.debug.write(f"excluded_lines() --> {ret!r}")
return ret
def translate_lines(self, lines):
ret = self.reporter.translate_lines(lines)
- self.debug.write("translate_lines(%r) --> %r" % (lines, ret))
+ self.debug.write(f"translate_lines({lines!r}) --> {ret!r}")
return ret
def translate_arcs(self, arcs):
ret = self.reporter.translate_arcs(arcs)
- self.debug.write("translate_arcs(%r) --> %r" % (arcs, ret))
+ self.debug.write(f"translate_arcs({arcs!r}) --> {ret!r}")
return ret
def no_branch_lines(self):
ret = self.reporter.no_branch_lines()
- self.debug.write("no_branch_lines() --> %r" % (ret,))
+ self.debug.write(f"no_branch_lines() --> {ret!r}")
return ret
def exit_counts(self):
ret = self.reporter.exit_counts()
- self.debug.write("exit_counts() --> %r" % (ret,))
+ self.debug.write(f"exit_counts() --> {ret!r}")
return ret
def arcs(self):
ret = self.reporter.arcs()
- self.debug.write("arcs() --> %r" % (ret,))
+ self.debug.write(f"arcs() --> {ret!r}")
return ret
def source(self):
diff --git a/coverage/python.py b/coverage/python.py
index 81aa66ba..7b6a6d8a 100644
--- a/coverage/python.py
+++ b/coverage/python.py
@@ -56,7 +56,7 @@ def get_python_source(filename):
break
else:
# Couldn't find source.
- exc_msg = "No source for code: '%s'.\n" % (filename,)
+ exc_msg = f"No source for code: '{filename}'.\n"
exc_msg += "Aborting report output, consider using -i."
raise NoSource(exc_msg)
@@ -90,7 +90,7 @@ def get_zip_bytes(filename):
continue
try:
data = zi.get_data(parts[1])
- except IOError:
+ except OSError:
continue
return data
return None
@@ -136,7 +136,7 @@ def source_for_morf(morf):
elif isinstance(morf, types.ModuleType):
# A module should have had .__file__, otherwise we can't use it.
# This could be a PEP-420 namespace package.
- raise CoverageException("Module {} has no file".format(morf))
+ raise CoverageException(f"Module {morf} has no file")
else:
filename = morf
@@ -152,7 +152,7 @@ class PythonFileReporter(FileReporter):
filename = source_for_morf(morf)
- super(PythonFileReporter, self).__init__(files.canonical_filename(filename))
+ super().__init__(files.canonical_filename(filename))
if hasattr(morf, '__name__'):
name = morf.__name__.replace(".", os.sep)
@@ -169,7 +169,7 @@ class PythonFileReporter(FileReporter):
self._excluded = None
def __repr__(self):
- return "<PythonFileReporter {!r}>".format(self.filename)
+ return f"<PythonFileReporter {self.filename!r}>"
@contract(returns='unicode')
def relative_filename(self):
diff --git a/coverage/pytracer.py b/coverage/pytracer.py
index ccc913a8..51f08a1b 100644
--- a/coverage/pytracer.py
+++ b/coverage/pytracer.py
@@ -18,7 +18,7 @@ YIELD_VALUE = dis.opmap['YIELD_VALUE']
THIS_FILE = __file__.rstrip("co")
-class PyTracer(object):
+class PyTracer:
"""Python implementation of the raw data tracer."""
# Because of poor implementations of trace-function-manipulating tools,
@@ -255,7 +255,7 @@ class PyTracer(object):
dont_warn = (env.PYPY and env.PYPYVERSION >= (5, 4) and self.in_atexit and tf is None)
if (not dont_warn) and tf != self._trace: # pylint: disable=comparison-with-callable
self.warn(
- "Trace function changed, measurement is likely wrong: %r" % (tf,),
+ f"Trace function changed, measurement is likely wrong: {tf!r}",
slug="trace-changed",
)
diff --git a/coverage/report.py b/coverage/report.py
index 0ddb5e10..4849fe80 100644
--- a/coverage/report.py
+++ b/coverage/report.py
@@ -76,7 +76,7 @@ def get_analysis_to_report(coverage, morfs):
# should_be_python() method.
if fr.should_be_python():
if config.ignore_errors:
- msg = "Couldn't parse Python file '{}'".format(fr.filename)
+ msg = f"Couldn't parse Python file '{fr.filename}'"
coverage._warn(msg, slug="couldnt-parse")
else:
raise
diff --git a/coverage/results.py b/coverage/results.py
index 35f79ded..0a7a6135 100644
--- a/coverage/results.py
+++ b/coverage/results.py
@@ -9,7 +9,7 @@ from coverage.debug import SimpleReprMixin
from coverage.misc import contract, CoverageException, nice_pair
-class Analysis(object):
+class Analysis:
"""The results of analyzing a FileReporter."""
def __init__(self, data, file_reporter, file_mapper):
@@ -332,7 +332,7 @@ def should_fail_under(total, fail_under, precision):
"""
# We can never achieve higher than 100% coverage, or less than zero.
if not (0 <= fail_under <= 100.0):
- msg = "fail_under={} is invalid. Must be between 0 and 100.".format(fail_under)
+ msg = f"fail_under={fail_under} is invalid. Must be between 0 and 100."
raise CoverageException(msg)
# Special case for fail_under=100, it must really be 100.
diff --git a/coverage/sqldata.py b/coverage/sqldata.py
index b85da057..0e31a358 100644
--- a/coverage/sqldata.py
+++ b/coverage/sqldata.py
@@ -243,7 +243,7 @@ class CoverageData(SimpleReprMixin):
Initializes the schema and certain metadata.
"""
if self._debug.should('dataio'):
- self._debug.write("Creating data file {!r}".format(self._filename))
+ self._debug.write(f"Creating data file {self._filename!r}")
self._dbs[threading.get_ident()] = db = SqliteDb(self._filename, self._debug)
with db:
db.executescript(SCHEMA)
@@ -260,7 +260,7 @@ class CoverageData(SimpleReprMixin):
def _open_db(self):
"""Open an existing db file, and read its metadata."""
if self._debug.should('dataio'):
- self._debug.write("Opening data file {!r}".format(self._filename))
+ self._debug.write(f"Opening data file {self._filename!r}")
self._dbs[threading.get_ident()] = SqliteDb(self._filename, self._debug)
self._read_db()
@@ -330,7 +330,7 @@ class CoverageData(SimpleReprMixin):
"""
if self._debug.should('dataio'):
- self._debug.write("Dumping data from data file {!r}".format(self._filename))
+ self._debug.write(f"Dumping data from data file {self._filename!r}")
with self._connect() as con:
return b'z' + zlib.compress(con.dump().encode("utf8"))
@@ -351,10 +351,10 @@ class CoverageData(SimpleReprMixin):
"""
if self._debug.should('dataio'):
- self._debug.write("Loading data into data file {!r}".format(self._filename))
+ self._debug.write(f"Loading data into data file {self._filename!r}")
if data[:1] != b'z':
raise CoverageException(
- "Unrecognized serialization: {!r} (head of {} bytes)".format(data[:40], len(data))
+ f"Unrecognized serialization: {data[:40]!r} (head of {len(data)} bytes)"
)
script = zlib.decompress(data[1:]).decode("utf8")
self._dbs[threading.get_ident()] = db = SqliteDb(self._filename, self._debug)
@@ -397,7 +397,7 @@ class CoverageData(SimpleReprMixin):
"""
if self._debug.should('dataop'):
- self._debug.write("Setting context: %r" % (context,))
+ self._debug.write(f"Setting context: {context!r}")
self._current_context = context
self._current_context_id = None
@@ -520,14 +520,14 @@ class CoverageData(SimpleReprMixin):
file_id = self._file_id(filename)
if file_id is None:
raise CoverageException(
- "Can't add file tracer data for unmeasured file '%s'" % (filename,)
+ f"Can't add file tracer data for unmeasured file '{filename}'"
)
existing_plugin = self.file_tracer(filename)
if existing_plugin:
if existing_plugin != plugin_name:
raise CoverageException(
- "Conflicting file tracer name for '%s': %r vs %r" % (
+ "Conflicting file tracer name for '{}': {!r} vs {!r}".format(
filename, existing_plugin, plugin_name,
)
)
@@ -552,7 +552,7 @@ class CoverageData(SimpleReprMixin):
to associate the right filereporter, etc.
"""
if self._debug.should('dataop'):
- self._debug.write("Touching %r" % (filenames,))
+ self._debug.write(f"Touching {filenames!r}")
self._start_using()
with self._connect(): # Use this to get one transaction.
if not self._has_arcs and not self._has_lines:
@@ -571,7 +571,7 @@ class CoverageData(SimpleReprMixin):
re-map paths to match the local machine's.
"""
if self._debug.should('dataop'):
- self._debug.write("Updating with data from %r" % (
+ self._debug.write("Updating with data from {!r}".format(
getattr(other_data, '_filename', '???'),
))
if self._has_lines and other_data._has_arcs:
@@ -674,7 +674,7 @@ class CoverageData(SimpleReprMixin):
# If there is no tracer, there is always the None tracer.
if this_tracer is not None and this_tracer != other_tracer:
raise CoverageException(
- "Conflicting file tracer name for '%s': %r vs %r" % (
+ "Conflicting file tracer name for '{}': {!r} vs {!r}".format(
path, this_tracer, other_tracer
)
)
@@ -743,7 +743,7 @@ class CoverageData(SimpleReprMixin):
if self._no_disk:
return
if self._debug.should('dataio'):
- self._debug.write("Erasing data file {!r}".format(self._filename))
+ self._debug.write(f"Erasing data file {self._filename!r}")
file_be_gone(self._filename)
if parallel:
data_dir, local = os.path.split(self._filename)
@@ -751,7 +751,7 @@ class CoverageData(SimpleReprMixin):
pattern = os.path.join(os.path.abspath(data_dir), localdot)
for filename in glob.glob(pattern):
if self._debug.should('dataio'):
- self._debug.write("Erasing parallel data file {!r}".format(filename))
+ self._debug.write(f"Erasing parallel data file {filename!r}")
file_be_gone(filename)
def read(self):
@@ -1007,7 +1007,7 @@ class SqliteDb(SimpleReprMixin):
# nature of the tracer operations, sharing a connection among threads
# is not a problem.
if self.debug:
- self.debug.write("Connecting to {!r}".format(self.filename))
+ self.debug.write(f"Connecting to {self.filename!r}")
self.con = sqlite3.connect(self.filename, check_same_thread=False)
self.con.create_function('REGEXP', 2, _regexp)
@@ -1039,14 +1039,14 @@ class SqliteDb(SimpleReprMixin):
self.close()
except Exception as exc:
if self.debug:
- self.debug.write("EXCEPTION from __exit__: {}".format(exc))
+ self.debug.write(f"EXCEPTION from __exit__: {exc}")
raise
def execute(self, sql, parameters=()):
"""Same as :meth:`python:sqlite3.Connection.execute`."""
if self.debug:
- tail = " with {!r}".format(parameters) if parameters else ""
- self.debug.write("Executing {!r}{}".format(sql, tail))
+ tail = f" with {parameters!r}" if parameters else ""
+ self.debug.write(f"Executing {sql!r}{tail}")
try:
try:
return self.con.execute(sql, parameters)
@@ -1070,8 +1070,8 @@ class SqliteDb(SimpleReprMixin):
except Exception:
pass
if self.debug:
- self.debug.write("EXCEPTION from execute: {}".format(msg))
- raise CoverageException("Couldn't use data file {!r}: {}".format(self.filename, msg))
+ self.debug.write(f"EXCEPTION from execute: {msg}")
+ raise CoverageException(f"Couldn't use data file {self.filename!r}: {msg}")
def execute_one(self, sql, parameters=()):
"""Execute a statement and return the one row that results.
@@ -1088,13 +1088,13 @@ class SqliteDb(SimpleReprMixin):
elif len(rows) == 1:
return rows[0]
else:
- raise CoverageException("Sql {!r} shouldn't return {} rows".format(sql, len(rows)))
+ raise CoverageException(f"Sql {sql!r} shouldn't return {len(rows)} rows")
def executemany(self, sql, data):
"""Same as :meth:`python:sqlite3.Connection.executemany`."""
if self.debug:
data = list(data)
- self.debug.write("Executing many {!r} with {} rows".format(sql, len(data)))
+ self.debug.write(f"Executing many {sql!r} with {len(data)} rows")
return self.con.executemany(sql, data)
def executescript(self, script):
diff --git a/coverage/summary.py b/coverage/summary.py
index d526d0bc..7d000150 100644
--- a/coverage/summary.py
+++ b/coverage/summary.py
@@ -10,7 +10,7 @@ from coverage.results import Numbers
from coverage.misc import CoverageException
-class SummaryReporter(object):
+class SummaryReporter:
"""A reporter for writing the summary report."""
def __init__(self, coverage):
@@ -22,7 +22,7 @@ class SummaryReporter(object):
self.skipped_count = 0
self.empty_count = 0
self.total = Numbers()
- self.fmt_err = u"%s %s: %s"
+ self.fmt_err = "%s %s: %s"
def writeout(self, line):
"""Write a line to the output, adding a newline."""
@@ -44,22 +44,22 @@ class SummaryReporter(object):
# Prepare the formatting strings, header, and column sorting.
max_name = max([len(fr.relative_filename()) for (fr, analysis) in self.fr_analysis] + [5])
- fmt_name = u"%%- %ds " % max_name
- fmt_skip_covered = u"\n%s file%s skipped due to complete coverage."
- fmt_skip_empty = u"\n%s empty file%s skipped."
+ fmt_name = "%%- %ds " % max_name
+ fmt_skip_covered = "\n%s file%s skipped due to complete coverage."
+ fmt_skip_empty = "\n%s empty file%s skipped."
- header = (fmt_name % "Name") + u" Stmts Miss"
- fmt_coverage = fmt_name + u"%6d %6d"
+ header = (fmt_name % "Name") + " Stmts Miss"
+ fmt_coverage = fmt_name + "%6d %6d"
if self.branches:
- header += u" Branch BrPart"
- fmt_coverage += u" %6d %6d"
+ header += " Branch BrPart"
+ fmt_coverage += " %6d %6d"
width100 = Numbers.pc_str_width()
- header += u"%*s" % (width100+4, "Cover")
- fmt_coverage += u"%%%ds%%%%" % (width100+3,)
+ header += "%*s" % (width100+4, "Cover")
+ fmt_coverage += "%%%ds%%%%" % (width100+3,)
if self.config.show_missing:
- header += u" Missing"
- fmt_coverage += u" %s"
- rule = u"-" * len(header)
+ header += " Missing"
+ fmt_coverage += " %s"
+ rule = "-" * len(header)
column_order = dict(name=0, stmts=1, miss=2, cover=-1)
if self.branches:
@@ -100,7 +100,7 @@ class SummaryReporter(object):
position = column_order.get(sort_option)
if position is None:
- raise CoverageException("Invalid sorting option: {!r}".format(self.config.sort))
+ raise CoverageException(f"Invalid sorting option: {self.config.sort!r}")
lines.sort(key=lambda l: (l[1][position], l[0]), reverse=reverse)
for line in lines:
diff --git a/coverage/templite.py b/coverage/templite.py
index 82673886..2ceeb6e2 100644
--- a/coverage/templite.py
+++ b/coverage/templite.py
@@ -23,7 +23,7 @@ class TempliteValueError(ValueError):
pass
-class CodeBuilder(object):
+class CodeBuilder:
"""Build source code conveniently."""
def __init__(self, indent=0):
@@ -69,7 +69,7 @@ class CodeBuilder(object):
return global_namespace
-class Templite(object):
+class Templite:
"""A simple template renderer, for a nano-subset of Django syntax.
Supported constructs are extended variable access::
@@ -188,7 +188,7 @@ class Templite(object):
ops_stack.append('for')
self._variable(words[1], self.loop_vars)
code.add_line(
- "for c_%s in %s:" % (
+ "for c_{} in {}:".format(
words[1],
self._expr_code(words[3])
)
@@ -228,7 +228,7 @@ class Templite(object):
flush_output()
for var_name in self.all_vars - self.loop_vars:
- vars_code.add_line("c_%s = context[%r]" % (var_name, var_name))
+ vars_code.add_line(f"c_{var_name} = context[{var_name!r}]")
code.add_line('return "".join(result)')
code.dedent()
@@ -241,12 +241,12 @@ class Templite(object):
code = self._expr_code(pipes[0])
for func in pipes[1:]:
self._variable(func, self.all_vars)
- code = "c_%s(%s)" % (func, code)
+ code = f"c_{func}({code})"
elif "." in expr:
dots = expr.split(".")
code = self._expr_code(dots[0])
args = ", ".join(repr(d) for d in dots[1:])
- code = "do_dots(%s, %s)" % (code, args)
+ code = f"do_dots({code}, {args})"
else:
self._variable(expr, self.all_vars)
code = "c_%s" % expr
@@ -254,7 +254,7 @@ class Templite(object):
def _syntax_error(self, msg, thing):
"""Raise a syntax error using `msg`, and showing `thing`."""
- raise TempliteSyntaxError("%s: %r" % (msg, thing))
+ raise TempliteSyntaxError(f"{msg}: {thing!r}")
def _variable(self, name, vars_set):
"""Track that `name` is used as a variable.
@@ -290,7 +290,7 @@ class Templite(object):
value = value[dot]
except (TypeError, KeyError):
raise TempliteValueError(
- "Couldn't evaluate %r.%s" % (value, dot)
+ f"Couldn't evaluate {value!r}.{dot}"
)
if callable(value):
value = value()
diff --git a/coverage/tomlconfig.py b/coverage/tomlconfig.py
index 5f8c154d..d8055455 100644
--- a/coverage/tomlconfig.py
+++ b/coverage/tomlconfig.py
@@ -4,7 +4,6 @@
"""TOML configuration support for coverage.py"""
import configparser
-import io
import os
import re
@@ -43,9 +42,9 @@ class TomlConfigParser:
filename = os.fspath(filename)
try:
- with io.open(filename, encoding='utf-8') as fp:
+ with open(filename, encoding='utf-8') as fp:
toml_text = fp.read()
- except IOError:
+ except OSError:
return []
if toml:
toml_text = substitute_variables(toml_text, os.environ)
@@ -151,7 +150,7 @@ class TomlConfigParser:
re.compile(value)
except re.error as e:
raise CoverageException(
- "Invalid [%s].%s value %r: %s" % (name, option, value, e)
+ f"Invalid [{name}].{option} value {value!r}: {e}"
)
return values
diff --git a/coverage/xmlreport.py b/coverage/xmlreport.py
index db1d0116..0538bfd5 100644
--- a/coverage/xmlreport.py
+++ b/coverage/xmlreport.py
@@ -1,4 +1,3 @@
-# coding: utf-8
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
@@ -28,7 +27,7 @@ def rate(hit, num):
return "%.4g" % (float(hit) / num)
-class XmlReporter(object):
+class XmlReporter:
"""A reporter for writing Cobertura-style XML coverage results."""
def __init__(self, coverage):
@@ -156,7 +155,7 @@ class XmlReporter(object):
rel_name = fr.relative_filename()
self.source_paths.add(fr.filename[:-len(rel_name)].rstrip(r"\/"))
- dirname = os.path.dirname(rel_name) or u"."
+ dirname = os.path.dirname(rel_name) or "."
dirname = "/".join(dirname.split("/")[:self.config.xml_package_depth])
package_name = dirname.replace("/", ".")