summaryrefslogtreecommitdiff
path: root/coverage
diff options
context:
space:
mode:
authorNed Batchelder <nedbat@gmail.com>2017-05-04 01:37:56 +0000
committerNed Batchelder <nedbat@gmail.com>2017-05-04 01:37:56 +0000
commitf1539ee72a2e920f3379a9d30e30ca5828bb8bec (patch)
tree5a6d60393b6a778543de592e39fad29b223301f2 /coverage
parentb2f19c1ed21629d5383532cfb2a77be9a7389ea4 (diff)
parent20dc040e7a10632c90015f6211cafab5d66fd6a2 (diff)
downloadpython-coveragepy-f1539ee72a2e920f3379a9d30e30ca5828bb8bec.tar.gz
Merged in dirk-thomas/coverage.py/fix_init_name (pull request #91)
fix name for module in __init__.py file
Diffstat (limited to 'coverage')
-rw-r--r--coverage/backward.py23
-rw-r--r--coverage/cmdline.py37
-rw-r--r--coverage/collector.py31
-rw-r--r--coverage/config.py98
-rw-r--r--coverage/control.py287
-rw-r--r--coverage/ctracer/datastack.c10
-rw-r--r--coverage/ctracer/datastack.h10
-rw-r--r--coverage/ctracer/stats.h2
-rw-r--r--coverage/ctracer/tracer.c119
-rw-r--r--coverage/ctracer/tracer.h6
-rw-r--r--coverage/ctracer/util.h12
-rw-r--r--coverage/data.py30
-rw-r--r--coverage/debug.py193
-rw-r--r--coverage/env.py8
-rw-r--r--coverage/execfile.py42
-rw-r--r--coverage/files.py36
-rw-r--r--coverage/html.py32
-rw-r--r--coverage/misc.py85
-rw-r--r--coverage/parser.py361
-rw-r--r--coverage/phystokens.py10
-rw-r--r--coverage/plugin.py13
-rw-r--r--coverage/python.py51
-rw-r--r--coverage/pytracer.py51
-rw-r--r--coverage/results.py24
-rw-r--r--coverage/summary.py74
-rw-r--r--coverage/version.py2
-rw-r--r--coverage/xmlreport.py20
27 files changed, 1163 insertions, 504 deletions
diff --git a/coverage/backward.py b/coverage/backward.py
index 700c3eb..62ca495 100644
--- a/coverage/backward.py
+++ b/coverage/backward.py
@@ -3,10 +3,8 @@
"""Add things to old Pythons so I can pretend they are newer."""
-# This file does lots of tricky stuff, so disable a bunch of pylint warnings.
-# pylint: disable=redefined-builtin
+# This file does tricky stuff, so disable a pylint warning.
# pylint: disable=unused-import
-# pxlint: disable=no-name-in-module
import sys
@@ -19,11 +17,14 @@ try:
except ImportError:
from io import StringIO
-# In py3, ConfigParser was renamed to the more-standard configparser
+# In py3, ConfigParser was renamed to the more-standard configparser.
+# But there's a py3 backport that installs "configparser" in py2, and I don't
+# want it because it has annoying deprecation warnings. So try the real py2
+# import first.
try:
- import configparser
-except ImportError:
import ConfigParser as configparser
+except ImportError:
+ import configparser
# What's a string called?
try:
@@ -45,9 +46,9 @@ except ImportError:
# range or xrange?
try:
- range = xrange
+ range = xrange # pylint: disable=redefined-builtin
except NameError:
- range = range # pylint: disable=redefined-variable-type
+ range = range
# shlex.quote is new, but there's an undocumented implementation in "pipes",
# who knew!?
@@ -143,6 +144,12 @@ except AttributeError:
PYC_MAGIC_NUMBER = imp.get_magic()
+def invalidate_import_caches():
+ """Invalidate any import caches that may or may not exist."""
+ if importlib and hasattr(importlib, "invalidate_caches"):
+ importlib.invalidate_caches()
+
+
def import_local_file(modname, modfile=None):
"""Import a local file as a module.
diff --git a/coverage/cmdline.py b/coverage/cmdline.py
index 8942024..63e4eb1 100644
--- a/coverage/cmdline.py
+++ b/coverage/cmdline.py
@@ -3,6 +3,8 @@
"""Command-line support for coverage.py."""
+from __future__ import print_function
+
import glob
import optparse
import os.path
@@ -12,9 +14,10 @@ import traceback
from coverage import env
from coverage.collector import CTracer
-from coverage.execfile import run_python_file, run_python_module
-from coverage.misc import CoverageException, ExceptionDuringRun, NoSource
from coverage.debug import info_formatter, info_header
+from coverage.execfile import run_python_file, run_python_module
+from coverage.misc import BaseCoverageException, ExceptionDuringRun, NoSource
+from coverage.results import should_fail_under
class Opts(object):
@@ -248,7 +251,7 @@ class CmdOptionParser(CoverageOptionParser):
program_name = super(CmdOptionParser, self).get_prog_name()
# Include the sub-command for this parser as part of the command.
- return "%(command)s %(subcommand)s" % {'command': program_name, 'subcommand': self.cmd}
+ return "{command} {subcommand}".format(command=program_name, subcommand=self.cmd)
GLOBAL_ARGS = [
@@ -320,6 +323,7 @@ CMDS = {
Opts.include,
Opts.omit,
Opts.title,
+ Opts.skip_covered,
] + GLOBAL_ARGS,
usage="[options] [modules]",
description=(
@@ -458,7 +462,7 @@ class CoverageScript(object):
debug = unshell_list(options.debug)
# Do something.
- self.coverage = self.covpkg.coverage(
+ self.coverage = self.covpkg.Coverage(
data_suffix=options.parallel_mode,
cover_pylib=options.pylib,
timid=options.timid,
@@ -510,7 +514,7 @@ class CoverageScript(object):
elif options.action == "html":
total = self.coverage.html_report(
directory=options.directory, title=options.title,
- **report_args)
+ skip_covered=options.skip_covered, **report_args)
elif options.action == "xml":
outfile = options.outfile
total = self.coverage.xml_report(outfile=outfile, **report_args)
@@ -521,18 +525,9 @@ class CoverageScript(object):
if options.fail_under is not None:
self.coverage.set_option("report:fail_under", options.fail_under)
- if self.coverage.get_option("report:fail_under"):
- # Total needs to be rounded, but don't want to report 100
- # unless it is really 100.
- if 99 < total < 100:
- total = 99
- else:
- total = round(total)
-
- if total >= self.coverage.get_option("report:fail_under"):
- return OK
- else:
- return FAIL_UNDER
+ fail_under = self.coverage.get_option("report:fail_under")
+ if should_fail_under(total, fail_under):
+ return FAIL_UNDER
return OK
@@ -540,8 +535,8 @@ class CoverageScript(object):
"""Display an error message, or the named topic."""
assert error or topic or parser
if error:
- print(error)
- print("Use '%s help' for help." % (self.program_name,))
+ print(error, file=sys.stderr)
+ print("Use '%s help' for help." % (self.program_name,), file=sys.stderr)
elif parser:
print(parser.format_help().strip())
else:
@@ -757,9 +752,9 @@ def main(argv=None):
# An exception was caught while running the product code. The
# sys.exc_info() return tuple is packed into an ExceptionDuringRun
# exception.
- traceback.print_exception(*err.args)
+ traceback.print_exception(*err.args) # pylint: disable=no-value-for-parameter
status = ERR
- except CoverageException as err:
+ except BaseCoverageException as err:
# A controlled error inside coverage.py: print the message to the user.
print(err)
status = ERR
diff --git a/coverage/collector.py b/coverage/collector.py
index 3e28b3b..cfdcf40 100644
--- a/coverage/collector.py
+++ b/coverage/collector.py
@@ -162,6 +162,13 @@ class Collector(object):
"""Return the class name of the tracer we're using."""
return self._trace_class.__name__
+ def _clear_data(self):
+ """Clear out existing data, but stay ready for more collection."""
+ self.data.clear()
+
+ for tracer in self.tracers:
+ tracer.reset_activity()
+
def reset(self):
"""Clear collected data, and prepare to collect more."""
# A dictionary mapping file names to dicts with line number keys (if not
@@ -208,6 +215,8 @@ class Collector(object):
# Our active Tracers.
self.tracers = []
+ self._clear_data()
+
def _start_tracer(self):
"""Start a new Tracer object, and store it in self.tracers."""
tracer = self._trace_class()
@@ -267,6 +276,8 @@ class Collector(object):
if self._collectors:
self._collectors[-1].pause()
+ self.tracers = []
+
# Check to see whether we had a fullcoverage tracer installed. If so,
# get the stack frames it stashed away for us.
traces0 = []
@@ -296,7 +307,7 @@ class Collector(object):
except TypeError:
raise Exception("fullcoverage must be run with the C trace function.")
- # Install our installation tracer in threading, to jump start other
+ # Install our installation tracer in threading, to jump-start other
# threads.
if self.threading:
self.threading.settrace(self._installation_trace)
@@ -309,7 +320,6 @@ class Collector(object):
)
self.pause()
- self.tracers = []
# Remove this Collector from the stack, and resume the one underneath
# (if any).
@@ -338,6 +348,14 @@ class Collector(object):
else:
self._start_tracer()
+ def _activity(self):
+ """Has any activity been traced?
+
+ Returns a boolean, True if any trace function was invoked.
+
+ """
+ return any(tracer.activity() for tracer in self.tracers)
+
def switch_context(self, new_context):
"""Who-Tests-What hack: switch to a new who-context."""
# Make a new data dict, or find the existing one, and switch all the
@@ -349,9 +367,11 @@ class Collector(object):
def save_data(self, covdata):
"""Save the collected data to a `CoverageData`.
- Also resets the collector.
-
+ Returns True if there was data to save, False if not.
"""
+ if not self._activity():
+ return False
+
def abs_file_dict(d):
"""Return a dict like d, but with keys modified by `abs_file`."""
return dict((abs_file(k), v) for k, v in iitems(d))
@@ -369,4 +389,5 @@ class Collector(object):
with open(out_file, "w") as wtw_out:
pprint.pprint(self.contexts, wtw_out)
- self.reset()
+ self._clear_data()
+ return True
diff --git a/coverage/config.py b/coverage/config.py
index c7d6555..3fa6449 100644
--- a/coverage/config.py
+++ b/coverage/config.py
@@ -21,12 +21,12 @@ class HandyConfigParser(configparser.RawConfigParser):
configparser.RawConfigParser.__init__(self)
self.section_prefix = section_prefix
- def read(self, filename): # pylint: disable=arguments-differ
+ def read(self, filenames):
"""Read a file name as UTF-8 configuration data."""
kwargs = {}
if sys.version_info >= (3, 2):
kwargs['encoding'] = "utf-8"
- return configparser.RawConfigParser.read(self, filename, **kwargs)
+ return configparser.RawConfigParser.read(self, filenames, **kwargs)
def has_option(self, section, option):
section = self.section_prefix + section
@@ -47,7 +47,7 @@ class HandyConfigParser(configparser.RawConfigParser):
d[opt] = self.get(section, opt)
return d
- def get(self, section, *args, **kwargs):
+ def get(self, section, *args, **kwargs): # pylint: disable=arguments-differ
"""Get a value, replacing environment variables also.
The arguments are the same as `RawConfigParser.get`, but in the found
@@ -122,12 +122,12 @@ class HandyConfigParser(configparser.RawConfigParser):
# The default line exclusion regexes.
DEFAULT_EXCLUDE = [
- r'(?i)#\s*pragma[:\s]?\s*no\s*cover',
+ r'#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(cover|COVER)',
]
# The default partial branch regexes, to be modified by the user.
DEFAULT_PARTIAL = [
- r'(?i)#\s*pragma[:\s]?\s*no\s*branch',
+ r'#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(branch|BRANCH)',
]
# The default partial branch regexes, based on Python semantics.
@@ -158,6 +158,7 @@ class CoverageConfig(object):
self.cover_pylib = False
self.data_file = ".coverage"
self.debug = []
+ self.disable_warnings = []
self.note = None
self.parallel = False
self.plugins = []
@@ -191,7 +192,7 @@ class CoverageConfig(object):
# Options for plugins
self.plugin_options = {}
- MUST_BE_LIST = ["omit", "include", "debug", "plugins", "concurrency"]
+ MUST_BE_LIST = ["concurrency", "debug", "disable_warnings", "include", "omit", "plugins"]
def from_args(self, **kwargs):
"""Read config values from `kwargs`."""
@@ -207,7 +208,8 @@ class CoverageConfig(object):
`filename` is a file name to read.
- Returns True or False, whether the file could be read.
+ Returns True or False, whether the file could be read, and it had some
+ coverage.py settings in it.
"""
self.attempted_config_files.append(filename)
@@ -222,9 +224,12 @@ class CoverageConfig(object):
self.config_files.extend(files_read)
+ any_set = False
try:
for option_spec in self.CONFIG_FILE_OPTIONS:
- self._set_attr_from_config_option(cp, *option_spec)
+ was_set = self._set_attr_from_config_option(cp, *option_spec)
+ if was_set:
+ any_set = True
except ValueError as err:
raise CoverageException("Couldn't read config file %s: %s" % (filename, err))
@@ -249,13 +254,20 @@ class CoverageConfig(object):
if cp.has_section('paths'):
for option in cp.options('paths'):
self.paths[option] = cp.getlist('paths', option)
+ any_set = True
# plugins can have options
for plugin in self.plugins:
if cp.has_section(plugin):
self.plugin_options[plugin] = cp.get_section(plugin)
+ any_set = True
- return True
+ # Was this file used as a config file? If no prefix, then it was used.
+ # If a prefix, then it was only used if we found some settings in it.
+ if section_prefix:
+ return any_set
+ else:
+ return True
CONFIG_FILE_OPTIONS = [
# These are *args for _set_attr_from_config_option:
@@ -272,6 +284,7 @@ class CoverageConfig(object):
('cover_pylib', 'run:cover_pylib', 'boolean'),
('data_file', 'run:data_file'),
('debug', 'run:debug', 'list'),
+ ('disable_warnings', 'run:disable_warnings', 'list'),
('include', 'run:include', 'list'),
('note', 'run:note'),
('omit', 'run:omit', 'list'),
@@ -304,11 +317,17 @@ class CoverageConfig(object):
]
def _set_attr_from_config_option(self, cp, attr, where, type_=''):
- """Set an attribute on self if it exists in the ConfigParser."""
+ """Set an attribute on self if it exists in the ConfigParser.
+
+ Returns True if the attribute was set.
+
+ """
section, option = where.split(":")
if cp.has_option(section, option):
method = getattr(cp, 'get' + type_)
setattr(self, attr, method(section, option))
+ return True
+ return False
def get_plugin_options(self, plugin):
"""Get a dictionary of options for the plugin named `plugin`."""
@@ -351,7 +370,6 @@ class CoverageConfig(object):
Returns the value of the option.
"""
-
# Check all the hard-coded options.
for option_spec in self.CONFIG_FILE_OPTIONS:
attr, where = option_spec[:2]
@@ -365,3 +383,61 @@ class CoverageConfig(object):
# If we get here, we didn't find the option.
raise CoverageException("No such option: %r" % option_name)
+
+
+def read_coverage_config(config_file, **kwargs):
+ """Read the coverage.py configuration.
+
+ Arguments:
+ config_file: a boolean or string, see the `Coverage` class for the
+ tricky details.
+ all others: keyword arguments from the `Coverage` class, used for
+ setting values in the configuration.
+
+ Returns:
+ config_file, config:
+ config_file is the value to use for config_file in other
+ invocations of coverage.
+
+ config is a CoverageConfig object read from the appropriate
+ configuration file.
+
+ """
+ # Build the configuration from a number of sources:
+ # 1) defaults:
+ config = CoverageConfig()
+
+ # 2) from a file:
+ if config_file:
+ # Some API users were specifying ".coveragerc" to mean the same as
+ # True, so make it so.
+ if config_file == ".coveragerc":
+ config_file = True
+ specified_file = (config_file is not True)
+ if not specified_file:
+ config_file = ".coveragerc"
+
+ for fname, prefix in [(config_file, ""),
+ ("setup.cfg", "coverage:"),
+ ("tox.ini", "coverage:")]:
+ config_read = config.from_file(fname, section_prefix=prefix)
+ is_config_file = fname == config_file
+
+ if not config_read and is_config_file and specified_file:
+ raise CoverageException("Couldn't read '%s' as a config file" % fname)
+
+ if config_read:
+ break
+
+ # 3) from environment variables:
+ env_data_file = os.environ.get('COVERAGE_FILE')
+ if env_data_file:
+ config.data_file = env_data_file
+ debugs = os.environ.get('COVERAGE_DEBUG')
+ if debugs:
+ config.debug.extend(d.strip() for d in debugs.split(","))
+
+ # 4) from constructor arguments:
+ config.from_args(**kwargs)
+
+ return config_file, config
diff --git a/coverage/control.py b/coverage/control.py
index 351992f..fb03361 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -3,39 +3,47 @@
"""Core control stuff for coverage.py."""
+
import atexit
import inspect
+import itertools
import os
import platform
import re
import sys
import traceback
-from coverage import env, files
+from coverage import env
from coverage.annotate import AnnotateReporter
from coverage.backward import string_class, iitems
from coverage.collector import Collector
-from coverage.config import CoverageConfig
+from coverage.config import read_coverage_config
from coverage.data import CoverageData, CoverageDataFiles
-from coverage.debug import DebugControl
+from coverage.debug import DebugControl, write_formatted_info
from coverage.files import TreeMatcher, FnmatchMatcher
from coverage.files import PathAliases, find_python_files, prep_patterns
+from coverage.files import canonical_filename, set_relative_directory
from coverage.files import ModuleMatcher, abs_file
from coverage.html import HtmlReporter
from coverage.misc import CoverageException, bool_or_none, join_regex
from coverage.misc import file_be_gone, isolate_module
-from coverage.multiproc import patch_multiprocessing
from coverage.plugin import FileReporter
from coverage.plugin_support import Plugins
-from coverage.python import PythonFileReporter
+from coverage.python import PythonFileReporter, source_for_file
from coverage.results import Analysis, Numbers
from coverage.summary import SummaryReporter
from coverage.xmlreport import XmlReporter
+try:
+ from coverage.multiproc import patch_multiprocessing
+except ImportError: # pragma: only jython
+ # Jython has no multiprocessing module.
+ patch_multiprocessing = None
+
os = isolate_module(os)
# Pypy has some unusual stuff in the "stdlib". Consider those locations
-# when deciding where the stdlib is. This modules are not used for anything,
+# when deciding where the stdlib is. These modules are not used for anything,
# they are modules importable from the pypy lib directories, so that we can
# find those directories.
_structseq = _pypy_irc_topic = None
@@ -101,8 +109,8 @@ class Coverage(object):
file can't be read, it is an error.
* If it is True, then a few standard files names are tried
- (".coveragerc", "setup.cfg"). It is not an error for these files
- to not be found.
+ (".coveragerc", "setup.cfg", "tox.ini"). It is not an error for
+ these files to not be found.
* If it is False, then no configuration file is read.
@@ -130,49 +138,18 @@ class Coverage(object):
The `concurrency` parameter can now be a list of strings.
"""
- # Build our configuration from a number of sources:
- # 1: defaults:
- self.config = CoverageConfig()
-
- # 2: from the rcfile, .coveragerc or setup.cfg file:
- if config_file:
- # pylint: disable=redefined-variable-type
- did_read_rc = False
- # Some API users were specifying ".coveragerc" to mean the same as
- # True, so make it so.
- if config_file == ".coveragerc":
- config_file = True
- specified_file = (config_file is not True)
- if not specified_file:
- config_file = ".coveragerc"
- self.config_file = config_file
-
- did_read_rc = self.config.from_file(config_file)
-
- if not did_read_rc:
- if specified_file:
- raise CoverageException(
- "Couldn't read '%s' as a config file" % config_file
- )
- self.config.from_file("setup.cfg", section_prefix="coverage:")
-
- # 3: from environment variables:
- env_data_file = os.environ.get('COVERAGE_FILE')
- if env_data_file:
- self.config.data_file = env_data_file
- debugs = os.environ.get('COVERAGE_DEBUG')
- if debugs:
- self.config.debug.extend(debugs.split(","))
-
- # 4: from constructor arguments:
- self.config.from_args(
+ # Build our configuration from a number of sources.
+ self.config_file, self.config = read_coverage_config(
+ config_file=config_file,
data_file=data_file, cover_pylib=cover_pylib, timid=timid,
branch=branch, parallel=bool_or_none(data_suffix),
source=source, omit=omit, include=include, debug=debug,
concurrency=concurrency,
)
+ # This is injectable by tests.
self._debug_file = None
+
self._auto_load = self._auto_save = auto_data
self._data_suffix = data_suffix
@@ -191,10 +168,11 @@ class Coverage(object):
# Other instance attributes, set later.
self.omit = self.include = self.source = None
+ self.source_pkgs_unmatched = None
self.source_pkgs = None
self.data = self.data_files = self.collector = None
self.plugins = None
- self.pylib_dirs = self.cover_dirs = None
+ self.pylib_paths = self.cover_paths = None
self.data_suffix = self.run_suffix = None
self._exclude_re = None
self.debug = None
@@ -204,8 +182,6 @@ class Coverage(object):
self._inited = False
# Have we started collecting and not stopped it?
self._started = False
- # Have we measured some data and not harvested it?
- self._measured = False
# If we have sub-process measurement happening automatically, then we
# want any explicit creation of a Coverage object to mean, this process
@@ -226,6 +202,8 @@ class Coverage(object):
if self._inited:
return
+ self._inited = True
+
# Create and configure the debugging controller. COVERAGE_DEBUG_FILE
# is an environment variable, the name of a file to append debug logs
# to.
@@ -245,22 +223,27 @@ class Coverage(object):
self._exclude_re = {}
self._exclude_regex_stale()
- files.set_relative_directory()
+ set_relative_directory()
# The source argument can be directories or package names.
self.source = []
self.source_pkgs = []
for src in self.config.source or []:
- if os.path.exists(src):
- self.source.append(files.canonical_filename(src))
+ if os.path.isdir(src):
+ self.source.append(canonical_filename(src))
else:
self.source_pkgs.append(src)
+ self.source_pkgs_unmatched = self.source_pkgs[:]
self.omit = prep_patterns(self.config.omit)
self.include = prep_patterns(self.config.include)
concurrency = self.config.concurrency or []
if "multiprocessing" in concurrency:
+ if not patch_multiprocessing:
+ raise CoverageException( # pragma: only jython
+ "multiprocessing is not supported on this Python"
+ )
patch_multiprocessing(rcfile=self.config_file)
# Multi-processing uses parallel for the subprocesses, so also use
# it for the main process.
@@ -306,10 +289,12 @@ class Coverage(object):
# data file will be written into the directory where the process
# started rather than wherever the process eventually chdir'd to.
self.data = CoverageData(debug=self.debug)
- self.data_files = CoverageDataFiles(basename=self.config.data_file, warn=self._warn)
+ self.data_files = CoverageDataFiles(
+ basename=self.config.data_file, warn=self._warn, debug=self.debug,
+ )
# The directories for files considered "installed with the interpreter".
- self.pylib_dirs = set()
+ self.pylib_paths = set()
if not self.config.cover_pylib:
# Look at where some standard modules are located. That's the
# indication for "installed with the interpreter". In some
@@ -318,7 +303,7 @@ class Coverage(object):
# we've imported, and take all the different ones.
for m in (atexit, inspect, os, platform, _pypy_irc_topic, re, _structseq, traceback):
if m is not None and hasattr(m, "__file__"):
- self.pylib_dirs.add(self._canonical_dir(m))
+ self.pylib_paths.add(self._canonical_path(m, directory=True))
if _structseq and not hasattr(_structseq, '__file__'):
# PyPy 2.4 has no __file__ in the builtin modules, but the code
@@ -329,96 +314,77 @@ class Coverage(object):
structseq_file = structseq_new.func_code.co_filename
except AttributeError:
structseq_file = structseq_new.__code__.co_filename
- self.pylib_dirs.add(self._canonical_dir(structseq_file))
+ self.pylib_paths.add(self._canonical_path(structseq_file))
# To avoid tracing the coverage.py code itself, we skip anything
# located where we are.
- self.cover_dirs = [self._canonical_dir(__file__)]
+ self.cover_paths = [self._canonical_path(__file__, directory=True)]
if env.TESTING:
+ # Don't include our own test code.
+ self.cover_paths.append(os.path.join(self.cover_paths[0], "tests"))
+
# When testing, we use PyContracts, which should be considered
# part of coverage.py, and it uses six. Exclude those directories
# just as we exclude ourselves.
import contracts
import six
for mod in [contracts, six]:
- self.cover_dirs.append(self._canonical_dir(mod))
+ self.cover_paths.append(self._canonical_path(mod))
# Set the reporting precision.
Numbers.set_precision(self.config.precision)
atexit.register(self._atexit)
- self._inited = True
-
# Create the matchers we need for _should_trace
if self.source or self.source_pkgs:
self.source_match = TreeMatcher(self.source)
self.source_pkgs_match = ModuleMatcher(self.source_pkgs)
else:
- if self.cover_dirs:
- self.cover_match = TreeMatcher(self.cover_dirs)
- if self.pylib_dirs:
- self.pylib_match = TreeMatcher(self.pylib_dirs)
+ if self.cover_paths:
+ self.cover_match = TreeMatcher(self.cover_paths)
+ if self.pylib_paths:
+ self.pylib_match = TreeMatcher(self.pylib_paths)
if self.include:
self.include_match = FnmatchMatcher(self.include)
if self.omit:
self.omit_match = FnmatchMatcher(self.omit)
# The user may want to debug things, show info if desired.
+ self._write_startup_debug()
+
+ def _write_startup_debug(self):
+ """Write out debug info at startup if needed."""
wrote_any = False
with self.debug.without_callers():
if self.debug.should('config'):
config_info = sorted(self.config.__dict__.items())
- self.debug.write_formatted_info("config", config_info)
+ write_formatted_info(self.debug, "config", config_info)
wrote_any = True
if self.debug.should('sys'):
- self.debug.write_formatted_info("sys", self.sys_info())
+ write_formatted_info(self.debug, "sys", self.sys_info())
for plugin in self.plugins:
header = "sys: " + plugin._coverage_plugin_name
info = plugin.sys_info()
- self.debug.write_formatted_info(header, info)
+ write_formatted_info(self.debug, header, info)
wrote_any = True
if wrote_any:
- self.debug.write_formatted_info("end", ())
+ write_formatted_info(self.debug, "end", ())
- def _canonical_dir(self, morf):
- """Return the canonical directory of the module or file `morf`."""
- morf_filename = PythonFileReporter(morf, self).filename
- return os.path.split(morf_filename)[0]
+ def _canonical_path(self, morf, directory=False):
+ """Return the canonical path of the module or file `morf`.
- def _source_for_file(self, filename):
- """Return the source file for `filename`.
-
- Given a file name being traced, return the best guess as to the source
- file to attribute it to.
+ If the module is a package, then return its directory. If it is a
+ module, then return its file, unless `directory` is True, in which
+ case return its enclosing directory.
"""
- if filename.endswith(".py"):
- # .py files are themselves source files.
- return filename
-
- elif filename.endswith((".pyc", ".pyo")):
- # Bytecode files probably have source files near them.
- py_filename = filename[:-1]
- if os.path.exists(py_filename):
- # Found a .py file, use that.
- return py_filename
- if env.WINDOWS:
- # On Windows, it could be a .pyw file.
- pyw_filename = py_filename + "w"
- if os.path.exists(pyw_filename):
- return pyw_filename
- # Didn't find source, but it's probably the .py file we want.
- return py_filename
-
- elif filename.endswith("$py.class"):
- # Jython is easy to guess.
- return filename[:-9] + ".py"
-
- # No idea, just use the file name as-is.
- return filename
+ morf_path = PythonFileReporter(morf, self).filename
+ if morf_path.endswith("__init__.py") or directory:
+ morf_path = os.path.split(morf_path)[0]
+ return morf_path
def _name_for_module(self, module_globals, filename):
"""Get the name of the module for a set of globals and file name.
@@ -432,6 +398,10 @@ class Coverage(object):
can't be determined, None is returned.
"""
+ if module_globals is None: # pragma: only ironpython
+ # IronPython doesn't provide globals: https://github.com/IronLanguages/main/issues/1296
+ module_globals = {}
+
dunder_name = module_globals.get('__name__', None)
if isinstance(dunder_name, str) and dunder_name != '__main__':
@@ -480,9 +450,9 @@ class Coverage(object):
# .pyc files can be moved after compilation (for example, by being
# installed), we look for __file__ in the frame and prefer it to the
# co_filename value.
- dunder_file = frame.f_globals.get('__file__')
+ dunder_file = frame.f_globals and frame.f_globals.get('__file__')
if dunder_file:
- filename = self._source_for_file(dunder_file)
+ filename = source_for_file(dunder_file)
if original_filename and not original_filename.startswith('<'):
orig = os.path.basename(original_filename)
if orig != os.path.basename(filename):
@@ -514,7 +484,7 @@ class Coverage(object):
if filename.endswith("$py.class"):
filename = filename[:-9] + ".py"
- canonical = files.canonical_filename(filename)
+ canonical = canonical_filename(filename)
disp.canonical_filename = canonical
# Try the plugins, see if they have an opinion about the file.
@@ -532,7 +502,7 @@ class Coverage(object):
if file_tracer.has_dynamic_source_filename():
disp.has_dynamic_filename = True
else:
- disp.source_filename = files.canonical_filename(
+ disp.source_filename = canonical_filename(
file_tracer.source_filename()
)
break
@@ -579,8 +549,8 @@ class Coverage(object):
# stdlib and coverage.py directories.
if self.source_match:
if self.source_pkgs_match.match(modulename):
- if modulename in self.source_pkgs:
- self.source_pkgs.remove(modulename)
+ if modulename in self.source_pkgs_unmatched:
+ self.source_pkgs_unmatched.remove(modulename)
return None # There's no reason to skip this file.
if not self.source_match.match(filename):
@@ -633,9 +603,18 @@ class Coverage(object):
return not reason
- def _warn(self, msg):
- """Use `msg` as a warning."""
+ def _warn(self, msg, slug=None):
+ """Use `msg` as a warning.
+
+ For warning suppression, use `slug` as the shorthand.
+ """
+ if slug in self.config.disable_warnings:
+ # Don't issue the warning
+ return
+
self._warnings.append(msg)
+ if slug:
+ msg = "%s (%s)" % (msg, slug)
if self.debug.should('pid'):
msg = "[%d] %s" % (os.getpid(), msg)
sys.stderr.write("Coverage.py warning: %s\n" % msg)
@@ -694,7 +673,7 @@ class Coverage(object):
def start(self):
"""Start measuring code coverage.
- Coverage measurement actually occurs in functions called after
+ Coverage measurement only occurs in functions called after
:meth:`start` is invoked. Statements in the same scope as
:meth:`start` won't be measured.
@@ -712,7 +691,6 @@ class Coverage(object):
self.collector.start()
self._started = True
- self._measured = True
def stop(self):
"""Stop measuring code coverage."""
@@ -722,8 +700,8 @@ class Coverage(object):
def _atexit(self):
"""Clean up on process shutdown."""
- if self.debug and self.debug.should('dataio'):
- self.debug.write("Inside _atexit: self._auto_save = %r" % (self._auto_save,))
+ if self.debug.should("process"):
+ self.debug.write("atexit: {0!r}".format(self))
if self._started:
self.stop()
if self._auto_save:
@@ -832,7 +810,7 @@ class Coverage(object):
)
def get_data(self):
- """Get the collected data and reset the collector.
+ """Get the collected data.
Also warn about various problems collecting data.
@@ -842,46 +820,78 @@ class Coverage(object):
"""
self._init()
- if not self._measured:
- return self.data
- self.collector.save_data(self.data)
+ if self.collector.save_data(self.data):
+ self._post_save_work()
+
+ return self.data
+
+ def _post_save_work(self):
+ """After saving data, look for warnings, post-work, etc.
- # If there are still entries in the source_pkgs list, then we never
- # encountered those packages.
+ Warn about things that should have happened but didn't.
+ Look for unexecuted files.
+
+ """
+ # If there are still entries in the source_pkgs_unmatched list,
+ # then we never encountered those packages.
if self._warn_unimported_source:
- for pkg in self.source_pkgs:
+ for pkg in self.source_pkgs_unmatched:
if pkg not in sys.modules:
- self._warn("Module %s was never imported." % pkg)
+ self._warn("Module %s was never imported." % pkg, slug="module-not-imported")
elif not (
hasattr(sys.modules[pkg], '__file__') and
os.path.exists(sys.modules[pkg].__file__)
):
- self._warn("Module %s has no Python source." % pkg)
+ self._warn("Module %s has no Python source." % pkg, slug="module-not-python")
else:
- self._warn("Module %s was previously imported, but not measured." % pkg)
+ self._warn(
+ "Module %s was previously imported, but not measured." % pkg,
+ slug="module-not-measured",
+ )
# Find out if we got any data.
if not self.data and self._warn_no_data:
- self._warn("No data was collected.")
+ self._warn("No data was collected.", slug="no-data-collected")
# Find files that were never executed at all.
- for src in self.source:
- for py_file in find_python_files(src):
- py_file = files.canonical_filename(py_file)
-
- if self.omit_match and self.omit_match.match(py_file):
- # Turns out this file was omitted, so don't pull it back
- # in as unexecuted.
- continue
+ for pkg in self.source_pkgs:
+ if (not pkg in sys.modules or
+ not hasattr(sys.modules[pkg], '__file__') or
+ not os.path.exists(sys.modules[pkg].__file__)):
+ continue
+ pkg_file = source_for_file(sys.modules[pkg].__file__)
+ self._find_unexecuted_files(self._canonical_path(pkg_file))
- self.data.touch_file(py_file)
+ for src in self.source:
+ self._find_unexecuted_files(src)
if self.config.note:
self.data.add_run_info(note=self.config.note)
- self._measured = False
- return self.data
+ def _find_plugin_files(self, src_dir):
+ """Get executable files from the plugins."""
+ for plugin in self.plugins:
+ for x_file in plugin.find_executable_files(src_dir):
+ yield x_file, plugin._coverage_plugin_name
+
+ def _find_unexecuted_files(self, src_dir):
+ """Find unexecuted files in `src_dir`.
+
+ Search for files in `src_dir` that are probably importable,
+ and add them as unexecuted files in `self.data`.
+
+ """
+ py_files = ((py_file, None) for py_file in find_python_files(src_dir))
+ plugin_files = self._find_plugin_files(src_dir)
+
+ for file_path, plugin_name in itertools.chain(py_files, plugin_files):
+ file_path = canonical_filename(file_path)
+ if self.omit_match and self.omit_match.match(file_path):
+ # Turns out this file was omitted, so don't pull it back
+ # in as unexecuted.
+ continue
+ self.data.touch_file(file_path, plugin_name)
# Backward compatibility with version 1.
def analysis(self, morf):
@@ -949,7 +959,6 @@ class Coverage(object):
)
if file_reporter == "python":
- # pylint: disable=redefined-variable-type
file_reporter = PythonFileReporter(morf, self)
return file_reporter
@@ -993,6 +1002,8 @@ class Coverage(object):
included in the report. Files matching `omit` will not be included in
the report.
+ If `skip_covered` is True, don't report on files with 100% coverage.
+
Returns a float, the total percentage covered.
"""
@@ -1026,7 +1037,8 @@ class Coverage(object):
reporter.report(morfs, directory=directory)
def html_report(self, morfs=None, directory=None, ignore_errors=None,
- omit=None, include=None, extra_css=None, title=None):
+ omit=None, include=None, extra_css=None, title=None,
+ skip_covered=None):
"""Generate an HTML report.
The HTML is written to `directory`. The file "index.html" is the
@@ -1048,6 +1060,7 @@ class Coverage(object):
self.config.from_args(
ignore_errors=ignore_errors, omit=omit, include=include,
html_dir=directory, extra_css=extra_css, html_title=title,
+ skip_covered=skip_covered,
)
reporter = HtmlReporter(self, self.config)
return reporter.report(morfs)
@@ -1120,8 +1133,8 @@ class Coverage(object):
info = [
('version', covmod.__version__),
('coverage', covmod.__file__),
- ('cover_dirs', self.cover_dirs),
- ('pylib_dirs', self.pylib_dirs),
+ ('cover_paths', self.cover_paths),
+ ('pylib_paths', self.pylib_paths),
('tracer', self.collector.tracer_name()),
('plugins.file_tracers', ft_plugins),
('config_files', self.config.attempted_config_files),
diff --git a/coverage/ctracer/datastack.c b/coverage/ctracer/datastack.c
index 5a384e6..515ba92 100644
--- a/coverage/ctracer/datastack.c
+++ b/coverage/ctracer/datastack.c
@@ -4,7 +4,7 @@
#include "util.h"
#include "datastack.h"
-#define STACK_DELTA 100
+#define STACK_DELTA 20
int
DataStack_init(Stats *pstats, DataStack *pdata_stack)
@@ -18,6 +18,11 @@ DataStack_init(Stats *pstats, DataStack *pdata_stack)
void
DataStack_dealloc(Stats *pstats, DataStack *pdata_stack)
{
+ int i;
+
+ for (i = 0; i < pdata_stack->alloc; i++) {
+ Py_XDECREF(pdata_stack->stack[i].file_data);
+ }
PyMem_Free(pdata_stack->stack);
}
@@ -35,6 +40,9 @@ DataStack_grow(Stats *pstats, DataStack *pdata_stack)
pdata_stack->depth--;
return RET_ERROR;
}
+ /* Zero the new entries. */
+ memset(bigger_data_stack + pdata_stack->alloc, 0, STACK_DELTA * sizeof(DataStackEntry));
+
pdata_stack->stack = bigger_data_stack;
pdata_stack->alloc = bigger;
}
diff --git a/coverage/ctracer/datastack.h b/coverage/ctracer/datastack.h
index b63af2c..b2dbeb9 100644
--- a/coverage/ctracer/datastack.h
+++ b/coverage/ctracer/datastack.h
@@ -9,18 +9,16 @@
/* An entry on the data stack. For each call frame, we need to record all
* the information needed for CTracer_handle_line to operate as quickly as
- * possible. All PyObject* here are borrowed references.
+ * possible.
*/
typedef struct DataStackEntry {
- /* The current file_data dictionary. Borrowed, owned by self->data. */
+ /* The current file_data dictionary. Owned. */
PyObject * file_data;
- /* The disposition object for this frame. If collector.py and control.py
- * are working properly, this will be an instance of CFileDisposition.
- */
+ /* The disposition object for this frame. A borrowed instance of CFileDisposition. */
PyObject * disposition;
- /* The FileTracer handling this frame, or None if it's Python. */
+ /* The FileTracer handling this frame, or None if it's Python. Borrowed. */
PyObject * file_tracer;
/* The line number of the last line recorded, for tracing arcs.
diff --git a/coverage/ctracer/stats.h b/coverage/ctracer/stats.h
index a72117c..c5ffdf5 100644
--- a/coverage/ctracer/stats.h
+++ b/coverage/ctracer/stats.h
@@ -19,7 +19,7 @@ typedef struct Stats {
unsigned int returns;
unsigned int exceptions;
unsigned int others;
- unsigned int new_files;
+ unsigned int files;
unsigned int missed_returns;
unsigned int stack_reallocs;
unsigned int errors;
diff --git a/coverage/ctracer/tracer.c b/coverage/ctracer/tracer.c
index ac16b6b..095df11 100644
--- a/coverage/ctracer/tracer.c
+++ b/coverage/ctracer/tracer.c
@@ -71,9 +71,8 @@ CTracer_init(CTracer *self, PyObject *args_unused, PyObject *kwds_unused)
self->pdata_stack = &self->data_stack;
- self->cur_entry.last_line = -1;
-
self->context = Py_None;
+ Py_INCREF(self->context);
ret = RET_OK;
goto ok;
@@ -168,7 +167,7 @@ showlog(int depth, int lineno, PyObject * filename, const char * msg)
static const char * what_sym[] = {"CALL", "EXC ", "LINE", "RET "};
#endif
-/* Record a pair of integers in self->cur_entry.file_data. */
+/* Record a pair of integers in self->pcur_entry->file_data. */
static int
CTracer_record_pair(CTracer *self, int l1, int l2)
{
@@ -181,7 +180,7 @@ CTracer_record_pair(CTracer *self, int l1, int l2)
goto error;
}
- if (PyDict_SetItem(self->cur_entry.file_data, t, Py_None) < 0) {
+ if (PyDict_SetItem(self->pcur_entry->file_data, t, Py_None) < 0) {
goto error;
}
@@ -300,14 +299,14 @@ CTracer_check_missing_return(CTracer *self, PyFrameObject *frame)
goto error;
}
if (self->pdata_stack->depth >= 0) {
- if (self->tracing_arcs && self->cur_entry.file_data) {
- if (CTracer_record_pair(self, self->cur_entry.last_line, -self->last_exc_firstlineno) < 0) {
+ if (self->tracing_arcs && self->pcur_entry->file_data) {
+ if (CTracer_record_pair(self, self->pcur_entry->last_line, -self->last_exc_firstlineno) < 0) {
goto error;
}
}
SHOWLOG(self->pdata_stack->depth, frame->f_lineno, frame->f_code->co_filename, "missedreturn");
- self->cur_entry = self->pdata_stack->stack[self->pdata_stack->depth];
self->pdata_stack->depth--;
+ self->pcur_entry = &self->pdata_stack->stack[self->pdata_stack->depth];
}
}
self->last_exc_back = NULL;
@@ -341,8 +340,8 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame)
CFileDisposition * pdisp = NULL;
-
STATS( self->stats.calls++; )
+ self->activity = TRUE;
/* Grow the stack. */
if (CTracer_set_pdata_stack(self) < 0) {
@@ -351,9 +350,7 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame)
if (DataStack_grow(&self->stats, self->pdata_stack) < 0) {
goto error;
}
-
- /* Push the current state on the stack. */
- self->pdata_stack->stack[self->pdata_stack->depth] = self->cur_entry;
+ self->pcur_entry = &self->pdata_stack->stack[self->pdata_stack->depth];
/* See if this frame begins a new context. */
if (self->should_start_context && self->context == Py_None) {
@@ -369,7 +366,7 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame)
PyObject * val;
Py_DECREF(self->context);
self->context = context;
- self->cur_entry.started_context = TRUE;
+ self->pcur_entry->started_context = TRUE;
STATS( self->stats.pycalls++; )
val = PyObject_CallFunctionObjArgs(self->switch_context, context, NULL);
if (val == NULL) {
@@ -379,11 +376,11 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame)
}
else {
Py_DECREF(context);
- self->cur_entry.started_context = FALSE;
+ self->pcur_entry->started_context = FALSE;
}
}
else {
- self->cur_entry.started_context = FALSE;
+ self->pcur_entry->started_context = FALSE;
}
/* Check if we should trace this line. */
@@ -393,7 +390,7 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame)
if (PyErr_Occurred()) {
goto error;
}
- STATS( self->stats.new_files++; )
+ STATS( self->stats.files++; )
/* We've never considered this file before. */
/* Ask should_trace about it. */
@@ -474,7 +471,7 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame)
if (PyErr_Occurred()) {
goto error;
}
- STATS( self->stats.new_files++; )
+ STATS( self->stats.files++; )
STATS( self->stats.pycalls++; )
should_include_bool = PyObject_CallFunctionObjArgs(self->check_include, tracename, frame, NULL);
if (should_include_bool == NULL) {
@@ -511,7 +508,6 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame)
goto error;
}
ret2 = PyDict_SetItem(self->data, tracename, file_data);
- Py_DECREF(file_data);
if (ret2 < 0) {
goto error;
}
@@ -524,32 +520,39 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame)
}
}
}
+ else {
+ /* PyDict_GetItem gives a borrowed reference. Own it. */
+ Py_INCREF(file_data);
+ }
- self->cur_entry.file_data = file_data;
- self->cur_entry.file_tracer = file_tracer;
+ Py_XDECREF(self->pcur_entry->file_data);
+ self->pcur_entry->file_data = file_data;
+ self->pcur_entry->file_tracer = file_tracer;
- /* Make the frame right in case settrace(gettrace()) happens. */
- Py_INCREF(self);
- frame->f_trace = (PyObject*)self;
SHOWLOG(self->pdata_stack->depth, frame->f_lineno, filename, "traced");
}
else {
- self->cur_entry.file_data = NULL;
- self->cur_entry.file_tracer = Py_None;
+ Py_XDECREF(self->pcur_entry->file_data);
+ self->pcur_entry->file_data = NULL;
+ self->pcur_entry->file_tracer = Py_None;
SHOWLOG(self->pdata_stack->depth, frame->f_lineno, filename, "skipped");
}
- self->cur_entry.disposition = disposition;
+ self->pcur_entry->disposition = disposition;
+
+ /* Make the frame right in case settrace(gettrace()) happens. */
+ Py_INCREF(self);
+ My_XSETREF(frame->f_trace, (PyObject*)self);
/* A call event is really a "start frame" event, and can happen for
* re-entering a generator also. f_lasti is -1 for a true call, and a
* real byte offset for a generator re-entry.
*/
if (frame->f_lasti < 0) {
- self->cur_entry.last_line = -frame->f_code->co_firstlineno;
+ self->pcur_entry->last_line = -frame->f_code->co_firstlineno;
}
else {
- self->cur_entry.last_line = frame->f_lineno;
+ self->pcur_entry->last_line = frame->f_lineno;
}
ok:
@@ -673,22 +676,22 @@ CTracer_handle_line(CTracer *self, PyFrameObject *frame)
STATS( self->stats.lines++; )
if (self->pdata_stack->depth >= 0) {
SHOWLOG(self->pdata_stack->depth, frame->f_lineno, frame->f_code->co_filename, "line");
- if (self->cur_entry.file_data) {
+ if (self->pcur_entry->file_data) {
int lineno_from = -1;
int lineno_to = -1;
/* We're tracing in this frame: record something. */
- if (self->cur_entry.file_tracer != Py_None) {
+ if (self->pcur_entry->file_tracer != Py_None) {
PyObject * from_to = NULL;
STATS( self->stats.pycalls++; )
- from_to = PyObject_CallMethodObjArgs(self->cur_entry.file_tracer, str_line_number_range, frame, NULL);
+ from_to = PyObject_CallMethodObjArgs(self->pcur_entry->file_tracer, str_line_number_range, frame, NULL);
if (from_to == NULL) {
goto error;
}
ret2 = CTracer_unpack_pair(self, from_to, &lineno_from, &lineno_to);
Py_DECREF(from_to);
if (ret2 < 0) {
- CTracer_disable_plugin(self, self->cur_entry.disposition);
+ CTracer_disable_plugin(self, self->pcur_entry->disposition);
goto ok;
}
}
@@ -700,7 +703,7 @@ CTracer_handle_line(CTracer *self, PyFrameObject *frame)
for (; lineno_from <= lineno_to; lineno_from++) {
if (self->tracing_arcs) {
/* Tracing arcs: key is (last_line,this_line). */
- if (CTracer_record_pair(self, self->cur_entry.last_line, lineno_from) < 0) {
+ if (CTracer_record_pair(self, self->pcur_entry->last_line, lineno_from) < 0) {
goto error;
}
}
@@ -711,14 +714,14 @@ CTracer_handle_line(CTracer *self, PyFrameObject *frame)
goto error;
}
- ret2 = PyDict_SetItem(self->cur_entry.file_data, this_line, Py_None);
+ ret2 = PyDict_SetItem(self->pcur_entry->file_data, this_line, Py_None);
Py_DECREF(this_line);
if (ret2 < 0) {
goto error;
}
}
- self->cur_entry.last_line = lineno_from;
+ self->pcur_entry->last_line = lineno_from;
}
}
}
@@ -742,8 +745,10 @@ CTracer_handle_return(CTracer *self, PyFrameObject *frame)
if (CTracer_set_pdata_stack(self) < 0) {
goto error;
}
+ self->pcur_entry = &self->pdata_stack->stack[self->pdata_stack->depth];
+
if (self->pdata_stack->depth >= 0) {
- if (self->tracing_arcs && self->cur_entry.file_data) {
+ if (self->tracing_arcs && self->pcur_entry->file_data) {
/* Need to distinguish between RETURN_VALUE and YIELD_VALUE. Read
* the current bytecode to see what it is. In unusual circumstances
* (Cython code), co_code can be the empty string, so range-check
@@ -758,14 +763,14 @@ CTracer_handle_return(CTracer *self, PyFrameObject *frame)
}
if (bytecode != YIELD_VALUE) {
int first = frame->f_code->co_firstlineno;
- if (CTracer_record_pair(self, self->cur_entry.last_line, -first) < 0) {
+ if (CTracer_record_pair(self, self->pcur_entry->last_line, -first) < 0) {
goto error;
}
}
}
/* If this frame started a context, then returning from it ends the context. */
- if (self->cur_entry.started_context) {
+ if (self->pcur_entry->started_context) {
PyObject * val;
Py_DECREF(self->context);
self->context = Py_None;
@@ -781,8 +786,8 @@ CTracer_handle_return(CTracer *self, PyFrameObject *frame)
/* Pop the stack. */
SHOWLOG(self->pdata_stack->depth, frame->f_lineno, frame->f_code->co_filename, "return");
- self->cur_entry = self->pdata_stack->stack[self->pdata_stack->depth];
self->pdata_stack->depth--;
+ self->pcur_entry = &self->pdata_stack->stack[self->pdata_stack->depth];
}
ret = RET_OK;
@@ -824,6 +829,10 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse
{
int ret = RET_ERROR;
+ #if DO_NOTHING
+ return RET_OK;
+ #endif
+
#if WHAT_LOG || TRACE_LOG
PyObject * ascii = NULL;
#endif
@@ -922,6 +931,10 @@ CTracer_call(CTracer *self, PyObject *args, PyObject *kwds)
PyObject *ret = NULL;
PyObject * ascii = NULL;
+ #if DO_NOTHING
+ CRASH
+ #endif
+
static char *what_names[] = {
"call", "exception", "line", "return",
"c_call", "c_exception", "c_return",
@@ -1022,7 +1035,25 @@ CTracer_stop(CTracer *self, PyObject *args_unused)
}
static PyObject *
-CTracer_get_stats(CTracer *self)
+CTracer_activity(CTracer *self, PyObject *args_unused)
+{
+ if (self->activity) {
+ Py_RETURN_TRUE;
+ }
+ else {
+ Py_RETURN_FALSE;
+ }
+}
+
+static PyObject *
+CTracer_reset_activity(CTracer *self, PyObject *args_unused)
+{
+ self->activity = FALSE;
+ Py_RETURN_NONE;
+}
+
+static PyObject *
+CTracer_get_stats(CTracer *self, PyObject *args_unused)
{
#if COLLECT_STATS
return Py_BuildValue(
@@ -1032,7 +1063,7 @@ CTracer_get_stats(CTracer *self)
"returns", self->stats.returns,
"exceptions", self->stats.exceptions,
"others", self->stats.others,
- "new_files", self->stats.new_files,
+ "files", self->stats.files,
"missed_returns", self->stats.missed_returns,
"stack_reallocs", self->stats.stack_reallocs,
"stack_alloc", self->pdata_stack->alloc,
@@ -1075,7 +1106,7 @@ CTracer_members[] = {
PyDoc_STR("Function for starting contexts.") },
{ "switch_context", T_OBJECT, offsetof(CTracer, switch_context), 0,
- PyDoc_STR("Function for switch to a new context.") },
+ PyDoc_STR("Function for switching to a new context.") },
{ NULL }
};
@@ -1091,6 +1122,12 @@ CTracer_methods[] = {
{ "get_stats", (PyCFunction) CTracer_get_stats, METH_VARARGS,
PyDoc_STR("Get statistics about the tracing") },
+ { "activity", (PyCFunction) CTracer_activity, METH_VARARGS,
+ PyDoc_STR("Has there been any activity?") },
+
+ { "reset_activity", (PyCFunction) CTracer_reset_activity, METH_VARARGS,
+ PyDoc_STR("Reset the activity flag") },
+
{ NULL }
};
diff --git a/coverage/ctracer/tracer.h b/coverage/ctracer/tracer.h
index 438317b..d5d630f 100644
--- a/coverage/ctracer/tracer.h
+++ b/coverage/ctracer/tracer.h
@@ -33,6 +33,8 @@ typedef struct CTracer {
BOOL started;
/* Are we tracing arcs, or just lines? */
BOOL tracing_arcs;
+ /* Have we had any activity? */
+ BOOL activity;
/*
The data stack is a stack of dictionaries. Each dictionary collects
@@ -54,8 +56,8 @@ typedef struct CTracer {
int data_stacks_used;
DataStack * pdata_stack;
- /* The current file's data stack entry, copied from the stack. */
- DataStackEntry cur_entry;
+ /* The current file's data stack entry. */
+ DataStackEntry * pcur_entry;
/* The parent frame for the last exception event, to fix missing returns. */
PyFrameObject * last_exc_back;
diff --git a/coverage/ctracer/util.h b/coverage/ctracer/util.h
index cafcc28..f0c302c 100644
--- a/coverage/ctracer/util.h
+++ b/coverage/ctracer/util.h
@@ -10,6 +10,7 @@
#undef WHAT_LOG /* Define to log the WHAT params in the trace function. */
#undef TRACE_LOG /* Define to log our bookkeeping. */
#undef COLLECT_STATS /* Collect counters: stats are printed when tracer is stopped. */
+#undef DO_NOTHING /* Define this to make the tracer do nothing. */
/* Py 2.x and 3.x compatibility */
@@ -43,6 +44,14 @@
#endif /* Py3k */
+// Undocumented, and not in 2.6, so our own copy of it.
+#define My_XSETREF(op, op2) \
+ do { \
+ PyObject *_py_tmp = (PyObject *)(op); \
+ (op) = (op2); \
+ Py_XDECREF(_py_tmp); \
+ } while (0)
+
/* The values returned to indicate ok or error. */
#define RET_OK 0
#define RET_ERROR -1
@@ -52,4 +61,7 @@ typedef int BOOL;
#define FALSE 0
#define TRUE 1
+/* Only for extreme machete-mode debugging! */
+#define CRASH { printf("*** CRASH! ***\n"); *((int*)1) = 1; }
+
#endif /* _COVERAGE_UTIL_H */
diff --git a/coverage/data.py b/coverage/data.py
index 95b6888..ecfb86b 100644
--- a/coverage/data.py
+++ b/coverage/data.py
@@ -144,9 +144,6 @@ class CoverageData(object):
# A list of dicts of information about the coverage.py runs.
self._runs = []
- if self._debug and self._debug.should('dataio'):
- self._debug.write("Creating CoverageData object")
-
def __repr__(self):
return "<{klass} lines={lines} arcs={arcs} tracers={tracers} runs={runs}>".format(
klass=self.__class__.__name__,
@@ -419,8 +416,12 @@ class CoverageData(object):
self._runs[0].update(kwargs)
self._validate()
- def touch_file(self, filename):
- """Ensure that `filename` appears in the data, empty if needed."""
+ def touch_file(self, filename, plugin_name=""):
+ """Ensure that `filename` appears in the data, empty if needed.
+
+ `plugin_name` is the name of the plugin resposible for this file. It is used
+ to associate the right filereporter, etc.
+ """
if self._debug and self._debug.should('dataop'):
self._debug.write("Touching %r" % (filename,))
if not self._has_arcs() and not self._has_lines():
@@ -431,6 +432,9 @@ class CoverageData(object):
else:
where = self._lines
where.setdefault(filename, [])
+ if plugin_name:
+ # Set the tracer for this file
+ self._file_tracers[filename] = plugin_name
self._validate()
@@ -608,15 +612,19 @@ class CoverageData(object):
class CoverageDataFiles(object):
"""Manage the use of coverage data files."""
- def __init__(self, basename=None, warn=None):
+ def __init__(self, basename=None, warn=None, debug=None):
"""Create a CoverageDataFiles to manage data files.
`warn` is the warning function to use.
`basename` is the name of the file to use for storing data.
+ `debug` is a `DebugControl` object for writing debug messages.
+
"""
self.warn = warn
+ self.debug = debug
+
# Construct the file name that will be used for data storage.
self.filename = os.path.abspath(basename or ".coverage")
@@ -627,12 +635,16 @@ class CoverageDataFiles(object):
basename by parallel-mode.
"""
+ if self.debug and self.debug.should('dataio'):
+ self.debug.write("Erasing data file %r" % (self.filename,))
file_be_gone(self.filename)
if parallel:
data_dir, local = os.path.split(self.filename)
localdot = local + '.*'
pattern = os.path.join(os.path.abspath(data_dir), localdot)
for filename in glob.glob(pattern):
+ if self.debug and self.debug.should('dataio'):
+ self.debug.write("Erasing parallel data file %r" % (filename,))
file_be_gone(filename)
def read(self, data):
@@ -660,7 +672,7 @@ class CoverageDataFiles(object):
with open(_TEST_NAME_FILE) as f:
test_name = f.read()
extra = "." + test_name
- dice = random.Random().randint(0, 999999)
+ dice = random.Random(os.urandom(8)).randint(0, 999999)
suffix = "%s%s.%s.%06d" % (socket.gethostname(), extra, os.getpid(), dice)
if suffix:
@@ -711,7 +723,7 @@ class CoverageDataFiles(object):
raise CoverageException("No data to combine")
for f in files_to_combine:
- new_data = CoverageData()
+ new_data = CoverageData(debug=self.debug)
try:
new_data.read_file(f)
except CoverageException as exc:
@@ -721,6 +733,8 @@ class CoverageDataFiles(object):
self.warn(str(exc))
else:
data.update(new_data, aliases=aliases)
+ if self.debug and self.debug.should('dataio'):
+ self.debug.write("Deleting combined data file %r" % (f,))
file_be_gone(f)
diff --git a/coverage/debug.py b/coverage/debug.py
index dff8beb..e68736f 100644
--- a/coverage/debug.py
+++ b/coverage/debug.py
@@ -8,7 +8,12 @@ import inspect
import os
import re
import sys
+try:
+ import _thread
+except ImportError:
+ import thread as _thread
+from coverage.backward import StringIO
from coverage.misc import isolate_module
os = isolate_module(os)
@@ -28,18 +33,27 @@ class DebugControl(object):
def __init__(self, options, output):
"""Configure the options and output file for debugging."""
- self.options = options
- self.output = output
+ self.options = list(options) + FORCED_DEBUG
+ self.raw_output = output
self.suppress_callers = False
+ filters = []
+ if self.should('pid'):
+ filters.append(add_pid_and_tid)
+ self.output = DebugOutputFile(
+ self.raw_output,
+ show_process=self.should('process'),
+ filters=filters,
+ )
+
def __repr__(self):
- return "<DebugControl options=%r output=%r>" % (self.options, self.output)
+ return "<DebugControl options=%r raw_output=%r>" % (self.options, self.raw_output)
def should(self, option):
"""Decide whether to output debug information in category `option`."""
if option == "callers" and self.suppress_callers:
return False
- return (option in self.options or option in FORCED_DEBUG)
+ return (option in self.options)
@contextlib.contextmanager
def without_callers(self):
@@ -57,18 +71,20 @@ class DebugControl(object):
`msg` is the line to write. A newline will be appended.
"""
- if self.should('pid'):
- msg = "pid %5d: %s" % (os.getpid(), msg)
self.output.write(msg+"\n")
if self.should('callers'):
dump_stack_frames(out=self.output, skip=1)
self.output.flush()
- def write_formatted_info(self, header, info):
- """Write a sequence of (label,data) pairs nicely."""
- self.write(info_header(header))
- for line in info_formatter(info):
- self.write(" %s" % line)
+
+class DebugControlString(DebugControl):
+ """A `DebugControl` that writes to a StringIO, for testing."""
+ def __init__(self, options):
+ super(DebugControlString, self).__init__(options, StringIO())
+
+ def get_output(self):
+ """Get the output text from the `DebugControl`."""
+ return self.raw_output.getvalue()
def info_header(label):
@@ -99,6 +115,13 @@ def info_formatter(info):
yield "%*s: %s" % (label_len, label, data)
+def write_formatted_info(writer, header, info):
+ """Write a sequence of (label,data) pairs nicely."""
+ writer.write(info_header(header))
+ for line in info_formatter(info):
+ writer.write(" %s" % line)
+
+
def short_stack(limit=None, skip=0):
"""Return a string summarizing the call stack.
@@ -122,18 +145,122 @@ def short_stack(limit=None, skip=0):
def dump_stack_frames(limit=None, out=None, skip=0):
- """Print a summary of the stack to stdout, or some place else."""
+ """Print a summary of the stack to stdout, or someplace else."""
out = out or sys.stdout
out.write(short_stack(limit=limit, skip=skip+1))
out.write("\n")
+def short_id(id64):
+ """Given a 64-bit id, make a shorter 16-bit one."""
+ id16 = 0
+ for offset in range(0, 64, 16):
+ id16 ^= id64 >> offset
+ return id16 & 0xFFFF
+
+
+def add_pid_and_tid(text):
+ """A filter to add pid and tid to debug messages."""
+ # Thread ids are useful, but too long. Make a shorter one.
+ tid = "{0:04x}".format(short_id(_thread.get_ident()))
+ text = "{0:5d}.{1}: {2}".format(os.getpid(), tid, text)
+ return text
+
+
+def filter_text(text, filters):
+ """Run `text` through a series of filters.
+
+ `filters` is a list of functions. Each takes a string and returns a
+ string. Each is run in turn.
+
+ Returns: the final string that results after all of the filters have
+ run.
+
+ """
+ clean_text = text.rstrip()
+ ending = text[len(clean_text):]
+ text = clean_text
+ for fn in filters:
+ lines = []
+ for line in text.splitlines():
+ lines.extend(fn(line).splitlines())
+ text = "\n".join(lines)
+ return text + ending
+
+
+class CwdTracker(object): # pragma: debugging
+ """A class to add cwd info to debug messages."""
+ def __init__(self):
+ self.cwd = None
+
+ def filter(self, text):
+ """Add a cwd message for each new cwd."""
+ cwd = os.getcwd()
+ if cwd != self.cwd:
+ text = "cwd is now {0!r}\n".format(cwd) + text
+ self.cwd = cwd
+ return text
+
+
+class DebugOutputFile(object): # pragma: debugging
+ """A file-like object that includes pid and cwd information."""
+ def __init__(self, outfile, show_process, filters):
+ self.outfile = outfile
+ self.show_process = show_process
+ self.filters = list(filters)
+
+ if self.show_process:
+ self.filters.append(CwdTracker().filter)
+ cmd = " ".join(getattr(sys, 'argv', ['???']))
+ self.write("New process: executable: %s\n" % (sys.executable,))
+ self.write("New process: cmd: %s\n" % (cmd,))
+ if hasattr(os, 'getppid'):
+ self.write("New process: parent pid: %s\n" % (os.getppid(),))
+
+ SYS_MOD_NAME = '$coverage.debug.DebugOutputFile.the_one'
+
+ @classmethod
+ def the_one(cls, fileobj=None, show_process=True, filters=()):
+ """Get the process-wide singleton DebugOutputFile.
+
+ If it doesn't exist yet, then create it as a wrapper around the file
+ object `fileobj`. `show_process` controls whether the debug file adds
+ process-level information.
+
+ """
+ # Because of the way igor.py deletes and re-imports modules,
+ # this class can be defined more than once. But we really want
+ # a process-wide singleton. So stash it in sys.modules instead of
+ # on a class attribute. Yes, this is aggressively gross.
+ the_one = sys.modules.get(cls.SYS_MOD_NAME)
+ if the_one is None:
+ assert fileobj is not None
+ sys.modules[cls.SYS_MOD_NAME] = the_one = cls(fileobj, show_process, filters)
+ return the_one
+
+ def write(self, text):
+ """Just like file.write, but filter through all our filters."""
+ self.outfile.write(filter_text(text, self.filters))
+ self.outfile.flush()
+
+ def flush(self):
+ """Flush our file."""
+ self.outfile.flush()
+
+
def log(msg, stack=False): # pragma: debugging
"""Write a log message as forcefully as possible."""
- with open("/tmp/covlog.txt", "a") as f:
- f.write("{pid}: {msg}\n".format(pid=os.getpid(), msg=msg))
- if stack:
- dump_stack_frames(out=f, skip=1)
+ out = DebugOutputFile.the_one()
+ out.write(msg+"\n")
+ if stack:
+ dump_stack_frames(out=out, skip=1)
+
+
+def filter_aspectlib_frames(text): # pragma: debugging
+ """Aspectlib prints stack traces, but includes its own frames. Scrub those out."""
+ # <<< aspectlib/__init__.py:257:function_wrapper < igor.py:143:run_tests < ...
+ text = re.sub(r"(?<= )aspectlib/[^.]+\.py:\d+:\w+ < ", "", text)
+ return text
def enable_aspectlib_maybe(): # pragma: debugging
@@ -142,7 +269,9 @@ def enable_aspectlib_maybe(): # pragma: debugging
Define COVERAGE_ASPECTLIB to enable and configure aspectlib to trace
execution::
- COVERAGE_ASPECTLIB=covaspect.txt:coverage.Coverage:coverage.data.CoverageData program...
+ $ export COVERAGE_LOG=covaspect.txt
+ $ export COVERAGE_ASPECTLIB=coverage.Coverage:coverage.data.CoverageData
+ $ coverage run blah.py ...
This will trace all the public methods on Coverage and CoverageData,
writing the information to covaspect.txt.
@@ -155,28 +284,12 @@ def enable_aspectlib_maybe(): # pragma: debugging
import aspectlib # pylint: disable=import-error
import aspectlib.debug # pylint: disable=import-error
- class AspectlibOutputFile(object):
- """A file-like object that includes pid and cwd information."""
- def __init__(self, outfile):
- self.outfile = outfile
- self.cwd = None
-
- def write(self, text):
- """Just like file.write"""
- cwd = os.getcwd()
- if cwd != self.cwd:
- self._write("cwd is now {0!r}\n".format(cwd))
- self.cwd = cwd
- self._write(text)
-
- def _write(self, text):
- """The raw text-writer, so that we can use it ourselves."""
- self.outfile.write("{0:5d}: {1}".format(os.getpid(), text))
-
- aspects = aspects.split(':')
- aspects_file = AspectlibOutputFile(open(aspects[0], "a"))
- aspect_log = aspectlib.debug.log(print_to=aspects_file, use_logging=False)
- aspects = aspects[1:]
+ filename = os.environ.get("COVERAGE_LOG", "/tmp/covlog.txt")
+ filters = [add_pid_and_tid, filter_aspectlib_frames]
+ aspects_file = DebugOutputFile.the_one(open(filename, "a"), show_process=True, filters=filters)
+ aspect_log = aspectlib.debug.log(
+ print_to=aspects_file, attributes=['id'], stacktrace=30, use_logging=False
+ )
public_methods = re.compile(r'^(__init__|[a-zA-Z].*)$')
- for aspect in aspects:
+ for aspect in aspects.split(':'):
aspectlib.weave(aspect, aspect_log, methods=public_methods)
diff --git a/coverage/env.py b/coverage/env.py
index 4cd02c0..4699a1e 100644
--- a/coverage/env.py
+++ b/coverage/env.py
@@ -4,6 +4,7 @@
"""Determine facts about the environment."""
import os
+import platform
import sys
# Operating systems.
@@ -11,7 +12,12 @@ WINDOWS = sys.platform == "win32"
LINUX = sys.platform == "linux2"
# Python implementations.
-PYPY = '__pypy__' in sys.builtin_module_names
+PYPY = (platform.python_implementation() == 'PyPy')
+if PYPY:
+ PYPYVERSION = sys.pypy_version_info
+
+JYTHON = (platform.python_implementation() == 'Jython')
+IRONPYTHON = (platform.python_implementation() == 'IronPython')
# Python versions.
PYVERSION = sys.version_info
diff --git a/coverage/execfile.py b/coverage/execfile.py
index 3e20a52..693f54f 100644
--- a/coverage/execfile.py
+++ b/coverage/execfile.py
@@ -10,7 +10,7 @@ import types
from coverage.backward import BUILTINS
from coverage.backward import PYC_MAGIC_NUMBER, imp, importlib_util_find_spec
-from coverage.misc import ExceptionDuringRun, NoCode, NoSource, isolate_module
+from coverage.misc import CoverageException, ExceptionDuringRun, NoCode, NoSource, isolate_module
from coverage.phystokens import compile_unicode
from coverage.python import get_python_source
@@ -166,11 +166,17 @@ def run_python_file(filename, args, package=None, modulename=None, path0=None):
sys.path[0] = path0 if path0 is not None else my_path0
try:
- # Make a code object somehow.
- if filename.endswith((".pyc", ".pyo")):
- code = make_code_from_pyc(filename)
- else:
- code = make_code_from_py(filename)
+ try:
+ # Make a code object somehow.
+ if filename.endswith((".pyc", ".pyo")):
+ code = make_code_from_pyc(filename)
+ else:
+ code = make_code_from_py(filename)
+ except CoverageException:
+ raise
+ except Exception as exc:
+ msg = "Couldn't run {filename!r} as Python code: {exc.__class__.__name__}: {exc}"
+ raise CoverageException(msg.format(filename=filename, exc=exc))
# Execute the code object.
try:
@@ -179,7 +185,7 @@ def run_python_file(filename, args, package=None, modulename=None, path0=None):
# The user called sys.exit(). Just pass it along to the upper
# layers, where it will be handled.
raise
- except:
+ except Exception:
# Something went wrong while executing the user code.
# Get the exc_info, and pack them into an exception that we can
# throw up to the outer loop. We peel one layer off the traceback
@@ -193,7 +199,27 @@ def run_python_file(filename, args, package=None, modulename=None, path0=None):
# it somehow? https://bitbucket.org/pypy/pypy/issue/1903
getattr(err, '__context__', None)
- raise ExceptionDuringRun(typ, err, tb.tb_next)
+ # Call the excepthook.
+ try:
+ if hasattr(err, "__traceback__"):
+ err.__traceback__ = err.__traceback__.tb_next
+ sys.excepthook(typ, err, tb.tb_next)
+ except SystemExit:
+ raise
+ except Exception:
+ # Getting the output right in the case of excepthook
+ # shenanigans is kind of involved.
+ sys.stderr.write("Error in sys.excepthook:\n")
+ typ2, err2, tb2 = sys.exc_info()
+ err2.__suppress_context__ = True
+ if hasattr(err2, "__traceback__"):
+ err2.__traceback__ = err2.__traceback__.tb_next
+ sys.__excepthook__(typ2, err2, tb2.tb_next)
+ sys.stderr.write("\nOriginal exception was:\n")
+ raise ExceptionDuringRun(typ, err, tb.tb_next)
+ else:
+ sys.exit(1)
+
finally:
# Restore the old __main__, argv, and path.
sys.modules['__main__'] = old_main_mod
diff --git a/coverage/files.py b/coverage/files.py
index 9de4849..d2c2b89 100644
--- a/coverage/files.py
+++ b/coverage/files.py
@@ -63,7 +63,11 @@ def canonical_filename(filename):
if path is None:
continue
f = os.path.join(path, filename)
- if os.path.exists(f):
+ try:
+ exists = os.path.exists(f)
+ except UnicodeError:
+ exists = False
+ if exists:
filename = f
break
cf = abs_file(filename)
@@ -147,7 +151,11 @@ else:
def abs_file(filename):
"""Return the absolute normalized form of `filename`."""
path = os.path.expandvars(os.path.expanduser(filename))
- path = os.path.abspath(os.path.realpath(path))
+ try:
+ path = os.path.realpath(path)
+ except UnicodeError:
+ pass
+ path = os.path.abspath(path)
path = actual_path(path)
path = unicode_filename(path)
return path
@@ -183,25 +191,31 @@ def prep_patterns(patterns):
class TreeMatcher(object):
- """A matcher for files in a tree."""
- def __init__(self, directories):
- self.dirs = list(directories)
+ """A matcher for files in a tree.
+
+ Construct with a list of paths, either files or directories. Paths match
+ with the `match` method if they are one of the files, or if they are
+ somewhere in a subtree rooted at one of the directories.
+
+ """
+ def __init__(self, paths):
+ self.paths = list(paths)
def __repr__(self):
- return "<TreeMatcher %r>" % self.dirs
+ return "<TreeMatcher %r>" % self.paths
def info(self):
"""A list of strings for displaying when dumping state."""
- return self.dirs
+ return self.paths
def match(self, fpath):
"""Does `fpath` indicate a file in one of our trees?"""
- for d in self.dirs:
- if fpath.startswith(d):
- if fpath == d:
+ for p in self.paths:
+ if fpath.startswith(p):
+ if fpath == p:
# This is the same file!
return True
- if fpath[len(d)] == os.sep:
+ if fpath[len(p)] == os.sep:
# This is a file in the directory
return True
return False
diff --git a/coverage/html.py b/coverage/html.py
index f04339d..b0c6164 100644
--- a/coverage/html.py
+++ b/coverage/html.py
@@ -12,7 +12,7 @@ import coverage
from coverage import env
from coverage.backward import iitems
from coverage.files import flat_rootname
-from coverage.misc import CoverageException, Hasher, isolate_module
+from coverage.misc import CoverageException, file_be_gone, Hasher, isolate_module
from coverage.report import Reporter
from coverage.results import Numbers
from coverage.templite import Templite
@@ -105,6 +105,7 @@ class HtmlReporter(Reporter):
self.coverage = cov
self.files = []
+ self.all_files_nums = []
self.has_arcs = self.coverage.data.has_arcs()
self.status = HtmlStatus()
self.extra_css = None
@@ -137,7 +138,7 @@ class HtmlReporter(Reporter):
# Process all the files.
self.report_files(self.html_file, morfs, self.config.html_dir)
- if not self.files:
+ if not self.all_files_nums:
raise CoverageException("No data to report.")
# Write the index file.
@@ -171,10 +172,26 @@ class HtmlReporter(Reporter):
def html_file(self, fr, analysis):
"""Generate an HTML file for one source file."""
+ rootname = flat_rootname(fr.relative_filename())
+ html_filename = rootname + ".html"
+ html_path = os.path.join(self.directory, html_filename)
+
+ # Get the numbers for this file.
+ nums = analysis.numbers
+ self.all_files_nums.append(nums)
+
+ if self.config.skip_covered:
+ # Don't report on 100% files.
+ no_missing_lines = (nums.n_missing == 0)
+ no_missing_branches = (nums.n_partial_branches == 0)
+ if no_missing_lines and no_missing_branches:
+ # If there's an existing file, remove it.
+ file_be_gone(html_path)
+ return
+
source = fr.source()
# Find out if the file on disk is already correct.
- rootname = flat_rootname(fr.relative_filename())
this_hash = self.file_hash(source.encode('utf-8'), fr)
that_hash = self.status.file_hash(rootname)
if this_hash == that_hash:
@@ -184,9 +201,6 @@ class HtmlReporter(Reporter):
self.status.set_file_hash(rootname, this_hash)
- # Get the numbers for this file.
- nums = analysis.numbers
-
if self.has_arcs:
missing_branch_arcs = analysis.missing_branch_arcs()
arcs_executed = analysis.arcs_executed()
@@ -269,8 +283,6 @@ class HtmlReporter(Reporter):
'time_stamp': self.time_stamp,
})
- html_filename = rootname + ".html"
- html_path = os.path.join(self.directory, html_filename)
write_html(html_path, html)
# Save this file's information for the index file.
@@ -286,7 +298,7 @@ class HtmlReporter(Reporter):
"""Write the index.html file for this report."""
index_tmpl = Templite(read_data("index.html"), self.template_globals)
- self.totals = sum(f['nums'] for f in self.files)
+ self.totals = sum(self.all_files_nums)
html = index_tmpl.render({
'has_arcs': self.has_arcs,
@@ -384,7 +396,7 @@ class HtmlStatus(object):
'files': files,
}
with open(status_file, "w") as fout:
- json.dump(status, fout)
+ json.dump(status, fout, separators=(',', ':'))
# Older versions of ShiningPanda look for the old name, status.dat.
# Accommodate them if we are running under Jenkins.
diff --git a/coverage/misc.py b/coverage/misc.py
index f376346..28aa3b0 100644
--- a/coverage/misc.py
+++ b/coverage/misc.py
@@ -12,7 +12,7 @@ import sys
import types
from coverage import env
-from coverage.backward import string_class, to_bytes, unicode_class
+from coverage.backward import to_bytes, unicode_class
ISOLATED_MODULES = {}
@@ -38,6 +38,13 @@ def isolate_module(mod):
os = isolate_module(os)
+def dummy_decorator_with_args(*args_unused, **kwargs_unused):
+ """Dummy no-op implementation of a decorator with arguments."""
+ def _decorator(func):
+ return func
+ return _decorator
+
+
# Use PyContracts for assertion testing on parameters and returns, but only if
# we are running our own test suite.
if env.TESTING:
@@ -57,12 +64,22 @@ if env.TESTING:
new_contract('bytes', lambda v: isinstance(v, bytes))
if env.PY3:
new_contract('unicode', lambda v: isinstance(v, unicode_class))
-else: # pragma: not covered
- # We aren't using real PyContracts, so just define a no-op decorator as a
- # stunt double.
- def contract(**unused):
- """Dummy no-op implementation of `contract`."""
- return lambda func: func
+
+ def one_of(argnames):
+ """Ensure that only one of the argnames is non-None."""
+ def _decorator(func):
+ argnameset = set(name.strip() for name in argnames.split(","))
+ def _wrapped(*args, **kwargs):
+ vals = [kwargs.get(name) for name in argnameset]
+ assert sum(val is not None for val in vals) == 1
+ return func(*args, **kwargs)
+ return _wrapped
+ return _decorator
+else: # pragma: not testing
+ # We aren't using real PyContracts, so just define our decorators as
+ # stunt-double no-ops.
+ contract = dummy_decorator_with_args
+ one_of = dummy_decorator_with_args
def new_contract(*args_unused, **kwargs_unused):
"""Dummy no-op implementation of `new_contract`."""
@@ -93,23 +110,28 @@ def format_lines(statements, lines):
For example, if `statements` is [1,2,3,4,5,10,11,12,13,14] and
`lines` is [1,2,5,10,11,13,14] then the result will be "1-2, 5-11, 13-14".
+ Both `lines` and `statements` can be any iterable. All of the elements of
+ `lines` must be in `statements`, and all of the values must be positive
+ integers.
+
"""
- pairs = []
- i = 0
- j = 0
- start = None
statements = sorted(statements)
lines = sorted(lines)
- while i < len(statements) and j < len(lines):
- if statements[i] == lines[j]:
- if start is None:
- start = lines[j]
- end = lines[j]
- j += 1
+
+ pairs = []
+ start = None
+ lidx = 0
+ for stmt in statements:
+ if lidx >= len(lines):
+ break
+ if stmt == lines[lidx]:
+ lidx += 1
+ if not start:
+ start = stmt
+ end = stmt
elif start:
pairs.append((start, end))
start = None
- i += 1
if start:
pairs.append((start, end))
ret = ', '.join(map(nice_pair, pairs))
@@ -129,12 +151,12 @@ def expensive(fn):
def _wrapped(self):
"""Inner function that checks the cache."""
if hasattr(self, attr):
- raise Exception("Shouldn't have called %s more than once" % fn.__name__)
+ raise AssertionError("Shouldn't have called %s more than once" % fn.__name__)
setattr(self, attr, True)
return fn(self)
return _wrapped
else:
- return fn
+ return fn # pragma: not testing
def bool_or_none(b):
@@ -179,8 +201,8 @@ class Hasher(object):
def update(self, v):
"""Add `v` to the hash, recursively if needed."""
self.md5.update(to_bytes(str(type(v))))
- if isinstance(v, string_class):
- self.md5.update(to_bytes(v))
+ if isinstance(v, unicode_class):
+ self.md5.update(v.encode('utf8'))
elif isinstance(v, bytes):
self.md5.update(v)
elif v is None:
@@ -237,8 +259,13 @@ class SimpleRepr(object):
)
-class CoverageException(Exception):
- """An exception specific to coverage.py."""
+class BaseCoverageException(Exception):
+ """The base of all Coverage exceptions."""
+ pass
+
+
+class CoverageException(BaseCoverageException):
+ """A run-of-the-mill exception specific to coverage.py."""
pass
@@ -264,3 +291,13 @@ class ExceptionDuringRun(CoverageException):
"""
pass
+
+
+class StopEverything(BaseCoverageException):
+ """An exception that means everything should stop.
+
+ The CoverageTest class converts these to SkipTest, so that when running
+ tests, raising this exception will automatically skip the test.
+
+ """
+ pass
diff --git a/coverage/parser.py b/coverage/parser.py
index c3dba83..590eace 100644
--- a/coverage/parser.py
+++ b/coverage/parser.py
@@ -15,8 +15,8 @@ from coverage.backward import range # pylint: disable=redefined-builtin
from coverage.backward import bytes_to_ints, string_class
from coverage.bytecode import CodeObjects
from coverage.debug import short_stack
-from coverage.misc import contract, new_contract, nice_pair, join_regex
-from coverage.misc import CoverageException, NoSource, NotPython
+from coverage.misc import contract, join_regex, new_contract, nice_pair, one_of
+from coverage.misc import NoSource, NotPython, StopEverything
from coverage.phystokens import compile_unicode, generate_tokens, neuter_encoding_declaration
@@ -106,7 +106,6 @@ class PythonParser(object):
"""
combined = join_regex(regexes)
if env.PY2:
- # pylint: disable=redefined-variable-type
combined = combined.decode("utf8")
regex_c = re.compile(combined)
matches = set()
@@ -138,7 +137,7 @@ class PythonParser(object):
tokgen = generate_tokens(self.text)
for toktype, ttext, (slineno, _), (elineno, _), ltext in tokgen:
- if self.show_tokens: # pragma: not covered
+ if self.show_tokens: # pragma: debugging
print("%10s %5s %-20r %r" % (
tokenize.tok_name.get(toktype, toktype),
nice_pair((slineno, elineno)), ttext, ltext
@@ -371,11 +370,11 @@ class ByteParser(object):
# Alternative Python implementations don't always provide all the
# attributes on code objects that we need to do the analysis.
- for attr in ['co_lnotab', 'co_firstlineno', 'co_consts']:
+ for attr in ['co_lnotab', 'co_firstlineno']:
if not hasattr(self.code, attr):
- raise CoverageException(
+ raise StopEverything( # pragma: only jython
"This implementation of Python doesn't support code analysis.\n"
- "Run coverage.py under CPython for this command."
+ "Run coverage.py under another Python for this command."
)
def child_parsers(self):
@@ -433,23 +432,35 @@ class ByteParser(object):
class LoopBlock(object):
"""A block on the block stack representing a `for` or `while` loop."""
+ @contract(start=int)
def __init__(self, start):
+ # The line number where the loop starts.
self.start = start
+ # A set of ArcStarts, the arcs from break statements exiting this loop.
self.break_exits = set()
class FunctionBlock(object):
"""A block on the block stack representing a function definition."""
+ @contract(start=int, name=str)
def __init__(self, start, name):
+ # The line number where the function starts.
self.start = start
+ # The name of the function.
self.name = name
class TryBlock(object):
"""A block on the block stack representing a `try` block."""
- def __init__(self, handler_start=None, final_start=None):
+ @contract(handler_start='int|None', final_start='int|None')
+ def __init__(self, handler_start, final_start):
+ # The line number of the first "except" handler, if any.
self.handler_start = handler_start
+ # The line number of the "finally:" clause, if any.
self.final_start = final_start
+
+ # The ArcStarts for breaks/continues/returns/raises inside the "try:"
+ # that need to route through the "finally:" clause.
self.break_from = set()
self.continue_from = set()
self.return_from = set()
@@ -459,8 +470,13 @@ class TryBlock(object):
class ArcStart(collections.namedtuple("Arc", "lineno, cause")):
"""The information needed to start an arc.
- `lineno` is the line number the arc starts from. `cause` is a fragment
- used as the startmsg for AstArcAnalyzer.missing_arc_fragments.
+ `lineno` is the line number the arc starts from.
+
+ `cause` is an English text fragment used as the `startmsg` for
+ AstArcAnalyzer.missing_arc_fragments. It will be used to describe why an
+ arc wasn't executed, so should fit well into a sentence of the form,
+ "Line 17 didn't run because {cause}." The fragment can include "{lineno}"
+ to have `lineno` interpolated into it.
"""
def __new__(cls, lineno, cause=None):
@@ -472,6 +488,21 @@ class ArcStart(collections.namedtuple("Arc", "lineno, cause")):
new_contract('ArcStarts', lambda seq: all(isinstance(x, ArcStart) for x in seq))
+# Turn on AST dumps with an environment variable.
+AST_DUMP = bool(int(os.environ.get("COVERAGE_AST_DUMP", 0)))
+
+class NodeList(object):
+ """A synthetic fictitious node, containing a sequence of nodes.
+
+ This is used when collapsing optimized if-statements, to represent the
+ unconditional execution of one of the clauses.
+
+ """
+ def __init__(self, body):
+ self.body = body
+ self.lineno = body[0].lineno
+
+
class AstArcAnalyzer(object):
"""Analyze source text with an AST to find executable code paths."""
@@ -482,15 +513,17 @@ class AstArcAnalyzer(object):
self.statements = set(multiline.get(l, l) for l in statements)
self.multiline = multiline
- if int(os.environ.get("COVERAGE_ASTDUMP", 0)): # pragma: debugging
+ if AST_DUMP: # pragma: debugging
# Dump the AST so that failing tests have helpful output.
- print("Statements: {}".format(self.statements))
- print("Multiline map: {}".format(self.multiline))
+ print("Statements: {0}".format(self.statements))
+ print("Multiline map: {0}".format(self.multiline))
ast_dump(self.root_node)
self.arcs = set()
- # A map from arc pairs to a pair of sentence fragments: (startmsg, endmsg).
+ # A map from arc pairs to a list of pairs of sentence fragments:
+ # { (start, end): [(startmsg, endmsg), ...], }
+ #
# For an arc from line 17, they should be usable like:
# "Line 17 {endmsg}, because {startmsg}"
self.missing_arc_fragments = collections.defaultdict(list)
@@ -513,7 +546,7 @@ class AstArcAnalyzer(object):
def add_arc(self, start, end, smsg=None, emsg=None):
"""Add an arc, including message fragments to use if it is missing."""
- if self.debug:
+ if self.debug: # pragma: debugging
print("\nAdding arc: ({}, {}): {!r}, {!r}".format(start, end, smsg, emsg))
print(short_stack(limit=6))
self.arcs.add((start, end))
@@ -564,9 +597,10 @@ class AstArcAnalyzer(object):
if node.body:
return self.line_for_node(node.body[0])
else:
- # Modules have no line number, they always start at 1.
+ # Empty modules have no line number, they always start at 1.
return 1
+ # The node types that just flow to the next node with no complications.
OK_TO_DEFAULT = set([
"Assign", "Assert", "AugAssign", "Delete", "Exec", "Expr", "Global",
"Import", "ImportFrom", "Nonlocal", "Pass", "Print",
@@ -576,20 +610,35 @@ class AstArcAnalyzer(object):
def add_arcs(self, node):
"""Add the arcs for `node`.
- Return a set of ArcStarts, exits from this node to the next.
+ Return a set of ArcStarts, exits from this node to the next. Because a
+ node represents an entire sub-tree (including its children), the exits
+ from a node can be arbitrarily complex::
+
+ if something(1):
+ if other(2):
+ doit(3)
+ else:
+ doit(5)
+
+ There are two exits from line 1: they start at line 3 and line 5.
"""
node_name = node.__class__.__name__
handler = getattr(self, "_handle__" + node_name, None)
if handler is not None:
return handler(node)
+ else:
+ # No handler: either it's something that's ok to default (a simple
+ # statement), or it's something we overlooked. Change this 0 to 1
+ # to see if it's overlooked.
+ if 0:
+ if node_name not in self.OK_TO_DEFAULT:
+ print("*** Unhandled: {0}".format(node))
- if 0:
- node_name = node.__class__.__name__
- if node_name not in self.OK_TO_DEFAULT:
- print("*** Unhandled: {0}".format(node))
- return set([ArcStart(self.line_for_node(node), cause=None)])
+ # Default for simple statements: one exit from this node.
+ return set([ArcStart(self.line_for_node(node))])
+ @one_of("from_start, prev_starts")
@contract(returns='ArcStarts')
def add_body_arcs(self, body, from_start=None, prev_starts=None):
"""Add arcs for the body of a compound statement.
@@ -608,28 +657,91 @@ class AstArcAnalyzer(object):
lineno = self.line_for_node(body_node)
first_line = self.multiline.get(lineno, lineno)
if first_line not in self.statements:
- continue
+ body_node = self.find_non_missing_node(body_node)
+ if body_node is None:
+ continue
+ lineno = self.line_for_node(body_node)
for prev_start in prev_starts:
self.add_arc(prev_start.lineno, lineno, prev_start.cause)
prev_starts = self.add_arcs(body_node)
return prev_starts
+ def find_non_missing_node(self, node):
+ """Search `node` looking for a child that has not been optimized away.
+
+ This might return the node you started with, or it will work recursively
+ to find a child node in self.statements.
+
+ Returns a node, or None if none of the node remains.
+
+ """
+ # This repeats work just done in add_body_arcs, but this duplication
+ # means we can avoid a function call in the 99.9999% case of not
+ # optimizing away statements.
+ lineno = self.line_for_node(node)
+ first_line = self.multiline.get(lineno, lineno)
+ if first_line in self.statements:
+ return node
+
+ missing_fn = getattr(self, "_missing__" + node.__class__.__name__, None)
+ if missing_fn:
+ node = missing_fn(node)
+ else:
+ node = None
+ return node
+
+ def _missing__If(self, node):
+ # If the if-node is missing, then one of its children might still be
+ # here, but not both. So return the first of the two that isn't missing.
+ # Use a NodeList to hold the clauses as a single node.
+ non_missing = self.find_non_missing_node(NodeList(node.body))
+ if non_missing:
+ return non_missing
+ if node.orelse:
+ return self.find_non_missing_node(NodeList(node.orelse))
+ return None
+
+ def _missing__NodeList(self, node):
+ # A NodeList might be a mixture of missing and present nodes. Find the
+ # ones that are present.
+ non_missing_children = []
+ for child in node.body:
+ child = self.find_non_missing_node(child)
+ if child is not None:
+ non_missing_children.append(child)
+
+ # Return the simplest representation of the present children.
+ if not non_missing_children:
+ return None
+ if len(non_missing_children) == 1:
+ return non_missing_children[0]
+ return NodeList(non_missing_children)
+
def is_constant_expr(self, node):
"""Is this a compile-time constant?"""
node_name = node.__class__.__name__
if node_name in ["NameConstant", "Num"]:
- return True
+ return "Num"
elif node_name == "Name":
- if env.PY3 and node.id in ["True", "False", "None"]:
- return True
- return False
-
- # tests to write:
- # TODO: while EXPR:
- # TODO: while False:
- # TODO: listcomps hidden deep in other expressions
- # TODO: listcomps hidden in lists: x = [[i for i in range(10)]]
- # TODO: nested function definitions
+ if node.id in ["True", "False", "None", "__debug__"]:
+ return "Name"
+ return None
+
+ # In the fullness of time, these might be good tests to write:
+ # while EXPR:
+ # while False:
+ # listcomps hidden deep in other expressions
+ # listcomps hidden in lists: x = [[i for i in range(10)]]
+ # nested function definitions
+
+
+ # Exit processing: process_*_exits
+ #
+ # These functions process the four kinds of jump exits: break, continue,
+ # raise, and return. To figure out where an exit goes, we have to look at
+ # the block stack context. For example, a break will jump to the nearest
+ # enclosing loop block, or the nearest enclosing finally block, whichever
+ # is nearer.
@contract(exits='ArcStarts')
def process_break_exits(self, exits):
@@ -689,7 +801,14 @@ class AstArcAnalyzer(object):
)
break
- ## Handlers
+
+ # Handlers: _handle__*
+ #
+ # Each handler deals with a specific AST node type, dispatched from
+ # add_arcs. Each deals with a particular kind of node type, and returns
+ # the set of exits from that node. These functions mirror the Python
+ # semantics of each syntactic construct. See the docstring for add_arcs to
+ # understand the concept of exits from a node.
@contract(returns='ArcStarts')
def _handle__Break(self, node):
@@ -719,7 +838,7 @@ class AstArcAnalyzer(object):
self.add_arc(last, lineno)
last = lineno
# The body is handled in collect_arcs.
- return set([ArcStart(last, cause=None)])
+ return set([ArcStart(last)])
_handle__ClassDef = _handle_decorated
@@ -746,7 +865,7 @@ class AstArcAnalyzer(object):
else_exits = self.add_body_arcs(node.orelse, from_start=from_start)
exits |= else_exits
else:
- # no else clause: exit from the for line.
+ # No else clause: exit from the for line.
exits.add(from_start)
return exits
@@ -765,6 +884,12 @@ class AstArcAnalyzer(object):
return exits
@contract(returns='ArcStarts')
+ def _handle__NodeList(self, node):
+ start = self.line_for_node(node)
+ exits = self.add_body_arcs(node.body, from_start=ArcStart(start))
+ return exits
+
+ @contract(returns='ArcStarts')
def _handle__Raise(self, node):
here = self.line_for_node(node)
raise_start = ArcStart(here, cause="the raise on line {lineno} wasn't executed")
@@ -792,11 +917,11 @@ class AstArcAnalyzer(object):
else:
final_start = None
- try_block = TryBlock(handler_start=handler_start, final_start=final_start)
+ try_block = TryBlock(handler_start, final_start)
self.block_stack.append(try_block)
start = self.line_for_node(node)
- exits = self.add_body_arcs(node.body, from_start=ArcStart(start, cause=None))
+ exits = self.add_body_arcs(node.body, from_start=ArcStart(start))
# We're done with the `try` body, so this block no longer handles
# exceptions. We keep the block so the `finally` clause can pick up
@@ -839,30 +964,46 @@ class AstArcAnalyzer(object):
try_block.return_from # or a `return`.
)
- exits = self.add_body_arcs(node.finalbody, prev_starts=final_from)
+ final_exits = self.add_body_arcs(node.finalbody, prev_starts=final_from)
+
if try_block.break_from:
- break_exits = self._combine_finally_starts(try_block.break_from, exits)
- self.process_break_exits(break_exits)
+ self.process_break_exits(
+ self._combine_finally_starts(try_block.break_from, final_exits)
+ )
if try_block.continue_from:
- continue_exits = self._combine_finally_starts(try_block.continue_from, exits)
- self.process_continue_exits(continue_exits)
+ self.process_continue_exits(
+ self._combine_finally_starts(try_block.continue_from, final_exits)
+ )
if try_block.raise_from:
- raise_exits = self._combine_finally_starts(try_block.raise_from, exits)
- self.process_raise_exits(raise_exits)
+ self.process_raise_exits(
+ self._combine_finally_starts(try_block.raise_from, final_exits)
+ )
if try_block.return_from:
- return_exits = self._combine_finally_starts(try_block.return_from, exits)
- self.process_return_exits(return_exits)
+ self.process_return_exits(
+ self._combine_finally_starts(try_block.return_from, final_exits)
+ )
+
+ if exits:
+ # The finally clause's exits are only exits for the try block
+ # as a whole if the try block had some exits to begin with.
+ exits = final_exits
return exits
+ @contract(starts='ArcStarts', exits='ArcStarts', returns='ArcStarts')
def _combine_finally_starts(self, starts, exits):
- """Helper for building the cause of `finally` branches."""
+ """Helper for building the cause of `finally` branches.
+
+ "finally" clauses might not execute their exits, and the causes could
+ be due to a failure to execute any of the exits in the try block. So
+ we use the causes from `starts` as the causes for `exits`.
+ """
causes = []
- for lineno, cause in sorted(starts):
- if cause is not None:
- causes.append(cause.format(lineno=lineno))
+ for start in sorted(starts):
+ if start.cause is not None:
+ causes.append(start.cause.format(lineno=start.lineno))
cause = " or ".join(causes)
- exits = set(ArcStart(ex.lineno, cause) for ex in exits)
+ exits = set(ArcStart(xit.lineno, cause) for xit in exits)
return exits
@contract(returns='ArcStarts')
@@ -894,9 +1035,9 @@ class AstArcAnalyzer(object):
def _handle__While(self, node):
constant_test = self.is_constant_expr(node.test)
start = to_top = self.line_for_node(node.test)
- if constant_test:
+ if constant_test and (env.PY3 or constant_test == "Num"):
to_top = self.line_for_node(node.body[0])
- self.block_stack.append(LoopBlock(start=start))
+ self.block_stack.append(LoopBlock(start=to_top))
from_start = ArcStart(start, cause="the condition on line {lineno} was never true")
exits = self.add_body_arcs(node.body, from_start=from_start)
for xit in exits:
@@ -971,62 +1112,64 @@ class AstArcAnalyzer(object):
_code_object__ListComp = _make_oneline_code_method("list comprehension")
-SKIP_DUMP_FIELDS = ["ctx"]
+if AST_DUMP: # pragma: debugging
+ # Code only used when dumping the AST for debugging.
-def _is_simple_value(value):
- """Is `value` simple enough to be displayed on a single line?"""
- return (
- value in [None, [], (), {}, set()] or
- isinstance(value, (string_class, int, float))
- )
+ SKIP_DUMP_FIELDS = ["ctx"]
-# TODO: a test of ast_dump?
-def ast_dump(node, depth=0):
- """Dump the AST for `node`.
+ def _is_simple_value(value):
+ """Is `value` simple enough to be displayed on a single line?"""
+ return (
+ value in [None, [], (), {}, set()] or
+ isinstance(value, (string_class, int, float))
+ )
- This recursively walks the AST, printing a readable version.
+ def ast_dump(node, depth=0):
+ """Dump the AST for `node`.
- """
- indent = " " * depth
- if not isinstance(node, ast.AST):
- print("{0}<{1} {2!r}>".format(indent, node.__class__.__name__, node))
- return
-
- lineno = getattr(node, "lineno", None)
- if lineno is not None:
- linemark = " @ {0}".format(node.lineno)
- else:
- linemark = ""
- head = "{0}<{1}{2}".format(indent, node.__class__.__name__, linemark)
-
- named_fields = [
- (name, value)
- for name, value in ast.iter_fields(node)
- if name not in SKIP_DUMP_FIELDS
- ]
- if not named_fields:
- print("{0}>".format(head))
- elif len(named_fields) == 1 and _is_simple_value(named_fields[0][1]):
- field_name, value = named_fields[0]
- print("{0} {1}: {2!r}>".format(head, field_name, value))
- else:
- print(head)
- if 0:
- print("{0}# mro: {1}".format(
- indent, ", ".join(c.__name__ for c in node.__class__.__mro__[1:]),
- ))
- next_indent = indent + " "
- for field_name, value in named_fields:
- prefix = "{0}{1}:".format(next_indent, field_name)
- if _is_simple_value(value):
- print("{0} {1!r}".format(prefix, value))
- elif isinstance(value, list):
- print("{0} [".format(prefix))
- for n in value:
- ast_dump(n, depth + 8)
- print("{0}]".format(next_indent))
- else:
- print(prefix)
- ast_dump(value, depth + 8)
+ This recursively walks the AST, printing a readable version.
+
+ """
+ indent = " " * depth
+ if not isinstance(node, ast.AST):
+ print("{0}<{1} {2!r}>".format(indent, node.__class__.__name__, node))
+ return
+
+ lineno = getattr(node, "lineno", None)
+ if lineno is not None:
+ linemark = " @ {0}".format(node.lineno)
+ else:
+ linemark = ""
+ head = "{0}<{1}{2}".format(indent, node.__class__.__name__, linemark)
+
+ named_fields = [
+ (name, value)
+ for name, value in ast.iter_fields(node)
+ if name not in SKIP_DUMP_FIELDS
+ ]
+ if not named_fields:
+ print("{0}>".format(head))
+ elif len(named_fields) == 1 and _is_simple_value(named_fields[0][1]):
+ field_name, value = named_fields[0]
+ print("{0} {1}: {2!r}>".format(head, field_name, value))
+ else:
+ print(head)
+ if 0:
+ print("{0}# mro: {1}".format(
+ indent, ", ".join(c.__name__ for c in node.__class__.__mro__[1:]),
+ ))
+ next_indent = indent + " "
+ for field_name, value in named_fields:
+ prefix = "{0}{1}:".format(next_indent, field_name)
+ if _is_simple_value(value):
+ print("{0} {1!r}".format(prefix, value))
+ elif isinstance(value, list):
+ print("{0} [".format(prefix))
+ for n in value:
+ ast_dump(n, depth + 8)
+ print("{0}]".format(next_indent))
+ else:
+ print(prefix)
+ ast_dump(value, depth + 8)
- print("{0}>".format(indent))
+ print("{0}>".format(indent))
diff --git a/coverage/phystokens.py b/coverage/phystokens.py
index 5e80ed5..a2b23cf 100644
--- a/coverage/phystokens.py
+++ b/coverage/phystokens.py
@@ -11,7 +11,7 @@ import token
import tokenize
from coverage import env
-from coverage.backward import iternext
+from coverage.backward import iternext, unicode_class
from coverage.misc import contract
@@ -281,7 +281,7 @@ def compile_unicode(source, filename, mode):
"""
source = neuter_encoding_declaration(source)
- if env.PY2 and isinstance(filename, unicode):
+ if env.PY2 and isinstance(filename, unicode_class):
filename = filename.encode(sys.getfilesystemencoding(), "replace")
code = compile(source, filename, mode)
return code
@@ -290,5 +290,9 @@ def compile_unicode(source, filename, mode):
@contract(source='unicode', returns='unicode')
def neuter_encoding_declaration(source):
"""Return `source`, with any encoding declaration neutered."""
- source = COOKIE_RE.sub("# (deleted declaration)", source, count=2)
+ if COOKIE_RE.search(source):
+ source_lines = source.splitlines(True)
+ for lineno in range(min(2, len(source_lines))):
+ source_lines[lineno] = COOKIE_RE.sub("# (deleted declaration)", source_lines[lineno])
+ source = "".join(source_lines)
return source
diff --git a/coverage/plugin.py b/coverage/plugin.py
index fc95eee..3e0e483 100644
--- a/coverage/plugin.py
+++ b/coverage/plugin.py
@@ -89,6 +89,19 @@ class CoveragePlugin(object):
"""
_needs_to_implement(self, "file_reporter")
+ def find_executable_files(self, src_dir): # pylint: disable=unused-argument
+ """Yield all of the executable files in `src_dir`, recursively.
+
+ Executability is a plugin-specific property, but generally means files
+ which would have been considered for coverage analysis, had they been
+ included automatically.
+
+ Returns or yields a sequence of strings, the paths to files that could
+ have been executed, including files that had been executed.
+
+ """
+ return []
+
def sys_info(self):
"""Get a list of information useful for debugging.
diff --git a/coverage/python.py b/coverage/python.py
index 7109ece..dacdf61 100644
--- a/coverage/python.py
+++ b/coverage/python.py
@@ -26,7 +26,13 @@ def read_python_source(filename):
"""
with open(filename, "rb") as f:
- return f.read().replace(b"\r\n", b"\n").replace(b"\r", b"\n")
+ source = f.read()
+
+ if env.IRONPYTHON:
+ # IronPython reads Unicode strings even for "rb" files.
+ source = bytes(source)
+
+ return source.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
@contract(returns='unicode')
@@ -75,7 +81,7 @@ def get_zip_bytes(filename):
an empty string if the file is empty.
"""
- markers = ['.zip'+os.sep, '.egg'+os.sep]
+ markers = ['.zip'+os.sep, '.egg'+os.sep, '.pex'+os.sep]
for marker in markers:
if marker in filename:
parts = filename.split(marker)
@@ -91,6 +97,39 @@ def get_zip_bytes(filename):
return None
+def source_for_file(filename):
+ """Return the source file for `filename`.
+
+ Given a file name being traced, return the best guess as to the source
+ file to attribute it to.
+
+ """
+ if filename.endswith(".py"):
+ # .py files are themselves source files.
+ return filename
+
+ elif filename.endswith((".pyc", ".pyo")):
+ # Bytecode files probably have source files near them.
+ py_filename = filename[:-1]
+ if os.path.exists(py_filename):
+ # Found a .py file, use that.
+ return py_filename
+ if env.WINDOWS:
+ # On Windows, it could be a .pyw file.
+ pyw_filename = py_filename + "w"
+ if os.path.exists(pyw_filename):
+ return pyw_filename
+ # Didn't find source, but it's probably the .py file we want.
+ return py_filename
+
+ elif filename.endswith("$py.class"):
+ # Jython is easy to guess.
+ return filename[:-9] + ".py"
+
+ # No idea, just use the file name as-is.
+ return filename
+
+
class PythonFileReporter(FileReporter):
"""Report support for a Python file."""
@@ -106,13 +145,7 @@ class PythonFileReporter(FileReporter):
else:
filename = morf
- filename = files.unicode_filename(filename)
-
- # .pyc files should always refer to a .py instead.
- if filename.endswith(('.pyc', '.pyo')):
- filename = filename[:-1]
- elif filename.endswith('$py.class'): # Jython
- filename = filename[:-9] + ".py"
+ filename = source_for_file(files.unicode_filename(filename))
super(PythonFileReporter, self).__init__(files.canonical_filename(filename))
diff --git a/coverage/pytracer.py b/coverage/pytracer.py
index 23f4946..b41f405 100644
--- a/coverage/pytracer.py
+++ b/coverage/pytracer.py
@@ -3,6 +3,7 @@
"""Raw data collector for coverage.py."""
+import atexit
import dis
import sys
@@ -44,16 +45,21 @@ class PyTracer(object):
self.threading = None
self.cur_file_dict = []
- self.last_line = [0]
+ self.last_line = 0 # int, but uninitialized.
self.data_stack = []
self.last_exc_back = None
self.last_exc_firstlineno = 0
self.thread = None
self.stopped = False
+ self._activity = False
+
+ self.in_atexit = False
+ # On exit, self.in_atexit = True
+ atexit.register(setattr, self, 'in_atexit', True)
def __repr__(self):
- return "<PyTracer at 0x{0:0x}: {1} lines in {2} files>".format(
+ return "<PyTracer at {0}: {1} lines in {2} files>".format(
id(self),
sum(len(v) for v in self.data.values()),
len(self.data),
@@ -77,6 +83,7 @@ class PyTracer(object):
if event == 'call':
# Entering a new function context. Decide if we should trace
# in this file.
+ self._activity = True
self.data_stack.append((self.cur_file_dict, self.last_line))
filename = frame.f_code.co_filename
disp = self.should_trace_cache.get(filename)
@@ -94,7 +101,7 @@ class PyTracer(object):
# function calls and re-entering generators. The f_lasti field is
# -1 for calls, and a real offset for generators. Use <0 as the
# line number for calls, and the real line number for generators.
- if frame.f_lasti < 0:
+ if getattr(frame, 'f_lasti', -1) < 0:
self.last_line = -frame.f_code.co_firstlineno
else:
self.last_line = frame.f_lineno
@@ -111,8 +118,9 @@ class PyTracer(object):
if self.trace_arcs and self.cur_file_dict:
# Record an arc leaving the function, but beware that a
# "return" event might just mean yielding from a generator.
- bytecode = frame.f_code.co_code[frame.f_lasti]
- if bytecode != YIELD_VALUE:
+ # Jython seems to have an empty co_code, so just assume return.
+ code = frame.f_code.co_code
+ if (not code) or code[frame.f_lasti] != YIELD_VALUE:
first = frame.f_code.co_firstlineno
self.cur_file_dict[(self.last_line, -first)] = None
# Leaving this function, pop the filename stack.
@@ -128,10 +136,18 @@ class PyTracer(object):
Return a Python function suitable for use with sys.settrace().
"""
+ self.stopped = False
if self.threading:
- self.thread = self.threading.currentThread()
+ if self.thread is None:
+ self.thread = self.threading.currentThread()
+ else:
+ if self.thread.ident != self.threading.currentThread().ident:
+ # Re-starting from a different thread!? Don't set the trace
+ # function, but we are marked as running again, so maybe it
+ # will be ok?
+ return self._trace
+
sys.settrace(self._trace)
- self.stopped = False
return self._trace
def stop(self):
@@ -144,12 +160,27 @@ class PyTracer(object):
return
if self.warn:
- if sys.gettrace() != self._trace:
- msg = "Trace function changed, measurement is likely wrong: %r"
- self.warn(msg % (sys.gettrace(),))
+ # PyPy clears the trace function before running atexit functions,
+ # so don't warn if we are in atexit on PyPy and the trace function
+ # has changed to None.
+ tf = sys.gettrace()
+ dont_warn = (env.PYPY and env.PYPYVERSION >= (5, 4) and self.in_atexit and tf is None)
+ if (not dont_warn) and tf != self._trace:
+ self.warn(
+ "Trace function changed, measurement is likely wrong: %r" % (tf,),
+ slug="trace-changed",
+ )
sys.settrace(None)
+ def activity(self):
+ """Has there been any activity?"""
+ return self._activity
+
+ def reset_activity(self):
+ """Reset the activity() flag."""
+ self._activity = False
+
def get_stats(self):
"""Return a dictionary of statistics, or None."""
return None
diff --git a/coverage/results.py b/coverage/results.py
index 9df5d5b..81ce2a6 100644
--- a/coverage/results.py
+++ b/coverage/results.py
@@ -269,3 +269,27 @@ class Numbers(SimpleRepr):
if other == 0:
return self
return NotImplemented
+
+
+def should_fail_under(total, fail_under):
+ """Determine if a total should fail due to fail-under.
+
+ `total` is a float, the coverage measurement total. `fail_under` is the
+ fail_under setting to compare with.
+
+ Returns True if the total should fail.
+
+ """
+ # The fail_under option defaults to 0.
+ if fail_under:
+ # Total needs to be rounded, but don't want to report 100
+ # unless it is really 100.
+ if 99 < total < 100:
+ total = 99
+ else:
+ total = round(total)
+
+ if total < fail_under:
+ return True
+
+ return False
diff --git a/coverage/summary.py b/coverage/summary.py
index b0fa71a..271b648 100644
--- a/coverage/summary.py
+++ b/coverage/summary.py
@@ -8,7 +8,7 @@ import sys
from coverage import env
from coverage.report import Reporter
from coverage.results import Numbers
-from coverage.misc import NotPython, CoverageException, output_encoding
+from coverage.misc import NotPython, CoverageException, output_encoding, StopEverything
class SummaryReporter(Reporter):
@@ -25,12 +25,53 @@ class SummaryReporter(Reporter):
for native strings (bytes on Python 2, Unicode on Python 3).
"""
- file_reporters = self.find_file_reporters(morfs)
+ if outfile is None:
+ outfile = sys.stdout
+
+ def writeout(line):
+ """Write a line to the output, adding a newline."""
+ if env.PY2:
+ line = line.encode(output_encoding())
+ outfile.write(line.rstrip())
+ outfile.write("\n")
+
+ fr_analysis = []
+ skipped_count = 0
+ total = Numbers()
+
+ fmt_err = u"%s %s: %s"
+
+ for fr in self.find_file_reporters(morfs):
+ try:
+ analysis = self.coverage._analyze(fr)
+ nums = analysis.numbers
+ total += nums
+
+ if self.config.skip_covered:
+ # Don't report on 100% files.
+ no_missing_lines = (nums.n_missing == 0)
+ no_missing_branches = (nums.n_partial_branches == 0)
+ if no_missing_lines and no_missing_branches:
+ skipped_count += 1
+ continue
+ fr_analysis.append((fr, analysis))
+ except StopEverything:
+ # Don't report this on single files, it's a systemic problem.
+ raise
+ except Exception:
+ report_it = not self.config.ignore_errors
+ if report_it:
+ typ, msg = sys.exc_info()[:2]
+ # NotPython is only raised by PythonFileReporter, which has a
+ # should_be_python() method.
+ if issubclass(typ, NotPython) and not fr.should_be_python():
+ report_it = False
+ if report_it:
+ writeout(fmt_err % (fr.relative_filename(), typ.__name__, msg))
# Prepare the formatting strings, header, and column sorting.
- max_name = max([len(fr.relative_filename()) for fr in file_reporters] + [5])
+ max_name = max([len(fr.relative_filename()) for (fr, analysis) in fr_analysis] + [5])
fmt_name = u"%%- %ds " % max_name
- fmt_err = u"%s %s: %s"
fmt_skip_covered = u"\n%s file%s skipped due to complete coverage."
header = (fmt_name % "Name") + u" Stmts Miss"
@@ -50,16 +91,6 @@ class SummaryReporter(Reporter):
if self.branches:
column_order.update(dict(branch=3, brpart=4))
- if outfile is None:
- outfile = sys.stdout
-
- def writeout(line):
- """Write a line to the output, adding a newline."""
- if env.PY2:
- line = line.encode(output_encoding())
- outfile.write(line.rstrip())
- outfile.write("\n")
-
# Write the header
writeout(header)
writeout(rule)
@@ -69,22 +100,9 @@ class SummaryReporter(Reporter):
# sortable values.
lines = []
- total = Numbers()
- skipped_count = 0
-
- for fr in file_reporters:
+ for (fr, analysis) in fr_analysis:
try:
- analysis = self.coverage._analyze(fr)
nums = analysis.numbers
- total += nums
-
- if self.config.skip_covered:
- # Don't report on 100% files.
- no_missing_lines = (nums.n_missing == 0)
- no_missing_branches = (nums.n_partial_branches == 0)
- if no_missing_lines and no_missing_branches:
- skipped_count += 1
- continue
args = (fr.relative_filename(), nums.n_statements, nums.n_missing)
if self.branches:
diff --git a/coverage/version.py b/coverage/version.py
index 35dc1ec..92a3bcb 100644
--- a/coverage/version.py
+++ b/coverage/version.py
@@ -5,7 +5,7 @@
# This file is exec'ed in setup.py, don't import anything!
# Same semantics as sys.version_info.
-version_info = (4, 3, 0, 'alpha', 0)
+version_info = (4, 4, 0, 'beta', 2)
def _make_version(major, minor, micro, releaselevel, serial):
diff --git a/coverage/xmlreport.py b/coverage/xmlreport.py
index 694415f..b5a33dd 100644
--- a/coverage/xmlreport.py
+++ b/coverage/xmlreport.py
@@ -18,11 +18,7 @@ from coverage.report import Reporter
os = isolate_module(os)
-DTD_URL = (
- 'https://raw.githubusercontent.com/cobertura/web/'
- 'f0366e5e2cf18f111cbd61fc34ef720a6584ba02'
- '/htdocs/xml/coverage-03.dtd'
-)
+DTD_URL = 'https://raw.githubusercontent.com/cobertura/web/master/htdocs/xml/coverage-04.dtd'
def rate(hit, num):
@@ -114,12 +110,18 @@ class XmlReporter(Reporter):
bnum_tot += bnum
bhits_tot += bhits
+ xcoverage.setAttribute("lines-valid", str(lnum_tot))
+ xcoverage.setAttribute("lines-covered", str(lhits_tot))
xcoverage.setAttribute("line-rate", rate(lhits_tot, lnum_tot))
if self.has_arcs:
- branch_rate = rate(bhits_tot, bnum_tot)
+ xcoverage.setAttribute("branches-valid", str(bnum_tot))
+ xcoverage.setAttribute("branches-covered", str(bhits_tot))
+ xcoverage.setAttribute("branch-rate", rate(bhits_tot, bnum_tot))
else:
- branch_rate = "0"
- xcoverage.setAttribute("branch-rate", branch_rate)
+ xcoverage.setAttribute("branches-covered", "0")
+ xcoverage.setAttribute("branches-valid", "0")
+ xcoverage.setAttribute("branch-rate", "0")
+ xcoverage.setAttribute("complexity", "0")
# Use the DOM to write the output file.
out = self.xml_out.toprettyxml()
@@ -148,7 +150,7 @@ class XmlReporter(Reporter):
else:
rel_name = fr.relative_filename()
- dirname = os.path.dirname(rel_name) or "."
+ dirname = os.path.dirname(rel_name) or u"."
dirname = "/".join(dirname.split("/")[:self.config.xml_package_depth])
package_name = dirname.replace("/", ".")