summaryrefslogtreecommitdiff
path: root/coverage
diff options
context:
space:
mode:
Diffstat (limited to 'coverage')
-rw-r--r--coverage/__init__.py48
-rw-r--r--coverage/__main__.py5
-rw-r--r--coverage/annotate.py8
-rw-r--r--coverage/backward.py68
-rw-r--r--coverage/bytecode.py10
-rw-r--r--coverage/cmdline.py347
-rw-r--r--coverage/codeunit.py26
-rw-r--r--coverage/collector.py46
-rw-r--r--coverage/config.py186
-rw-r--r--coverage/control.py226
-rw-r--r--coverage/data.py47
-rw-r--r--coverage/execfile.py78
-rw-r--r--coverage/files.py193
-rw-r--r--coverage/fullcoverage/encodings.py57
-rw-r--r--coverage/html.py252
-rw-r--r--coverage/htmlfiles/coverage_html.js318
-rw-r--r--coverage/htmlfiles/index.html32
-rw-r--r--coverage/htmlfiles/jquery.isonscreen.js53
-rwxr-xr-xcoverage/htmlfiles/keybd_closed.pngbin0 -> 264 bytes
-rwxr-xr-xcoverage/htmlfiles/keybd_open.pngbin0 -> 267 bytes
-rw-r--r--coverage/htmlfiles/pyfile.html38
-rw-r--r--coverage/htmlfiles/style.css74
-rw-r--r--coverage/misc.py70
-rw-r--r--coverage/parser.py227
-rw-r--r--coverage/phystokens.py102
-rw-r--r--coverage/report.py43
-rw-r--r--coverage/results.py51
-rw-r--r--coverage/summary.py29
-rw-r--r--coverage/tracer.c170
-rw-r--r--coverage/version.py9
-rw-r--r--coverage/xmlreport.py31
31 files changed, 2119 insertions, 725 deletions
diff --git a/coverage/__init__.py b/coverage/__init__.py
index 70231c1c..0ccc699f 100644
--- a/coverage/__init__.py
+++ b/coverage/__init__.py
@@ -5,19 +5,13 @@ http://nedbatchelder.com/code/coverage
"""
-__version__ = "3.5a1" # see detailed history in CHANGES.txt
-
-__url__ = "http://nedbatchelder.com/code/coverage"
-if max(__version__).isalpha():
- # For pre-releases, use a version-specific URL.
- __url__ += "/" + __version__
+from coverage.version import __version__, __url__
from coverage.control import coverage, process_startup
from coverage.data import CoverageData
from coverage.cmdline import main, CoverageScript
from coverage.misc import CoverageException
-
# Module-level functions. The original API to this module was based on
# functions defined directly in the module, with a singleton of the coverage()
# class. That design hampered programmability, so the current api uses
@@ -36,12 +30,34 @@ def _singleton_method(name):
called.
"""
+ # Disable pylint msg W0612, because a bunch of variables look unused, but
+ # they're accessed via locals().
+ # pylint: disable=W0612
+
def wrapper(*args, **kwargs):
"""Singleton wrapper around a coverage method."""
global _the_coverage
if not _the_coverage:
_the_coverage = coverage(auto_data=True)
return getattr(_the_coverage, name)(*args, **kwargs)
+
+ import inspect
+ meth = getattr(coverage, name)
+ args, varargs, kw, defaults = inspect.getargspec(meth)
+ argspec = inspect.formatargspec(args[1:], varargs, kw, defaults)
+ docstring = meth.__doc__
+ wrapper.__doc__ = ("""\
+ A first-use-singleton wrapper around coverage.%(name)s.
+
+ This wrapper is provided for backward compatibility with legacy code.
+ New code should use coverage.%(name)s directly.
+
+ %(name)s%(argspec)s:
+
+ %(docstring)s
+ """ % locals()
+ )
+
return wrapper
@@ -57,10 +73,26 @@ report = _singleton_method('report')
annotate = _singleton_method('annotate')
+# On Windows, we encode and decode deep enough that something goes wrong and
+# the encodings.utf_8 module is loaded and then unloaded, I don't know why.
+# Adding a reference here prevents it from being unloaded. Yuk.
+import encodings.utf_8
+
+# Because of the "from coverage.control import fooey" lines at the top of the
+# file, there's an entry for coverage.coverage in sys.modules, mapped to None.
+# This makes some inspection tools (like pydoc) unable to find the class
+# coverage.coverage. So remove that entry.
+import sys
+try:
+ del sys.modules['coverage.coverage']
+except KeyError:
+ pass
+
+
# COPYRIGHT AND LICENSE
#
# Copyright 2001 Gareth Rees. All rights reserved.
-# Copyright 2004-2010 Ned Batchelder. All rights reserved.
+# Copyright 2004-2012 Ned Batchelder. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
diff --git a/coverage/__main__.py b/coverage/__main__.py
index af5fa9f6..55e0d259 100644
--- a/coverage/__main__.py
+++ b/coverage/__main__.py
@@ -1,3 +1,4 @@
-"""Coverage.py's main entrypoint."""
+"""Coverage.py's main entry point."""
+import sys
from coverage.cmdline import main
-main()
+sys.exit(main())
diff --git a/coverage/annotate.py b/coverage/annotate.py
index a556d853..b7f32c1c 100644
--- a/coverage/annotate.py
+++ b/coverage/annotate.py
@@ -26,20 +26,20 @@ class AnnotateReporter(Reporter):
"""
- def __init__(self, coverage, ignore_errors=False):
- super(AnnotateReporter, self).__init__(coverage, ignore_errors)
+ def __init__(self, coverage, config):
+ super(AnnotateReporter, self).__init__(coverage, config)
self.directory = None
blank_re = re.compile(r"\s*(#|$)")
else_re = re.compile(r"\s*else\s*:\s*(#|$)")
- def report(self, morfs, config, directory=None):
+ def report(self, morfs, directory=None):
"""Run the report.
See `coverage.report()` for arguments.
"""
- self.report_files(self.annotate_file, morfs, config, directory)
+ self.report_files(self.annotate_file, morfs, directory)
def annotate_file(self, cu, analysis):
"""Annotate a single file.
diff --git a/coverage/backward.py b/coverage/backward.py
index 425bcc6e..6347501a 100644
--- a/coverage/backward.py
+++ b/coverage/backward.py
@@ -1,12 +1,12 @@
"""Add things to old Pythons so I can pretend they are newer."""
# This file does lots of tricky stuff, so disable a bunch of lintisms.
-# pylint: disable-msg=F0401,W0611,W0622
+# pylint: disable=F0401,W0611,W0622
# F0401: Unable to import blah
# W0611: Unused import blah
# W0622: Redefining built-in blah
-import os, sys
+import os, re, sys
# Python 2.3 doesn't have `set`
try:
@@ -49,6 +49,16 @@ try:
except NameError:
range = range
+# A function to iterate listlessly over a dict's items.
+if "iteritems" in dir({}):
+ def iitems(d):
+ """Produce the items from dict `d`."""
+ return d.iteritems()
+else:
+ def iitems(d):
+ """Produce the items from dict `d`."""
+ return d.items()
+
# Exec is a statement in Py2, a function in Py3
if sys.version_info >= (3, 0):
def exec_code_object(code, global_map):
@@ -66,8 +76,56 @@ else:
)
)
-# ConfigParser was renamed to the more-standard configparser
+# Reading Python source and interpreting the coding comment is a big deal.
+if sys.version_info >= (3, 0):
+ # Python 3.2 provides `tokenize.open`, the best way to open source files.
+ import tokenize
+ try:
+ open_source = tokenize.open # pylint: disable=E1101
+ except AttributeError:
+ from io import TextIOWrapper
+ detect_encoding = tokenize.detect_encoding # pylint: disable=E1101
+ # Copied from the 3.2 stdlib:
+ def open_source(fname):
+ """Open a file in read only mode using the encoding detected by
+ detect_encoding().
+ """
+ buffer = open(fname, 'rb')
+ encoding, _ = detect_encoding(buffer.readline)
+ buffer.seek(0)
+ text = TextIOWrapper(buffer, encoding, line_buffering=True)
+ text.mode = 'r'
+ return text
+else:
+ def open_source(fname):
+ """Open a source file the best way."""
+ return open(fname, "rU")
+
+
+# Python 3.x is picky about bytes and strings, so provide methods to
+# get them right, and make them no-ops in 2.x
+if sys.version_info >= (3, 0):
+ def to_bytes(s):
+ """Convert string `s` to bytes."""
+ return s.encode('utf8')
+
+ def to_string(b):
+ """Convert bytes `b` to a string."""
+ return b.decode('utf8')
+
+else:
+ def to_bytes(s):
+ """Convert string `s` to bytes (no-op in 2.x)."""
+ return s
+
+ def to_string(b):
+ """Convert bytes `b` to a string (no-op in 2.x)."""
+ return b
+
+# Md5 is available in different places.
try:
- import configparser
+ import hashlib
+ md5 = hashlib.md5
except ImportError:
- import ConfigParser as configparser
+ import md5
+ md5 = md5.new
diff --git a/coverage/bytecode.py b/coverage/bytecode.py
index ab522d6c..fd5c7da2 100644
--- a/coverage/bytecode.py
+++ b/coverage/bytecode.py
@@ -5,10 +5,19 @@ import opcode, sys, types
class ByteCode(object):
"""A single bytecode."""
def __init__(self):
+ # The offset of this bytecode in the code object.
self.offset = -1
+
+ # The opcode, defined in the `opcode` module.
self.op = -1
+
+ # The argument, a small integer, whose meaning depends on the opcode.
self.arg = -1
+
+ # The offset in the code object of the next bytecode.
self.next_offset = -1
+
+ # The offset to jump to.
self.jump_to = -1
@@ -18,6 +27,7 @@ class ByteCodes(object):
Returns `ByteCode` objects.
"""
+ # pylint: disable=R0924
def __init__(self, code):
self.code = code
self.offset = 0
diff --git a/coverage/cmdline.py b/coverage/cmdline.py
index e5d6bb84..cb1d7a3e 100644
--- a/coverage/cmdline.py
+++ b/coverage/cmdline.py
@@ -1,10 +1,10 @@
"""Command-line support for Coverage."""
-import optparse, re, sys, traceback
+import optparse, sys, traceback
-from coverage.backward import sorted # pylint: disable-msg=W0622
-from coverage.execfile import run_python_file
-from coverage.misc import CoverageException, ExceptionDuringRun
+from coverage.backward import sorted # pylint: disable=W0622
+from coverage.execfile import run_python_file, run_python_module
+from coverage.misc import CoverageException, ExceptionDuringRun, NoSource
class Opts(object):
@@ -20,10 +20,13 @@ class Opts(object):
help="Measure branch coverage in addition to statement coverage."
)
directory = optparse.make_option(
- '-d', '--directory', action='store',
- metavar="DIR",
+ '-d', '--directory', action='store', metavar="DIR",
help="Write the output files to DIR."
)
+ fail_under = optparse.make_option(
+ '', '--fail-under', action='store', metavar="MIN", type="int",
+ help="Exit with a status of 2 if the total coverage is less than MIN."
+ )
help = optparse.make_option(
'-h', '--help', action='store_true',
help="Get help on this command."
@@ -71,6 +74,11 @@ class Opts(object):
".coverage data file name to simplify collecting data from "
"many processes."
)
+ module = optparse.make_option(
+ '-m', '--module', action='store_true',
+ help="<pyfile> is an importable Python module, not a script path, "
+ "to be run as 'python -m' would run it."
+ )
rcfile = optparse.make_option(
'', '--rcfile', action='store',
help="Specify configuration file. Defaults to '.coveragerc'"
@@ -84,6 +92,10 @@ class Opts(object):
help="Use a simpler but slower trace method. Try this if you get "
"seemingly impossible results!"
)
+ title = optparse.make_option(
+ '', '--title', action='store', metavar="TITLE",
+ help="A text string to use as the title on the HTML."
+ )
version = optparse.make_option(
'', '--version', action='store_true',
help="Display version information and exit."
@@ -106,16 +118,19 @@ class CoverageOptionParser(optparse.OptionParser, object):
actions=[],
branch=None,
directory=None,
+ fail_under=None,
help=None,
ignore_errors=None,
include=None,
omit=None,
parallel_mode=None,
+ module=None,
pylib=None,
rcfile=True,
show_missing=None,
source=None,
timid=None,
+ title=None,
erase_first=None,
version=None,
)
@@ -267,9 +282,11 @@ CMDS = {
'html': CmdOptionParser("html",
[
Opts.directory,
+ Opts.fail_under,
Opts.ignore_errors,
Opts.omit,
Opts.include,
+ Opts.title,
] + GLOBAL_ARGS,
usage = "[options] [modules]",
description = "Create an HTML report of the coverage of the files. "
@@ -279,6 +296,7 @@ CMDS = {
'report': CmdOptionParser("report",
[
+ Opts.fail_under,
Opts.ignore_errors,
Opts.omit,
Opts.include,
@@ -294,6 +312,7 @@ CMDS = {
Opts.branch,
Opts.pylib,
Opts.parallel_mode,
+ Opts.module,
Opts.timid,
Opts.source,
Opts.omit,
@@ -307,26 +326,27 @@ CMDS = {
'xml': CmdOptionParser("xml",
[
+ Opts.fail_under,
Opts.ignore_errors,
Opts.omit,
Opts.include,
Opts.output_xml,
] + GLOBAL_ARGS,
cmd = "xml",
- defaults = {'outfile': 'coverage.xml'},
usage = "[options] [modules]",
description = "Generate an XML report of coverage results."
),
}
-OK, ERR = 0, 1
+OK, ERR, FAIL_UNDER = 0, 1, 2
class CoverageScript(object):
"""The command-line interface to Coverage."""
- def __init__(self, _covpkg=None, _run_python_file=None, _help_fn=None):
+ def __init__(self, _covpkg=None, _run_python_file=None,
+ _run_python_module=None, _help_fn=None):
# _covpkg is for dependency injection, so we can test this code.
if _covpkg:
self.covpkg = _covpkg
@@ -334,32 +354,14 @@ class CoverageScript(object):
import coverage
self.covpkg = coverage
- # _run_python_file is for dependency injection also.
+ # For dependency injection:
self.run_python_file = _run_python_file or run_python_file
-
- # _help_fn is for dependency injection.
+ self.run_python_module = _run_python_module or run_python_module
self.help_fn = _help_fn or self.help
+ self.classic = False
self.coverage = None
- def help(self, error=None, topic=None, parser=None):
- """Display an error message, or the named topic."""
- assert error or topic or parser
- if error:
- print(error)
- print("Use 'coverage help' for help.")
- elif parser:
- print(parser.format_help().strip())
- else:
- # Parse out the topic we want from HELP_TOPICS
- topic_list = re.split("(?m)^=+ (\w+) =+$", HELP_TOPICS)
- topics = dict(zip(topic_list[1::2], topic_list[2::2]))
- help_msg = topics.get(topic, '').strip()
- if help_msg:
- print(help_msg % self.covpkg.__dict__)
- else:
- print("Don't know topic %r" % topic)
-
def command_line(self, argv):
"""The bulk of the command line interface to Coverage.
@@ -369,15 +371,14 @@ class CoverageScript(object):
"""
# Collect the command-line options.
-
if not argv:
self.help_fn(topic='minimum_help')
return OK
# The command syntax we parse depends on the first argument. Classic
# syntax always starts with an option.
- classic = argv[0].startswith('-')
- if classic:
+ self.classic = argv[0].startswith('-')
+ if self.classic:
parser = ClassicOptionParser()
else:
parser = CMDS.get(argv[0])
@@ -391,58 +392,12 @@ class CoverageScript(object):
if not ok:
return ERR
- # Handle help.
- if options.help:
- if classic:
- self.help_fn(topic='help')
- else:
- self.help_fn(parser=parser)
- return OK
-
- if "help" in options.actions:
- if args:
- for a in args:
- parser = CMDS.get(a)
- if parser:
- self.help_fn(parser=parser)
- else:
- self.help_fn(topic=a)
- else:
- self.help_fn(topic='help')
- return OK
-
- # Handle version.
- if options.version:
- self.help_fn(topic='version')
+ # Handle help and version.
+ if self.do_help(options, args, parser):
return OK
# Check for conflicts and problems in the options.
- for i in ['erase', 'execute']:
- for j in ['annotate', 'html', 'report', 'combine']:
- if (i in options.actions) and (j in options.actions):
- self.help_fn("You can't specify the '%s' and '%s' "
- "options at the same time." % (i, j))
- return ERR
-
- if not options.actions:
- self.help_fn(
- "You must specify at least one of -e, -x, -c, -r, -a, or -b."
- )
- return ERR
- args_allowed = (
- 'execute' in options.actions or
- 'annotate' in options.actions or
- 'html' in options.actions or
- 'debug' in options.actions or
- 'report' in options.actions or
- 'xml' in options.actions
- )
- if not args_allowed and args:
- self.help_fn("Unexpected arguments: %s" % " ".join(args))
- return ERR
-
- if 'execute' in options.actions and not args:
- self.help_fn("Nothing to do.")
+ if not self.args_ok(options, args):
return ERR
# Listify the list options.
@@ -463,38 +418,7 @@ class CoverageScript(object):
)
if 'debug' in options.actions:
- if not args:
- self.help_fn("What information would you like: data, sys?")
- return ERR
- for info in args:
- if info == 'sys':
- print("-- sys ----------------------------------------")
- for label, info in self.coverage.sysinfo():
- if info == []:
- info = "-none-"
- if isinstance(info, list):
- print("%15s:" % label)
- for e in info:
- print("%15s %s" % ("", e))
- else:
- print("%15s: %s" % (label, info))
- elif info == 'data':
- print("-- data ---------------------------------------")
- self.coverage.load()
- print("path: %s" % self.coverage.data.filename)
- print("has_arcs: %r" % self.coverage.data.has_arcs())
- summary = self.coverage.data.summary(fullpath=True)
- if summary:
- filenames = sorted(summary.keys())
- print("\n%d files:" % len(filenames))
- for f in filenames:
- print("%s: %d lines" % (f, summary[f]))
- else:
- print("No data collected")
- else:
- self.help_fn("Don't know what you mean by %r" % info)
- return ERR
- return OK
+ return self.do_debug(args)
if 'erase' in options.actions or options.erase_first:
self.coverage.erase()
@@ -502,13 +426,7 @@ class CoverageScript(object):
self.coverage.load()
if 'execute' in options.actions:
- # Run the script.
- self.coverage.start()
- try:
- self.run_python_file(args[0], args)
- finally:
- self.coverage.stop()
- self.coverage.save()
+ self.do_execute(options, args)
if 'combine' in options.actions:
self.coverage.combine()
@@ -523,18 +441,166 @@ class CoverageScript(object):
)
if 'report' in options.actions:
- self.coverage.report(
+ total = self.coverage.report(
show_missing=options.show_missing, **report_args)
if 'annotate' in options.actions:
self.coverage.annotate(
directory=options.directory, **report_args)
if 'html' in options.actions:
- self.coverage.html_report(
- directory=options.directory, **report_args)
+ total = self.coverage.html_report(
+ directory=options.directory, title=options.title,
+ **report_args)
if 'xml' in options.actions:
outfile = options.outfile
- self.coverage.xml_report(outfile=outfile, **report_args)
+ total = self.coverage.xml_report(outfile=outfile, **report_args)
+
+ if options.fail_under is not None:
+ if total >= options.fail_under:
+ return OK
+ else:
+ return FAIL_UNDER
+ else:
+ return OK
+ def help(self, error=None, topic=None, parser=None):
+ """Display an error message, or the named topic."""
+ assert error or topic or parser
+ if error:
+ print(error)
+ print("Use 'coverage help' for help.")
+ elif parser:
+ print(parser.format_help().strip())
+ else:
+ help_msg = HELP_TOPICS.get(topic, '').strip()
+ if help_msg:
+ print(help_msg % self.covpkg.__dict__)
+ else:
+ print("Don't know topic %r" % topic)
+
+ def do_help(self, options, args, parser):
+ """Deal with help requests.
+
+ Return True if it handled the request, False if not.
+
+ """
+ # Handle help.
+ if options.help:
+ if self.classic:
+ self.help_fn(topic='help')
+ else:
+ self.help_fn(parser=parser)
+ return True
+
+ if "help" in options.actions:
+ if args:
+ for a in args:
+ parser = CMDS.get(a)
+ if parser:
+ self.help_fn(parser=parser)
+ else:
+ self.help_fn(topic=a)
+ else:
+ self.help_fn(topic='help')
+ return True
+
+ # Handle version.
+ if options.version:
+ self.help_fn(topic='version')
+ return True
+
+ return False
+
+ def args_ok(self, options, args):
+ """Check for conflicts and problems in the options.
+
+ Returns True if everything is ok, or False if not.
+
+ """
+ for i in ['erase', 'execute']:
+ for j in ['annotate', 'html', 'report', 'combine']:
+ if (i in options.actions) and (j in options.actions):
+ self.help_fn("You can't specify the '%s' and '%s' "
+ "options at the same time." % (i, j))
+ return False
+
+ if not options.actions:
+ self.help_fn(
+ "You must specify at least one of -e, -x, -c, -r, -a, or -b."
+ )
+ return False
+ args_allowed = (
+ 'execute' in options.actions or
+ 'annotate' in options.actions or
+ 'html' in options.actions or
+ 'debug' in options.actions or
+ 'report' in options.actions or
+ 'xml' in options.actions
+ )
+ if not args_allowed and args:
+ self.help_fn("Unexpected arguments: %s" % " ".join(args))
+ return False
+
+ if 'execute' in options.actions and not args:
+ self.help_fn("Nothing to do.")
+ return False
+
+ return True
+
+ def do_execute(self, options, args):
+ """Implementation of 'coverage run'."""
+
+ # Run the script.
+ self.coverage.start()
+ code_ran = True
+ try:
+ try:
+ if options.module:
+ self.run_python_module(args[0], args)
+ else:
+ self.run_python_file(args[0], args)
+ except NoSource:
+ code_ran = False
+ raise
+ finally:
+ self.coverage.stop()
+ if code_ran:
+ self.coverage.save()
+
+ def do_debug(self, args):
+ """Implementation of 'coverage debug'."""
+
+ if not args:
+ self.help_fn("What information would you like: data, sys?")
+ return ERR
+ for info in args:
+ if info == 'sys':
+ print("-- sys ----------------------------------------")
+ for label, info in self.coverage.sysinfo():
+ if info == []:
+ info = "-none-"
+ if isinstance(info, list):
+ prefix = "%15s:" % label
+ for e in info:
+ print("%16s %s" % (prefix, e))
+ prefix = ""
+ else:
+ print("%15s: %s" % (label, info))
+ elif info == 'data':
+ print("-- data ---------------------------------------")
+ self.coverage.load()
+ print("path: %s" % self.coverage.data.filename)
+ print("has_arcs: %r" % self.coverage.data.has_arcs())
+ summary = self.coverage.data.summary(fullpath=True)
+ if summary:
+ filenames = sorted(summary.keys())
+ print("\n%d files:" % len(filenames))
+ for f in filenames:
+ print("%s: %d lines" % (f, summary[f]))
+ else:
+ print("No data collected")
+ else:
+ self.help_fn("Don't know what you mean by %r" % info)
+ return ERR
return OK
@@ -552,10 +618,10 @@ def unshell_list(s):
return s.split(',')
-HELP_TOPICS = r"""
-
-== classic ====================================================================
-Coverage.py version %(__version__)s
+HELP_TOPICS = {
+# -------------------------
+'classic':
+r"""Coverage.py version %(__version__)s
Measure, collect, and report on code coverage in Python programs.
Usage:
@@ -599,8 +665,9 @@ coverage -a [-d DIR] [-i] [-o DIR,...] [FILE1 FILE2 ...]
Coverage data is saved in the file .coverage by default. Set the
COVERAGE_FILE environment variable to save it somewhere else.
-
-== help =======================================================================
+""",
+# -------------------------
+'help': """\
Coverage.py, version %(__version__)s
Measure, collect, and report on code coverage in Python programs.
@@ -619,20 +686,22 @@ Commands:
Use "coverage help <command>" for detailed help on any command.
Use "coverage help classic" for help on older command syntax.
For more information, see %(__url__)s
-
-== minimum_help ===============================================================
+""",
+# -------------------------
+'minimum_help': """\
Code coverage for Python. Use 'coverage help' for help.
-
-== version ====================================================================
+""",
+# -------------------------
+'version': """\
Coverage.py, version %(__version__)s. %(__url__)s
-
-"""
+""",
+}
def main(argv=None):
- """The main entrypoint to Coverage.
+ """The main entry point to Coverage.
- This is installed as the script entrypoint.
+ This is installed as the script entry point.
"""
if argv is None:
diff --git a/coverage/codeunit.py b/coverage/codeunit.py
index ac90cb21..2b581d0d 100644
--- a/coverage/codeunit.py
+++ b/coverage/codeunit.py
@@ -2,7 +2,7 @@
import glob, os
-from coverage.backward import string_class, StringIO
+from coverage.backward import open_source, string_class, StringIO
from coverage.misc import CoverageException
@@ -52,7 +52,7 @@ class CodeUnit(object):
else:
f = morf
# .pyc files should always refer to a .py instead.
- if f.endswith('.pyc'):
+ if f.endswith('.pyc') or f.endswith('.pyo'):
f = f[:-1]
elif f.endswith('$py.class'): # jython
f = f[:-9] + ".py"
@@ -106,7 +106,7 @@ class CodeUnit(object):
"""Return an open file for reading the source of the code unit."""
if os.path.exists(self.filename):
# A regular text file: open it.
- return open(self.filename)
+ return open_source(self.filename)
# Maybe it's in a zip file?
source = self.file_locator.get_zip_data(self.filename)
@@ -117,3 +117,23 @@ class CodeUnit(object):
raise CoverageException(
"No source for code %r." % self.filename
)
+
+ def should_be_python(self):
+ """Does it seem like this file should contain Python?
+
+ This is used to decide if a file reported as part of the exection of
+ a program was really likely to have contained Python in the first
+ place.
+
+ """
+ # Get the file extension.
+ _, ext = os.path.splitext(self.filename)
+
+ # Anything named *.py* should be Python.
+ if ext.startswith('.py'):
+ return True
+ # A file with no extension should be Python.
+ if not ext:
+ return True
+ # Everything else is probably not Python.
+ return False
diff --git a/coverage/collector.py b/coverage/collector.py
index 9c40d16c..1b807b27 100644
--- a/coverage/collector.py
+++ b/coverage/collector.py
@@ -1,13 +1,24 @@
"""Raw data collector for Coverage."""
-import sys, threading
+import os, sys, threading
try:
# Use the C extension code when we can, for speed.
- from coverage.tracer import Tracer
+ from coverage.tracer import CTracer # pylint: disable=F0401,E0611
except ImportError:
# Couldn't import the C extension, maybe it isn't built.
- Tracer = None
+ if os.getenv('COVERAGE_TEST_TRACER') == 'c':
+ # During testing, we use the COVERAGE_TEST_TRACER env var to indicate
+ # that we've fiddled with the environment to test this fallback code.
+ # If we thought we had a C tracer, but couldn't import it, then exit
+ # quickly and clearly instead of dribbling confusing errors. I'm using
+ # sys.exit here instead of an exception because an exception here
+ # causes all sorts of other noise in unittest.
+ sys.stderr.write(
+ "*** COVERAGE_TEST_TRACER is 'c' but can't import CTracer!\n"
+ )
+ sys.exit(1)
+ CTracer = None
class PyTracer(object):
@@ -45,7 +56,8 @@ class PyTracer(object):
"""The trace function passed to sys.settrace."""
#print("trace event: %s %r @%d" % (
- # event, frame.f_code.co_filename, frame.f_lineno))
+ # event, frame.f_code.co_filename, frame.f_lineno),
+ # file=sys.stderr)
if self.last_exc_back:
if frame == self.last_exc_back:
@@ -173,7 +185,7 @@ class Collector(object):
else:
# Being fast: use the C Tracer if it is available, else the Python
# trace function.
- self._trace_class = Tracer or PyTracer
+ self._trace_class = CTracer or PyTracer
def __repr__(self):
return "<Collector at 0x%x>" % id(self)
@@ -232,9 +244,29 @@ class Collector(object):
if self._collectors:
self._collectors[-1].pause()
self._collectors.append(self)
- #print >>sys.stderr, "Started: %r" % self._collectors
+ #print("Started: %r" % self._collectors, file=sys.stderr)
+
+ # Check to see whether we had a fullcoverage tracer installed.
+ traces0 = []
+ if hasattr(sys, "gettrace"):
+ fn0 = sys.gettrace()
+ if fn0:
+ tracer0 = getattr(fn0, '__self__', None)
+ if tracer0:
+ traces0 = getattr(tracer0, 'traces', [])
+
# Install the tracer on this thread.
- self._start_tracer()
+ fn = self._start_tracer()
+
+ for args in traces0:
+ (frame, event, arg), lineno = args
+ try:
+ fn(frame, event, arg, lineno=lineno)
+ except TypeError:
+ raise Exception(
+ "fullcoverage must be run with the C trace function."
+ )
+
# Install our installation tracer in threading, to jump start other
# threads.
threading.settrace(self._installation_trace)
diff --git a/coverage/config.py b/coverage/config.py
index 1f6a879f..8f1f6710 100644
--- a/coverage/config.py
+++ b/coverage/config.py
@@ -1,7 +1,73 @@
"""Config file for coverage.py"""
-import os
-from coverage.backward import configparser # pylint: disable-msg=W0622
+import os, sys
+from coverage.backward import string_class, iitems
+
+# In py3, # ConfigParser was renamed to the more-standard configparser
+try:
+ import configparser # pylint: disable=F0401
+except ImportError:
+ import ConfigParser as configparser
+
+
+class HandyConfigParser(configparser.ConfigParser):
+ """Our specialization of ConfigParser."""
+
+ def read(self, filename):
+ """Read a filename as UTF-8 configuration data."""
+ kwargs = {}
+ if sys.version_info >= (3, 2):
+ kwargs['encoding'] = "utf-8"
+ configparser.ConfigParser.read(self, filename, **kwargs)
+
+ def getlist(self, section, option):
+ """Read a list of strings.
+
+ The value of `section` and `option` is treated as a comma- and newline-
+ separated list of strings. Each value is stripped of whitespace.
+
+ Returns the list of strings.
+
+ """
+ value_list = self.get(section, option)
+ values = []
+ for value_line in value_list.split('\n'):
+ for value in value_line.split(','):
+ value = value.strip()
+ if value:
+ values.append(value)
+ return values
+
+ def getlinelist(self, section, option):
+ """Read a list of full-line strings.
+
+ The value of `section` and `option` is treated as a newline-separated
+ list of strings. Each value is stripped of whitespace.
+
+ Returns the list of strings.
+
+ """
+ value_list = self.get(section, option)
+ return list(filter(None, value_list.split('\n')))
+
+
+# The default line exclusion regexes
+DEFAULT_EXCLUDE = [
+ '(?i)# *pragma[: ]*no *cover',
+ ]
+
+# The default partial branch regexes, to be modified by the user.
+DEFAULT_PARTIAL = [
+ '(?i)# *pragma[: ]*no *branch',
+ ]
+
+# The default partial branch regexes, based on Python semantics.
+# These are any Python branching constructs that can't actually execute all
+# their branches.
+DEFAULT_PARTIAL_ALWAYS = [
+ 'while (True|1|False|0):',
+ 'if (True|1|False|0):',
+ ]
class CoverageConfig(object):
@@ -11,7 +77,6 @@ class CoverageConfig(object):
operation of coverage.py.
"""
-
def __init__(self):
"""Initialize the configuration attributes to their defaults."""
# Defaults for [run]
@@ -23,18 +88,26 @@ class CoverageConfig(object):
self.source = None
# Defaults for [report]
- self.exclude_list = ['(?i)# *pragma[: ]*no *cover']
+ self.exclude_list = DEFAULT_EXCLUDE[:]
self.ignore_errors = False
- self.omit = None
self.include = None
+ self.omit = None
+ self.partial_list = DEFAULT_PARTIAL[:]
+ self.partial_always_list = DEFAULT_PARTIAL_ALWAYS[:]
self.precision = 0
+ self.show_missing = False
# Defaults for [html]
self.html_dir = "htmlcov"
+ self.extra_css = None
+ self.html_title = "Coverage report"
# Defaults for [xml]
self.xml_output = "coverage.xml"
+ # Defaults for [paths]
+ self.paths = {}
+
def from_environment(self, env_var):
"""Read configuration from the `env_var` environment variable."""
# Timidity: for nose users, read an environment variable. This is a
@@ -44,75 +117,66 @@ class CoverageConfig(object):
if env:
self.timid = ('--timid' in env)
+ MUST_BE_LIST = ["omit", "include"]
+
def from_args(self, **kwargs):
"""Read config values from `kwargs`."""
- for k, v in kwargs.items():
+ for k, v in iitems(kwargs):
if v is not None:
+ if k in self.MUST_BE_LIST and isinstance(v, string_class):
+ v = [v]
setattr(self, k, v)
- def from_file(self, *files):
- """Read configuration from .rc files.
+ def from_file(self, filename):
+ """Read configuration from a .rc file.
- Each argument in `files` is a file name to read.
+ `filename` is a file name to read.
"""
- cp = configparser.RawConfigParser()
- cp.read(files)
+ cp = HandyConfigParser()
+ cp.read(filename)
+
+ for option_spec in self.CONFIG_FILE_OPTIONS:
+ self.set_attr_from_config_option(cp, *option_spec)
+
+ # [paths] is special
+ if cp.has_section('paths'):
+ for option in cp.options('paths'):
+ self.paths[option] = cp.getlist('paths', option)
+ CONFIG_FILE_OPTIONS = [
# [run]
- if cp.has_option('run', 'branch'):
- self.branch = cp.getboolean('run', 'branch')
- if cp.has_option('run', 'cover_pylib'):
- self.cover_pylib = cp.getboolean('run', 'cover_pylib')
- if cp.has_option('run', 'data_file'):
- self.data_file = cp.get('run', 'data_file')
- if cp.has_option('run', 'parallel'):
- self.parallel = cp.getboolean('run', 'parallel')
- if cp.has_option('run', 'timid'):
- self.timid = cp.getboolean('run', 'timid')
- if cp.has_option('run', 'source'):
- self.source = self.get_list(cp, 'run', 'source')
- if cp.has_option('run', 'omit'):
- self.omit = self.get_list(cp, 'run', 'omit')
- if cp.has_option('run', 'include'):
- self.include = self.get_list(cp, 'run', 'include')
+ ('branch', 'run:branch', 'boolean'),
+ ('cover_pylib', 'run:cover_pylib', 'boolean'),
+ ('data_file', 'run:data_file'),
+ ('include', 'run:include', 'list'),
+ ('omit', 'run:omit', 'list'),
+ ('parallel', 'run:parallel', 'boolean'),
+ ('source', 'run:source', 'list'),
+ ('timid', 'run:timid', 'boolean'),
# [report]
- if cp.has_option('report', 'exclude_lines'):
- # exclude_lines is a list of lines, leave out the blank ones.
- exclude_list = cp.get('report', 'exclude_lines')
- self.exclude_list = list(filter(None, exclude_list.split('\n')))
- if cp.has_option('report', 'ignore_errors'):
- self.ignore_errors = cp.getboolean('report', 'ignore_errors')
- if cp.has_option('report', 'omit'):
- self.omit = self.get_list(cp, 'report', 'omit')
- if cp.has_option('report', 'include'):
- self.include = self.get_list(cp, 'report', 'include')
- if cp.has_option('report', 'precision'):
- self.precision = cp.getint('report', 'precision')
+ ('exclude_list', 'report:exclude_lines', 'linelist'),
+ ('ignore_errors', 'report:ignore_errors', 'boolean'),
+ ('include', 'report:include', 'list'),
+ ('omit', 'report:omit', 'list'),
+ ('partial_list', 'report:partial_branches', 'linelist'),
+ ('partial_always_list', 'report:partial_branches_always', 'linelist'),
+ ('precision', 'report:precision', 'int'),
+ ('show_missing', 'report:show_missing', 'boolean'),
# [html]
- if cp.has_option('html', 'directory'):
- self.html_dir = cp.get('html', 'directory')
+ ('html_dir', 'html:directory'),
+ ('extra_css', 'html:extra_css'),
+ ('html_title', 'html:title'),
# [xml]
- if cp.has_option('xml', 'output'):
- self.xml_output = cp.get('xml', 'output')
-
- def get_list(self, cp, section, option):
- """Read a list of strings from the ConfigParser `cp`.
-
- The value of `section` and `option` is treated as a comma- and newline-
- separated list of strings. Each value is stripped of whitespace.
-
- Returns the list of strings.
-
- """
- value_list = cp.get(section, option)
- values = []
- for value_line in value_list.split('\n'):
- for value in value_line.split(','):
- value = value.strip()
- if value:
- values.append(value)
- return values
+ ('xml_output', 'xml:output'),
+ ]
+
+ def set_attr_from_config_option(self, cp, attr, where, type_=''):
+ """Set an attribute on self if it exists in the ConfigParser."""
+ section, option = where.split(":")
+ if cp.has_option(section, option):
+ method = getattr(cp, 'get'+type_)
+ setattr(self, attr, method(section, option))
diff --git a/coverage/control.py b/coverage/control.py
index 54e6d3f9..28d084bf 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -3,21 +3,22 @@
import atexit, os, random, socket, sys
from coverage.annotate import AnnotateReporter
-from coverage.backward import string_class
+from coverage.backward import string_class, iitems
from coverage.codeunit import code_unit_factory, CodeUnit
from coverage.collector import Collector
from coverage.config import CoverageConfig
from coverage.data import CoverageData
from coverage.files import FileLocator, TreeMatcher, FnmatchMatcher
-from coverage.files import find_python_files
+from coverage.files import PathAliases, find_python_files, prep_patterns
from coverage.html import HtmlReporter
-from coverage.misc import CoverageException, bool_or_none
+from coverage.misc import CoverageException, bool_or_none, join_regex
+from coverage.misc import file_be_gone
from coverage.results import Analysis, Numbers
from coverage.summary import SummaryReporter
from coverage.xmlreport import XmlReporter
class coverage(object):
- """Programmatic access to Coverage.
+ """Programmatic access to coverage.py.
To use::
@@ -25,7 +26,7 @@ class coverage(object):
cov = coverage()
cov.start()
- #.. blah blah (run your code) blah blah ..
+ #.. call your code ..
cov.stop()
cov.html_report(directory='covhtml')
@@ -64,7 +65,8 @@ class coverage(object):
measured.
`include` and `omit` are lists of filename patterns. Files that match
- `include` will be measured, files that match `omit` will not.
+ `include` will be measured, files that match `omit` will not. Each
+ will also accept a single string argument.
"""
from coverage import __version__
@@ -104,8 +106,10 @@ class coverage(object):
self.auto_data = auto_data
self.atexit_registered = False
- self.exclude_re = ""
- self._compile_exclude()
+ # _exclude_re is a dict mapping exclusion list names to compiled
+ # regexes.
+ self._exclude_re = {}
+ self._exclude_regex_stale()
self.file_locator = FileLocator()
@@ -118,8 +122,8 @@ class coverage(object):
else:
self.source_pkgs.append(src)
- self.omit = self._abs_files(self.config.omit)
- self.include = self._abs_files(self.config.include)
+ self.omit = prep_patterns(self.config.omit)
+ self.include = prep_patterns(self.config.include)
self.collector = Collector(
self._should_trace, timid=self.config.timid,
@@ -157,7 +161,7 @@ class coverage(object):
# we've imported, and take all the different ones.
for m in (atexit, os, random, socket):
if hasattr(m, "__file__"):
- m_dir = self._canonical_dir(m.__file__)
+ m_dir = self._canonical_dir(m)
if m_dir not in self.pylib_dirs:
self.pylib_dirs.append(m_dir)
@@ -176,17 +180,9 @@ class coverage(object):
# Set the reporting precision.
Numbers.set_precision(self.config.precision)
- # When tearing down the coverage object, modules can become None.
- # Saving the modules as object attributes avoids problems, but it is
- # quite ad-hoc which modules need to be saved and which references
- # need to use the object attributes.
- self.socket = socket
- self.os = os
- self.random = random
-
- def _canonical_dir(self, f):
- """Return the canonical directory of the file `f`."""
- return os.path.split(self.file_locator.canonical_filename(f))[0]
+ def _canonical_dir(self, morf):
+ """Return the canonical directory of the module or file `morf`."""
+ return os.path.split(CodeUnit(morf, self.file_locator).filename)[0]
def _source_for_file(self, filename):
"""Return the source file for `filename`."""
@@ -207,9 +203,6 @@ class coverage(object):
should not.
"""
- if os is None:
- return False
-
if filename.startswith('<'):
# Lots of non-file execution is represented with artificial
# filenames like "<string>", "<doctest readme.txt[0]>", or
@@ -217,19 +210,11 @@ class coverage(object):
# can't do anything with the data later anyway.
return False
- if filename.endswith(".html"):
- # Jinja and maybe other templating systems compile templates into
- # Python code, but use the template filename as the filename in
- # the compiled code. Of course, those filenames are useless later
- # so don't bother collecting. TODO: How should we really separate
- # out good file extensions from bad?
- return False
-
self._check_for_packages()
# Compiled Python files have two filenames: frame.f_code.co_filename is
- # the filename at the time the .pyc was compiled. The second name
- # is __file__, which is where the .pyc was actually loaded from. Since
+ # the filename at the time the .pyc was compiled. The second name is
+ # __file__, which is where the .pyc was actually loaded from. Since
# .pyc files can be moved after compilation (for example, by being
# installed), we look for __file__ in the frame and prefer it to the
# co_filename value.
@@ -243,12 +228,16 @@ class coverage(object):
canonical = self.file_locator.canonical_filename(filename)
- # If the user specified source, then that's authoritative about what to
- # measure. If they didn't, then we have to exclude the stdlib and
- # coverage.py directories.
+ # If the user specified source or include, then that's authoritative
+ # about the outer bound of what to measure and we don't have to apply
+ # any canned exclusions. If they didn't, then we have to exclude the
+ # stdlib and coverage.py directories.
if self.source_match:
if not self.source_match.match(canonical):
return False
+ elif self.include_match:
+ if not self.include_match.match(canonical):
+ return False
else:
# If we aren't supposed to trace installed code, then check if this
# is near the Python standard library and skip it if so.
@@ -260,9 +249,7 @@ class coverage(object):
if self.cover_match and self.cover_match.match(canonical):
return False
- # Check the file against the include and omit patterns.
- if self.include_match and not self.include_match.match(canonical):
- return False
+ # Check the file against the omit pattern.
if self.omit_match and self.omit_match.match(canonical):
return False
@@ -271,7 +258,7 @@ class coverage(object):
# To log what should_trace returns, change this to "if 1:"
if 0:
_real_should_trace = _should_trace
- def _should_trace(self, filename, frame): # pylint: disable-msg=E0102
+ def _should_trace(self, filename, frame): # pylint: disable=E0102
"""A logging decorator around the real _should_trace function."""
ret = self._real_should_trace(filename, frame)
print("should_trace: %r -> %r" % (filename, ret))
@@ -282,11 +269,6 @@ class coverage(object):
self._warnings.append(msg)
sys.stderr.write("Coverage.py warning: %s\n" % msg)
- def _abs_files(self, files):
- """Return a list of absolute file names for the names in `files`."""
- files = files or []
- return [self.file_locator.abs_file(f) for f in files]
-
def _check_for_packages(self):
"""Update the source_match matcher with latest imported packages."""
# Our self.source_pkgs attribute is a list of package names we want to
@@ -306,7 +288,7 @@ class coverage(object):
try:
pkg_file = mod.__file__
except AttributeError:
- self._warn("Module %s has no python source." % pkg)
+ pkg_file = None
else:
d, f = os.path.split(pkg_file)
if f.startswith('__init__'):
@@ -315,8 +297,14 @@ class coverage(object):
else:
pkg_file = self._source_for_file(pkg_file)
pkg_file = self.file_locator.canonical_filename(pkg_file)
+ if not os.path.exists(pkg_file):
+ pkg_file = None
+
+ if pkg_file:
self.source.append(pkg_file)
self.source_match.add(pkg_file)
+ else:
+ self._warn("Module %s has no Python source." % pkg)
for pkg in found:
self.source_pkgs.remove(pkg)
@@ -335,7 +323,15 @@ class coverage(object):
self.data.read()
def start(self):
- """Start measuring code coverage."""
+ """Start measuring code coverage.
+
+ Coverage measurement actually occurs in functions called after `start`
+ is invoked. Statements in the same scope as `start` won't be measured.
+
+ Once you invoke `start`, you must also call `stop` eventually, or your
+ process might not shut down cleanly.
+
+ """
if self.run_suffix:
# Calling start() means we're running code, so use the run_suffix
# as the data_suffix when we eventually save the data.
@@ -366,7 +362,6 @@ class coverage(object):
def stop(self):
"""Stop measuring code coverage."""
self.collector.stop()
- self._harvest_data()
def erase(self):
"""Erase previously-collected coverage data.
@@ -378,30 +373,49 @@ class coverage(object):
self.collector.reset()
self.data.erase()
- def clear_exclude(self):
+ def clear_exclude(self, which='exclude'):
"""Clear the exclude list."""
- self.config.exclude_list = []
- self.exclude_re = ""
+ setattr(self.config, which + "_list", [])
+ self._exclude_regex_stale()
- def exclude(self, regex):
+ def exclude(self, regex, which='exclude'):
"""Exclude source lines from execution consideration.
- `regex` is a regular expression. Lines matching this expression are
- not considered executable when reporting code coverage. A list of
- regexes is maintained; this function adds a new regex to the list.
- Matching any of the regexes excludes a source line.
+ A number of lists of regular expressions are maintained. Each list
+ selects lines that are treated differently during reporting.
+
+ `which` determines which list is modified. The "exclude" list selects
+ lines that are not considered executable at all. The "partial" list
+ indicates lines with branches that are not taken.
+
+ `regex` is a regular expression. The regex is added to the specified
+ list. If any of the regexes in the list is found in a line, the line
+ is marked for special treatment during reporting.
"""
- self.config.exclude_list.append(regex)
- self._compile_exclude()
+ excl_list = getattr(self.config, which + "_list")
+ excl_list.append(regex)
+ self._exclude_regex_stale()
+
+ def _exclude_regex_stale(self):
+ """Drop all the compiled exclusion regexes, a list was modified."""
+ self._exclude_re.clear()
+
+ def _exclude_regex(self, which):
+ """Return a compiled regex for the given exclusion list."""
+ if which not in self._exclude_re:
+ excl_list = getattr(self.config, which + "_list")
+ self._exclude_re[which] = join_regex(excl_list)
+ return self._exclude_re[which]
- def _compile_exclude(self):
- """Build the internal usable form of the exclude list."""
- self.exclude_re = "(" + ")|(".join(self.config.exclude_list) + ")"
+ def get_exclude_list(self, which='exclude'):
+ """Return a list of excluded regex patterns.
- def get_exclude_list(self):
- """Return the list of excluded regex patterns."""
- return self.config.exclude_list
+ `which` indicates which list is desired. See `exclude` for the lists
+ that are available, and their meaning.
+
+ """
+ return getattr(self.config, which + "_list")
def save(self):
"""Save the collected coverage data to the data file."""
@@ -412,8 +426,8 @@ class coverage(object):
# `save()` at the last minute so that the pid will be correct even
# if the process forks.
data_suffix = "%s.%s.%06d" % (
- self.socket.gethostname(), self.os.getpid(),
- self.random.randint(0, 99999)
+ socket.gethostname(), os.getpid(),
+ random.randint(0, 99999)
)
self._harvest_data()
@@ -427,7 +441,14 @@ class coverage(object):
current measurements.
"""
- self.data.combine_parallel_data()
+ aliases = None
+ if self.config.paths:
+ aliases = PathAliases(self.file_locator)
+ for paths in self.config.paths.values():
+ result = paths[0]
+ for pattern in paths[1:]:
+ aliases.add(pattern, result)
+ self.data.combine_parallel_data(aliases=aliases)
def _harvest_data(self):
"""Get the collected data and reset the collector.
@@ -443,7 +464,7 @@ class coverage(object):
# If there are still entries in the source_pkgs list, then we never
# encountered those packages.
for pkg in self.source_pkgs:
- self._warn("Source module %s was never encountered." % pkg)
+ self._warn("Module %s was never imported." % pkg)
# Find out if we got any data.
summary = self.data.summary()
@@ -453,6 +474,7 @@ class coverage(object):
# Find files that were never executed at all.
for src in self.source:
for py_file in find_python_files(src):
+ py_file = self.file_locator.canonical_filename(py_file)
self.data.touch_file(py_file)
self._harvested = True
@@ -492,13 +514,14 @@ class coverage(object):
Returns an `Analysis` object.
"""
+ self._harvest_data()
if not isinstance(it, CodeUnit):
it = code_unit_factory(it, self.file_locator)[0]
return Analysis(self, it)
def report(self, morfs=None, show_missing=True, ignore_errors=None,
- file=None, # pylint: disable-msg=W0622
+ file=None, # pylint: disable=W0622
omit=None, include=None
):
"""Write a summary report to `file`.
@@ -510,14 +533,16 @@ class coverage(object):
match those patterns will be included in the report. Modules matching
`omit` will not be included in the report.
+ Returns a float, the total percentage covered.
+
"""
+ self._harvest_data()
self.config.from_args(
- ignore_errors=ignore_errors, omit=omit, include=include
- )
- reporter = SummaryReporter(
- self, show_missing, self.config.ignore_errors
+ ignore_errors=ignore_errors, omit=omit, include=include,
+ show_missing=show_missing,
)
- reporter.report(morfs, outfile=file, config=self.config)
+ reporter = SummaryReporter(self, self.config)
+ return reporter.report(morfs, outfile=file)
def annotate(self, morfs=None, directory=None, ignore_errors=None,
omit=None, include=None):
@@ -531,25 +556,39 @@ class coverage(object):
See `coverage.report()` for other arguments.
"""
+ self._harvest_data()
self.config.from_args(
ignore_errors=ignore_errors, omit=omit, include=include
)
- reporter = AnnotateReporter(self, self.config.ignore_errors)
- reporter.report(morfs, config=self.config, directory=directory)
+ reporter = AnnotateReporter(self, self.config)
+ reporter.report(morfs, directory=directory)
def html_report(self, morfs=None, directory=None, ignore_errors=None,
- omit=None, include=None):
+ omit=None, include=None, extra_css=None, title=None):
"""Generate an HTML report.
+ The HTML is written to `directory`. The file "index.html" is the
+ overview starting point, with links to more detailed pages for
+ individual modules.
+
+ `extra_css` is a path to a file of other CSS to apply on the page.
+ It will be copied into the HTML directory.
+
+ `title` is a text string (not HTML) to use as the title of the HTML
+ report.
+
See `coverage.report()` for other arguments.
+ Returns a float, the total percentage covered.
+
"""
+ self._harvest_data()
self.config.from_args(
ignore_errors=ignore_errors, omit=omit, include=include,
- html_dir=directory,
+ html_dir=directory, extra_css=extra_css, html_title=title,
)
- reporter = HtmlReporter(self, self.config.ignore_errors)
- reporter.report(morfs, config=self.config)
+ reporter = HtmlReporter(self, self.config)
+ return reporter.report(morfs)
def xml_report(self, morfs=None, outfile=None, ignore_errors=None,
omit=None, include=None):
@@ -562,12 +601,16 @@ class coverage(object):
See `coverage.report()` for other arguments.
+ Returns a float, the total percentage covered.
+
"""
+ self._harvest_data()
self.config.from_args(
ignore_errors=ignore_errors, omit=omit, include=include,
xml_output=outfile,
)
file_to_close = None
+ delete_file = False
if self.config.xml_output:
if self.config.xml_output == '-':
outfile = sys.stdout
@@ -575,11 +618,16 @@ class coverage(object):
outfile = open(self.config.xml_output, "w")
file_to_close = outfile
try:
- reporter = XmlReporter(self, self.config.ignore_errors)
- reporter.report(morfs, outfile=outfile, config=self.config)
+ reporter = XmlReporter(self, self.config)
+ return reporter.report(morfs, outfile=outfile)
+ except CoverageException:
+ delete_file = True
+ raise
finally:
if file_to_close:
file_to_close.close()
+ if delete_file:
+ file_be_gone(self.config.xml_output)
def sysinfo(self):
"""Return a list of (key, value) pairs showing internal information."""
@@ -587,6 +635,11 @@ class coverage(object):
import coverage as covmod
import platform, re
+ try:
+ implementation = platform.python_implementation()
+ except AttributeError:
+ implementation = "unknown"
+
info = [
('version', covmod.__version__),
('coverage', covmod.__file__),
@@ -596,11 +649,12 @@ class coverage(object):
('data_path', self.data.filename),
('python', sys.version.replace('\n', '')),
('platform', platform.platform()),
+ ('implementation', implementation),
('cwd', os.getcwd()),
('path', sys.path),
('environment', [
- ("%s = %s" % (k, v)) for k, v in os.environ.items()
- if re.search("^COV|^PY", k)
+ ("%s = %s" % (k, v)) for k, v in iitems(os.environ)
+ if re.search(r"^COV|^PY", k)
]),
]
return info
diff --git a/coverage/data.py b/coverage/data.py
index 3d750c42..c86a77f2 100644
--- a/coverage/data.py
+++ b/coverage/data.py
@@ -2,7 +2,9 @@
import os
-from coverage.backward import pickle, sorted # pylint: disable-msg=W0622
+from coverage.backward import iitems, pickle, sorted # pylint: disable=W0622
+from coverage.files import PathAliases
+from coverage.misc import file_be_gone
class CoverageData(object):
@@ -59,10 +61,6 @@ class CoverageData(object):
#
self.arcs = {}
- self.os = os
- self.sorted = sorted
- self.pickle = pickle
-
def usefile(self, use_file=True):
"""Set whether or not to use a disk file for data."""
self.use_file = use_file
@@ -92,21 +90,21 @@ class CoverageData(object):
def erase(self):
"""Erase the data, both in this object, and from its file storage."""
if self.use_file:
- if self.filename and os.path.exists(self.filename):
- os.remove(self.filename)
+ if self.filename:
+ file_be_gone(self.filename)
self.lines = {}
self.arcs = {}
def line_data(self):
"""Return the map from filenames to lists of line numbers executed."""
return dict(
- [(f, self.sorted(lmap.keys())) for f, lmap in self.lines.items()]
+ [(f, sorted(lmap.keys())) for f, lmap in iitems(self.lines)]
)
def arc_data(self):
"""Return the map from filenames to lists of line number pairs."""
return dict(
- [(f, self.sorted(amap.keys())) for f, amap in self.arcs.items()]
+ [(f, sorted(amap.keys())) for f, amap in iitems(self.arcs)]
)
def write_file(self, filename):
@@ -126,7 +124,7 @@ class CoverageData(object):
# Write the pickle to the file.
fdata = open(filename, 'wb')
try:
- self.pickle.dump(data, fdata, 2)
+ pickle.dump(data, fdata, 2)
finally:
fdata.close()
@@ -158,33 +156,39 @@ class CoverageData(object):
# Unpack the 'lines' item.
lines = dict([
(f, dict.fromkeys(linenos, None))
- for f, linenos in data.get('lines', {}).items()
+ for f, linenos in iitems(data.get('lines', {}))
])
# Unpack the 'arcs' item.
arcs = dict([
(f, dict.fromkeys(arcpairs, None))
- for f, arcpairs in data.get('arcs', {}).items()
+ for f, arcpairs in iitems(data.get('arcs', {}))
])
except Exception:
pass
return lines, arcs
- def combine_parallel_data(self):
+ def combine_parallel_data(self, aliases=None):
"""Combine a number of data files together.
Treat `self.filename` as a file prefix, and combine the data from all
of the data files starting with that prefix plus a dot.
+ If `aliases` is provided, it's a `PathAliases` object that is used to
+ re-map paths to match the local machine's.
+
"""
+ aliases = aliases or PathAliases()
data_dir, local = os.path.split(self.filename)
localdot = local + '.'
for f in os.listdir(data_dir or '.'):
if f.startswith(localdot):
full_path = os.path.join(data_dir, f)
new_lines, new_arcs = self._read_file(full_path)
- for filename, file_data in new_lines.items():
+ for filename, file_data in iitems(new_lines):
+ filename = aliases.map(filename)
self.lines.setdefault(filename, {}).update(file_data)
- for filename, file_data in new_arcs.items():
+ for filename, file_data in iitems(new_arcs):
+ filename = aliases.map(filename)
self.arcs.setdefault(filename, {}).update(file_data)
if f != local:
os.remove(full_path)
@@ -195,7 +199,7 @@ class CoverageData(object):
`line_data` is { filename: { lineno: None, ... }, ...}
"""
- for filename, linenos in line_data.items():
+ for filename, linenos in iitems(line_data):
self.lines.setdefault(filename, {}).update(linenos)
def add_arc_data(self, arc_data):
@@ -204,7 +208,7 @@ class CoverageData(object):
`arc_data` is { filename: { (l1,l2): None, ... }, ...}
"""
- for filename, arcs in arc_data.items():
+ for filename, arcs in iitems(arc_data):
self.arcs.setdefault(filename, {}).update(arcs)
def touch_file(self, filename):
@@ -228,6 +232,11 @@ class CoverageData(object):
"""A map containing all the arcs executed in `filename`."""
return self.arcs.get(filename) or {}
+ def add_to_hash(self, filename, hasher):
+ """Contribute `filename`'s data to the Md5Hash `hasher`."""
+ hasher.update(self.executed_lines(filename))
+ hasher.update(self.executed_arcs(filename))
+
def summary(self, fullpath=False):
"""Return a dict summarizing the coverage data.
@@ -240,8 +249,8 @@ class CoverageData(object):
if fullpath:
filename_fn = lambda f: f
else:
- filename_fn = self.os.path.basename
- for filename, lines in self.lines.items():
+ filename_fn = os.path.basename
+ for filename, lines in iitems(self.lines):
summ[filename_fn(filename)] = len(lines)
return summ
diff --git a/coverage/execfile.py b/coverage/execfile.py
index 333163f8..587c2d3c 100644
--- a/coverage/execfile.py
+++ b/coverage/execfile.py
@@ -2,7 +2,7 @@
import imp, os, sys
-from coverage.backward import exec_code_object
+from coverage.backward import exec_code_object, open_source
from coverage.misc import NoSource, ExceptionDuringRun
@@ -14,12 +14,68 @@ except KeyError:
BUILTINS = sys.modules['builtins']
-def run_python_file(filename, args):
+def rsplit1(s, sep):
+ """The same as s.rsplit(sep, 1), but works in 2.3"""
+ parts = s.split(sep)
+ return sep.join(parts[:-1]), parts[-1]
+
+
+def run_python_module(modulename, args):
+ """Run a python module, as though with ``python -m name args...``.
+
+ `modulename` is the name of the module, possibly a dot-separated name.
+ `args` is the argument array to present as sys.argv, including the first
+ element naming the module being executed.
+
+ """
+ openfile = None
+ glo, loc = globals(), locals()
+ try:
+ try:
+ # Search for the module - inside its parent package, if any - using
+ # standard import mechanics.
+ if '.' in modulename:
+ packagename, name = rsplit1(modulename, '.')
+ package = __import__(packagename, glo, loc, ['__path__'])
+ searchpath = package.__path__
+ else:
+ packagename, name = None, modulename
+ searchpath = None # "top-level search" in imp.find_module()
+ openfile, pathname, _ = imp.find_module(name, searchpath)
+
+ # Complain if this is a magic non-file module.
+ if openfile is None and pathname is None:
+ raise NoSource(
+ "module does not live in a file: %r" % modulename
+ )
+
+ # If `modulename` is actually a package, not a mere module, then we
+ # pretend to be Python 2.7 and try running its __main__.py script.
+ if openfile is None:
+ packagename = modulename
+ name = '__main__'
+ package = __import__(packagename, glo, loc, ['__path__'])
+ searchpath = package.__path__
+ openfile, pathname, _ = imp.find_module(name, searchpath)
+ except ImportError:
+ _, err, _ = sys.exc_info()
+ raise NoSource(str(err))
+ finally:
+ if openfile:
+ openfile.close()
+
+ # Finally, hand the file off to run_python_file for execution.
+ args[0] = pathname
+ run_python_file(pathname, args, package=packagename)
+
+
+def run_python_file(filename, args, package=None):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
- element representing the file being executed.
+ element naming the file being executed. `package` is the name of the
+ enclosing package, if any.
"""
# Create a module to serve as __main__
@@ -27,24 +83,34 @@ def run_python_file(filename, args):
main_mod = imp.new_module('__main__')
sys.modules['__main__'] = main_mod
main_mod.__file__ = filename
+ if package:
+ main_mod.__package__ = package
main_mod.__builtins__ = BUILTINS
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
- sys.path[0] = os.path.dirname(filename)
+ if package:
+ sys.path[0] = ''
+ else:
+ sys.path[0] = os.path.abspath(os.path.dirname(filename))
try:
# Open the source file.
try:
- source = open(filename, 'rU').read()
+ source_file = open_source(filename)
except IOError:
raise NoSource("No file to run: %r" % filename)
+ try:
+ source = source_file.read()
+ finally:
+ source_file.close()
+
# We have the source. `compile` still needs the last line to be clean,
# so make sure it is, then compile a code object from it.
- if source[-1] != '\n':
+ if not source or source[-1] != '\n':
source += '\n'
code = compile(source, filename, "exec")
diff --git a/coverage/files.py b/coverage/files.py
index 9a8ac564..40af7bf7 100644
--- a/coverage/files.py
+++ b/coverage/files.py
@@ -1,22 +1,20 @@
"""File wrangling."""
-import fnmatch, os, sys
+from coverage.backward import to_string
+from coverage.misc import CoverageException
+import fnmatch, os, os.path, re, sys
class FileLocator(object):
"""Understand how filenames work."""
def __init__(self):
# The absolute path to our current directory.
- self.relative_dir = self.abs_file(os.curdir) + os.sep
+ self.relative_dir = os.path.normcase(abs_file(os.curdir) + os.sep)
# Cache of results of calling the canonical_filename() method, to
# avoid duplicating work.
self.canonical_filename_cache = {}
- def abs_file(self, filename):
- """Return the absolute normalized form of `filename`."""
- return os.path.normcase(os.path.abspath(os.path.realpath(filename)))
-
def relative_filename(self, filename):
"""Return the relative form of `filename`.
@@ -24,8 +22,9 @@ class FileLocator(object):
`FileLocator` was constructed.
"""
- if filename.startswith(self.relative_dir):
- filename = filename.replace(self.relative_dir, "")
+ fnorm = os.path.normcase(filename)
+ if fnorm.startswith(self.relative_dir):
+ filename = filename[len(self.relative_dir):]
return filename
def canonical_filename(self, filename):
@@ -47,7 +46,7 @@ class FileLocator(object):
if os.path.exists(g):
f = g
break
- cf = self.abs_file(f)
+ cf = abs_file(f)
self.canonical_filename_cache[filename] = cf
return self.canonical_filename_cache[filename]
@@ -72,12 +71,76 @@ class FileLocator(object):
data = zi.get_data(parts[1])
except IOError:
continue
- if sys.version_info >= (3, 0):
- data = data.decode('utf8') # TODO: How to do this properly?
- return data
+ return to_string(data)
return None
+if sys.platform == 'win32':
+
+ def actual_path(path):
+ """Get the actual path of `path`, including the correct case."""
+ if path in actual_path.cache:
+ return actual_path.cache[path]
+
+ head, tail = os.path.split(path)
+ if not tail:
+ actpath = head
+ elif not head:
+ actpath = tail
+ else:
+ head = actual_path(head)
+ if head in actual_path.list_cache:
+ files = actual_path.list_cache[head]
+ else:
+ try:
+ files = os.listdir(head)
+ except OSError:
+ files = []
+ actual_path.list_cache[head] = files
+ normtail = os.path.normcase(tail)
+ for f in files:
+ if os.path.normcase(f) == normtail:
+ tail = f
+ break
+ actpath = os.path.join(head, tail)
+ actual_path.cache[path] = actpath
+ return actpath
+
+ actual_path.cache = {}
+ actual_path.list_cache = {}
+
+else:
+ def actual_path(filename):
+ """The actual path for non-Windows platforms."""
+ return filename
+
+def abs_file(filename):
+ """Return the absolute normalized form of `filename`."""
+ path = os.path.abspath(os.path.realpath(filename))
+ path = actual_path(path)
+ return path
+
+
+def prep_patterns(patterns):
+ """Prepare the file patterns for use in a `FnmatchMatcher`.
+
+ If a pattern starts with a wildcard, it is used as a pattern
+ as-is. If it does not start with a wildcard, then it is made
+ absolute with the current directory.
+
+ If `patterns` is None, an empty list is returned.
+
+ """
+ patterns = patterns or []
+ prepped = []
+ for p in patterns or []:
+ if p.startswith("*") or p.startswith("?"):
+ prepped.append(p)
+ else:
+ prepped.append(abs_file(p))
+ return prepped
+
+
class TreeMatcher(object):
"""A matcher for files in a tree."""
def __init__(self, directories):
@@ -119,14 +182,112 @@ class FnmatchMatcher(object):
return False
+def sep(s):
+ """Find the path separator used in this string, or os.sep if none."""
+ sep_match = re.search(r"[\\/]", s)
+ if sep_match:
+ the_sep = sep_match.group(0)
+ else:
+ the_sep = os.sep
+ return the_sep
+
+
+class PathAliases(object):
+ """A collection of aliases for paths.
+
+ When combining data files from remote machines, often the paths to source
+ code are different, for example, due to OS differences, or because of
+ serialized checkouts on continuous integration machines.
+
+ A `PathAliases` object tracks a list of pattern/result pairs, and can
+ map a path through those aliases to produce a unified path.
+
+ `locator` is a FileLocator that is used to canonicalize the results.
+
+ """
+ def __init__(self, locator=None):
+ self.aliases = []
+ self.locator = locator
+
+ def add(self, pattern, result):
+ """Add the `pattern`/`result` pair to the list of aliases.
+
+ `pattern` is an `fnmatch`-style pattern. `result` is a simple
+ string. When mapping paths, if a path starts with a match against
+ `pattern`, then that match is replaced with `result`. This models
+ isomorphic source trees being rooted at different places on two
+ different machines.
+
+ `pattern` can't end with a wildcard component, since that would
+ match an entire tree, and not just its root.
+
+ """
+ # The pattern can't end with a wildcard component.
+ pattern = pattern.rstrip(r"\/")
+ if pattern.endswith("*"):
+ raise CoverageException("Pattern must not end with wildcards.")
+ pattern_sep = sep(pattern)
+ pattern += pattern_sep
+
+ # Make a regex from the pattern. fnmatch always adds a \Z or $ to
+ # match the whole string, which we don't want.
+ regex_pat = fnmatch.translate(pattern).replace(r'\Z(', '(')
+ if regex_pat.endswith("$"):
+ regex_pat = regex_pat[:-1]
+ # We want */a/b.py to match on Windows to, so change slash to match
+ # either separator.
+ regex_pat = regex_pat.replace(r"\/", r"[\\/]")
+ # We want case-insensitive matching, so add that flag.
+ regex = re.compile(r"(?i)" + regex_pat)
+
+ # Normalize the result: it must end with a path separator.
+ result_sep = sep(result)
+ result = result.rstrip(r"\/") + result_sep
+ self.aliases.append((regex, result, pattern_sep, result_sep))
+
+ def map(self, path):
+ """Map `path` through the aliases.
+
+ `path` is checked against all of the patterns. The first pattern to
+ match is used to replace the root of the path with the result root.
+ Only one pattern is ever used. If no patterns match, `path` is
+ returned unchanged.
+
+ The separator style in the result is made to match that of the result
+ in the alias.
+
+ """
+ for regex, result, pattern_sep, result_sep in self.aliases:
+ m = regex.match(path)
+ if m:
+ new = path.replace(m.group(0), result)
+ if pattern_sep != result_sep:
+ new = new.replace(pattern_sep, result_sep)
+ if self.locator:
+ new = self.locator.canonical_filename(new)
+ return new
+ return path
+
+
def find_python_files(dirname):
- """Yield all of the importable Python files in `dirname`, recursively."""
- for dirpath, dirnames, filenames in os.walk(dirname, topdown=True):
- if '__init__.py' not in filenames:
+ """Yield all of the importable Python files in `dirname`, recursively.
+
+ To be importable, the files have to be in a directory with a __init__.py,
+ except for `dirname` itself, which isn't required to have one. The
+ assumption is that `dirname` was specified directly, so the user knows
+ best, but subdirectories are checked for a __init__.py to be sure we only
+ find the importable files.
+
+ """
+ for i, (dirpath, dirnames, filenames) in enumerate(os.walk(dirname)):
+ if i > 0 and '__init__.py' not in filenames:
# If a directory doesn't have __init__.py, then it isn't
# importable and neither are its files
del dirnames[:]
continue
for filename in filenames:
- if fnmatch.fnmatch(filename, "*.py"):
+ # We're only interested in files that look like reasonable Python
+ # files: Must end with .py, and must not have certain funny
+ # characters that probably mean they are editor junk.
+ if re.match(r"^[^.#~!$@%^&*()+=,]+\.py$", filename):
yield os.path.join(dirpath, filename)
diff --git a/coverage/fullcoverage/encodings.py b/coverage/fullcoverage/encodings.py
new file mode 100644
index 00000000..6a258d67
--- /dev/null
+++ b/coverage/fullcoverage/encodings.py
@@ -0,0 +1,57 @@
+"""Imposter encodings module that installs a coverage-style tracer.
+
+This is NOT the encodings module; it is an imposter that sets up tracing
+instrumentation and then replaces itself with the real encodings module.
+
+If the directory that holds this file is placed first in the PYTHONPATH when
+using "coverage" to run Python's tests, then this file will become the very
+first module imported by the internals of Python 3. It installs a
+coverage-compatible trace function that can watch Standard Library modules
+execute from the very earliest stages of Python's own boot process. This fixes
+a problem with coverage - that it starts too late to trace the coverage of many
+of the most fundamental modules in the Standard Library.
+
+"""
+
+import sys
+
+class FullCoverageTracer(object):
+ def __init__(self):
+ # `traces` is a list of trace events. Frames are tricky: the same
+ # frame object is used for a whole scope, with new line numbers
+ # written into it. So in one scope, all the frame objects are the
+ # same object, and will eventually all will point to the last line
+ # executed. So we keep the line numbers alongside the frames.
+ # The list looks like:
+ #
+ # traces = [
+ # ((frame, event, arg), lineno), ...
+ # ]
+ #
+ self.traces = []
+
+ def fullcoverage_trace(self, *args):
+ frame, event, arg = args
+ self.traces.append((args, frame.f_lineno))
+ return self.fullcoverage_trace
+
+sys.settrace(FullCoverageTracer().fullcoverage_trace)
+
+# In coverage/files.py is actual_filename(), which uses glob.glob. I don't
+# understand why, but that use of glob borks everything if fullcoverage is in
+# effect. So here we make an ugly hail-mary pass to switch off glob.glob over
+# there. This means when using fullcoverage, Windows path names will not be
+# their actual case.
+
+#sys.fullcoverage = True
+
+# Finally, remove our own directory from sys.path; remove ourselves from
+# sys.modules; and re-import "encodings", which will be the real package
+# this time. Note that the delete from sys.modules dictionary has to
+# happen last, since all of the symbols in this module will become None
+# at that exact moment, including "sys".
+
+parentdir = max(filter(__file__.startswith, sys.path), key=len)
+sys.path.remove(parentdir)
+del sys.modules['encodings']
+import encodings
diff --git a/coverage/html.py b/coverage/html.py
index 76e28907..ed8920f2 100644
--- a/coverage/html.py
+++ b/coverage/html.py
@@ -1,16 +1,18 @@
"""HTML reporting for Coverage."""
-import os, re, shutil
+import os, re, shutil, sys
-from coverage import __url__, __version__ # pylint: disable-msg=W0611
-from coverage.misc import CoverageException
-from coverage.phystokens import source_token_lines
+import coverage
+from coverage.backward import pickle
+from coverage.misc import CoverageException, Hasher
+from coverage.phystokens import source_token_lines, source_encoding
from coverage.report import Reporter
+from coverage.results import Numbers
from coverage.templite import Templite
# Disable pylint msg W0612, because a bunch of variables look unused, but
# they're accessed in a Templite context via locals().
-# pylint: disable-msg=W0612
+# pylint: disable=W0612
def data_filename(fname):
"""Return the path to a data file of ours."""
@@ -18,7 +20,11 @@ def data_filename(fname):
def data(fname):
"""Return the contents of a data file of ours."""
- return open(data_filename(fname)).read()
+ data_file = open(data_filename(fname))
+ try:
+ return data_file.read()
+ finally:
+ data_file.close()
class HtmlReporter(Reporter):
@@ -28,30 +34,60 @@ class HtmlReporter(Reporter):
STATIC_FILES = [
"style.css",
"jquery-1.4.3.min.js",
- "jquery.tablesorter.min.js",
"jquery.hotkeys.js",
+ "jquery.isonscreen.js",
+ "jquery.tablesorter.min.js",
"coverage_html.js",
+ "keybd_closed.png",
+ "keybd_open.png",
]
- def __init__(self, coverage, ignore_errors=False):
- super(HtmlReporter, self).__init__(coverage, ignore_errors)
+ def __init__(self, cov, config):
+ super(HtmlReporter, self).__init__(cov, config)
self.directory = None
- self.source_tmpl = Templite(data("htmlfiles/pyfile.html"), globals())
+ self.template_globals = {
+ 'escape': escape,
+ 'title': self.config.html_title,
+ '__url__': coverage.__url__,
+ '__version__': coverage.__version__,
+ }
+ self.source_tmpl = Templite(
+ data("htmlfiles/pyfile.html"), self.template_globals
+ )
+
+ self.coverage = cov
self.files = []
- self.arcs = coverage.data.has_arcs()
+ self.arcs = self.coverage.data.has_arcs()
+ self.status = HtmlStatus()
+ self.extra_css = None
+ self.totals = Numbers()
- def report(self, morfs, config=None):
+ def report(self, morfs):
"""Generate an HTML report for `morfs`.
- `morfs` is a list of modules or filenames. `config` is a
- CoverageConfig instance.
+ `morfs` is a list of modules or filenames.
"""
- assert config.html_dir, "must provide a directory for html reporting"
+ assert self.config.html_dir, "must give a directory for html reporting"
+
+ # Read the status data.
+ self.status.read(self.config.html_dir)
+
+ # Check that this run used the same settings as the last run.
+ m = Hasher()
+ m.update(self.config)
+ these_settings = m.digest()
+ if self.status.settings_hash() != these_settings:
+ self.status.reset()
+ self.status.set_settings_hash(these_settings)
+
+ # The user may have extra CSS they want copied.
+ if self.config.extra_css:
+ self.extra_css = os.path.basename(self.config.extra_css)
# Process all the files.
- self.report_files(self.html_file, morfs, config, config.html_dir)
+ self.report_files(self.html_file, morfs, self.config.html_dir)
if not self.files:
raise CoverageException("No data to report.")
@@ -59,22 +95,73 @@ class HtmlReporter(Reporter):
# Write the index file.
self.index_file()
- # Create the once-per-directory files.
+ self.make_local_static_report_files()
+
+ return self.totals.pc_covered
+
+ def make_local_static_report_files(self):
+ """Make local instances of static files for HTML report."""
+ # The files we provide must always be copied.
for static in self.STATIC_FILES:
shutil.copyfile(
data_filename("htmlfiles/" + static),
os.path.join(self.directory, static)
)
- def html_file(self, cu, analysis):
- """Generate an HTML file for one source file."""
+ # The user may have extra CSS they want copied.
+ if self.extra_css:
+ shutil.copyfile(
+ self.config.extra_css,
+ os.path.join(self.directory, self.extra_css)
+ )
- source = cu.source_file().read()
+ def write_html(self, fname, html):
+ """Write `html` to `fname`, properly encoded."""
+ fout = open(fname, "wb")
+ try:
+ fout.write(html.encode('ascii', 'xmlcharrefreplace'))
+ finally:
+ fout.close()
+
+ def file_hash(self, source, cu):
+ """Compute a hash that changes if the file needs to be re-reported."""
+ m = Hasher()
+ m.update(source)
+ self.coverage.data.add_to_hash(cu.filename, m)
+ return m.digest()
+ def html_file(self, cu, analysis):
+ """Generate an HTML file for one source file."""
+ source_file = cu.source_file()
+ try:
+ source = source_file.read()
+ finally:
+ source_file.close()
+
+ # Find out if the file on disk is already correct.
+ flat_rootname = cu.flat_rootname()
+ this_hash = self.file_hash(source, cu)
+ that_hash = self.status.file_hash(flat_rootname)
+ if this_hash == that_hash:
+ # Nothing has changed to require the file to be reported again.
+ self.files.append(self.status.index_info(flat_rootname))
+ return
+
+ self.status.set_file_hash(flat_rootname, this_hash)
+
+ # If need be, determine the encoding of the source file. We use it
+ # later to properly write the HTML.
+ if sys.version_info < (3, 0):
+ encoding = source_encoding(source)
+ # Some UTF8 files have the dreaded UTF8 BOM. If so, junk it.
+ if encoding.startswith("utf-8") and source[:3] == "\xef\xbb\xbf":
+ source = source[3:]
+ encoding = "utf-8"
+
+ # Get the numbers for this file.
nums = analysis.numbers
missing_branch_arcs = analysis.missing_branch_arcs()
- n_par = 0 # accumulated below.
arcs = self.arcs
# These classes determine which lines are highlighted by default.
@@ -99,7 +186,6 @@ class HtmlReporter(Reporter):
line_class.append(c_mis)
elif self.arcs and lineno in missing_branch_arcs:
line_class.append(c_par)
- n_par += 1
annlines = []
for b in missing_branch_arcs[lineno]:
if b < 0:
@@ -134,33 +220,125 @@ class HtmlReporter(Reporter):
})
# Write the HTML page for this file.
- html_filename = cu.flat_rootname() + ".html"
+ html_filename = flat_rootname + ".html"
html_path = os.path.join(self.directory, html_filename)
+ extra_css = self.extra_css
+
html = spaceless(self.source_tmpl.render(locals()))
- fhtml = open(html_path, 'w')
- fhtml.write(html)
- fhtml.close()
+ if sys.version_info < (3, 0):
+ html = html.decode(encoding)
+ self.write_html(html_path, html)
# Save this file's information for the index file.
- self.files.append({
+ index_info = {
'nums': nums,
- 'par': n_par,
'html_filename': html_filename,
- 'cu': cu,
- })
+ 'name': cu.name,
+ }
+ self.files.append(index_info)
+ self.status.set_index_info(flat_rootname, index_info)
def index_file(self):
"""Write the index.html file for this report."""
- index_tmpl = Templite(data("htmlfiles/index.html"), globals())
+ index_tmpl = Templite(
+ data("htmlfiles/index.html"), self.template_globals
+ )
files = self.files
arcs = self.arcs
- totals = sum([f['nums'] for f in files])
-
- fhtml = open(os.path.join(self.directory, "index.html"), "w")
- fhtml.write(index_tmpl.render(locals()))
- fhtml.close()
+ self.totals = totals = sum([f['nums'] for f in files])
+ extra_css = self.extra_css
+
+ html = index_tmpl.render(locals())
+ if sys.version_info < (3, 0):
+ html = html.decode("utf-8")
+ self.write_html(
+ os.path.join(self.directory, "index.html"),
+ html
+ )
+
+ # Write the latest hashes for next time.
+ self.status.write(self.directory)
+
+
+class HtmlStatus(object):
+ """The status information we keep to support incremental reporting."""
+
+ STATUS_FILE = "status.dat"
+ STATUS_FORMAT = 1
+
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ """Initialize to empty."""
+ self.settings = ''
+ self.files = {}
+
+ def read(self, directory):
+ """Read the last status in `directory`."""
+ usable = False
+ try:
+ status_file = os.path.join(directory, self.STATUS_FILE)
+ fstatus = open(status_file, "rb")
+ try:
+ status = pickle.load(fstatus)
+ finally:
+ fstatus.close()
+ except (IOError, ValueError):
+ usable = False
+ else:
+ usable = True
+ if status['format'] != self.STATUS_FORMAT:
+ usable = False
+ elif status['version'] != coverage.__version__:
+ usable = False
+
+ if usable:
+ self.files = status['files']
+ self.settings = status['settings']
+ else:
+ self.reset()
+
+ def write(self, directory):
+ """Write the current status to `directory`."""
+ status_file = os.path.join(directory, self.STATUS_FILE)
+ status = {
+ 'format': self.STATUS_FORMAT,
+ 'version': coverage.__version__,
+ 'settings': self.settings,
+ 'files': self.files,
+ }
+ fout = open(status_file, "wb")
+ try:
+ pickle.dump(status, fout)
+ finally:
+ fout.close()
+
+ def settings_hash(self):
+ """Get the hash of the coverage.py settings."""
+ return self.settings
+
+ def set_settings_hash(self, settings):
+ """Set the hash of the coverage.py settings."""
+ self.settings = settings
+
+ def file_hash(self, fname):
+ """Get the hash of `fname`'s contents."""
+ return self.files.get(fname, {}).get('hash', '')
+
+ def set_file_hash(self, fname, val):
+ """Set the hash of `fname`'s contents."""
+ self.files.setdefault(fname, {})['hash'] = val
+
+ def index_info(self, fname):
+ """Get the information for index.html for `fname`."""
+ return self.files.get(fname, {}).get('index', {})
+
+ def set_index_info(self, fname, info):
+ """Set the information for index.html for `fname`."""
+ self.files.setdefault(fname, {})['index'] = info
# Helpers for templates and generating HTML
@@ -185,5 +363,5 @@ def spaceless(html):
Get rid of some.
"""
- html = re.sub(">\s+<p ", ">\n<p ", html)
+ html = re.sub(r">\s+<p ", ">\n<p ", html)
return html
diff --git a/coverage/htmlfiles/coverage_html.js b/coverage/htmlfiles/coverage_html.js
index a3519250..b24006d2 100644
--- a/coverage/htmlfiles/coverage_html.js
+++ b/coverage/htmlfiles/coverage_html.js
@@ -1,34 +1,54 @@
// Coverage.py HTML report browser code.
+/*jslint browser: true, sloppy: true, vars: true, plusplus: true, maxerr: 50, indent: 4 */
+/*global coverage: true, document, window, $ */
coverage = {};
// Find all the elements with shortkey_* class, and use them to assign a shotrtcut key.
-coverage.assign_shortkeys = function() {
- $("*[class*='shortkey_']").each(function(i, e) {
- console.log(i, e);
- $.each($(e).attr("class").split(" "), function(i, c) {
+coverage.assign_shortkeys = function () {
+ $("*[class*='shortkey_']").each(function (i, e) {
+ $.each($(e).attr("class").split(" "), function (i, c) {
if (/^shortkey_/.test(c)) {
- $(document).bind('keydown', c.substr(9), function() {
+ $(document).bind('keydown', c.substr(9), function () {
$(e).click();
});
}
});
});
-}
+};
+
+// Create the events for the help panel.
+coverage.wire_up_help_panel = function () {
+ $("#keyboard_icon").click(function () {
+ // Show the help panel, and position it so the keyboard icon in the
+ // panel is in the same place as the keyboard icon in the header.
+ $(".help_panel").show();
+ var koff = $("#keyboard_icon").offset();
+ var poff = $("#panel_icon").position();
+ $(".help_panel").offset({
+ top: koff.top-poff.top,
+ left: koff.left-poff.left
+ });
+ });
+ $("#panel_icon").click(function () {
+ $(".help_panel").hide();
+ });
+};
// Loaded on index.html
-coverage.index_ready = function($) {
+coverage.index_ready = function ($) {
// Look for a cookie containing previous sort settings:
- sort_list = [];
- cookie_name = "COVERAGE_INDEX_SORT";
+ var sort_list = [];
+ var cookie_name = "COVERAGE_INDEX_SORT";
+ var i;
// This almost makes it worth installing the jQuery cookie plugin:
if (document.cookie.indexOf(cookie_name) > -1) {
- cookies = document.cookie.split(";");
- for (var i=0; i < cookies.length; i++) {
- parts = cookies[i].split("=")
+ var cookies = document.cookie.split(";");
+ for (i = 0; i < cookies.length; i++) {
+ var parts = cookies[i].split("=");
- if ($.trim(parts[0]) == cookie_name && parts[1]) {
+ if ($.trim(parts[0]) === cookie_name && parts[1]) {
sort_list = eval("[[" + parts[1] + "]]");
break;
}
@@ -41,8 +61,8 @@ coverage.index_ready = function($) {
id: "persistentSort",
// Format is called by the widget before displaying:
- format: function(table) {
- if (table.config.sortList.length == 0 && sort_list.length > 0) {
+ format: function (table) {
+ if (table.config.sortList.length === 0 && sort_list.length > 0) {
// This table hasn't been sorted before - we'll use
// our stored settings:
$(table).trigger('sorton', [sort_list]);
@@ -58,11 +78,11 @@ coverage.index_ready = function($) {
// Configure our tablesorter to handle the variable number of
// columns produced depending on report options:
- var headers = {};
+ var headers = [];
var col_count = $("table.index > thead > tr > th").length;
headers[0] = { sorter: 'text' };
- for (var i = 1; i < col_count-1; i++) {
+ for (i = 1; i < col_count-1; i++) {
headers[i] = { sorter: 'digit' };
}
headers[col_count-1] = { sorter: 'percent' };
@@ -74,26 +94,44 @@ coverage.index_ready = function($) {
});
coverage.assign_shortkeys();
+ coverage.wire_up_help_panel();
// Watch for page unload events so we can save the final sort settings:
- $(window).unload(function() {
- document.cookie = cookie_name + "=" + sort_list.toString() + "; path=/"
+ $(window).unload(function () {
+ document.cookie = cookie_name + "=" + sort_list.toString() + "; path=/";
});
-}
+};
// -- pyfile stuff --
-coverage.pyfile_ready = function($) {
+coverage.pyfile_ready = function ($) {
// If we're directed to a particular line number, highlight the line.
var frag = location.hash;
- if (frag.length > 2 && frag[1] == 'n') {
+ if (frag.length > 2 && frag[1] === 'n') {
$(frag).addClass('highlight');
+ coverage.set_sel(parseInt(frag.substr(2), 10));
}
+ else {
+ coverage.set_sel(0);
+ }
+
+ $(document)
+ .bind('keydown', 'j', coverage.to_next_chunk_nicely)
+ .bind('keydown', 'k', coverage.to_prev_chunk_nicely)
+ .bind('keydown', '0', coverage.to_top)
+ .bind('keydown', '1', coverage.to_first_chunk)
+ ;
+
+ $(".button_toggle_run").click(function (evt) {coverage.toggle_lines(evt.target, "run");});
+ $(".button_toggle_exc").click(function (evt) {coverage.toggle_lines(evt.target, "exc");});
+ $(".button_toggle_mis").click(function (evt) {coverage.toggle_lines(evt.target, "mis");});
+ $(".button_toggle_par").click(function (evt) {coverage.toggle_lines(evt.target, "par");});
coverage.assign_shortkeys();
-}
+ coverage.wire_up_help_panel();
+};
-coverage.toggle_lines = function(btn, cls) {
+coverage.toggle_lines = function (btn, cls) {
btn = $(btn);
var hide = "hide_"+cls;
if (btn.hasClass(hide)) {
@@ -104,5 +142,235 @@ coverage.toggle_lines = function(btn, cls) {
$("#source ."+cls).addClass(hide);
btn.addClass(hide);
}
-}
+};
+
+// Return the nth line div.
+coverage.line_elt = function (n) {
+ return $("#t" + n);
+};
+
+// Return the nth line number div.
+coverage.num_elt = function (n) {
+ return $("#n" + n);
+};
+
+// Return the container of all the code.
+coverage.code_container = function () {
+ return $(".linenos");
+};
+
+// Set the selection. b and e are line numbers.
+coverage.set_sel = function (b, e) {
+ // The first line selected.
+ coverage.sel_begin = b;
+ // The next line not selected.
+ coverage.sel_end = (e === undefined) ? b+1 : e;
+};
+
+coverage.to_top = function () {
+ coverage.set_sel(0, 1);
+ coverage.scroll_window(0);
+};
+
+coverage.to_first_chunk = function () {
+ coverage.set_sel(0, 1);
+ coverage.to_next_chunk();
+};
+
+coverage.is_transparent = function (color) {
+ // Different browsers return different colors for "none".
+ return color === "transparent" || color === "rgba(0, 0, 0, 0)";
+};
+
+coverage.to_next_chunk = function () {
+ var c = coverage;
+
+ // Find the start of the next colored chunk.
+ var probe = c.sel_end;
+ while (true) {
+ var probe_line = c.line_elt(probe);
+ if (probe_line.length === 0) {
+ return;
+ }
+ var color = probe_line.css("background-color");
+ if (!c.is_transparent(color)) {
+ break;
+ }
+ probe++;
+ }
+
+ // There's a next chunk, `probe` points to it.
+ var begin = probe;
+
+ // Find the end of this chunk.
+ var next_color = color;
+ while (next_color === color) {
+ probe++;
+ probe_line = c.line_elt(probe);
+ next_color = probe_line.css("background-color");
+ }
+ c.set_sel(begin, probe);
+ c.show_selection();
+};
+
+coverage.to_prev_chunk = function () {
+ var c = coverage;
+
+ // Find the end of the prev colored chunk.
+ var probe = c.sel_begin-1;
+ var probe_line = c.line_elt(probe);
+ if (probe_line.length === 0) {
+ return;
+ }
+ var color = probe_line.css("background-color");
+ while (probe > 0 && c.is_transparent(color)) {
+ probe--;
+ probe_line = c.line_elt(probe);
+ if (probe_line.length === 0) {
+ return;
+ }
+ color = probe_line.css("background-color");
+ }
+
+ // There's a prev chunk, `probe` points to its last line.
+ var end = probe+1;
+
+ // Find the beginning of this chunk.
+ var prev_color = color;
+ while (prev_color === color) {
+ probe--;
+ probe_line = c.line_elt(probe);
+ prev_color = probe_line.css("background-color");
+ }
+ c.set_sel(probe+1, end);
+ c.show_selection();
+};
+
+// Return the line number of the line nearest pixel position pos
+coverage.line_at_pos = function (pos) {
+ var l1 = coverage.line_elt(1),
+ l2 = coverage.line_elt(2),
+ result;
+ if (l1.length && l2.length) {
+ var l1_top = l1.offset().top,
+ line_height = l2.offset().top - l1_top,
+ nlines = (pos - l1_top) / line_height;
+ if (nlines < 1) {
+ result = 1;
+ }
+ else {
+ result = Math.ceil(nlines);
+ }
+ }
+ else {
+ result = 1;
+ }
+ return result;
+};
+
+// Returns 0, 1, or 2: how many of the two ends of the selection are on
+// the screen right now?
+coverage.selection_ends_on_screen = function () {
+ if (coverage.sel_begin === 0) {
+ return 0;
+ }
+
+ var top = coverage.line_elt(coverage.sel_begin);
+ var next = coverage.line_elt(coverage.sel_end-1);
+
+ return (
+ (top.isOnScreen() ? 1 : 0) +
+ (next.isOnScreen() ? 1 : 0)
+ );
+};
+
+coverage.to_next_chunk_nicely = function () {
+ coverage.finish_scrolling();
+ if (coverage.selection_ends_on_screen() === 0) {
+ // The selection is entirely off the screen: select the top line on
+ // the screen.
+ var win = $(window);
+ coverage.select_line_or_chunk(coverage.line_at_pos(win.scrollTop()));
+ }
+ coverage.to_next_chunk();
+};
+
+coverage.to_prev_chunk_nicely = function () {
+ coverage.finish_scrolling();
+ if (coverage.selection_ends_on_screen() === 0) {
+ var win = $(window);
+ coverage.select_line_or_chunk(coverage.line_at_pos(win.scrollTop() + win.height()));
+ }
+ coverage.to_prev_chunk();
+};
+
+// Select line number lineno, or if it is in a colored chunk, select the
+// entire chunk
+coverage.select_line_or_chunk = function (lineno) {
+ var c = coverage;
+ var probe_line = c.line_elt(lineno);
+ if (probe_line.length === 0) {
+ return;
+ }
+ var the_color = probe_line.css("background-color");
+ if (!c.is_transparent(the_color)) {
+ // The line is in a highlighted chunk.
+ // Search backward for the first line.
+ var probe = lineno;
+ var color = the_color;
+ while (probe > 0 && color === the_color) {
+ probe--;
+ probe_line = c.line_elt(probe);
+ if (probe_line.length === 0) {
+ break;
+ }
+ color = probe_line.css("background-color");
+ }
+ var begin = probe + 1;
+
+ // Search forward for the last line.
+ probe = lineno;
+ color = the_color;
+ while (color === the_color) {
+ probe++;
+ probe_line = c.line_elt(probe);
+ color = probe_line.css("background-color");
+ }
+
+ coverage.set_sel(begin, probe);
+ }
+ else {
+ coverage.set_sel(lineno);
+ }
+};
+
+coverage.show_selection = function () {
+ var c = coverage;
+
+ // Highlight the lines in the chunk
+ c.code_container().find(".highlight").removeClass("highlight");
+ for (var probe = c.sel_begin; probe > 0 && probe < c.sel_end; probe++) {
+ c.num_elt(probe).addClass("highlight");
+ }
+
+ c.scroll_to_selection();
+};
+
+coverage.scroll_to_selection = function () {
+ // Scroll the page if the chunk isn't fully visible.
+ if (coverage.selection_ends_on_screen() < 2) {
+ // Need to move the page. The html,body trick makes it scroll in all
+ // browsers, got it from http://stackoverflow.com/questions/3042651
+ var top = coverage.line_elt(coverage.sel_begin);
+ var top_pos = parseInt(top.offset().top, 10);
+ coverage.scroll_window(top_pos - 30);
+ }
+};
+
+coverage.scroll_window = function (to_pos) {
+ $("html,body").animate({scrollTop: to_pos}, 200);
+};
+coverage.finish_scrolling = function () {
+ $("html,body").stop(true, true);
+};
diff --git a/coverage/htmlfiles/index.html b/coverage/htmlfiles/index.html
index f03c325e..c649a83c 100644
--- a/coverage/htmlfiles/index.html
+++ b/coverage/htmlfiles/index.html
@@ -2,8 +2,11 @@
<html>
<head>
<meta http-equiv='Content-Type' content='text/html; charset=utf-8'>
- <title>Coverage report</title>
+ <title>{{ title|escape }}</title>
<link rel='stylesheet' href='style.css' type='text/css'>
+ {% if extra_css %}
+ <link rel='stylesheet' href='{{ extra_css }}' type='text/css'>
+ {% endif %}
<script type='text/javascript' src='jquery-1.4.3.min.js'></script>
<script type='text/javascript' src='jquery.tablesorter.min.js'></script>
<script type='text/javascript' src='jquery.hotkeys.js'></script>
@@ -16,9 +19,28 @@
<div id='header'>
<div class='content'>
- <h1>Coverage report:
+ <h1>{{ title|escape }}:
<span class='pc_cov'>{{totals.pc_covered_str}}%</span>
</h1>
+ <img id='keyboard_icon' src='keybd_closed.png'>
+ </div>
+</div>
+
+<div class='help_panel'>
+ <img id='panel_icon' src='keybd_open.png'>
+ <p class='legend'>Hot-keys on this page</p>
+ <div>
+ <p class='keyhelp'>
+ <span class='key'>n</span>
+ <span class='key'>s</span>
+ <span class='key'>m</span>
+ <span class='key'>x</span>
+ {% if arcs %}
+ <span class='key'>b</span>
+ <span class='key'>p</span>
+ {% endif %}
+ <span class='key'>c</span> &nbsp; change column sorting
+ </p>
</div>
</div>
@@ -47,7 +69,7 @@
<td>{{totals.n_excluded}}</td>
{% if arcs %}
<td>{{totals.n_branches}}</td>
- <td>{{totals.n_missing_branches}}</td>
+ <td>{{totals.n_partial_branches}}</td>
{% endif %}
<td class='right'>{{totals.pc_covered_str}}%</td>
</tr>
@@ -55,13 +77,13 @@
<tbody>
{% for file in files %}
<tr class='file'>
- <td class='name left'><a href='{{file.html_filename}}'>{{file.cu.name}}</a></td>
+ <td class='name left'><a href='{{file.html_filename}}'>{{file.name}}</a></td>
<td>{{file.nums.n_statements}}</td>
<td>{{file.nums.n_missing}}</td>
<td>{{file.nums.n_excluded}}</td>
{% if arcs %}
<td>{{file.nums.n_branches}}</td>
- <td>{{file.nums.n_missing_branches}}</td>
+ <td>{{file.nums.n_partial_branches}}</td>
{% endif %}
<td class='right'>{{file.nums.pc_covered_str}}%</td>
</tr>
diff --git a/coverage/htmlfiles/jquery.isonscreen.js b/coverage/htmlfiles/jquery.isonscreen.js
new file mode 100644
index 00000000..0182ebd2
--- /dev/null
+++ b/coverage/htmlfiles/jquery.isonscreen.js
@@ -0,0 +1,53 @@
+/* Copyright (c) 2010
+ * @author Laurence Wheway
+ * Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php)
+ * and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses.
+ *
+ * @version 1.2.0
+ */
+(function($) {
+ jQuery.extend({
+ isOnScreen: function(box, container) {
+ //ensure numbers come in as intgers (not strings) and remove 'px' is it's there
+ for(var i in box){box[i] = parseFloat(box[i])};
+ for(var i in container){container[i] = parseFloat(container[i])};
+
+ if(!container){
+ container = {
+ left: $(window).scrollLeft(),
+ top: $(window).scrollTop(),
+ width: $(window).width(),
+ height: $(window).height()
+ }
+ }
+
+ if( box.left+box.width-container.left > 0 &&
+ box.left < container.width+container.left &&
+ box.top+box.height-container.top > 0 &&
+ box.top < container.height+container.top
+ ) return true;
+ return false;
+ }
+ })
+
+
+ jQuery.fn.isOnScreen = function (container) {
+ for(var i in container){container[i] = parseFloat(container[i])};
+
+ if(!container){
+ container = {
+ left: $(window).scrollLeft(),
+ top: $(window).scrollTop(),
+ width: $(window).width(),
+ height: $(window).height()
+ }
+ }
+
+ if( $(this).offset().left+$(this).width()-container.left > 0 &&
+ $(this).offset().left < container.width+container.left &&
+ $(this).offset().top+$(this).height()-container.top > 0 &&
+ $(this).offset().top < container.height+container.top
+ ) return true;
+ return false;
+ }
+})(jQuery);
diff --git a/coverage/htmlfiles/keybd_closed.png b/coverage/htmlfiles/keybd_closed.png
new file mode 100755
index 00000000..f2b0418d
--- /dev/null
+++ b/coverage/htmlfiles/keybd_closed.png
Binary files differ
diff --git a/coverage/htmlfiles/keybd_open.png b/coverage/htmlfiles/keybd_open.png
new file mode 100755
index 00000000..a77961db
--- /dev/null
+++ b/coverage/htmlfiles/keybd_open.png
Binary files differ
diff --git a/coverage/htmlfiles/pyfile.html b/coverage/htmlfiles/pyfile.html
index d9d0e4c6..525939f8 100644
--- a/coverage/htmlfiles/pyfile.html
+++ b/coverage/htmlfiles/pyfile.html
@@ -7,8 +7,12 @@
<meta http-equiv='X-UA-Compatible' content='IE=emulateIE7' />
<title>Coverage for {{cu.name|escape}}: {{nums.pc_covered_str}}%</title>
<link rel='stylesheet' href='style.css' type='text/css'>
+ {% if extra_css %}
+ <link rel='stylesheet' href='{{ extra_css }}' type='text/css'>
+ {% endif %}
<script type='text/javascript' src='jquery-1.4.3.min.js'></script>
<script type='text/javascript' src='jquery.hotkeys.js'></script>
+ <script type='text/javascript' src='jquery.isonscreen.js'></script>
<script type='text/javascript' src='coverage_html.js'></script>
<script type='text/javascript' charset='utf-8'>
jQuery(document).ready(coverage.pyfile_ready);
@@ -21,18 +25,42 @@
<h1>Coverage for <b>{{cu.name|escape}}</b> :
<span class='pc_cov'>{{nums.pc_covered_str}}%</span>
</h1>
+ <img id='keyboard_icon' src='keybd_closed.png'>
<h2 class='stats'>
- {{nums.n_statements}} statements
- <span class='{{c_run}} shortkey_r' onclick='coverage.toggle_lines(this, "run")'>{{nums.n_executed}} run</span>
- <span class='{{c_mis}} shortkey_m' onclick='coverage.toggle_lines(this, "mis")'>{{nums.n_missing}} missing</span>
- <span class='{{c_exc}} shortkey_x' onclick='coverage.toggle_lines(this, "exc")'>{{nums.n_excluded}} excluded</span>
+ {{nums.n_statements}} statements &nbsp;
+ <span class='{{c_run}} shortkey_r button_toggle_run'>{{nums.n_executed}} run</span>
+ <span class='{{c_mis}} shortkey_m button_toggle_mis'>{{nums.n_missing}} missing</span>
+ <span class='{{c_exc}} shortkey_x button_toggle_exc'>{{nums.n_excluded}} excluded</span>
{% if arcs %}
- <span class='{{c_par}} shortkey_p' onclick='coverage.toggle_lines(this, "par")'>{{n_par}} partial</span>
+ <span class='{{c_par}} shortkey_p button_toggle_par'>{{nums.n_partial_branches}} partial</span>
{% endif %}
</h2>
</div>
</div>
+<div class='help_panel'>
+ <img id='panel_icon' src='keybd_open.png'>
+ <p class='legend'>Hot-keys on this page</p>
+ <div>
+ <p class='keyhelp'>
+ <span class='key'>r</span>
+ <span class='key'>m</span>
+ <span class='key'>x</span>
+ <span class='key'>p</span> &nbsp; toggle line displays
+ </p>
+ <p class='keyhelp'>
+ <span class='key'>j</span>
+ <span class='key'>k</span> &nbsp; next/prev highlighted chunk
+ </p>
+ <p class='keyhelp'>
+ <span class='key'>0</span> &nbsp; (zero) top of page
+ </p>
+ <p class='keyhelp'>
+ <span class='key'>1</span> &nbsp; (one) first highlighted chunk
+ </p>
+ </div>
+</div>
+
<div id='source'>
<table cellspacing='0' cellpadding='0'>
<tr>
diff --git a/coverage/htmlfiles/style.css b/coverage/htmlfiles/style.css
index 9a06a2b4..811c6401 100644
--- a/coverage/htmlfiles/style.css
+++ b/coverage/htmlfiles/style.css
@@ -24,8 +24,8 @@ html>body {
/* Set base font size to 12/16 */
p {
- font-size: .75em; /* 12/16 */
- line-height: 1.3333em; /* 16/12 */
+ font-size: .75em; /* 12/16 */
+ line-height: 1.33333333em; /* 16/12 */
}
table {
@@ -102,6 +102,76 @@ h2.stats {
border-color: #999 #ccc #ccc #999;
}
+.stats span.run {
+ background: #ddffdd;
+}
+.stats span.exc {
+ background: #eeeeee;
+}
+.stats span.mis {
+ background: #ffdddd;
+}
+.stats span.hide_run {
+ background: #eeffee;
+}
+.stats span.hide_exc {
+ background: #f5f5f5;
+}
+.stats span.hide_mis {
+ background: #ffeeee;
+}
+.stats span.par {
+ background: #ffffaa;
+}
+.stats span.hide_par {
+ background: #ffffcc;
+}
+
+/* Help panel */
+#keyboard_icon {
+ float: right;
+ cursor: pointer;
+}
+
+.help_panel {
+ position: absolute;
+ background: #ffc;
+ padding: .5em;
+ border: 1px solid #883;
+ display: none;
+}
+
+#indexfile .help_panel {
+ width: 20em; height: 4em;
+}
+
+#pyfile .help_panel {
+ width: 16em; height: 8em;
+}
+
+.help_panel .legend {
+ font-style: italic;
+ margin-bottom: 1em;
+}
+
+#panel_icon {
+ float: right;
+ cursor: pointer;
+}
+
+.keyhelp {
+ margin: .75em;
+}
+
+.keyhelp .key {
+ border: 1px solid black;
+ border-color: #888 #333 #333 #888;
+ padding: .1em .35em;
+ font-family: monospace;
+ font-weight: bold;
+ background: #eee;
+}
+
/* Source file styles */
.linenos p {
text-align: right;
diff --git a/coverage/misc.py b/coverage/misc.py
index 4218536d..3ed854a7 100644
--- a/coverage/misc.py
+++ b/coverage/misc.py
@@ -1,5 +1,14 @@
"""Miscellaneous stuff for Coverage."""
+import errno
+import inspect
+import os
+import sys
+
+from coverage.backward import md5, sorted # pylint: disable=W0622
+from coverage.backward import string_class, to_bytes
+
+
def nice_pair(pair):
"""Make a nice string representation of a pair of numbers.
@@ -68,12 +77,71 @@ def bool_or_none(b):
return bool(b)
+def join_regex(regexes):
+ """Combine a list of regexes into one that matches any of them."""
+ if len(regexes) > 1:
+ return "(" + ")|(".join(regexes) + ")"
+ elif regexes:
+ return regexes[0]
+ else:
+ return ""
+
+
+def file_be_gone(path):
+ """Remove a file, and don't get annoyed if it doesn't exist."""
+ try:
+ os.remove(path)
+ except OSError:
+ _, e, _ = sys.exc_info()
+ if e.errno != errno.ENOENT:
+ raise
+
+
+class Hasher(object):
+ """Hashes Python data into md5."""
+ def __init__(self):
+ self.md5 = md5()
+
+ def update(self, v):
+ """Add `v` to the hash, recursively if needed."""
+ self.md5.update(to_bytes(str(type(v))))
+ if isinstance(v, string_class):
+ self.md5.update(to_bytes(v))
+ elif isinstance(v, (int, float)):
+ self.update(str(v))
+ elif isinstance(v, (tuple, list)):
+ for e in v:
+ self.update(e)
+ elif isinstance(v, dict):
+ keys = v.keys()
+ for k in sorted(keys):
+ self.update(k)
+ self.update(v[k])
+ else:
+ for k in dir(v):
+ if k.startswith('__'):
+ continue
+ a = getattr(v, k)
+ if inspect.isroutine(a):
+ continue
+ self.update(k)
+ self.update(a)
+
+ def digest(self):
+ """Retrieve the digest of the hash."""
+ return self.md5.digest()
+
+
class CoverageException(Exception):
"""An exception specific to Coverage."""
pass
class NoSource(CoverageException):
- """Used to indicate we couldn't find the source for a module."""
+ """We couldn't find the source for a module."""
+ pass
+
+class NotPython(CoverageException):
+ """A source file turned out not to be parsable Python."""
pass
class ExceptionDuringRun(CoverageException):
diff --git a/coverage/parser.py b/coverage/parser.py
index ae618ce5..d894e61c 100644
--- a/coverage/parser.py
+++ b/coverage/parser.py
@@ -1,10 +1,12 @@
"""Code parsing for Coverage."""
-import glob, opcode, os, re, sys, token, tokenize
+import opcode, re, sys, token, tokenize
-from coverage.backward import set, sorted, StringIO # pylint: disable-msg=W0622
+from coverage.backward import set, sorted, StringIO # pylint: disable=W0622
+from coverage.backward import open_source
from coverage.bytecode import ByteCodes, CodeObjects
-from coverage.misc import nice_pair, CoverageException, NoSource, expensive
+from coverage.misc import nice_pair, expensive, join_regex
+from coverage.misc import CoverageException, NoSource, NotPython
class CodeParser(object):
@@ -13,7 +15,7 @@ class CodeParser(object):
def __init__(self, text=None, filename=None, exclude=None):
"""
Source can be provided as `text`, the text itself, or `filename`, from
- which text will be read. Excluded lines are those that match
+ which the text will be read. Excluded lines are those that match
`exclude`, a regex.
"""
@@ -22,15 +24,20 @@ class CodeParser(object):
self.text = text
if not self.text:
try:
- sourcef = open(self.filename, 'rU')
- self.text = sourcef.read()
- sourcef.close()
+ sourcef = open_source(self.filename)
+ try:
+ self.text = sourcef.read()
+ finally:
+ sourcef.close()
except IOError:
_, err, _ = sys.exc_info()
raise NoSource(
"No source for code: %r: %s" % (self.filename, err)
)
- self.text = self.text.replace('\r\n', '\n')
+
+ # Scrap the BOM if it exists.
+ if self.text and ord(self.text[0]) == 0xfeff:
+ self.text = self.text[1:]
self.exclude = exclude
@@ -65,6 +72,21 @@ class CodeParser(object):
return self._byte_parser
byte_parser = property(_get_byte_parser)
+ def lines_matching(self, *regexes):
+ """Find the lines matching one of a list of regexes.
+
+ Returns a set of line numbers, the lines that contain a match for one
+ of the regexes in `regexes`. The entire line needn't match, just a
+ part of it.
+
+ """
+ regex_c = re.compile(join_regex(regexes))
+ matches = set()
+ for i, ltext in enumerate(self.lines):
+ if regex_c.search(ltext):
+ matches.add(i+1)
+ return matches
+
def _raw_parse(self):
"""Parse the source to find the interesting facts about its lines.
@@ -73,10 +95,7 @@ class CodeParser(object):
"""
# Find lines which match an exclusion pattern.
if self.exclude:
- re_exclude = re.compile(self.exclude)
- for i, ltext in enumerate(self.lines):
- if re_exclude.search(ltext):
- self.excluded.add(i+1)
+ self.excluded = self.lines_matching(self.exclude)
# Tokenize, to find excluded suites, to find docstrings, and to find
# multi-line statements.
@@ -184,7 +203,15 @@ class CodeParser(object):
statements.
"""
- self._raw_parse()
+ try:
+ self._raw_parse()
+ except (tokenize.TokenError, IndentationError):
+ _, tokerr, _ = sys.exc_info()
+ msg, lineno = tokerr.args
+ raise NotPython(
+ "Couldn't parse '%s' as Python source: '%s' at %s" %
+ (self.filename, msg, lineno)
+ )
excluded_lines = self.first_lines(self.excluded)
ignore = excluded_lines + list(self.docstrings)
@@ -282,7 +309,7 @@ OPS_EXCEPT_BLOCKS = _opcode_set('SETUP_EXCEPT', 'SETUP_FINALLY')
OPS_POP_BLOCK = _opcode_set('POP_BLOCK')
# Opcodes that have a jump destination, but aren't really a jump.
-OPS_NO_JUMP = _opcode_set('SETUP_EXCEPT', 'SETUP_FINALLY')
+OPS_NO_JUMP = OPS_PUSH_BLOCK
# Individual opcodes we need below.
OP_BREAK_LOOP = _opcode('BREAK_LOOP')
@@ -299,12 +326,16 @@ class ByteParser(object):
def __init__(self, code=None, text=None, filename=None):
if code:
self.code = code
+ self.text = text
else:
if not text:
assert filename, "If no code or text, need a filename"
- sourcef = open(filename, 'rU')
- text = sourcef.read()
- sourcef.close()
+ sourcef = open_source(filename)
+ try:
+ text = sourcef.read()
+ finally:
+ sourcef.close()
+ self.text = text
try:
# Python 2.3 and 2.4 don't like partial last lines, so be sure
@@ -312,7 +343,7 @@ class ByteParser(object):
self.code = compile(text + '\n', filename, "exec")
except SyntaxError:
_, synerr, _ = sys.exc_info()
- raise CoverageException(
+ raise NotPython(
"Couldn't parse '%s' as Python source: '%s' at line %d" %
(filename, synerr.msg, synerr.lineno)
)
@@ -333,7 +364,8 @@ class ByteParser(object):
The iteration includes `self` as its first value.
"""
- return map(lambda c: ByteParser(code=c), CodeObjects(self.code))
+ children = CodeObjects(self.code)
+ return [ByteParser(code=c, text=self.text) for c in children]
# Getting numbers from the lnotab value changed in Py3.0.
if sys.version_info >= (3, 0):
@@ -385,18 +417,6 @@ class ByteParser(object):
stmts.add(l)
return stmts
- def _disassemble(self): # pragma: no cover
- """Disassemble code, for ad-hoc experimenting."""
-
- import dis
-
- for bp in self.child_parsers():
- print("\n%s: " % bp.code)
- dis.dis(bp.code)
- print("Bytes lines: %r" % bp._bytes_lines())
-
- print("")
-
def _split_into_chunks(self):
"""Split the code object into a list of `Chunk` objects.
@@ -509,7 +529,7 @@ class ByteParser(object):
chunks.append(chunk)
# Give all the chunks a length.
- chunks[-1].length = bc.next_offset - chunks[-1].byte
+ chunks[-1].length = bc.next_offset - chunks[-1].byte # pylint: disable=W0631,C0301
for i in range(len(chunks)-1):
chunks[i].length = chunks[i+1].byte - chunks[i].byte
@@ -556,7 +576,7 @@ class ByteParser(object):
else:
# No chunk for this byte!
raise Exception("Couldn't find chunk @ %d" % byte)
- byte_chunks[byte] = ch
+ byte_chunks[byte] = ch # pylint: disable=W0631
if ch.line:
lines.add(ch.line)
@@ -640,144 +660,3 @@ class Chunk(object):
return "<%d+%d @%d %r>" % (
self.byte, self.length, self.line, list(self.exits)
)
-
-
-class AdHocMain(object): # pragma: no cover
- """An ad-hoc main for code parsing experiments."""
-
- def main(self, args):
- """A main function for trying the code from the command line."""
-
- from optparse import OptionParser
-
- parser = OptionParser()
- parser.add_option(
- "-c", action="store_true", dest="chunks",
- help="Show basic block chunks"
- )
- parser.add_option(
- "-d", action="store_true", dest="dis",
- help="Disassemble"
- )
- parser.add_option(
- "-R", action="store_true", dest="recursive",
- help="Recurse to find source files"
- )
- parser.add_option(
- "-s", action="store_true", dest="source",
- help="Show analyzed source"
- )
- parser.add_option(
- "-t", action="store_true", dest="tokens",
- help="Show tokens"
- )
-
- options, args = parser.parse_args()
- if options.recursive:
- if args:
- root = args[0]
- else:
- root = "."
- for root, _, _ in os.walk(root):
- for f in glob.glob(root + "/*.py"):
- self.adhoc_one_file(options, f)
- else:
- self.adhoc_one_file(options, args[0])
-
- def adhoc_one_file(self, options, filename):
- """Process just one file."""
-
- if options.dis or options.chunks:
- try:
- bp = ByteParser(filename=filename)
- except CoverageException:
- _, err, _ = sys.exc_info()
- print("%s" % (err,))
- return
-
- if options.dis:
- print("Main code:")
- bp._disassemble()
-
- if options.chunks:
- chunks = bp._all_chunks()
- if options.recursive:
- print("%6d: %s" % (len(chunks), filename))
- else:
- print("Chunks: %r" % chunks)
- arcs = bp._all_arcs()
- print("Arcs: %r" % sorted(arcs))
-
- if options.source or options.tokens:
- cp = CodeParser(filename=filename, exclude=r"no\s*cover")
- cp.show_tokens = options.tokens
- cp._raw_parse()
-
- if options.source:
- if options.chunks:
- arc_width, arc_chars = self.arc_ascii_art(arcs)
- else:
- arc_width, arc_chars = 0, {}
-
- exit_counts = cp.exit_counts()
-
- for i, ltext in enumerate(cp.lines):
- lineno = i+1
- m0 = m1 = m2 = m3 = a = ' '
- if lineno in cp.statement_starts:
- m0 = '-'
- exits = exit_counts.get(lineno, 0)
- if exits > 1:
- m1 = str(exits)
- if lineno in cp.docstrings:
- m2 = '"'
- if lineno in cp.classdefs:
- m2 = 'C'
- if lineno in cp.excluded:
- m3 = 'x'
- a = arc_chars.get(lineno, '').ljust(arc_width)
- print("%4d %s%s%s%s%s %s" %
- (lineno, m0, m1, m2, m3, a, ltext)
- )
-
- def arc_ascii_art(self, arcs):
- """Draw arcs as ascii art.
-
- Returns a width of characters needed to draw all the arcs, and a
- dictionary mapping line numbers to ascii strings to draw for that line.
-
- """
- arc_chars = {}
- for lfrom, lto in sorted(arcs):
- if lfrom < 0:
- arc_chars[lto] = arc_chars.get(lto, '') + 'v'
- elif lto < 0:
- arc_chars[lfrom] = arc_chars.get(lfrom, '') + '^'
- else:
- if lfrom == lto - 1:
- # Don't show obvious arcs.
- continue
- if lfrom < lto:
- l1, l2 = lfrom, lto
- else:
- l1, l2 = lto, lfrom
- w = max([len(arc_chars.get(l, '')) for l in range(l1, l2+1)])
- for l in range(l1, l2+1):
- if l == lfrom:
- ch = '<'
- elif l == lto:
- ch = '>'
- else:
- ch = '|'
- arc_chars[l] = arc_chars.get(l, '').ljust(w) + ch
- arc_width = 0
-
- if arc_chars:
- arc_width = max([len(a) for a in arc_chars.values()])
- else:
- arc_width = 0
-
- return arc_width, arc_chars
-
-if __name__ == '__main__':
- AdHocMain().main(sys.argv[1:])
diff --git a/coverage/phystokens.py b/coverage/phystokens.py
index 60b87932..166020e1 100644
--- a/coverage/phystokens.py
+++ b/coverage/phystokens.py
@@ -1,7 +1,7 @@
"""Better tokenizing for coverage.py."""
-import keyword, re, token, tokenize
-from coverage.backward import StringIO # pylint: disable-msg=W0622
+import codecs, keyword, re, sys, token, tokenize
+from coverage.backward import StringIO # pylint: disable=W0622
def phys_tokens(toks):
"""Return all physical tokens, even line continuations.
@@ -106,3 +106,101 @@ def source_token_lines(source):
if line:
yield line
+
+def source_encoding(source):
+ """Determine the encoding for `source` (a string), according to PEP 263.
+
+ Returns a string, the name of the encoding.
+
+ """
+ # Note: this function should never be called on Python 3, since py3 has
+ # built-in tools to do this.
+ assert sys.version_info < (3, 0)
+
+ # This is mostly code adapted from Py3.2's tokenize module.
+
+ cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)")
+
+ # Do this so the detect_encode code we copied will work.
+ readline = iter(source.splitlines()).next
+
+ def _get_normal_name(orig_enc):
+ """Imitates get_normal_name in tokenizer.c."""
+ # Only care about the first 12 characters.
+ enc = orig_enc[:12].lower().replace("_", "-")
+ if re.match(r"^utf-8($|-)", enc):
+ return "utf-8"
+ if re.match(r"^(latin-1|iso-8859-1|iso-latin-1)($|-)", enc):
+ return "iso-8859-1"
+ return orig_enc
+
+ # From detect_encode():
+ # It detects the encoding from the presence of a utf-8 bom or an encoding
+ # cookie as specified in pep-0263. If both a bom and a cookie are present,
+ # but disagree, a SyntaxError will be raised. If the encoding cookie is an
+ # invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
+ # 'utf-8-sig' is returned.
+
+ # If no encoding is specified, then the default will be returned. The
+ # default varied with version.
+
+ if sys.version_info <= (2, 4):
+ default = 'iso-8859-1'
+ else:
+ default = 'ascii'
+
+ bom_found = False
+ encoding = None
+
+ def read_or_stop():
+ """Get the next source line, or ''."""
+ try:
+ return readline()
+ except StopIteration:
+ return ''
+
+ def find_cookie(line):
+ """Find an encoding cookie in `line`."""
+ try:
+ line_string = line.decode('ascii')
+ except UnicodeDecodeError:
+ return None
+
+ matches = cookie_re.findall(line_string)
+ if not matches:
+ return None
+ encoding = _get_normal_name(matches[0])
+ try:
+ codec = codecs.lookup(encoding)
+ except LookupError:
+ # This behaviour mimics the Python interpreter
+ raise SyntaxError("unknown encoding: " + encoding)
+
+ if bom_found:
+ if codec.name != 'utf-8':
+ # This behaviour mimics the Python interpreter
+ raise SyntaxError('encoding problem: utf-8')
+ encoding += '-sig'
+ return encoding
+
+ first = read_or_stop()
+ if first.startswith(codecs.BOM_UTF8):
+ bom_found = True
+ first = first[3:]
+ default = 'utf-8-sig'
+ if not first:
+ return default
+
+ encoding = find_cookie(first)
+ if encoding:
+ return encoding
+
+ second = read_or_stop()
+ if not second:
+ return default
+
+ encoding = find_cookie(second)
+ if encoding:
+ return encoding
+
+ return default
diff --git a/coverage/report.py b/coverage/report.py
index 0fb353a2..34f44422 100644
--- a/coverage/report.py
+++ b/coverage/report.py
@@ -2,20 +2,21 @@
import fnmatch, os
from coverage.codeunit import code_unit_factory
-from coverage.misc import CoverageException, NoSource
+from coverage.files import prep_patterns
+from coverage.misc import CoverageException, NoSource, NotPython
class Reporter(object):
"""A base class for all reporters."""
- def __init__(self, coverage, ignore_errors=False):
+ def __init__(self, coverage, config):
"""Create a reporter.
- `coverage` is the coverage instance. `ignore_errors` controls how
- skittish the reporter will be during file processing.
+ `coverage` is the coverage instance. `config` is an instance of
+ CoverageConfig, for controlling all sorts of behavior.
"""
self.coverage = coverage
- self.ignore_errors = ignore_errors
+ self.config = config
# The code units to report on. Set by find_code_units.
self.code_units = []
@@ -24,19 +25,18 @@ class Reporter(object):
# classes.
self.directory = None
- def find_code_units(self, morfs, config):
+ def find_code_units(self, morfs):
"""Find the code units we'll report on.
- `morfs` is a list of modules or filenames. `config` is a
- CoverageConfig instance.
+ `morfs` is a list of modules or filenames.
"""
morfs = morfs or self.coverage.data.measured_files()
file_locator = self.coverage.file_locator
self.code_units = code_unit_factory(morfs, file_locator)
- if config.include:
- patterns = [file_locator.abs_file(p) for p in config.include]
+ if self.config.include:
+ patterns = prep_patterns(self.config.include)
filtered = []
for cu in self.code_units:
for pattern in patterns:
@@ -45,8 +45,8 @@ class Reporter(object):
break
self.code_units = filtered
- if config.omit:
- patterns = [file_locator.abs_file(p) for p in config.omit]
+ if self.config.omit:
+ patterns = prep_patterns(self.config.omit)
filtered = []
for cu in self.code_units:
for pattern in patterns:
@@ -58,15 +58,19 @@ class Reporter(object):
self.code_units.sort()
- def report_files(self, report_fn, morfs, config, directory=None):
+ def report_files(self, report_fn, morfs, directory=None):
"""Run a reporting function on a number of morfs.
- `report_fn` is called for each relative morf in `morfs`.
+ `report_fn` is called for each relative morf in `morfs`. It is called
+ as::
- `config` is a CoverageConfig instance.
+ report_fn(code_unit, analysis)
+
+ where `code_unit` is the `CodeUnit` for the morf, and `analysis` is
+ the `Analysis` for the morf.
"""
- self.find_code_units(morfs, config)
+ self.find_code_units(morfs)
if not self.code_units:
raise CoverageException("No data to report.")
@@ -79,5 +83,10 @@ class Reporter(object):
try:
report_fn(cu, self.coverage._analyze(cu))
except NoSource:
- if not self.ignore_errors:
+ if not self.config.ignore_errors:
+ raise
+ except NotPython:
+ # Only report errors for .py files, and only if we didn't
+ # explicitly suppress those errors.
+ if cu.should_be_python() and not self.config.ignore_errors:
raise
diff --git a/coverage/results.py b/coverage/results.py
index 85071fe3..ae22e1c3 100644
--- a/coverage/results.py
+++ b/coverage/results.py
@@ -2,8 +2,8 @@
import os
-from coverage.backward import set, sorted # pylint: disable-msg=W0622
-from coverage.misc import format_lines, NoSource
+from coverage.backward import iitems, set, sorted # pylint: disable=W0622
+from coverage.misc import format_lines, join_regex, NoSource
from coverage.parser import CodeParser
@@ -25,7 +25,7 @@ class Analysis(object):
self.parser = CodeParser(
text=source, filename=self.filename,
- exclude=self.coverage.exclude_re
+ exclude=self.coverage._exclude_regex('exclude')
)
self.statements, self.excluded = self.parser.parse_source()
@@ -35,11 +35,19 @@ class Analysis(object):
self.missing = sorted(set(self.statements) - set(exec1))
if self.coverage.data.has_arcs():
+ self.no_branch = self.parser.lines_matching(
+ join_regex(self.coverage.config.partial_list),
+ join_regex(self.coverage.config.partial_always_list)
+ )
n_branches = self.total_branches()
mba = self.missing_branch_arcs()
- n_missing_branches = sum([len(v) for v in mba.values()])
+ n_partial_branches = sum(
+ [len(v) for k,v in iitems(mba) if k not in self.missing]
+ )
+ n_missing_branches = sum([len(v) for k,v in iitems(mba)])
else:
- n_branches = n_missing_branches = 0
+ n_branches = n_partial_branches = n_missing_branches = 0
+ self.no_branch = set()
self.numbers = Numbers(
n_files=1,
@@ -47,6 +55,7 @@ class Analysis(object):
n_excluded=len(self.excluded),
n_missing=len(self.missing),
n_branches=n_branches,
+ n_partial_branches=n_partial_branches,
n_missing_branches=n_missing_branches,
)
@@ -64,7 +73,8 @@ class Analysis(object):
def arc_possibilities(self):
"""Returns a sorted list of the arcs in the code."""
- return self.parser.arcs()
+ arcs = self.parser.arcs()
+ return arcs
def arcs_executed(self):
"""Returns a sorted list of the arcs actually executed in the code."""
@@ -77,7 +87,11 @@ class Analysis(object):
"""Returns a sorted list of the arcs in the code not executed."""
possible = self.arc_possibilities()
executed = self.arcs_executed()
- missing = [p for p in possible if p not in executed]
+ missing = [
+ p for p in possible
+ if p not in executed
+ and p[0] not in self.no_branch
+ ]
return sorted(missing)
def arcs_unpredicted(self):
@@ -89,14 +103,15 @@ class Analysis(object):
# trouble, and here is where it's the least burden to remove them.
unpredicted = [
e for e in executed
- if e not in possible and e[0] != e[1]
+ if e not in possible
+ and e[0] != e[1]
]
return sorted(unpredicted)
def branch_lines(self):
"""Returns a list of line numbers that have more than one exit."""
exit_counts = self.parser.exit_counts()
- return [l1 for l1,count in exit_counts.items() if count > 1]
+ return [l1 for l1,count in iitems(exit_counts) if count > 1]
def total_branches(self):
"""How many total branches are there?"""
@@ -153,13 +168,14 @@ class Numbers(object):
_near100 = 99.0
def __init__(self, n_files=0, n_statements=0, n_excluded=0, n_missing=0,
- n_branches=0, n_missing_branches=0
+ n_branches=0, n_partial_branches=0, n_missing_branches=0
):
self.n_files = n_files
self.n_statements = n_statements
self.n_excluded = n_excluded
self.n_missing = n_missing
self.n_branches = n_branches
+ self.n_partial_branches = n_partial_branches
self.n_missing_branches = n_missing_branches
def set_precision(cls, precision):
@@ -193,8 +209,9 @@ class Numbers(object):
def _get_pc_covered_str(self):
"""Returns the percent covered, as a string, without a percent sign.
- The important thing here is that "0" only be returned when it's truly
- zero, and "100" only be returned when it's truly 100.
+ Note that "0" is only returned when the value is truly zero, and "100"
+ is only returned when the value is truly 100. Rounding can never
+ result in either "0" or "100".
"""
pc = self.pc_covered
@@ -222,12 +239,16 @@ class Numbers(object):
nums.n_excluded = self.n_excluded + other.n_excluded
nums.n_missing = self.n_missing + other.n_missing
nums.n_branches = self.n_branches + other.n_branches
- nums.n_missing_branches = (self.n_missing_branches +
- other.n_missing_branches)
+ nums.n_partial_branches = (
+ self.n_partial_branches + other.n_partial_branches
+ )
+ nums.n_missing_branches = (
+ self.n_missing_branches + other.n_missing_branches
+ )
return nums
def __radd__(self, other):
# Implementing 0+Numbers allows us to sum() a list of Numbers.
if other == 0:
return self
- raise NotImplemented
+ return NotImplemented
diff --git a/coverage/summary.py b/coverage/summary.py
index 599ae782..4b1cd14e 100644
--- a/coverage/summary.py
+++ b/coverage/summary.py
@@ -4,24 +4,23 @@ import sys
from coverage.report import Reporter
from coverage.results import Numbers
+from coverage.misc import NotPython
class SummaryReporter(Reporter):
"""A reporter for writing the summary report."""
- def __init__(self, coverage, show_missing=True, ignore_errors=False):
- super(SummaryReporter, self).__init__(coverage, ignore_errors)
- self.show_missing = show_missing
+ def __init__(self, coverage, config):
+ super(SummaryReporter, self).__init__(coverage, config)
self.branches = coverage.data.has_arcs()
- def report(self, morfs, outfile=None, config=None):
+ def report(self, morfs, outfile=None):
"""Writes a report summarizing coverage statistics per module.
- `outfile` is a file object to write the summary to. `config` is a
- CoverageConfig instance.
+ `outfile` is a file object to write the summary to.
"""
- self.find_code_units(morfs, config)
+ self.find_code_units(morfs)
# Prepare the formatting strings
max_name = max([len(cu.name) for cu in self.code_units] + [5])
@@ -30,12 +29,12 @@ class SummaryReporter(Reporter):
header = (fmt_name % "Name") + " Stmts Miss"
fmt_coverage = fmt_name + "%6d %6d"
if self.branches:
- header += " Branch BrPart"
+ header += " Branch BrMiss"
fmt_coverage += " %6d %6d"
width100 = Numbers.pc_str_width()
header += "%*s" % (width100+4, "Cover")
fmt_coverage += "%%%ds%%%%" % (width100+3,)
- if self.show_missing:
+ if self.config.show_missing:
header += " Missing"
fmt_coverage += " %s"
rule = "-" * len(header) + "\n"
@@ -59,15 +58,19 @@ class SummaryReporter(Reporter):
if self.branches:
args += (nums.n_branches, nums.n_missing_branches)
args += (nums.pc_covered_str,)
- if self.show_missing:
+ if self.config.show_missing:
args += (analysis.missing_formatted(),)
outfile.write(fmt_coverage % args)
total += nums
except KeyboardInterrupt: # pragma: no cover
raise
except:
- if not self.ignore_errors:
+ report_it = not self.config.ignore_errors
+ if report_it:
typ, msg = sys.exc_info()[:2]
+ if typ is NotPython and not cu.should_be_python():
+ report_it = False
+ if report_it:
outfile.write(fmt_err % (cu.name, typ.__name__, msg))
if total.n_files > 1:
@@ -76,6 +79,8 @@ class SummaryReporter(Reporter):
if self.branches:
args += (total.n_branches, total.n_missing_branches)
args += (total.pc_covered_str,)
- if self.show_missing:
+ if self.config.show_missing:
args += ("",)
outfile.write(fmt_coverage % args)
+
+ return total.pc_covered
diff --git a/coverage/tracer.c b/coverage/tracer.c
index e046596a..97dd113b 100644
--- a/coverage/tracer.c
+++ b/coverage/tracer.c
@@ -27,7 +27,8 @@
#define MyText_Type PyUnicode_Type
#define MyText_Check(o) PyUnicode_Check(o)
-#define MyText_AS_STRING(o) PyBytes_AS_STRING(PyUnicode_AsASCIIString(o))
+#define MyText_AS_BYTES(o) PyUnicode_AsASCIIString(o)
+#define MyText_AS_STRING(o) PyBytes_AS_STRING(o)
#define MyInt_FromLong(l) PyLong_FromLong(l)
#define MyType_HEAD_INIT PyVarObject_HEAD_INIT(NULL, 0)
@@ -36,6 +37,7 @@
#define MyText_Type PyString_Type
#define MyText_Check(o) PyString_Check(o)
+#define MyText_AS_BYTES(o) (Py_INCREF(o), o)
#define MyText_AS_STRING(o) PyString_AS_STRING(o)
#define MyInt_FromLong(l) PyInt_FromLong(l)
@@ -56,7 +58,7 @@ typedef struct {
int last_line;
} DataStackEntry;
-/* The Tracer type. */
+/* The CTracer type. */
typedef struct {
PyObject_HEAD
@@ -115,12 +117,12 @@ typedef struct {
unsigned int errors;
} stats;
#endif /* COLLECT_STATS */
-} Tracer;
+} CTracer;
#define STACK_DELTA 100
static int
-Tracer_init(Tracer *self, PyObject *args_unused, PyObject *kwds_unused)
+CTracer_init(CTracer *self, PyObject *args_unused, PyObject *kwds_unused)
{
#if COLLECT_STATS
self->stats.calls = 0;
@@ -161,7 +163,7 @@ Tracer_init(Tracer *self, PyObject *args_unused, PyObject *kwds_unused)
}
static void
-Tracer_dealloc(Tracer *self)
+CTracer_dealloc(CTracer *self)
{
if (self->started) {
PyEval_SetTrace(NULL, NULL);
@@ -207,7 +209,9 @@ showlog(int depth, int lineno, PyObject * filename, const char * msg)
printf(" ");
}
if (filename) {
- printf(" %s", MyText_AS_STRING(filename));
+ PyObject *ascii = MyText_AS_BYTES(filename);
+ printf(" %s", MyText_AS_STRING(ascii));
+ Py_DECREF(ascii);
}
if (msg) {
printf(" %s", msg);
@@ -227,14 +231,12 @@ static const char * what_sym[] = {"CALL", "EXC ", "LINE", "RET "};
/* Record a pair of integers in self->cur_file_data. */
static int
-Tracer_record_pair(Tracer *self, int l1, int l2)
+CTracer_record_pair(CTracer *self, int l1, int l2)
{
int ret = RET_OK;
- PyObject * t = PyTuple_New(2);
+ PyObject * t = Py_BuildValue("(ii)", l1, l2);
if (t != NULL) {
- PyTuple_SET_ITEM(t, 0, MyInt_FromLong(l1));
- PyTuple_SET_ITEM(t, 1, MyInt_FromLong(l2));
if (PyDict_SetItem(self->cur_file_data, t, Py_None) < 0) {
STATS( self->stats.errors++; )
ret = RET_ERROR;
@@ -252,22 +254,29 @@ Tracer_record_pair(Tracer *self, int l1, int l2)
* The Trace Function
*/
static int
-Tracer_trace(Tracer *self, PyFrameObject *frame, int what, PyObject *arg_unused)
+CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unused)
{
int ret = RET_OK;
PyObject * filename = NULL;
PyObject * tracename = NULL;
+ #if WHAT_LOG || TRACE_LOG
+ PyObject * ascii = NULL;
+ #endif
#if WHAT_LOG
if (what <= sizeof(what_sym)/sizeof(const char *)) {
- printf("trace: %s @ %s %d\n", what_sym[what], MyText_AS_STRING(frame->f_code->co_filename), frame->f_lineno);
+ ascii = MyText_AS_BYTES(frame->f_code->co_filename);
+ printf("trace: %s @ %s %d\n", what_sym[what], MyText_AS_STRING(ascii), frame->f_lineno);
+ Py_DECREF(ascii);
}
#endif
#if TRACE_LOG
- if (strstr(MyText_AS_STRING(frame->f_code->co_filename), start_file) && frame->f_lineno == start_line) {
+ ascii = MyText_AS_BYTES(frame->f_code->co_filename);
+ if (strstr(MyText_AS_STRING(ascii), start_file) && frame->f_lineno == start_line) {
logging = 1;
}
+ Py_DECREF(ascii);
#endif
/* See below for details on missing-return detection. */
@@ -286,7 +295,7 @@ Tracer_trace(Tracer *self, PyFrameObject *frame, int what, PyObject *arg_unused)
STATS( self->stats.missed_returns++; )
if (self->depth >= 0) {
if (self->tracing_arcs && self->cur_file_data) {
- if (Tracer_record_pair(self, self->last_line, -self->last_exc_firstlineno) < 0) {
+ if (CTracer_record_pair(self, self->last_line, -self->last_exc_firstlineno) < 0) {
return RET_ERROR;
}
}
@@ -365,6 +374,9 @@ Tracer_trace(Tracer *self, PyFrameObject *frame, int what, PyObject *arg_unused)
}
}
self->cur_file_data = file_data;
+ /* Make the frame right in case settrace(gettrace()) happens. */
+ Py_INCREF(self);
+ frame->f_trace = (PyObject*)self;
SHOWLOG(self->depth, frame->f_lineno, filename, "traced");
}
else {
@@ -383,7 +395,7 @@ Tracer_trace(Tracer *self, PyFrameObject *frame, int what, PyObject *arg_unused)
if (self->depth >= 0) {
if (self->tracing_arcs && self->cur_file_data) {
int first = frame->f_code->co_firstlineno;
- if (Tracer_record_pair(self, self->last_line, -first) < 0) {
+ if (CTracer_record_pair(self, self->last_line, -first) < 0) {
return RET_ERROR;
}
}
@@ -403,7 +415,7 @@ Tracer_trace(Tracer *self, PyFrameObject *frame, int what, PyObject *arg_unused)
/* We're tracing in this frame: record something. */
if (self->tracing_arcs) {
/* Tracing arcs: key is (last_line,this_line). */
- if (Tracer_record_pair(self, self->last_line, frame->f_lineno) < 0) {
+ if (CTracer_record_pair(self, self->last_line, frame->f_lineno) < 0) {
return RET_ERROR;
}
}
@@ -455,57 +467,98 @@ Tracer_trace(Tracer *self, PyFrameObject *frame, int what, PyObject *arg_unused)
}
/*
- * A sys.settrace-compatible function that invokes our C trace function.
+ * Python has two ways to set the trace function: sys.settrace(fn), which
+ * takes a Python callable, and PyEval_SetTrace(func, obj), which takes
+ * a C function and a Python object. The way these work together is that
+ * sys.settrace(pyfn) calls PyEval_SetTrace(builtin_func, pyfn), using the
+ * Python callable as the object in PyEval_SetTrace. So sys.gettrace()
+ * simply returns the Python object used as the second argument to
+ * PyEval_SetTrace. So sys.gettrace() will return our self parameter, which
+ * means it must be callable to be used in sys.settrace().
+ *
+ * So we make our self callable, equivalent to invoking our trace function.
+ *
+ * To help with the process of replaying stored frames, this function has an
+ * optional keyword argument:
+ *
+ * def CTracer_call(frame, event, arg, lineno=0)
+ *
+ * If provided, the lineno argument is used as the line number, and the
+ * frame's f_lineno member is ignored.
*/
static PyObject *
-Tracer_pytrace(Tracer *self, PyObject *args)
+CTracer_call(CTracer *self, PyObject *args, PyObject *kwds)
{
PyFrameObject *frame;
PyObject *what_str;
- PyObject *arg_unused;
+ PyObject *arg;
+ int lineno = 0;
int what;
+ int orig_lineno;
+ PyObject *ret = NULL;
+
static char *what_names[] = {
"call", "exception", "line", "return",
"c_call", "c_exception", "c_return",
NULL
};
- if (!PyArg_ParseTuple(args, "O!O!O:Tracer_pytrace",
- &PyFrame_Type, &frame, &MyText_Type, &what_str, &arg_unused)) {
+ #if WHAT_LOG
+ printf("pytrace\n");
+ #endif
+
+ static char *kwlist[] = {"frame", "event", "arg", "lineno", NULL};
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O!O!O|i:Tracer_call", kwlist,
+ &PyFrame_Type, &frame, &MyText_Type, &what_str, &arg, &lineno)) {
goto done;
}
/* In Python, the what argument is a string, we need to find an int
for the C function. */
for (what = 0; what_names[what]; what++) {
- if (!strcmp(MyText_AS_STRING(what_str), what_names[what])) {
+ PyObject *ascii = MyText_AS_BYTES(what_str);
+ int should_break = !strcmp(MyText_AS_STRING(ascii), what_names[what]);
+ Py_DECREF(ascii);
+ if (should_break) {
break;
}
}
+ /* Save off the frame's lineno, and use the forced one, if provided. */
+ orig_lineno = frame->f_lineno;
+ if (lineno > 0) {
+ frame->f_lineno = lineno;
+ }
+
/* Invoke the C function, and return ourselves. */
- if (Tracer_trace(self, frame, what, arg_unused) == RET_OK) {
- return PyObject_GetAttrString((PyObject*)self, "pytrace");
+ if (CTracer_trace(self, frame, what, arg) == RET_OK) {
+ Py_INCREF(self);
+ ret = (PyObject *)self;
}
+ /* Clean up. */
+ frame->f_lineno = orig_lineno;
+
done:
- return NULL;
+ return ret;
}
static PyObject *
-Tracer_start(Tracer *self, PyObject *args_unused)
+CTracer_start(CTracer *self, PyObject *args_unused)
{
- PyEval_SetTrace((Py_tracefunc)Tracer_trace, (PyObject*)self);
+ PyEval_SetTrace((Py_tracefunc)CTracer_trace, (PyObject*)self);
self->started = 1;
self->tracing_arcs = self->arcs && PyObject_IsTrue(self->arcs);
self->last_line = -1;
/* start() returns a trace function usable with sys.settrace() */
- return PyObject_GetAttrString((PyObject*)self, "pytrace");
+ Py_INCREF(self);
+ return (PyObject *)self;
}
static PyObject *
-Tracer_stop(Tracer *self, PyObject *args_unused)
+CTracer_stop(CTracer *self, PyObject *args_unused)
{
if (self->started) {
PyEval_SetTrace(NULL, NULL);
@@ -516,7 +569,7 @@ Tracer_stop(Tracer *self, PyObject *args_unused)
}
static PyObject *
-Tracer_get_stats(Tracer *self)
+CTracer_get_stats(CTracer *self)
{
#if COLLECT_STATS
return Py_BuildValue(
@@ -538,49 +591,46 @@ Tracer_get_stats(Tracer *self)
}
static PyMemberDef
-Tracer_members[] = {
- { "should_trace", T_OBJECT, offsetof(Tracer, should_trace), 0,
+CTracer_members[] = {
+ { "should_trace", T_OBJECT, offsetof(CTracer, should_trace), 0,
PyDoc_STR("Function indicating whether to trace a file.") },
- { "warn", T_OBJECT, offsetof(Tracer, warn), 0,
+ { "warn", T_OBJECT, offsetof(CTracer, warn), 0,
PyDoc_STR("Function for issuing warnings.") },
- { "data", T_OBJECT, offsetof(Tracer, data), 0,
+ { "data", T_OBJECT, offsetof(CTracer, data), 0,
PyDoc_STR("The raw dictionary of trace data.") },
- { "should_trace_cache", T_OBJECT, offsetof(Tracer, should_trace_cache), 0,
+ { "should_trace_cache", T_OBJECT, offsetof(CTracer, should_trace_cache), 0,
PyDoc_STR("Dictionary caching should_trace results.") },
- { "arcs", T_OBJECT, offsetof(Tracer, arcs), 0,
+ { "arcs", T_OBJECT, offsetof(CTracer, arcs), 0,
PyDoc_STR("Should we trace arcs, or just lines?") },
{ NULL }
};
static PyMethodDef
-Tracer_methods[] = {
- { "pytrace", (PyCFunction) Tracer_pytrace, METH_VARARGS,
- PyDoc_STR("A trace function compatible with sys.settrace()") },
-
- { "start", (PyCFunction) Tracer_start, METH_VARARGS,
+CTracer_methods[] = {
+ { "start", (PyCFunction) CTracer_start, METH_VARARGS,
PyDoc_STR("Start the tracer") },
- { "stop", (PyCFunction) Tracer_stop, METH_VARARGS,
+ { "stop", (PyCFunction) CTracer_stop, METH_VARARGS,
PyDoc_STR("Stop the tracer") },
- { "get_stats", (PyCFunction) Tracer_get_stats, METH_VARARGS,
+ { "get_stats", (PyCFunction) CTracer_get_stats, METH_VARARGS,
PyDoc_STR("Get statistics about the tracing") },
{ NULL }
};
static PyTypeObject
-TracerType = {
+CTracerType = {
MyType_HEAD_INIT
- "coverage.Tracer", /*tp_name*/
- sizeof(Tracer), /*tp_basicsize*/
+ "coverage.CTracer", /*tp_name*/
+ sizeof(CTracer), /*tp_basicsize*/
0, /*tp_itemsize*/
- (destructor)Tracer_dealloc, /*tp_dealloc*/
+ (destructor)CTracer_dealloc, /*tp_dealloc*/
0, /*tp_print*/
0, /*tp_getattr*/
0, /*tp_setattr*/
@@ -590,28 +640,28 @@ TracerType = {
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
0, /*tp_hash */
- 0, /*tp_call*/
+ (ternaryfunc)CTracer_call, /*tp_call*/
0, /*tp_str*/
0, /*tp_getattro*/
0, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
- "Tracer objects", /* tp_doc */
+ "CTracer objects", /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
0, /* tp_iter */
0, /* tp_iternext */
- Tracer_methods, /* tp_methods */
- Tracer_members, /* tp_members */
+ CTracer_methods, /* tp_methods */
+ CTracer_members, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
- (initproc)Tracer_init, /* tp_init */
+ (initproc)CTracer_init, /* tp_init */
0, /* tp_alloc */
0, /* tp_new */
};
@@ -644,14 +694,14 @@ PyInit_tracer(void)
return NULL;
}
- TracerType.tp_new = PyType_GenericNew;
- if (PyType_Ready(&TracerType) < 0) {
+ CTracerType.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&CTracerType) < 0) {
Py_DECREF(mod);
return NULL;
}
- Py_INCREF(&TracerType);
- PyModule_AddObject(mod, "Tracer", (PyObject *)&TracerType);
+ Py_INCREF(&CTracerType);
+ PyModule_AddObject(mod, "CTracer", (PyObject *)&CTracerType);
return mod;
}
@@ -668,13 +718,13 @@ inittracer(void)
return;
}
- TracerType.tp_new = PyType_GenericNew;
- if (PyType_Ready(&TracerType) < 0) {
+ CTracerType.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&CTracerType) < 0) {
return;
}
- Py_INCREF(&TracerType);
- PyModule_AddObject(mod, "Tracer", (PyObject *)&TracerType);
+ Py_INCREF(&CTracerType);
+ PyModule_AddObject(mod, "CTracer", (PyObject *)&CTracerType);
}
#endif /* Py3k */
diff --git a/coverage/version.py b/coverage/version.py
new file mode 100644
index 00000000..cf18d62f
--- /dev/null
+++ b/coverage/version.py
@@ -0,0 +1,9 @@
+"""The version and URL for coverage.py"""
+# This file is exec'ed in setup.py, don't import anything!
+
+__version__ = "3.6b1" # see detailed history in CHANGES.txt
+
+__url__ = "http://nedbatchelder.com/code/coverage"
+if max(__version__).isalpha():
+ # For pre-releases, use a version-specific URL.
+ __url__ += "/" + __version__
diff --git a/coverage/xmlreport.py b/coverage/xmlreport.py
index 5eabac74..4344488d 100644
--- a/coverage/xmlreport.py
+++ b/coverage/xmlreport.py
@@ -4,7 +4,7 @@ import os, sys, time
import xml.dom.minidom
from coverage import __url__, __version__
-from coverage.backward import sorted # pylint: disable-msg=W0622
+from coverage.backward import sorted # pylint: disable=W0622
from coverage.report import Reporter
def rate(hit, num):
@@ -15,20 +15,19 @@ def rate(hit, num):
class XmlReporter(Reporter):
"""A reporter for writing Cobertura-style XML coverage results."""
- def __init__(self, coverage, ignore_errors=False):
- super(XmlReporter, self).__init__(coverage, ignore_errors)
+ def __init__(self, coverage, config):
+ super(XmlReporter, self).__init__(coverage, config)
self.packages = None
self.xml_out = None
self.arcs = coverage.data.has_arcs()
- def report(self, morfs, outfile=None, config=None):
+ def report(self, morfs, outfile=None):
"""Generate a Cobertura-compatible XML report for `morfs`.
`morfs` is a list of modules or filenames.
- `outfile` is a file object to write the XML to. `config` is a
- CoverageConfig instance.
+ `outfile` is a file object to write the XML to.
"""
# Initial setup.
@@ -54,7 +53,7 @@ class XmlReporter(Reporter):
# Call xml_file for each file in the data.
self.packages = {}
- self.report_files(self.xml_file, morfs, config)
+ self.report_files(self.xml_file, morfs)
lnum_tot, lhits_tot = 0, 0
bnum_tot, bhits_tot = 0, 0
@@ -85,14 +84,18 @@ class XmlReporter(Reporter):
# Use the DOM to write the output file.
outfile.write(self.xml_out.toprettyxml())
+ # Return the total percentage.
+ return 100.0 * (lhits_tot + bhits_tot) / (lnum_tot + bnum_tot)
+
def xml_file(self, cu, analysis):
"""Add to the XML report for a single file."""
# Create the 'lines' and 'package' XML elements, which
# are populated later. Note that a package == a directory.
- dirname, fname = os.path.split(cu.name)
- dirname = dirname or '.'
- package = self.packages.setdefault(dirname, [ {}, 0, 0, 0, 0 ])
+ package_name = cu.name.rpartition(".")[0]
+ className = cu.name
+
+ package = self.packages.setdefault(package_name, [{}, 0, 0, 0, 0])
xclass = self.xml_out.createElement("class")
@@ -100,10 +103,10 @@ class XmlReporter(Reporter):
xlines = self.xml_out.createElement("lines")
xclass.appendChild(xlines)
- className = fname.replace('.', '_')
+
xclass.setAttribute("name", className)
- ext = os.path.splitext(cu.filename)[1]
- xclass.setAttribute("filename", cu.name + ext)
+ filename = cu.file_locator.relative_filename(cu.filename)
+ xclass.setAttribute("filename", filename.replace("\\", "/"))
xclass.setAttribute("complexity", "0")
branch_stats = analysis.branch_stats()
@@ -115,7 +118,7 @@ class XmlReporter(Reporter):
# Q: can we get info about the number of times a statement is
# executed? If so, that should be recorded here.
- xline.setAttribute("hits", str(int(not line in analysis.missing)))
+ xline.setAttribute("hits", str(int(line not in analysis.missing)))
if self.arcs:
if line in branch_stats: