summaryrefslogtreecommitdiff
path: root/coverage
diff options
context:
space:
mode:
authorDanny Allen <me@dannya.com>2014-09-22 12:05:55 +0100
committerDanny Allen <me@dannya.com>2014-09-22 12:05:55 +0100
commit1b6d0d06624170fb7a17738387387b1f21357e94 (patch)
tree335402fdef527656f37d3024345c9b532592bce7 /coverage
parentc4935999f882e7317121e884629d07080f1bc776 (diff)
parentd68b95f7a0a201b2e8e830b6d4769005ef0223fa (diff)
downloadpython-coveragepy-git-1b6d0d06624170fb7a17738387387b1f21357e94.tar.gz
Merged ned/coveragepy into default
Diffstat (limited to 'coverage')
-rw-r--r--coverage/__init__.py6
-rw-r--r--coverage/backunittest.py36
-rw-r--r--coverage/backward.py25
-rw-r--r--coverage/bytecode.py1
-rw-r--r--coverage/codeunit.py53
-rw-r--r--coverage/collector.py285
-rw-r--r--coverage/config.py31
-rw-r--r--coverage/control.py186
-rw-r--r--coverage/data.py16
-rw-r--r--coverage/debug.py2
-rw-r--r--coverage/extension.py20
-rw-r--r--coverage/files.py14
-rw-r--r--coverage/html.py4
-rw-r--r--coverage/misc.py11
-rw-r--r--coverage/plugin.py108
-rw-r--r--coverage/pytracer.py163
-rw-r--r--coverage/report.py4
-rw-r--r--coverage/summary.py2
-rw-r--r--coverage/test_helpers.py258
-rw-r--r--coverage/tracer.c313
20 files changed, 1093 insertions, 445 deletions
diff --git a/coverage/__init__.py b/coverage/__init__.py
index 193b7a10..5ae32aba 100644
--- a/coverage/__init__.py
+++ b/coverage/__init__.py
@@ -7,10 +7,14 @@ http://nedbatchelder.com/code/coverage
from coverage.version import __version__, __url__
-from coverage.control import coverage, process_startup
+from coverage.control import Coverage, process_startup
from coverage.data import CoverageData
from coverage.cmdline import main, CoverageScript
from coverage.misc import CoverageException
+from coverage.plugin import CoveragePlugin
+
+# Backward compatibility.
+coverage = Coverage
# Module-level functions. The original API to this module was based on
# functions defined directly in the module, with a singleton of the coverage()
diff --git a/coverage/backunittest.py b/coverage/backunittest.py
new file mode 100644
index 00000000..b2b7ca2f
--- /dev/null
+++ b/coverage/backunittest.py
@@ -0,0 +1,36 @@
+"""Implementations of unittest features from the future."""
+
+# Use unittest2 if it's available, otherwise unittest. This gives us
+# backported features for 2.6.
+try:
+ import unittest2 as unittest # pylint: disable=F0401
+except ImportError:
+ import unittest
+
+
+def unittest_has(method):
+ """Does `unitttest.TestCase` have `method` defined?"""
+ return hasattr(unittest.TestCase, method)
+
+
+class TestCase(unittest.TestCase):
+ """Just like unittest.TestCase, but with assert methods added.
+
+ Designed to be compatible with 3.1 unittest. Methods are only defined if
+ `unittest` doesn't have them.
+
+ """
+ # pylint: disable=missing-docstring
+
+ if not unittest_has('assertCountEqual'):
+ def assertCountEqual(self, s1, s2):
+ """Assert these have the same elements, regardless of order."""
+ self.assertEqual(set(s1), set(s2))
+
+ if not unittest_has('assertRaisesRegex'):
+ def assertRaisesRegex(self, *args, **kwargs):
+ return self.assertRaisesRegexp(*args, **kwargs)
+
+ if not unittest_has('assertRegex'):
+ def assertRegex(self, *args, **kwargs):
+ return self.assertRegexpMatches(*args, **kwargs)
diff --git a/coverage/backward.py b/coverage/backward.py
index a7888a24..9597449c 100644
--- a/coverage/backward.py
+++ b/coverage/backward.py
@@ -50,22 +50,7 @@ else:
if sys.version_info >= (3, 0):
# Python 3.2 provides `tokenize.open`, the best way to open source files.
import tokenize
- try:
- open_python_source = tokenize.open # pylint: disable=E1101
- except AttributeError:
- from io import TextIOWrapper
- detect_encoding = tokenize.detect_encoding # pylint: disable=E1101
- # Copied from the 3.2 stdlib:
- def open_python_source(fname):
- """Open a file in read only mode using the encoding detected by
- detect_encoding().
- """
- buffer = open(fname, 'rb')
- encoding, _ = detect_encoding(buffer.readline)
- buffer.seek(0)
- text = TextIOWrapper(buffer, encoding, line_buffering=True)
- text.mode = 'r'
- return text
+ open_python_source = tokenize.open # pylint: disable=E1101
else:
def open_python_source(fname):
"""Open a source file the best way."""
@@ -118,14 +103,6 @@ else:
for byte in bytes_value:
yield ord(byte)
-# Md5 is available in different places.
-try:
- import hashlib
- md5 = hashlib.md5
-except ImportError:
- import md5
- md5 = md5.new
-
try:
# In Py 2.x, the builtins were in __builtin__
diff --git a/coverage/bytecode.py b/coverage/bytecode.py
index 85360638..3f62dfaf 100644
--- a/coverage/bytecode.py
+++ b/coverage/bytecode.py
@@ -29,7 +29,6 @@ class ByteCodes(object):
Returns `ByteCode` objects.
"""
- # pylint: disable=R0924
def __init__(self, code):
self.code = code
diff --git a/coverage/codeunit.py b/coverage/codeunit.py
index 35167a72..c9ab2622 100644
--- a/coverage/codeunit.py
+++ b/coverage/codeunit.py
@@ -7,17 +7,17 @@ from coverage.misc import CoverageException, NoSource
from coverage.parser import CodeParser, PythonParser
from coverage.phystokens import source_token_lines, source_encoding
-from coverage.django import DjangoTracer
-
-def code_unit_factory(morfs, file_locator, get_ext=None):
+def code_unit_factory(morfs, file_locator, get_plugin=None):
"""Construct a list of CodeUnits from polymorphic inputs.
`morfs` is a module or a filename, or a list of same.
`file_locator` is a FileLocator that can help resolve filenames.
- `get_ext` TODO
+ `get_plugin` is a function taking a filename, and returning a plugin
+ responsible for the file. It can also return None if there is no plugin
+ claiming the file.
Returns a list of CodeUnit objects.
@@ -26,15 +26,14 @@ def code_unit_factory(morfs, file_locator, get_ext=None):
if not isinstance(morfs, (list, tuple)):
morfs = [morfs]
- django_tracer = DjangoTracer()
-
code_units = []
for morf in morfs:
- ext = None
- if isinstance(morf, string_class) and get_ext:
- ext = get_ext(morf)
- if ext:
- klass = DjangoTracer # NOT REALLY! TODO
+ plugin = None
+ if isinstance(morf, string_class) and get_plugin:
+ plugin = get_plugin(morf)
+ if plugin:
+ klass = plugin.code_unit_class(morf)
+ #klass = DjangoTracer # NOT REALLY! TODO
# Hacked-in Mako support. Define COVERAGE_MAKO_PATH as a fragment of
# the path that indicates the Python file is actually a compiled Mako
# template. THIS IS TEMPORARY!
@@ -91,6 +90,8 @@ class CodeUnit(object):
self.name = n
self.modname = modname
+ self._source = None
+
def __repr__(self):
return "<CodeUnit name=%r filename=%r>" % (self.name, self.filename)
@@ -131,6 +132,11 @@ class CodeUnit(object):
return root.replace('\\', '_').replace('/', '_').replace('.', '_')
def source(self):
+ if self._source is None:
+ self._source = self.get_source()
+ return self._source
+
+ def get_source(self):
"""Return the source code, as a string."""
if os.path.exists(self.filename):
# A regular text file: open it.
@@ -147,10 +153,9 @@ class CodeUnit(object):
"No source for code '%s'." % self.filename
)
- def source_token_lines(self, source):
+ def source_token_lines(self):
"""Return the 'tokenized' text for the code."""
- # TODO: Taking source here is wrong, change it?
- for line in source.splitlines():
+ for line in self.source().splitlines():
yield [('txt', line)]
def should_be_python(self):
@@ -162,6 +167,9 @@ class CodeUnit(object):
"""
return False
+ def get_parser(self, exclude=None):
+ raise NotImplementedError
+
class PythonCodeUnit(CodeUnit):
"""Represents a Python file."""
@@ -238,11 +246,11 @@ class PythonCodeUnit(CodeUnit):
# Everything else is probably not Python.
return False
- def source_token_lines(self, source):
- return source_token_lines(source)
+ def source_token_lines(self):
+ return source_token_lines(self.source())
- def source_encoding(self, source):
- return source_encoding(source)
+ def source_encoding(self):
+ return source_encoding(self.source())
class MakoParser(CodeParser):
@@ -271,26 +279,25 @@ class MakoCodeUnit(CodeUnit):
py_source = open(self.filename).read()
self.metadata = ModuleInfo.get_module_source_metadata(py_source, full_line_map=True)
- def source(self):
+ def get_source(self):
return open(self.metadata['filename']).read()
def get_parser(self, exclude=None):
return MakoParser(self.metadata)
- def source_encoding(self, source):
- # TODO: Taking source here is wrong, change it!
+ def source_encoding(self):
return self.metadata['source_encoding']
class DjangoCodeUnit(CodeUnit):
- def source(self):
+ def get_source(self):
with open(self.filename) as f:
return f.read()
def get_parser(self, exclude=None):
return DjangoParser(self.filename)
- def source_encoding(self, source):
+ def source_encoding(self):
return "utf8"
diff --git a/coverage/collector.py b/coverage/collector.py
index 546525d2..85c8dc90 100644
--- a/coverage/collector.py
+++ b/coverage/collector.py
@@ -1,6 +1,9 @@
"""Raw data collector for Coverage."""
-import collections, os, sys, threading
+import os, sys
+
+from coverage.misc import CoverageException
+from coverage.pytracer import PyTracer
try:
# Use the C extension code when we can, for speed.
@@ -21,188 +24,6 @@ except ImportError:
CTracer = None
-class PyTracer(object):
- """Python implementation of the raw data tracer."""
-
- # Because of poor implementations of trace-function-manipulating tools,
- # the Python trace function must be kept very simple. In particular, there
- # must be only one function ever set as the trace function, both through
- # sys.settrace, and as the return value from the trace function. Put
- # another way, the trace function must always return itself. It cannot
- # swap in other functions, or return None to avoid tracing a particular
- # frame.
- #
- # The trace manipulator that introduced this restriction is DecoratorTools,
- # which sets a trace function, and then later restores the pre-existing one
- # by calling sys.settrace with a function it found in the current frame.
- #
- # Systems that use DecoratorTools (or similar trace manipulations) must use
- # PyTracer to get accurate results. The command-line --timid argument is
- # used to force the use of this tracer.
-
- def __init__(self):
- # Attributes set from the collector:
- self.data = None
- self.arcs = False
- self.should_trace = None
- self.should_trace_cache = None
- self.warn = None
- self.extensions = None
-
- self.extension = None
- self.cur_tracename = None # TODO: This is only maintained for the if0 debugging output. Get rid of it eventually.
- self.cur_file_data = None
- self.last_line = 0
- self.data_stack = []
- self.data_stacks = collections.defaultdict(list)
- self.last_exc_back = None
- self.last_exc_firstlineno = 0
- self.thread = None
- self.stopped = False
- self.coroutine_id_func = None
- self.last_coroutine = None
-
- def _trace(self, frame, event, arg_unused):
- """The trace function passed to sys.settrace."""
-
- if self.stopped:
- return
-
- if 0:
- # A lot of debugging to try to understand why gevent isn't right.
- import os.path, pprint
- def short_ident(ident):
- return "{}:{:06X}".format(ident.__class__.__name__, id(ident) & 0xFFFFFF)
-
- ident = None
- if self.coroutine_id_func:
- ident = short_ident(self.coroutine_id_func())
- sys.stdout.write("trace event: %s %s %r @%d\n" % (
- event, ident, frame.f_code.co_filename, frame.f_lineno
- ))
- pprint.pprint(
- dict(
- (
- short_ident(ident),
- [
- (os.path.basename(tn or ""), sorted((cfd or {}).keys()), ll)
- for ex, tn, cfd, ll in data_stacks
- ]
- )
- for ident, data_stacks in self.data_stacks.items()
- )
- , width=250)
- pprint.pprint(sorted((self.cur_file_data or {}).keys()), width=250)
- print("TRYING: {}".format(sorted(next((v for k,v in self.data.items() if k.endswith("try_it.py")), {}).keys())))
-
- if self.last_exc_back:
- if frame == self.last_exc_back:
- # Someone forgot a return event.
- if self.arcs and self.cur_file_data:
- pair = (self.last_line, -self.last_exc_firstlineno)
- self.cur_file_data[pair] = None
- if self.coroutine_id_func:
- self.data_stack = self.data_stacks[self.coroutine_id_func()]
- self.handler, _, self.cur_file_data, self.last_line = self.data_stack.pop()
- self.last_exc_back = None
-
- if event == 'call':
- # Entering a new function context. Decide if we should trace
- # in this file.
- if self.coroutine_id_func:
- self.data_stack = self.data_stacks[self.coroutine_id_func()]
- self.last_coroutine = self.coroutine_id_func()
- self.data_stack.append((self.extension, self.cur_tracename, self.cur_file_data, self.last_line))
- filename = frame.f_code.co_filename
- disp = self.should_trace_cache.get(filename)
- if disp is None:
- disp = self.should_trace(filename, frame)
- self.should_trace_cache[filename] = disp
- #print("called, stack is %d deep, tracename is %r" % (
- # len(self.data_stack), tracename))
- tracename = disp.filename
- if tracename and disp.extension:
- tracename = disp.extension.file_name(frame)
- if tracename:
- if tracename not in self.data:
- self.data[tracename] = {}
- if disp.extension:
- self.extensions[tracename] = disp.extension.__name__
- self.cur_tracename = tracename
- self.cur_file_data = self.data[tracename]
- self.extension = disp.extension
- else:
- self.cur_file_data = None
- # Set the last_line to -1 because the next arc will be entering a
- # code block, indicated by (-1, n).
- self.last_line = -1
- elif event == 'line':
- # Record an executed line.
- if 0 and self.coroutine_id_func:
- this_coroutine = self.coroutine_id_func()
- if self.last_coroutine != this_coroutine:
- print("mismatch: {0} != {1}".format(self.last_coroutine, this_coroutine))
- if self.extension:
- lineno_from, lineno_to = self.extension.line_number_range(frame)
- else:
- lineno_from, lineno_to = frame.f_lineno, frame.f_lineno
- if lineno_from != -1:
- if self.cur_file_data is not None:
- if self.arcs:
- #print("lin", self.last_line, frame.f_lineno)
- self.cur_file_data[(self.last_line, lineno_from)] = None
- else:
- #print("lin", frame.f_lineno)
- for lineno in range(lineno_from, lineno_to+1):
- self.cur_file_data[lineno] = None
- self.last_line = lineno_to
- elif event == 'return':
- if self.arcs and self.cur_file_data:
- first = frame.f_code.co_firstlineno
- self.cur_file_data[(self.last_line, -first)] = None
- # Leaving this function, pop the filename stack.
- if self.coroutine_id_func:
- self.data_stack = self.data_stacks[self.coroutine_id_func()]
- self.last_coroutine = self.coroutine_id_func()
- self.extension, _, self.cur_file_data, self.last_line = self.data_stack.pop()
- #print("returned, stack is %d deep" % (len(self.data_stack)))
- elif event == 'exception':
- #print("exc", self.last_line, frame.f_lineno)
- self.last_exc_back = frame.f_back
- self.last_exc_firstlineno = frame.f_code.co_firstlineno
- return self._trace
-
- def start(self):
- """Start this Tracer.
-
- Return a Python function suitable for use with sys.settrace().
-
- """
- self.thread = threading.currentThread()
- sys.settrace(self._trace)
- return self._trace
-
- def stop(self):
- """Stop this Tracer."""
- self.stopped = True
- if self.thread != threading.currentThread():
- # Called on a different thread than started us: we can't unhook
- # ourseves, but we've set the flag that we should stop, so we won't
- # do any more tracing.
- return
-
- if hasattr(sys, "gettrace") and self.warn:
- if sys.gettrace() != self._trace:
- msg = "Trace function changed, measurement is likely wrong: %r"
- self.warn(msg % (sys.gettrace(),))
- #print("Stopping tracer on %s" % threading.current_thread().ident)
- sys.settrace(None)
-
- def get_stats(self):
- """Return a dictionary of statistics, or None."""
- return None
-
-
class Collector(object):
"""Collects trace data.
@@ -224,13 +45,17 @@ class Collector(object):
# the top, and resumed when they become the top again.
_collectors = []
- def __init__(self, should_trace, timid, branch, warn, coroutine):
+ def __init__(self,
+ should_trace, check_include, timid, branch, warn, coroutine,
+ ):
"""Create a collector.
`should_trace` is a function, taking a filename, and returning a
canonicalized filename, or None depending on whether the file should
be traced or not.
+ TODO: `check_include`
+
If `timid` is true, then a slower simpler trace function will be
used. This is important for some environments where manipulation of
tracing functions make the faster more sophisticated trace function not
@@ -243,21 +68,44 @@ class Collector(object):
`warn` is a warning function, taking a single string message argument,
to be used if a warning needs to be issued.
+ TODO: `coroutine`
+
"""
self.should_trace = should_trace
+ self.check_include = check_include
self.warn = warn
self.branch = branch
- if coroutine == "greenlet":
- import greenlet
- self.coroutine_id_func = greenlet.getcurrent
- elif coroutine == "eventlet":
- import eventlet.greenthread
- self.coroutine_id_func = eventlet.greenthread.getcurrent
- elif coroutine == "gevent":
- import gevent
- self.coroutine_id_func = gevent.getcurrent
- else:
- self.coroutine_id_func = None
+ self.threading = None
+ self.coroutine = coroutine
+
+ self.coroutine_id_func = None
+
+ try:
+ if coroutine == "greenlet":
+ import greenlet
+ self.coroutine_id_func = greenlet.getcurrent
+ elif coroutine == "eventlet":
+ import eventlet.greenthread
+ self.coroutine_id_func = eventlet.greenthread.getcurrent
+ elif coroutine == "gevent":
+ import gevent
+ self.coroutine_id_func = gevent.getcurrent
+ elif coroutine == "thread" or not coroutine:
+ # It's important to import threading only if we need it. If
+ # it's imported early, and the program being measured uses
+ # gevent, then gevent's monkey-patching won't work properly.
+ import threading
+ self.threading = threading
+ else:
+ raise CoverageException(
+ "Don't understand coroutine=%s" % coroutine
+ )
+ except ImportError:
+ raise CoverageException(
+ "Couldn't trace with coroutine=%s, "
+ "the module isn't installed." % coroutine
+ )
+
self.reset()
if timid:
@@ -281,7 +129,7 @@ class Collector(object):
# or mapping filenames to dicts with linenumber pairs as keys.
self.data = {}
- self.extensions = {}
+ self.plugin_data = {}
# A cache of the results from should_trace, the decision about whether
# to trace execution in a file. A dict of filename to (filename or
@@ -299,12 +147,25 @@ class Collector(object):
tracer.should_trace = self.should_trace
tracer.should_trace_cache = self.should_trace_cache
tracer.warn = self.warn
+
if hasattr(tracer, 'coroutine_id_func'):
tracer.coroutine_id_func = self.coroutine_id_func
- if hasattr(tracer, 'extensions'):
- tracer.extensions = self.extensions
+ elif self.coroutine_id_func:
+ raise CoverageException(
+ "Can't support coroutine=%s with %s, "
+ "only threads are supported" % (
+ self.coroutine, self.tracer_name(),
+ )
+ )
+
+ if hasattr(tracer, 'plugin_data'):
+ tracer.plugin_data = self.plugin_data
+ if hasattr(tracer, 'threading'):
+ tracer.threading = self.threading
+
fn = tracer.start()
self.tracers.append(tracer)
+
return fn
# The trace function has to be set individually on each thread before
@@ -331,16 +192,14 @@ class Collector(object):
if self._collectors:
self._collectors[-1].pause()
self._collectors.append(self)
- #print("Started: %r" % self._collectors, file=sys.stderr)
# Check to see whether we had a fullcoverage tracer installed.
traces0 = []
- if hasattr(sys, "gettrace"):
- fn0 = sys.gettrace()
- if fn0:
- tracer0 = getattr(fn0, '__self__', None)
- if tracer0:
- traces0 = getattr(tracer0, 'traces', [])
+ fn0 = sys.gettrace()
+ if fn0:
+ tracer0 = getattr(fn0, '__self__', None)
+ if tracer0:
+ traces0 = getattr(tracer0, 'traces', [])
# Install the tracer on this thread.
fn = self._start_tracer()
@@ -356,11 +215,11 @@ class Collector(object):
# Install our installation tracer in threading, to jump start other
# threads.
- threading.settrace(self._installation_trace)
+ if self.threading:
+ self.threading.settrace(self._installation_trace)
def stop(self):
"""Stop collecting trace information."""
- #print >>sys.stderr, "Stopping: %r" % self._collectors
assert self._collectors
assert self._collectors[-1] is self
@@ -382,13 +241,17 @@ class Collector(object):
print("\nCoverage.py tracer stats:")
for k in sorted(stats.keys()):
print("%16s: %s" % (k, stats[k]))
- threading.settrace(None)
+ if self.threading:
+ self.threading.settrace(None)
def resume(self):
"""Resume tracing after a `pause`."""
for tracer in self.tracers:
tracer.start()
- threading.settrace(self._installation_trace)
+ if self.threading:
+ self.threading.settrace(self._installation_trace)
+ else:
+ self._start_tracer()
def get_line_data(self):
"""Return the line data collected.
@@ -420,5 +283,5 @@ class Collector(object):
else:
return {}
- def get_extension_data(self):
- return self.extensions
+ def get_plugin_data(self):
+ return self.plugin_data
diff --git a/coverage/config.py b/coverage/config.py
index 064bc1ca..c671ef75 100644
--- a/coverage/config.py
+++ b/coverage/config.py
@@ -37,6 +37,13 @@ class HandyConfigParser(configparser.RawConfigParser):
section = self.section_prefix + section
return configparser.RawConfigParser.options(self, section)
+ def get_section(self, section):
+ """Get the contents of a section, as a dictionary."""
+ d = {}
+ for opt in self.options(section):
+ d[opt] = self.get(section, opt)
+ return d
+
def get(self, section, *args, **kwargs):
"""Get a value, replacing environment variables also.
@@ -140,7 +147,7 @@ class CoverageConfig(object):
self.timid = False
self.source = None
self.debug = []
- self.extensions = []
+ self.plugins = []
# Defaults for [report]
self.exclude_list = DEFAULT_EXCLUDE[:]
@@ -163,6 +170,9 @@ class CoverageConfig(object):
# Defaults for [paths]
self.paths = {}
+ # Options for plugins
+ self.plugin_options = {}
+
def from_environment(self, env_var):
"""Read configuration from the `env_var` environment variable."""
# Timidity: for nose users, read an environment variable. This is a
@@ -172,7 +182,7 @@ class CoverageConfig(object):
if env:
self.timid = ('--timid' in env)
- MUST_BE_LIST = ["omit", "include", "debug", "extensions"]
+ MUST_BE_LIST = ["omit", "include", "debug", "plugins"]
def from_args(self, **kwargs):
"""Read config values from `kwargs`."""
@@ -200,17 +210,22 @@ class CoverageConfig(object):
self.config_files.extend(files_read)
for option_spec in self.CONFIG_FILE_OPTIONS:
- self.set_attr_from_config_option(cp, *option_spec)
+ self._set_attr_from_config_option(cp, *option_spec)
# [paths] is special
if cp.has_section('paths'):
for option in cp.options('paths'):
self.paths[option] = cp.getlist('paths', option)
+ # plugins can have options
+ for plugin in self.plugins:
+ if cp.has_section(plugin):
+ self.plugin_options[plugin] = cp.get_section(plugin)
+
return True
CONFIG_FILE_OPTIONS = [
- # These are *args for set_attr_from_config_option:
+ # These are *args for _set_attr_from_config_option:
# (attr, where, type_="")
#
# attr is the attribute to set on the CoverageConfig object.
@@ -224,7 +239,7 @@ class CoverageConfig(object):
('cover_pylib', 'run:cover_pylib', 'boolean'),
('data_file', 'run:data_file'),
('debug', 'run:debug', 'list'),
- ('extensions', 'run:extensions', 'list'),
+ ('plugins', 'run:plugins', 'list'),
('include', 'run:include', 'list'),
('omit', 'run:omit', 'list'),
('parallel', 'run:parallel', 'boolean'),
@@ -250,9 +265,13 @@ class CoverageConfig(object):
('xml_output', 'xml:output'),
]
- def set_attr_from_config_option(self, cp, attr, where, type_=''):
+ def _set_attr_from_config_option(self, cp, attr, where, type_=''):
"""Set an attribute on self if it exists in the ConfigParser."""
section, option = where.split(":")
if cp.has_option(section, option):
method = getattr(cp, 'get'+type_)
setattr(self, attr, method(section, option))
+
+ def get_plugin_options(self, plugin):
+ """Get a dictionary of options for the plugin named `plugin`."""
+ return self.plugin_options.get(plugin, {})
diff --git a/coverage/control.py b/coverage/control.py
index cb917e52..86a2ae23 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -1,6 +1,6 @@
"""Core control stuff for Coverage."""
-import atexit, os, random, socket, sys
+import atexit, os, platform, random, socket, sys
from coverage.annotate import AnnotateReporter
from coverage.backward import string_class, iitems
@@ -9,7 +9,7 @@ from coverage.collector import Collector
from coverage.config import CoverageConfig
from coverage.data import CoverageData
from coverage.debug import DebugControl
-from coverage.extension import load_extensions
+from coverage.plugin import Plugins, plugin_implements
from coverage.files import FileLocator, TreeMatcher, FnmatchMatcher
from coverage.files import PathAliases, find_python_files, prep_patterns
from coverage.html import HtmlReporter
@@ -28,14 +28,14 @@ except ImportError:
_structseq = None
-class coverage(object):
+class Coverage(object):
"""Programmatic access to coverage.py.
To use::
from coverage import coverage
- cov = coverage()
+ cov = Coverage()
cov.start()
#.. call your code ..
cov.stop()
@@ -45,7 +45,7 @@ class coverage(object):
def __init__(self, data_file=None, data_suffix=None, cover_pylib=None,
auto_data=False, timid=None, branch=None, config_file=True,
source=None, omit=None, include=None, debug=None,
- debug_file=None, coroutine=None):
+ debug_file=None, coroutine=None, plugins=None):
"""
`data_file` is the base name of the data file to use, defaulting to
".coverage". `data_suffix` is appended (with a dot) to `data_file` to
@@ -87,7 +87,9 @@ class coverage(object):
`coroutine` is a string indicating the coroutining library being used
in the measured code. Without this, coverage.py will get incorrect
results. Valid strings are "greenlet", "eventlet", or "gevent", which
- are all equivalent.
+ are all equivalent. TODO: really?
+
+ `plugins` TODO.
"""
from coverage import __version__
@@ -126,15 +128,20 @@ class coverage(object):
data_file=data_file, cover_pylib=cover_pylib, timid=timid,
branch=branch, parallel=bool_or_none(data_suffix),
source=source, omit=omit, include=include, debug=debug,
- coroutine=coroutine,
+ coroutine=coroutine, plugins=plugins,
)
# Create and configure the debugging controller.
self.debug = DebugControl(self.config.debug, debug_file or sys.stderr)
- # Load extensions
- tracer_classes = load_extensions(self.config.extensions, "tracer")
- self.tracer_extensions = [cls() for cls in tracer_classes]
+ # Load plugins
+ self.plugins = Plugins.load_plugins(self.config.plugins, self.config)
+
+ self.trace_judges = []
+ for plugin in self.plugins:
+ if plugin_implements(plugin, "trace_judge"):
+ self.trace_judges.append(plugin)
+ self.trace_judges.append(None) # The Python case.
self.auto_data = auto_data
@@ -158,8 +165,11 @@ class coverage(object):
self.include = prep_patterns(self.config.include)
self.collector = Collector(
- self._should_trace, timid=self.config.timid,
- branch=self.config.branch, warn=self._warn,
+ should_trace=self._should_trace,
+ check_include=self._tracing_check_include_omit_etc,
+ timid=self.config.timid,
+ branch=self.config.branch,
+ warn=self._warn,
coroutine=self.config.coroutine,
)
@@ -186,18 +196,16 @@ class coverage(object):
)
# The dirs for files considered "installed with the interpreter".
- self.pylib_dirs = []
+ self.pylib_dirs = set()
if not self.config.cover_pylib:
# Look at where some standard modules are located. That's the
# indication for "installed with the interpreter". In some
# environments (virtualenv, for example), these modules may be
# spread across a few locations. Look at all the candidate modules
# we've imported, and take all the different ones.
- for m in (atexit, os, random, socket, _structseq):
+ for m in (atexit, os, platform, random, socket, _structseq):
if m is not None and hasattr(m, "__file__"):
- m_dir = self._canonical_dir(m)
- if m_dir not in self.pylib_dirs:
- self.pylib_dirs.append(m_dir)
+ self.pylib_dirs.add(self._canonical_dir(m))
# To avoid tracing the coverage code itself, we skip anything located
# where we are.
@@ -247,20 +255,10 @@ class coverage(object):
"""
disp = FileDisposition(filename)
-
- if not filename:
- # Empty string is pretty useless
- return disp.nope("empty string isn't a filename")
-
- if filename.startswith('memory:'):
- return disp.nope("memory isn't traceable")
-
- if filename.startswith('<'):
- # Lots of non-file execution is represented with artificial
- # filenames like "<string>", "<doctest readme.txt[0]>", or
- # "<exec_function>". Don't ever trace these executions, since we
- # can't do anything with the data later anyway.
- return disp.nope("not a real filename")
+ def nope(disp, reason):
+ disp.trace = False
+ disp.reason = reason
+ return disp
self._check_for_packages()
@@ -274,46 +272,80 @@ class coverage(object):
if dunder_file:
filename = self._source_for_file(dunder_file)
+ if not filename:
+ # Empty string is pretty useless
+ return nope(disp, "empty string isn't a filename")
+
+ if filename.startswith('memory:'):
+ return nope(disp, "memory isn't traceable")
+
+ if filename.startswith('<'):
+ # Lots of non-file execution is represented with artificial
+ # filenames like "<string>", "<doctest readme.txt[0]>", or
+ # "<exec_function>". Don't ever trace these executions, since we
+ # can't do anything with the data later anyway.
+ return nope(disp, "not a real filename")
+
# Jython reports the .class file to the tracer, use the source file.
if filename.endswith("$py.class"):
filename = filename[:-9] + ".py"
canonical = self.file_locator.canonical_filename(filename)
+ disp.canonical_filename = canonical
+
+ # Try the plugins, see if they have an opinion about the file.
+ for plugin in self.trace_judges:
+ if plugin:
+ plugin.trace_judge(disp)
+ else:
+ disp.trace = True
+ disp.source_filename = canonical
+ if disp.trace:
+ disp.plugin = plugin
+
+ if disp.check_filters:
+ reason = self._check_include_omit_etc(disp.source_filename)
+ if reason:
+ nope(disp, reason)
+
+ return disp
+
+ return nope(disp, "no plugin found") # TODO: a test that causes this.
- # Try the extensions, see if they have an opinion about the file.
- for tracer in self.tracer_extensions:
- ext_disp = tracer.should_trace(canonical)
- if ext_disp:
- ext_disp.extension = tracer
- return ext_disp
+ def _check_include_omit_etc(self, filename):
+ """Check a filename against the include, omit, etc, rules.
+ Returns a string or None. String means, don't trace, and is the reason
+ why. None means no reason found to not trace.
+
+ """
# If the user specified source or include, then that's authoritative
# about the outer bound of what to measure and we don't have to apply
# any canned exclusions. If they didn't, then we have to exclude the
# stdlib and coverage.py directories.
if self.source_match:
- if not self.source_match.match(canonical):
- return disp.nope("falls outside the --source trees")
+ if not self.source_match.match(filename):
+ return "falls outside the --source trees"
elif self.include_match:
- if not self.include_match.match(canonical):
- return disp.nope("falls outside the --include trees")
+ if not self.include_match.match(filename):
+ return "falls outside the --include trees"
else:
# If we aren't supposed to trace installed code, then check if this
# is near the Python standard library and skip it if so.
- if self.pylib_match and self.pylib_match.match(canonical):
- return disp.nope("is in the stdlib")
+ if self.pylib_match and self.pylib_match.match(filename):
+ return "is in the stdlib"
# We exclude the coverage code itself, since a little of it will be
# measured otherwise.
- if self.cover_match and self.cover_match.match(canonical):
- return disp.nope("is part of coverage.py")
+ if self.cover_match and self.cover_match.match(filename):
+ return "is part of coverage.py"
# Check the file against the omit pattern.
- if self.omit_match and self.omit_match.match(canonical):
- return disp.nope("is inside an --omit pattern")
+ if self.omit_match and self.omit_match.match(filename):
+ return "is inside an --omit pattern"
- disp.filename = canonical
- return disp
+ # No reason found to skip this file.
+ return None
def _should_trace(self, filename, frame):
"""Decide whether to trace execution in `filename`.
@@ -326,6 +358,22 @@ class coverage(object):
self.debug.write(disp.debug_message())
return disp
+ def _tracing_check_include_omit_etc(self, filename):
+ """Check a filename against the include, omit, etc, rules, and say so.
+
+ Returns a boolean: True if the file should be traced, False if not.
+
+ """
+ reason = self._check_include_omit_etc(filename)
+ if self.debug.should('trace'):
+ if not reason:
+ msg = "Tracing %r" % (filename,)
+ else:
+ msg = "Not tracing %r: %s" % (filename, reason)
+ self.debug.write(msg)
+
+ return not reason
+
def _warn(self, msg):
"""Use `msg` as a warning."""
self._warnings.append(msg)
@@ -545,7 +593,7 @@ class coverage(object):
# TODO: seems like this parallel structure is getting kinda old...
self.data.add_line_data(self.collector.get_line_data())
self.data.add_arc_data(self.collector.get_arc_data())
- self.data.add_extension_data(self.collector.get_extension_data())
+ self.data.add_plugin_data(self.collector.get_plugin_data())
self.collector.reset()
# If there are still entries in the source_pkgs list, then we never
@@ -611,10 +659,17 @@ class coverage(object):
Returns an `Analysis` object.
"""
+ def get_plugin(filename):
+ """For code_unit_factory to use to find the plugin for a file."""
+ plugin = None
+ plugin_name = self.data.plugin_data().get(filename)
+ if plugin_name:
+ plugin = self.plugins.get(plugin_name)
+ return plugin
+
self._harvest_data()
if not isinstance(it, CodeUnit):
- get_ext = self.data.extension_data().get
- it = code_unit_factory(it, self.file_locator, get_ext)[0]
+ it = code_unit_factory(it, self.file_locator, get_plugin)[0]
return Analysis(self, it)
@@ -738,7 +793,6 @@ class coverage(object):
"""Return a list of (key, value) pairs showing internal information."""
import coverage as covmod
- import platform, re
try:
implementation = platform.python_implementation()
@@ -760,10 +814,10 @@ class coverage(object):
('executable', sys.executable),
('cwd', os.getcwd()),
('path', sys.path),
- ('environment', sorted([
+ ('environment', sorted(
("%s = %s" % (k, v)) for k, v in iitems(os.environ)
- if re.search(r"^COV|^PY", k)
- ])),
+ if k.startswith(("COV", "PY"))
+ )),
('command_line', " ".join(getattr(sys, 'argv', ['???']))),
]
if self.source_match:
@@ -784,21 +838,19 @@ class FileDisposition(object):
"""A simple object for noting a number of details of files to trace."""
def __init__(self, original_filename):
self.original_filename = original_filename
- self.filename = None
+ self.canonical_filename = original_filename
+ self.source_filename = None
+ self.check_filters = True
+ self.trace = False
self.reason = ""
- self.extension = None
-
- def nope(self, reason):
- """A helper for returning a NO answer from should_trace."""
- self.reason = reason
- return self
+ self.plugin = None
def debug_message(self):
"""Produce a debugging message explaining the outcome."""
- if not self.filename:
- msg = "Not tracing %r: %s" % (self.original_filename, self.reason)
- else:
+ if self.trace:
msg = "Tracing %r" % (self.original_filename,)
+ else:
+ msg = "Not tracing %r: %s" % (self.original_filename, self.reason)
return msg
@@ -824,7 +876,7 @@ def process_startup():
"""
cps = os.environ.get("COVERAGE_PROCESS_START")
if cps:
- cov = coverage(config_file=cps, auto_data=True)
+ cov = Coverage(config_file=cps, auto_data=True)
cov.start()
cov._warn_no_data = False
cov._warn_unimported_source = False
diff --git a/coverage/data.py b/coverage/data.py
index b78c931d..e220a364 100644
--- a/coverage/data.py
+++ b/coverage/data.py
@@ -21,9 +21,9 @@ class CoverageData(object):
* arcs: a dict mapping filenames to sorted lists of line number pairs:
{ 'file1': [(17,23), (17,25), (25,26)], ... }
- * extensions: a dict mapping filenames to extension names:
+ * plugins: a dict mapping filenames to plugin names:
{ 'file1': "django.coverage", ... }
- # TODO: how to handle the difference between a extension module
+ # TODO: how to handle the difference between a plugin module
# name, and the class in the module?
"""
@@ -69,13 +69,13 @@ class CoverageData(object):
#
self.arcs = {}
- # A map from canonical source file name to an extension module name:
+ # A map from canonical source file name to an plugin module name:
#
# {
# 'filename1.py': 'django.coverage',
# ...
# }
- self.extensions = {}
+ self.plugins = {}
def usefile(self, use_file=True):
"""Set whether or not to use a disk file for data."""
@@ -123,8 +123,8 @@ class CoverageData(object):
(f, sorted(amap.keys())) for f, amap in iitems(self.arcs)
)
- def extension_data(self):
- return self.extensions
+ def plugin_data(self):
+ return self.plugins
def write_file(self, filename):
"""Write the coverage data to `filename`."""
@@ -229,8 +229,8 @@ class CoverageData(object):
for filename, arcs in iitems(arc_data):
self.arcs.setdefault(filename, {}).update(arcs)
- def add_extension_data(self, extension_data):
- self.extensions.update(extension_data)
+ def add_plugin_data(self, plugin_data):
+ self.plugins.update(plugin_data)
def touch_file(self, filename):
"""Ensure that `filename` appears in the data, empty if needed."""
diff --git a/coverage/debug.py b/coverage/debug.py
index 6908383d..6e7af242 100644
--- a/coverage/debug.py
+++ b/coverage/debug.py
@@ -45,7 +45,7 @@ def info_formatter(info):
for label, data in info:
if data == []:
data = "-none-"
- if isinstance(data, (list, tuple)):
+ if isinstance(data, (list, set, tuple)):
prefix = "%*s:" % (label_len, label)
for e in data:
yield "%*s %s" % (label_len+1, prefix, e)
diff --git a/coverage/extension.py b/coverage/extension.py
deleted file mode 100644
index 8c89b88e..00000000
--- a/coverage/extension.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""Extension management for coverage.py"""
-
-def load_extensions(modules, name):
- """Load extensions from `modules`, finding them by `name`.
-
- Yields the loaded extensions.
-
- """
-
- for module in modules:
- try:
- __import__(module)
- mod = sys.modules[module]
- except ImportError:
- blah()
- continue
-
- entry = getattr(mod, name, None)
- if entry:
- yield entry
diff --git a/coverage/files.py b/coverage/files.py
index 08ce1e84..1ed7276e 100644
--- a/coverage/files.py
+++ b/coverage/files.py
@@ -147,7 +147,7 @@ def prep_patterns(patterns):
class TreeMatcher(object):
"""A matcher for files in a tree."""
def __init__(self, directories):
- self.dirs = directories[:]
+ self.dirs = list(directories)
def __repr__(self):
return "<TreeMatcher %r>" % self.dirs
@@ -177,7 +177,17 @@ class FnmatchMatcher(object):
"""A matcher for files by filename pattern."""
def __init__(self, pats):
self.pats = pats[:]
- self.re = re.compile(join_regex([fnmatch.translate(p) for p in pats]))
+ # fnmatch is platform-specific. On Windows, it does the Windows thing
+ # of treating / and \ as equivalent. But on other platforms, we need to
+ # take care of that ourselves.
+ fnpats = (fnmatch.translate(p) for p in pats)
+ fnpats = (p.replace(r"\/", r"[\\/]") for p in fnpats)
+ if sys.platform == 'win32':
+ # Windows is also case-insensitive. BTW: the regex docs say that
+ # flags like (?i) have to be at the beginning, but fnmatch puts
+ # them at the end, and have two there seems to work fine.
+ fnpats = (p + "(?i)" for p in fnpats)
+ self.re = re.compile(join_regex(fnpats))
def __repr__(self):
return "<FnmatchMatcher %r>" % self.pats
diff --git a/coverage/html.py b/coverage/html.py
index 6e21efaa..91ae2c27 100644
--- a/coverage/html.py
+++ b/coverage/html.py
@@ -165,7 +165,7 @@ class HtmlReporter(Reporter):
# If need be, determine the encoding of the source file. We use it
# later to properly write the HTML.
if sys.version_info < (3, 0):
- encoding = cu.source_encoding(source)
+ encoding = cu.source_encoding()
# Some UTF8 files have the dreaded UTF8 BOM. If so, junk it.
if encoding.startswith("utf-8") and source[:3] == "\xef\xbb\xbf":
source = source[3:]
@@ -185,7 +185,7 @@ class HtmlReporter(Reporter):
lines = []
- for lineno, line in enumerate(cu.source_token_lines(source), start=1):
+ for lineno, line in enumerate(cu.source_token_lines(), start=1):
# Figure out how to mark this line.
line_class = []
annotate_html = ""
diff --git a/coverage/misc.py b/coverage/misc.py
index 4b1dccb2..6962ae32 100644
--- a/coverage/misc.py
+++ b/coverage/misc.py
@@ -1,10 +1,10 @@
"""Miscellaneous stuff for Coverage."""
import errno
+import hashlib
import inspect
import os
-from coverage.backward import md5
from coverage.backward import string_class, to_bytes
@@ -86,12 +86,7 @@ def bool_or_none(b):
def join_regex(regexes):
"""Combine a list of regexes into one that matches any of them."""
- if len(regexes) > 1:
- return "|".join("(?:%s)" % r for r in regexes)
- elif regexes:
- return regexes[0]
- else:
- return ""
+ return "|".join("(?:%s)" % r for r in regexes)
def file_be_gone(path):
@@ -106,7 +101,7 @@ def file_be_gone(path):
class Hasher(object):
"""Hashes Python data into md5."""
def __init__(self):
- self.md5 = md5()
+ self.md5 = hashlib.md5()
def update(self, v):
"""Add `v` to the hash, recursively if needed."""
diff --git a/coverage/plugin.py b/coverage/plugin.py
new file mode 100644
index 00000000..35be41a9
--- /dev/null
+++ b/coverage/plugin.py
@@ -0,0 +1,108 @@
+"""Plugin management for coverage.py"""
+
+import sys
+
+
+class CoveragePlugin(object):
+ """Base class for coverage.py plugins."""
+ def __init__(self, options):
+ self.options = options
+
+ def trace_judge(self, disposition):
+ """Decide whether to trace this file with this plugin.
+
+ Set disposition.trace to True if this plugin should trace this file.
+ May also set other attributes in `disposition`.
+
+ """
+ return None
+
+ def source_file_name(self, filename):
+ """Return the source name for a given Python filename.
+
+ Can return None if tracing shouldn't continue.
+
+ """
+ return filename
+
+ def dynamic_source_file_name(self):
+ """Returns a callable that can return a source name for a frame.
+
+ The callable should take a filename and a frame, and return either a
+ filename or None:
+
+ def dynamic_source_filename_func(filename, frame)
+
+ Can return None if dynamic filenames aren't needed.
+
+ """
+ return None
+
+ def code_unit_class(self, morf):
+ """Return the CodeUnit class to use for a module or filename."""
+ return None
+
+
+class Plugins(object):
+ """The currently loaded collection of coverage.py plugins."""
+
+ def __init__(self):
+ self.order = []
+ self.names = {}
+
+ @classmethod
+ def load_plugins(cls, modules, config):
+ """Load plugins from `modules`.
+
+ Returns a list of loaded and configured plugins.
+
+ """
+ plugins = cls()
+
+ for module in modules:
+ __import__(module)
+ mod = sys.modules[module]
+
+ plugin_class = getattr(mod, "Plugin", None)
+ if plugin_class:
+ options = config.get_plugin_options(module)
+ plugin = plugin_class(options)
+ plugin.__name__ = module
+ plugins.order.append(plugin)
+ plugins.names[module] = plugin
+
+ return plugins
+
+ def __iter__(self):
+ return iter(self.order)
+
+ def get(self, module):
+ return self.names[module]
+
+
+def overrides(obj, method_name, base_class):
+ """Does `obj` override the `method_name` it got from `base_class`?
+
+ Determine if `obj` implements the method called `method_name`, which it
+ inherited from `base_class`.
+
+ Returns a boolean.
+
+ """
+ klass = obj.__class__
+ klass_func = getattr(klass, method_name)
+ base_func = getattr(base_class, method_name)
+
+ # Python 2/3 compatibility: Python 2 returns an instancemethod object, the
+ # function is the .im_func attribute. Python 3 returns a plain function
+ # object already.
+ if sys.version_info < (3, 0):
+ klass_func = klass_func.im_func
+ base_func = base_func.im_func
+
+ return klass_func is not base_func
+
+
+def plugin_implements(obj, method_name):
+ """Does the plugin `obj` implement `method_name`?"""
+ return overrides(obj, method_name, CoveragePlugin)
diff --git a/coverage/pytracer.py b/coverage/pytracer.py
new file mode 100644
index 00000000..7563ae11
--- /dev/null
+++ b/coverage/pytracer.py
@@ -0,0 +1,163 @@
+"""Raw data collector for Coverage."""
+
+import sys
+
+
+class PyTracer(object):
+ """Python implementation of the raw data tracer."""
+
+ # Because of poor implementations of trace-function-manipulating tools,
+ # the Python trace function must be kept very simple. In particular, there
+ # must be only one function ever set as the trace function, both through
+ # sys.settrace, and as the return value from the trace function. Put
+ # another way, the trace function must always return itself. It cannot
+ # swap in other functions, or return None to avoid tracing a particular
+ # frame.
+ #
+ # The trace manipulator that introduced this restriction is DecoratorTools,
+ # which sets a trace function, and then later restores the pre-existing one
+ # by calling sys.settrace with a function it found in the current frame.
+ #
+ # Systems that use DecoratorTools (or similar trace manipulations) must use
+ # PyTracer to get accurate results. The command-line --timid argument is
+ # used to force the use of this tracer.
+
+ def __init__(self):
+ # Attributes set from the collector:
+ self.data = None
+ self.arcs = False
+ self.should_trace = None
+ self.should_trace_cache = None
+ self.warn = None
+ self.plugin_data = None
+ # The threading module to use, if any.
+ self.threading = None
+
+ self.plugin = []
+ self.cur_file_dict = []
+ self.last_line = [0]
+
+ self.data_stack = []
+ self.last_exc_back = None
+ self.last_exc_firstlineno = 0
+ self.thread = None
+ self.stopped = False
+
+ def __repr__(self):
+ return "<PyTracer at 0x{0:0x}: {1} lines in {2} files>".format(
+ id(self),
+ sum(len(v) for v in self.data.values()),
+ len(self.data),
+ )
+
+ def _trace(self, frame, event, arg_unused):
+ """The trace function passed to sys.settrace."""
+
+ if self.stopped:
+ return
+
+ if self.last_exc_back: # TODO: bring this up to speed
+ if frame == self.last_exc_back:
+ # Someone forgot a return event.
+ if self.arcs and self.cur_file_dict:
+ pair = (self.last_line, -self.last_exc_firstlineno)
+ self.cur_file_dict[pair] = None
+ self.plugin, self.cur_file_dict, self.last_line = (
+ self.data_stack.pop()
+ )
+ self.last_exc_back = None
+
+ if event == 'call':
+ # Entering a new function context. Decide if we should trace
+ # in this file.
+ self.data_stack.append(
+ (self.plugin, self.cur_file_dict, self.last_line)
+ )
+ filename = frame.f_code.co_filename
+ disp = self.should_trace_cache.get(filename)
+ if disp is None:
+ disp = self.should_trace(filename, frame)
+ self.should_trace_cache[filename] = disp
+
+ self.plugin = None
+ self.cur_file_dict = None
+ if disp.trace:
+ tracename = disp.source_filename
+ if disp.plugin:
+ dyn_func = disp.plugin.dynamic_source_file_name()
+ if dyn_func:
+ tracename = dyn_func(tracename, frame)
+ if tracename:
+ if not self.check_include(tracename):
+ tracename = None
+ else:
+ tracename = None
+ if tracename:
+ if tracename not in self.data:
+ self.data[tracename] = {}
+ if disp.plugin:
+ self.plugin_data[tracename] = disp.plugin.__name__
+ self.cur_file_dict = self.data[tracename]
+ self.plugin = disp.plugin
+ # Set the last_line to -1 because the next arc will be entering a
+ # code block, indicated by (-1, n).
+ self.last_line = -1
+ elif event == 'line':
+ # Record an executed line.
+ if self.plugin:
+ lineno_from, lineno_to = self.plugin.line_number_range(frame)
+ else:
+ lineno_from, lineno_to = frame.f_lineno, frame.f_lineno
+ if lineno_from != -1:
+ if self.cur_file_dict is not None:
+ if self.arcs:
+ self.cur_file_dict[
+ (self.last_line, lineno_from)
+ ] = None
+ else:
+ for lineno in range(lineno_from, lineno_to+1):
+ self.cur_file_dict[lineno] = None
+ self.last_line = lineno_to
+ elif event == 'return':
+ if self.arcs and self.cur_file_dict:
+ first = frame.f_code.co_firstlineno
+ self.cur_file_dict[(self.last_line, -first)] = None
+ # Leaving this function, pop the filename stack.
+ self.plugin, self.cur_file_dict, self.last_line = (
+ self.data_stack.pop()
+ )
+ elif event == 'exception':
+ self.last_exc_back = frame.f_back
+ self.last_exc_firstlineno = frame.f_code.co_firstlineno
+ return self._trace
+
+ def start(self):
+ """Start this Tracer.
+
+ Return a Python function suitable for use with sys.settrace().
+
+ """
+ if self.threading:
+ self.thread = self.threading.currentThread()
+ sys.settrace(self._trace)
+ return self._trace
+
+ def stop(self):
+ """Stop this Tracer."""
+ self.stopped = True
+ if self.threading and self.thread != self.threading.currentThread():
+ # Called on a different thread than started us: we can't unhook
+ # ourseves, but we've set the flag that we should stop, so we won't
+ # do any more tracing.
+ return
+
+ if self.warn:
+ if sys.gettrace() != self._trace:
+ msg = "Trace function changed, measurement is likely wrong: %r"
+ self.warn(msg % (sys.gettrace(),))
+
+ sys.settrace(None)
+
+ def get_stats(self):
+ """Return a dictionary of statistics, or None."""
+ return None
diff --git a/coverage/report.py b/coverage/report.py
index 7627d1aa..b93749c8 100644
--- a/coverage/report.py
+++ b/coverage/report.py
@@ -33,8 +33,8 @@ class Reporter(object):
"""
morfs = morfs or self.coverage.data.measured_files()
file_locator = self.coverage.file_locator
- get_ext = self.coverage.data.extension_data().get
- self.code_units = code_unit_factory(morfs, file_locator, get_ext)
+ get_plugin = self.coverage.data.plugin_data().get
+ self.code_units = code_unit_factory(morfs, file_locator, get_plugin)
if self.config.include:
patterns = prep_patterns(self.config.include)
diff --git a/coverage/summary.py b/coverage/summary.py
index a6768cf9..9d31c226 100644
--- a/coverage/summary.py
+++ b/coverage/summary.py
@@ -71,7 +71,7 @@ class SummaryReporter(Reporter):
total += nums
except KeyboardInterrupt: # pragma: not covered
raise
- except:
+ except Exception:
report_it = not self.config.ignore_errors
if report_it:
typ, msg = sys.exc_info()[:2]
diff --git a/coverage/test_helpers.py b/coverage/test_helpers.py
new file mode 100644
index 00000000..efe68dcd
--- /dev/null
+++ b/coverage/test_helpers.py
@@ -0,0 +1,258 @@
+"""Mixin classes to help make good tests."""
+
+import atexit
+import collections
+import os
+import random
+import shutil
+import sys
+import tempfile
+import textwrap
+
+from coverage.backunittest import TestCase
+from coverage.backward import StringIO, to_bytes
+
+
+class Tee(object):
+ """A file-like that writes to all the file-likes it has."""
+
+ def __init__(self, *files):
+ """Make a Tee that writes to all the files in `files.`"""
+ self._files = files
+ if hasattr(files[0], "encoding"):
+ self.encoding = files[0].encoding
+
+ def write(self, data):
+ """Write `data` to all the files."""
+ for f in self._files:
+ f.write(data)
+
+ if 0:
+ # Use this if you need to use a debugger, though it makes some tests
+ # fail, I'm not sure why...
+ def __getattr__(self, name):
+ return getattr(self._files[0], name)
+
+
+class ModuleAwareMixin(TestCase):
+ """A test case mixin that isolates changes to sys.modules."""
+
+ def setUp(self):
+ super(ModuleAwareMixin, self).setUp()
+
+ # Record sys.modules here so we can restore it in tearDown.
+ self.old_modules = dict(sys.modules)
+ self.addCleanup(self.cleanup_modules)
+
+ def cleanup_modules(self):
+ """Remove any new modules imported during the test run.
+
+ This lets us import the same source files for more than one test.
+
+ """
+ for m in [m for m in sys.modules if m not in self.old_modules]:
+ del sys.modules[m]
+
+
+class SysPathAwareMixin(TestCase):
+ """A test case mixin that isolates changes to sys.path."""
+
+ def setUp(self):
+ super(SysPathAwareMixin, self).setUp()
+
+ self.old_syspath = sys.path[:]
+ self.addCleanup(self.cleanup_syspath)
+
+ def cleanup_syspath(self):
+ """Restore the original sys.path."""
+ sys.path = self.old_syspath
+
+
+class EnvironmentAwareMixin(TestCase):
+ """A test case mixin that isolates changes to the environment."""
+
+ def setUp(self):
+ super(EnvironmentAwareMixin, self).setUp()
+
+ # Record environment variables that we changed with set_environ.
+ self.environ_undos = {}
+
+ self.addCleanup(self.cleanup_environ)
+
+ def set_environ(self, name, value):
+ """Set an environment variable `name` to be `value`.
+
+ The environment variable is set, and record is kept that it was set,
+ so that `tearDown` can restore its original value.
+
+ """
+ if name not in self.environ_undos:
+ self.environ_undos[name] = os.environ.get(name)
+ os.environ[name] = value
+
+ def cleanup_environ(self):
+ """Undo all the changes made by `set_environ`."""
+ for name, value in self.environ_undos.items():
+ if value is None:
+ del os.environ[name]
+ else:
+ os.environ[name] = value
+
+
+class StdStreamCapturingMixin(TestCase):
+ """A test case mixin that captures stdout and stderr."""
+
+ def setUp(self):
+ super(StdStreamCapturingMixin, self).setUp()
+
+ # Capture stdout and stderr so we can examine them in tests.
+ # nose keeps stdout from littering the screen, so we can safely Tee it,
+ # but it doesn't capture stderr, so we don't want to Tee stderr to the
+ # real stderr, since it will interfere with our nice field of dots.
+ self.old_stdout = sys.stdout
+ self.captured_stdout = StringIO()
+ sys.stdout = Tee(sys.stdout, self.captured_stdout)
+ self.old_stderr = sys.stderr
+ self.captured_stderr = StringIO()
+ sys.stderr = self.captured_stderr
+
+ self.addCleanup(self.cleanup_std_streams)
+
+ def cleanup_std_streams(self):
+ """Restore stdout and stderr."""
+ sys.stdout = self.old_stdout
+ sys.stderr = self.old_stderr
+
+ def stdout(self):
+ """Return the data written to stdout during the test."""
+ return self.captured_stdout.getvalue()
+
+ def stderr(self):
+ """Return the data written to stderr during the test."""
+ return self.captured_stderr.getvalue()
+
+
+class TempDirMixin(TestCase):
+ """A test case mixin that creates a temp directory and files in it."""
+
+ # Our own setting: most of these tests run in their own temp directory.
+ run_in_temp_dir = True
+
+ def setUp(self):
+ super(TempDirMixin, self).setUp()
+
+ if self.run_in_temp_dir:
+ # Create a temporary directory.
+ noise = str(random.random())[2:]
+ self.temp_root = os.path.join(tempfile.gettempdir(), 'test_cover')
+ self.temp_dir = os.path.join(self.temp_root, noise)
+ os.makedirs(self.temp_dir)
+ self.old_dir = os.getcwd()
+ os.chdir(self.temp_dir)
+
+ # Modules should be importable from this temp directory. We don't
+ # use '' because we make lots of different temp directories and
+ # nose's caching importer can get confused. The full path prevents
+ # problems.
+ sys.path.insert(0, os.getcwd())
+
+ class_behavior = self.class_behavior()
+ class_behavior.tests += 1
+ class_behavior.test_method_made_any_files = False
+ class_behavior.temp_dir = self.run_in_temp_dir
+
+ self.addCleanup(self.cleanup_temp_dir)
+
+ def cleanup_temp_dir(self):
+ """Clean up the temp directories we made."""
+
+ if self.run_in_temp_dir:
+ # Get rid of the temporary directory.
+ os.chdir(self.old_dir)
+ shutil.rmtree(self.temp_root)
+
+ class_behavior = self.class_behavior()
+ if class_behavior.test_method_made_any_files:
+ class_behavior.tests_making_files += 1
+
+ def make_file(self, filename, text="", newline=None):
+ """Create a file for testing.
+
+ `filename` is the relative path to the file, including directories if
+ desired, which will be created if need be. `text` is the content to
+ create in the file. If `newline` is provided, it is a string that will
+ be used as the line endings in the created file, otherwise the line
+ endings are as provided in `text`.
+
+ Returns `filename`.
+
+ """
+ # Tests that call `make_file` should be run in a temp environment.
+ assert self.run_in_temp_dir
+ self.class_behavior().test_method_made_any_files = True
+
+ text = textwrap.dedent(text)
+ if newline:
+ text = text.replace("\n", newline)
+
+ # Make sure the directories are available.
+ dirs, _ = os.path.split(filename)
+ if dirs and not os.path.exists(dirs):
+ os.makedirs(dirs)
+
+ # Create the file.
+ with open(filename, 'wb') as f:
+ f.write(to_bytes(text))
+
+ return filename
+
+ # We run some tests in temporary directories, because they may need to make
+ # files for the tests. But this is expensive, so we can change per-class
+ # whether a temp dir is used or not. It's easy to forget to set that
+ # option properly, so we track information about what the tests did, and
+ # then report at the end of the process on test classes that were set
+ # wrong.
+
+ class ClassBehavior(object):
+ """A value object to store per-class."""
+ def __init__(self):
+ self.tests = 0
+ self.temp_dir = True
+ self.tests_making_files = 0
+ self.test_method_made_any_files = False
+
+ # Map from class to info about how it ran.
+ class_behaviors = collections.defaultdict(ClassBehavior)
+
+ @classmethod
+ def report_on_class_behavior(cls):
+ """Called at process exit to report on class behavior."""
+ for test_class, behavior in cls.class_behaviors.items():
+ if behavior.temp_dir and behavior.tests_making_files == 0:
+ bad = "Inefficient"
+ elif not behavior.temp_dir and behavior.tests_making_files > 0:
+ bad = "Unsafe"
+ else:
+ bad = ""
+
+ if bad:
+ if behavior.temp_dir:
+ where = "in a temp directory"
+ else:
+ where = "without a temp directory"
+ print(
+ "%s: %s ran %d tests, %d made files %s" % (
+ bad,
+ test_class.__name__,
+ behavior.tests,
+ behavior.tests_making_files,
+ where,
+ )
+ )
+
+ def class_behavior(self):
+ """Get the ClassBehavior instance for this test."""
+ return self.class_behaviors[self.__class__]
+
+# When the process ends, find out about bad classes.
+atexit.register(TempDirMixin.report_on_class_behavior)
diff --git a/coverage/tracer.c b/coverage/tracer.c
index ca8d61c1..5bf5c462 100644
--- a/coverage/tracer.c
+++ b/coverage/tracer.c
@@ -30,6 +30,7 @@
#define MyText_AS_BYTES(o) PyUnicode_AsASCIIString(o)
#define MyText_AS_STRING(o) PyBytes_AS_STRING(o)
#define MyInt_FromLong(l) PyLong_FromLong(l)
+#define MyInt_AsLong(o) PyLong_AsLong(o)
#define MyType_HEAD_INIT PyVarObject_HEAD_INIT(NULL, 0)
@@ -40,6 +41,7 @@
#define MyText_AS_BYTES(o) (Py_INCREF(o), o)
#define MyText_AS_STRING(o) PyString_AS_STRING(o)
#define MyInt_FromLong(l) PyInt_FromLong(l)
+#define MyInt_AsLong(o) PyInt_AsLong(o)
#define MyType_HEAD_INIT PyObject_HEAD_INIT(NULL) 0,
@@ -54,10 +56,23 @@
frame.
*/
typedef struct {
- PyObject * file_data; /* PyMem_Malloc'ed, a borrowed ref. */
+ /* The current file_data dictionary. Borrowed. */
+ PyObject * file_data;
+
+ /* The line number of the last line recorded, for tracing arcs.
+ -1 means there was no previous line, as when entering a code object.
+ */
int last_line;
} DataStackEntry;
+/* A data stack is a dynamically allocated vector of DataStackEntry's. */
+typedef struct {
+ int depth; /* The index of the last-used entry in stack. */
+ int alloc; /* number of entries allocated at stack. */
+ /* The file data at each level, or NULL if not recording. */
+ DataStackEntry * stack;
+} DataStack;
+
/* The CTracer type. */
typedef struct {
@@ -66,7 +81,9 @@ typedef struct {
/* Python objects manipulated directly by the Collector class. */
PyObject * should_trace;
PyObject * warn;
+ PyObject * coroutine_id_func;
PyObject * data;
+ PyObject * plugin_data;
PyObject * should_trace_cache;
PyObject * arcs;
@@ -86,19 +103,17 @@ typedef struct {
the keys are line numbers. In both cases, the value is irrelevant
(None).
*/
- /* The index of the last-used entry in data_stack. */
- int depth;
- /* The file data at each level, or NULL if not recording. */
- DataStackEntry * data_stack;
- int data_stack_alloc; /* number of entries allocated at data_stack. */
- /* The current file_data dictionary. Borrowed. */
- PyObject * cur_file_data;
+ DataStack data_stack; /* Used if we aren't doing coroutines. */
+ PyObject * data_stack_index; /* Used if we are doing coroutines. */
+ DataStack * data_stacks;
+ int data_stacks_alloc;
+ int data_stacks_used;
- /* The line number of the last line recorded, for tracing arcs.
- -1 means there was no previous line, as when entering a code object.
- */
- int last_line;
+ DataStack * pdata_stack;
+
+ /* The current file's data stack entry, copied from the stack. */
+ DataStackEntry cur_entry;
/* The parent frame for the last exception event, to fix missing returns. */
PyFrameObject * last_exc_back;
@@ -119,9 +134,47 @@ typedef struct {
#endif /* COLLECT_STATS */
} CTracer;
+
#define STACK_DELTA 100
static int
+DataStack_init(CTracer *self, DataStack *pdata_stack)
+{
+ pdata_stack->depth = -1;
+ pdata_stack->stack = NULL;
+ pdata_stack->alloc = 0;
+ return RET_OK;
+}
+
+static void
+DataStack_dealloc(CTracer *self, DataStack *pdata_stack)
+{
+ PyMem_Free(pdata_stack->stack);
+}
+
+static int
+DataStack_grow(CTracer *self, DataStack *pdata_stack)
+{
+ pdata_stack->depth++;
+ if (pdata_stack->depth >= pdata_stack->alloc) {
+ STATS( self->stats.stack_reallocs++; )
+ /* We've outgrown our data_stack array: make it bigger. */
+ int bigger = pdata_stack->alloc + STACK_DELTA;
+ DataStackEntry * bigger_data_stack = PyMem_Realloc(pdata_stack->stack, bigger * sizeof(DataStackEntry));
+ if (bigger_data_stack == NULL) {
+ STATS( self->stats.errors++; )
+ PyErr_NoMemory();
+ pdata_stack->depth--;
+ return RET_ERROR;
+ }
+ pdata_stack->stack = bigger_data_stack;
+ pdata_stack->alloc = bigger;
+ }
+ return RET_OK;
+}
+
+
+static int
CTracer_init(CTracer *self, PyObject *args_unused, PyObject *kwds_unused)
{
#if COLLECT_STATS
@@ -138,24 +191,32 @@ CTracer_init(CTracer *self, PyObject *args_unused, PyObject *kwds_unused)
self->should_trace = NULL;
self->warn = NULL;
+ self->coroutine_id_func = NULL;
self->data = NULL;
+ self->plugin_data = NULL;
self->should_trace_cache = NULL;
self->arcs = NULL;
self->started = 0;
self->tracing_arcs = 0;
- self->depth = -1;
- self->data_stack = PyMem_Malloc(STACK_DELTA*sizeof(DataStackEntry));
- if (self->data_stack == NULL) {
+ if (DataStack_init(self, &self->data_stack)) {
+ return RET_ERROR;
+ }
+ self->data_stack_index = PyDict_New();
+ if (self->data_stack_index == NULL) {
STATS( self->stats.errors++; )
- PyErr_NoMemory();
return RET_ERROR;
}
- self->data_stack_alloc = STACK_DELTA;
- self->cur_file_data = NULL;
- self->last_line = -1;
+ self->data_stacks = NULL;
+ self->data_stacks_alloc = 0;
+ self->data_stacks_used = 0;
+
+ self->pdata_stack = &self->data_stack;
+
+ self->cur_entry.file_data = NULL;
+ self->cur_entry.last_line = -1;
self->last_exc_back = NULL;
@@ -165,16 +226,28 @@ CTracer_init(CTracer *self, PyObject *args_unused, PyObject *kwds_unused)
static void
CTracer_dealloc(CTracer *self)
{
+ int i;
+
if (self->started) {
PyEval_SetTrace(NULL, NULL);
}
Py_XDECREF(self->should_trace);
Py_XDECREF(self->warn);
+ Py_XDECREF(self->coroutine_id_func);
Py_XDECREF(self->data);
+ Py_XDECREF(self->plugin_data);
Py_XDECREF(self->should_trace_cache);
- PyMem_Free(self->data_stack);
+ DataStack_dealloc(self, &self->data_stack);
+ if (self->data_stacks) {
+ for (i = 0; i < self->data_stacks_used; i++) {
+ DataStack_dealloc(self, self->data_stacks + i);
+ }
+ PyMem_Free(self->data_stacks);
+ }
+
+ Py_XDECREF(self->data_stack_index);
Py_TYPE(self)->tp_free((PyObject*)self);
}
@@ -229,7 +302,7 @@ showlog(int depth, int lineno, PyObject * filename, const char * msg)
static const char * what_sym[] = {"CALL", "EXC ", "LINE", "RET "};
#endif
-/* Record a pair of integers in self->cur_file_data. */
+/* Record a pair of integers in self->cur_entry.file_data. */
static int
CTracer_record_pair(CTracer *self, int l1, int l2)
{
@@ -237,7 +310,7 @@ CTracer_record_pair(CTracer *self, int l1, int l2)
PyObject * t = Py_BuildValue("(ii)", l1, l2);
if (t != NULL) {
- if (PyDict_SetItem(self->cur_file_data, t, Py_None) < 0) {
+ if (PyDict_SetItem(self->cur_entry.file_data, t, Py_None) < 0) {
STATS( self->stats.errors++; )
ret = RET_ERROR;
}
@@ -250,6 +323,63 @@ CTracer_record_pair(CTracer *self, int l1, int l2)
return ret;
}
+/* Set self->pdata_stack to the proper data_stack to use. */
+static int
+CTracer_set_pdata_stack(CTracer *self)
+{
+ if (self->coroutine_id_func != Py_None) {
+ PyObject * co_obj = NULL;
+ PyObject * stack_index = NULL;
+ long the_index = 0;
+
+ co_obj = PyObject_CallObject(self->coroutine_id_func, NULL);
+ if (co_obj == NULL) {
+ return RET_ERROR;
+ }
+ stack_index = PyDict_GetItem(self->data_stack_index, co_obj);
+ if (stack_index == NULL) {
+ /* A new coroutine object. Make a new data stack. */
+ the_index = self->data_stacks_used;
+ stack_index = MyInt_FromLong(the_index);
+ if (PyDict_SetItem(self->data_stack_index, co_obj, stack_index) < 0) {
+ STATS( self->stats.errors++; )
+ Py_XDECREF(co_obj);
+ Py_XDECREF(stack_index);
+ return RET_ERROR;
+ }
+ self->data_stacks_used++;
+ if (self->data_stacks_used >= self->data_stacks_alloc) {
+ int bigger = self->data_stacks_alloc + 10;
+ DataStack * bigger_stacks = PyMem_Realloc(self->data_stacks, bigger * sizeof(DataStack));
+ if (bigger_stacks == NULL) {
+ STATS( self->stats.errors++; )
+ PyErr_NoMemory();
+ Py_XDECREF(co_obj);
+ Py_XDECREF(stack_index);
+ return RET_ERROR;
+ }
+ self->data_stacks = bigger_stacks;
+ self->data_stacks_alloc = bigger;
+ }
+ DataStack_init(self, &self->data_stacks[the_index]);
+ }
+ else {
+ Py_INCREF(stack_index);
+ the_index = MyInt_AsLong(stack_index);
+ }
+
+ self->pdata_stack = &self->data_stacks[the_index];
+
+ Py_XDECREF(co_obj);
+ Py_XDECREF(stack_index);
+ }
+ else {
+ self->pdata_stack = &self->data_stack;
+ }
+
+ return RET_OK;
+}
+
/*
* The Trace Function
*/
@@ -260,6 +390,7 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse
PyObject * filename = NULL;
PyObject * tracename = NULL;
PyObject * disposition = NULL;
+ PyObject * disp_trace = NULL;
#if WHAT_LOG || TRACE_LOG
PyObject * ascii = NULL;
#endif
@@ -294,16 +425,18 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse
we'll need to keep more of the missed frame's state.
*/
STATS( self->stats.missed_returns++; )
- if (self->depth >= 0) {
- if (self->tracing_arcs && self->cur_file_data) {
- if (CTracer_record_pair(self, self->last_line, -self->last_exc_firstlineno) < 0) {
+ if (CTracer_set_pdata_stack(self)) {
+ return RET_ERROR;
+ }
+ if (self->pdata_stack->depth >= 0) {
+ if (self->tracing_arcs && self->cur_entry.file_data) {
+ if (CTracer_record_pair(self, self->cur_entry.last_line, -self->last_exc_firstlineno) < 0) {
return RET_ERROR;
}
}
- SHOWLOG(self->depth, frame->f_lineno, frame->f_code->co_filename, "missedreturn");
- self->cur_file_data = self->data_stack[self->depth].file_data;
- self->last_line = self->data_stack[self->depth].last_line;
- self->depth--;
+ SHOWLOG(self->pdata_stack->depth, frame->f_lineno, frame->f_code->co_filename, "missedreturn");
+ self->cur_entry = self->pdata_stack->stack[self->pdata_stack->depth];
+ self->pdata_stack->depth--;
}
}
self->last_exc_back = NULL;
@@ -314,25 +447,15 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse
case PyTrace_CALL: /* 0 */
STATS( self->stats.calls++; )
/* Grow the stack. */
- self->depth++;
- if (self->depth >= self->data_stack_alloc) {
- STATS( self->stats.stack_reallocs++; )
- /* We've outgrown our data_stack array: make it bigger. */
- int bigger = self->data_stack_alloc + STACK_DELTA;
- DataStackEntry * bigger_data_stack = PyMem_Realloc(self->data_stack, bigger * sizeof(DataStackEntry));
- if (bigger_data_stack == NULL) {
- STATS( self->stats.errors++; )
- PyErr_NoMemory();
- self->depth--;
- return RET_ERROR;
- }
- self->data_stack = bigger_data_stack;
- self->data_stack_alloc = bigger;
+ if (CTracer_set_pdata_stack(self)) {
+ return RET_ERROR;
+ }
+ if (DataStack_grow(self, self->pdata_stack)) {
+ return RET_ERROR;
}
/* Push the current state on the stack. */
- self->data_stack[self->depth].file_data = self->cur_file_data;
- self->data_stack[self->depth].last_line = self->last_line;
+ self->pdata_stack->stack[self->pdata_stack->depth] = self->cur_entry;
/* Check if we should trace this line. */
filename = frame->f_code->co_filename;
@@ -358,15 +481,33 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse
Py_INCREF(disposition);
}
- /* If tracename is a string, then we're supposed to trace. */
- tracename = PyObject_GetAttrString(disposition, "filename");
- if (tracename == NULL) {
+ disp_trace = PyObject_GetAttrString(disposition, "trace");
+ if (disp_trace == NULL) {
STATS( self->stats.errors++; )
Py_DECREF(disposition);
return RET_ERROR;
}
+
+ tracename = Py_None;
+ Py_INCREF(tracename);
+
+ if (disp_trace == Py_True) {
+ /* If tracename is a string, then we're supposed to trace. */
+ tracename = PyObject_GetAttrString(disposition, "source_filename");
+ if (tracename == NULL) {
+ STATS( self->stats.errors++; )
+ Py_DECREF(disposition);
+ Py_DECREF(disp_trace);
+ return RET_ERROR;
+ }
+ }
+ Py_DECREF(disp_trace);
+
if (MyText_Check(tracename)) {
PyObject * file_data = PyDict_GetItem(self->data, tracename);
+ PyObject * disp_plugin = NULL;
+ PyObject * disp_plugin_name = NULL;
+
if (file_data == NULL) {
file_data = PyDict_New();
if (file_data == NULL) {
@@ -383,51 +524,81 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse
Py_DECREF(disposition);
return RET_ERROR;
}
+
+ if (self->plugin_data != NULL) {
+ /* If the disposition mentions a plugin, record that. */
+ disp_plugin = PyObject_GetAttrString(disposition, "plugin");
+ if (disp_plugin == NULL) {
+ STATS( self->stats.errors++; )
+ Py_DECREF(tracename);
+ Py_DECREF(disposition);
+ return RET_ERROR;
+ }
+ if (disp_plugin != Py_None) {
+ disp_plugin_name = PyObject_GetAttrString(disp_plugin, "__name__");
+ Py_DECREF(disp_plugin);
+ if (disp_plugin_name == NULL) {
+ STATS( self->stats.errors++; )
+ Py_DECREF(tracename);
+ Py_DECREF(disposition);
+ return RET_ERROR;
+ }
+ ret = PyDict_SetItem(self->plugin_data, tracename, disp_plugin_name);
+ Py_DECREF(disp_plugin_name);
+ if (ret < 0) {
+ Py_DECREF(tracename);
+ Py_DECREF(disposition);
+ return RET_ERROR;
+ }
+ }
+ }
}
- self->cur_file_data = file_data;
+ self->cur_entry.file_data = file_data;
/* Make the frame right in case settrace(gettrace()) happens. */
Py_INCREF(self);
frame->f_trace = (PyObject*)self;
- SHOWLOG(self->depth, frame->f_lineno, filename, "traced");
+ SHOWLOG(self->pdata_stack->depth, frame->f_lineno, filename, "traced");
}
else {
- self->cur_file_data = NULL;
- SHOWLOG(self->depth, frame->f_lineno, filename, "skipped");
+ self->cur_entry.file_data = NULL;
+ SHOWLOG(self->pdata_stack->depth, frame->f_lineno, filename, "skipped");
}
Py_DECREF(tracename);
Py_DECREF(disposition);
- self->last_line = -1;
+ self->cur_entry.last_line = -1;
break;
case PyTrace_RETURN: /* 3 */
STATS( self->stats.returns++; )
/* A near-copy of this code is above in the missing-return handler. */
- if (self->depth >= 0) {
- if (self->tracing_arcs && self->cur_file_data) {
+ if (CTracer_set_pdata_stack(self)) {
+ return RET_ERROR;
+ }
+ if (self->pdata_stack->depth >= 0) {
+ if (self->tracing_arcs && self->cur_entry.file_data) {
int first = frame->f_code->co_firstlineno;
- if (CTracer_record_pair(self, self->last_line, -first) < 0) {
+ if (CTracer_record_pair(self, self->cur_entry.last_line, -first) < 0) {
return RET_ERROR;
}
}
- SHOWLOG(self->depth, frame->f_lineno, frame->f_code->co_filename, "return");
- self->cur_file_data = self->data_stack[self->depth].file_data;
- self->last_line = self->data_stack[self->depth].last_line;
- self->depth--;
+ SHOWLOG(self->pdata_stack->depth, frame->f_lineno, frame->f_code->co_filename, "return");
+ self->cur_entry = self->pdata_stack->stack[self->pdata_stack->depth];
+ self->pdata_stack->depth--;
}
break;
case PyTrace_LINE: /* 2 */
STATS( self->stats.lines++; )
- if (self->depth >= 0) {
- SHOWLOG(self->depth, frame->f_lineno, frame->f_code->co_filename, "line");
- if (self->cur_file_data) {
+ if (self->pdata_stack->depth >= 0) {
+ SHOWLOG(self->pdata_stack->depth, frame->f_lineno, frame->f_code->co_filename, "line");
+ if (self->cur_entry.file_data) {
/* We're tracing in this frame: record something. */
if (self->tracing_arcs) {
/* Tracing arcs: key is (last_line,this_line). */
- if (CTracer_record_pair(self, self->last_line, frame->f_lineno) < 0) {
+ if (CTracer_record_pair(self, self->cur_entry.last_line, frame->f_lineno) < 0) {
return RET_ERROR;
}
}
@@ -438,7 +609,7 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse
STATS( self->stats.errors++; )
return RET_ERROR;
}
- ret = PyDict_SetItem(self->cur_file_data, this_line, Py_None);
+ ret = PyDict_SetItem(self->cur_entry.file_data, this_line, Py_None);
Py_DECREF(this_line);
if (ret < 0) {
STATS( self->stats.errors++; )
@@ -446,7 +617,7 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse
}
}
}
- self->last_line = frame->f_lineno;
+ self->cur_entry.last_line = frame->f_lineno;
}
break;
@@ -562,7 +733,7 @@ CTracer_start(CTracer *self, PyObject *args_unused)
PyEval_SetTrace((Py_tracefunc)CTracer_trace, (PyObject*)self);
self->started = 1;
self->tracing_arcs = self->arcs && PyObject_IsTrue(self->arcs);
- self->last_line = -1;
+ self->cur_entry.last_line = -1;
/* start() returns a trace function usable with sys.settrace() */
Py_INCREF(self);
@@ -594,7 +765,7 @@ CTracer_get_stats(CTracer *self)
"new_files", self->stats.new_files,
"missed_returns", self->stats.missed_returns,
"stack_reallocs", self->stats.stack_reallocs,
- "stack_alloc", self->data_stack_alloc,
+ "stack_alloc", self->pdata_stack->alloc,
"errors", self->stats.errors
);
#else
@@ -610,9 +781,15 @@ CTracer_members[] = {
{ "warn", T_OBJECT, offsetof(CTracer, warn), 0,
PyDoc_STR("Function for issuing warnings.") },
+ { "coroutine_id_func", T_OBJECT, offsetof(CTracer, coroutine_id_func), 0,
+ PyDoc_STR("Function for determining coroutine context") },
+
{ "data", T_OBJECT, offsetof(CTracer, data), 0,
PyDoc_STR("The raw dictionary of trace data.") },
+ { "plugin_data", T_OBJECT, offsetof(CTracer, plugin_data), 0,
+ PyDoc_STR("Mapping from filename to plugin name.") },
+
{ "should_trace_cache", T_OBJECT, offsetof(CTracer, should_trace_cache), 0,
PyDoc_STR("Dictionary caching should_trace results.") },