diff options
-rw-r--r-- | coverage/cmdline.py | 2 | ||||
-rw-r--r-- | coverage/control.py | 20 | ||||
-rw-r--r-- | coverage/data.py | 272 | ||||
-rw-r--r-- | tests/test_cmdline.py | 11 | ||||
-rw-r--r-- | tests/test_data.py | 73 |
5 files changed, 173 insertions, 205 deletions
diff --git a/coverage/cmdline.py b/coverage/cmdline.py index fba1112f..4d1d1e72 100644 --- a/coverage/cmdline.py +++ b/coverage/cmdline.py @@ -661,7 +661,7 @@ class CoverageScript(object): self.coverage.load() data = self.coverage.get_data() print(info_header("data")) - print("path: %s" % self.coverage._data_files.filename) + print("path: %s" % self.coverage.get_data().filename) if data: print("has_arcs: %r" % data.has_arcs()) summary = data.line_counts(fullpath=True) diff --git a/coverage/control.py b/coverage/control.py index a5943aa8..1760ee78 100644 --- a/coverage/control.py +++ b/coverage/control.py @@ -15,7 +15,7 @@ from coverage.annotate import AnnotateReporter from coverage.backward import string_class, iitems from coverage.collector import Collector from coverage.config import read_coverage_config -from coverage.data import CoverageData, CoverageDataFiles +from coverage.data import CoverageData from coverage.debug import DebugControl, write_formatted_info from coverage.disposition import disposition_debug_msg from coverage.files import PathAliases, set_relative_directory, abs_file @@ -152,7 +152,7 @@ class Coverage(object): self._warnings = [] # Other instance attributes, set later. - self._data = self._data_files = self._collector = None + self._data = self._collector = None self._plugins = None self._inorout = None self._inorout_class = InOrOut @@ -270,8 +270,7 @@ class Coverage(object): # Create the data file. We do this at construction time so that the # data file will be written into the directory where the process # started rather than wherever the process eventually chdir'd to. - self._data = CoverageData(debug=self._debug) - self._data_files = CoverageDataFiles( + self._data = CoverageData( basename=self.config.data_file, warn=self._warn, debug=self._debug, ) @@ -395,7 +394,7 @@ class Coverage(object): """Load previously-collected coverage data from the data file.""" self._init() self._collector.reset() - self._data_files.read(self._data) + self._data.read() def start(self): """Start measuring code coverage. @@ -449,8 +448,7 @@ class Coverage(object): """ self._init() self._collector.reset() - self._data.erase() - self._data_files.erase(parallel=self.config.parallel) + self._data.erase(parallel=self.config.parallel) def clear_exclude(self, which='exclude'): """Clear the exclude list.""" @@ -503,7 +501,7 @@ class Coverage(object): """Save the collected coverage data to the data file.""" self._init() data = self.get_data() - self._data_files.write(data, suffix=self._data_suffix) + data.write(suffix=self._data_suffix) def combine(self, data_paths=None, strict=False): """Combine together a number of similarly-named coverage data files. @@ -538,9 +536,7 @@ class Coverage(object): for pattern in paths[1:]: aliases.add(pattern, result) - self._data_files.combine_parallel_data( - self._data, aliases=aliases, data_paths=data_paths, strict=strict, - ) + self._data.combine_parallel_data(aliases=aliases, data_paths=data_paths, strict=strict) def get_data(self): """Get the collected data. @@ -827,7 +823,7 @@ class Coverage(object): ('configs_attempted', self.config.attempted_config_files), ('configs_read', self.config.config_files_read), ('config_file', self.config.config_file), - ('data_path', self._data_files.filename), + ('data_path', self._data.filename), ('python', sys.version.replace('\n', '')), ('platform', platform.platform()), ('implementation', platform.python_implementation()), diff --git a/coverage/data.py b/coverage/data.py index 9f2d1308..6d30e2ba 100644 --- a/coverage/data.py +++ b/coverage/data.py @@ -57,8 +57,7 @@ class CoverageData(object): names in this API are case-sensitive, even on platforms with case-insensitive file systems. - To read a coverage.py data file, use :meth:`read_file`, or - :meth:`read_fileobj` if you have an already-opened file. You can then + To read a coverage.py data file, use :meth:`read_file`. You can then access the line, arc, or file tracer data with :meth:`lines`, :meth:`arcs`, or :meth:`file_tracer`. Run information is available with :meth:`run_infos`. @@ -78,8 +77,7 @@ class CoverageData(object): To add a file without any measured data, use :meth:`touch_file`. - You write to a named file with :meth:`write_file`, or to an already opened - file with :meth:`write_fileobj`. + You write to a named file with :meth:`write_file`. You can clear the data in memory with :meth:`erase`. Two data collections can be combined by using :meth:`update` on one :class:`CoverageData`, @@ -112,13 +110,19 @@ class CoverageData(object): # line data is easily recovered from the arcs: it is all the first elements # of the pairs that are greater than zero. - def __init__(self, debug=None): + def __init__(self, basename=None, warn=None, debug=None): """Create a CoverageData. + `warn` is the warning function to use. + + `basename` is the name of the file to use for storing data. + `debug` is a `DebugControl` object for writing debug messages. """ + self._warn = warn self._debug = debug + self.filename = os.path.abspath(basename or ".coverage") # A map from canonical Python source file name to a dictionary in # which there's an entry for each line number that has been @@ -262,7 +266,12 @@ class CoverageData(object): __bool__ = __nonzero__ - def read_fileobj(self, file_obj): + def read(self): + """Read the coverage data.""" + if os.path.exists(self.filename): + self.read_file(self.filename) + + def _read_fileobj(self, file_obj): """Read the coverage data from the given file object. Should only be used on an empty CoverageData object. @@ -290,7 +299,7 @@ class CoverageData(object): self._debug.write("Reading data from %r" % (filename,)) try: with self._open_for_reading(filename) as f: - self.read_fileobj(f) + self._read_fileobj(f) except Exception as exc: raise CoverageException( "Couldn't read data from '%s': %s: %s" % ( @@ -438,7 +447,34 @@ class CoverageData(object): self._validate() - def write_fileobj(self, file_obj): + def write(self, suffix=None): + """Write the collected coverage data to a file. + + `suffix` is a suffix to append to the base file name. This can be used + for multiple or parallel execution, so that many coverage data files + can exist simultaneously. A dot will be used to join the base name and + the suffix. + + """ + filename = self.filename + if suffix is True: + # If data_suffix was a simple true value, then make a suffix with + # plenty of distinguishing information. We do this here in + # `save()` at the last minute so that the pid will be correct even + # if the process forks. + extra = "" + if _TEST_NAME_FILE: # pragma: debugging + with open(_TEST_NAME_FILE) as f: + test_name = f.read() + extra = "." + test_name + dice = random.Random(os.urandom(8)).randint(0, 999999) + suffix = "%s%s.%s.%06d" % (socket.gethostname(), extra, os.getpid(), dice) + + if suffix: + filename += "." + suffix + self.write_file(filename) + + def _write_fileobj(self, file_obj): """Write the coverage data to `file_obj`.""" # Create the file data. @@ -465,16 +501,33 @@ class CoverageData(object): if self._debug and self._debug.should('dataio'): self._debug.write("Writing data to %r" % (filename,)) with open(filename, 'w') as fdata: - self.write_fileobj(fdata) + self._write_fileobj(fdata) + + def erase(self, parallel=False): + """Erase the data in this object. + + If `parallel` is true, then also deletes data files created from the + basename by parallel-mode. - def erase(self): - """Erase the data in this object.""" + """ self._lines = None self._arcs = None self._file_tracers = {} self._runs = [] self._validate() + if self._debug and self._debug.should('dataio'): + self._debug.write("Erasing data file %r" % (self.filename,)) + file_be_gone(self.filename) + if parallel: + data_dir, local = os.path.split(self.filename) + localdot = local + '.*' + pattern = os.path.join(os.path.abspath(data_dir), localdot) + for filename in glob.glob(pattern): + if self._debug and self._debug.should('dataio'): + self._debug.write("Erasing parallel data file %r" % (filename,)) + file_be_gone(filename) + def update(self, other_data, aliases=None): """Update this data with data from another `CoverageData`. @@ -535,6 +588,69 @@ class CoverageData(object): self._validate() + def combine_parallel_data(self, aliases=None, data_paths=None, strict=False): + """Combine a number of data files together. + + Treat `self.filename` as a file prefix, and combine the data from all + of the data files starting with that prefix plus a dot. + + If `aliases` is provided, it's a `PathAliases` object that is used to + re-map paths to match the local machine's. + + If `data_paths` is provided, it is a list of directories or files to + combine. Directories are searched for files that start with + `self.filename` plus dot as a prefix, and those files are combined. + + If `data_paths` is not provided, then the directory portion of + `self.filename` is used as the directory to search for data files. + + Every data file found and combined is then deleted from disk. If a file + cannot be read, a warning will be issued, and the file will not be + deleted. + + If `strict` is true, and no files are found to combine, an error is + raised. + + """ + # Because of the os.path.abspath in the constructor, data_dir will + # never be an empty string. + data_dir, local = os.path.split(self.filename) + localdot = local + '.*' + + data_paths = data_paths or [data_dir] + files_to_combine = [] + for p in data_paths: + if os.path.isfile(p): + files_to_combine.append(os.path.abspath(p)) + elif os.path.isdir(p): + pattern = os.path.join(os.path.abspath(p), localdot) + files_to_combine.extend(glob.glob(pattern)) + else: + raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,)) + + if strict and not files_to_combine: + raise CoverageException("No data to combine") + + files_combined = 0 + for f in files_to_combine: + new_data = CoverageData(debug=self._debug) + try: + new_data.read_file(f) + except CoverageException as exc: + if self._warn: + # The CoverageException has the file name in it, so just + # use the message as the warning. + self._warn(str(exc)) + else: + self.update(new_data, aliases=aliases) + files_combined += 1 + if self._debug and self._debug.should('dataio'): + self._debug.write("Deleting combined data file %r" % (f,)) + file_be_gone(f) + + if strict and not files_combined: + raise CoverageException("No usable data files") + ## ## Miscellaneous ## @@ -609,140 +725,6 @@ class CoverageData(object): return self._arcs is not None -class CoverageDataFiles(object): - """Manage the use of coverage data files.""" - - def __init__(self, basename=None, warn=None, debug=None): - """Create a CoverageDataFiles to manage data files. - - `warn` is the warning function to use. - - `basename` is the name of the file to use for storing data. - - `debug` is a `DebugControl` object for writing debug messages. - - """ - self.warn = warn - self.debug = debug - - # Construct the file name that will be used for data storage. - self.filename = os.path.abspath(basename or ".coverage") - - def erase(self, parallel=False): - """Erase the data from the file storage. - - If `parallel` is true, then also deletes data files created from the - basename by parallel-mode. - - """ - if self.debug and self.debug.should('dataio'): - self.debug.write("Erasing data file %r" % (self.filename,)) - file_be_gone(self.filename) - if parallel: - data_dir, local = os.path.split(self.filename) - localdot = local + '.*' - pattern = os.path.join(os.path.abspath(data_dir), localdot) - for filename in glob.glob(pattern): - if self.debug and self.debug.should('dataio'): - self.debug.write("Erasing parallel data file %r" % (filename,)) - file_be_gone(filename) - - def read(self, data): - """Read the coverage data.""" - if os.path.exists(self.filename): - data.read_file(self.filename) - - def write(self, data, suffix=None): - """Write the collected coverage data to a file. - - `suffix` is a suffix to append to the base file name. This can be used - for multiple or parallel execution, so that many coverage data files - can exist simultaneously. A dot will be used to join the base name and - the suffix. - - """ - filename = self.filename - if suffix is True: - # If data_suffix was a simple true value, then make a suffix with - # plenty of distinguishing information. We do this here in - # `save()` at the last minute so that the pid will be correct even - # if the process forks. - extra = "" - if _TEST_NAME_FILE: # pragma: debugging - with open(_TEST_NAME_FILE) as f: - test_name = f.read() - extra = "." + test_name - dice = random.Random(os.urandom(8)).randint(0, 999999) - suffix = "%s%s.%s.%06d" % (socket.gethostname(), extra, os.getpid(), dice) - - if suffix: - filename += "." + suffix - data.write_file(filename) - - def combine_parallel_data(self, data, aliases=None, data_paths=None, strict=False): - """Combine a number of data files together. - - Treat `self.filename` as a file prefix, and combine the data from all - of the data files starting with that prefix plus a dot. - - If `aliases` is provided, it's a `PathAliases` object that is used to - re-map paths to match the local machine's. - - If `data_paths` is provided, it is a list of directories or files to - combine. Directories are searched for files that start with - `self.filename` plus dot as a prefix, and those files are combined. - - If `data_paths` is not provided, then the directory portion of - `self.filename` is used as the directory to search for data files. - - Every data file found and combined is then deleted from disk. If a file - cannot be read, a warning will be issued, and the file will not be - deleted. - - If `strict` is true, and no files are found to combine, an error is - raised. - - """ - # Because of the os.path.abspath in the constructor, data_dir will - # never be an empty string. - data_dir, local = os.path.split(self.filename) - localdot = local + '.*' - - data_paths = data_paths or [data_dir] - files_to_combine = [] - for p in data_paths: - if os.path.isfile(p): - files_to_combine.append(os.path.abspath(p)) - elif os.path.isdir(p): - pattern = os.path.join(os.path.abspath(p), localdot) - files_to_combine.extend(glob.glob(pattern)) - else: - raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,)) - - if strict and not files_to_combine: - raise CoverageException("No data to combine") - - files_combined = 0 - for f in files_to_combine: - new_data = CoverageData(debug=self.debug) - try: - new_data.read_file(f) - except CoverageException as exc: - if self.warn: - # The CoverageException has the file name in it, so just - # use the message as the warning. - self.warn(str(exc)) - else: - data.update(new_data, aliases=aliases) - files_combined += 1 - if self.debug and self.debug.should('dataio'): - self.debug.write("Deleting combined data file %r" % (f,)) - file_be_gone(f) - - if strict and not files_combined: - raise CoverageException("No usable data files") - - def canonicalize_json_data(data): """Canonicalize our JSON data so it can be compared.""" for fname, lines in iitems(data.get('lines', {})): diff --git a/tests/test_cmdline.py b/tests/test_cmdline.py index b6fad76d..7fda7961 100644 --- a/tests/test_cmdline.py +++ b/tests/test_cmdline.py @@ -16,7 +16,7 @@ import coverage import coverage.cmdline from coverage import env from coverage.config import CoverageConfig -from coverage.data import CoverageData, CoverageDataFiles +from coverage.data import CoverageData from coverage.misc import ExceptionDuringRun from tests.coveragetest import CoverageTest, OK, ERR, command_line @@ -605,8 +605,7 @@ class CmdLineWithFilesTest(BaseCmdLineTest): "file2.py": dict.fromkeys(range(1, 24)), }) data.add_file_tracers({"file1.py": "a_plugin"}) - data_files = CoverageDataFiles() - data_files.write(data) + data.write() self.command_line("debug data") self.assertMultiLineEqual(self.stdout(), textwrap.dedent("""\ @@ -617,16 +616,16 @@ class CmdLineWithFilesTest(BaseCmdLineTest): 2 files: file1.py: 17 lines [a_plugin] file2.py: 23 lines - """).replace("FILENAME", data_files.filename)) + """).replace("FILENAME", data.filename)) def test_debug_data_with_no_data(self): - data_files = CoverageDataFiles() + data = CoverageData() self.command_line("debug data") self.assertMultiLineEqual(self.stdout(), textwrap.dedent("""\ -- data ------------------------------------------------------ path: FILENAME No data collected - """).replace("FILENAME", data_files.filename)) + """).replace("FILENAME", data.filename)) class CmdLineStdoutTest(BaseCmdLineTest): diff --git a/tests/test_data.py b/tests/test_data.py index 0d3172d4..3c0d602b 100644 --- a/tests/test_data.py +++ b/tests/test_data.py @@ -11,8 +11,7 @@ import re import mock -from coverage.backward import StringIO -from coverage.data import CoverageData, CoverageDataFiles, debug_main, canonicalize_json_data +from coverage.data import CoverageData, debug_main, canonicalize_json_data from coverage.debug import DebugControlString from coverage.files import PathAliases, canonical_filename from coverage.misc import CoverageException @@ -420,12 +419,10 @@ class CoverageDataTest(DataTestHelpers, CoverageTest): def test_read_and_write_are_opposites(self): covdata1 = CoverageData() covdata1.add_arcs(ARCS_3) - stringio = StringIO() - covdata1.write_fileobj(stringio) + covdata1.write() - stringio.seek(0) covdata2 = CoverageData() - covdata2.read_fileobj(stringio) + covdata2.read() self.assert_arcs3_data(covdata2) @@ -518,27 +515,23 @@ class CoverageDataTestInTempDir(DataTestHelpers, CoverageTest): class CoverageDataFilesTest(DataTestHelpers, CoverageTest): - """Tests of CoverageDataFiles.""" + """Tests of CoverageData file handling.""" no_files_in_temp_dir = True - def setUp(self): - super(CoverageDataFilesTest, self).setUp() - self.data_files = CoverageDataFiles() - def test_reading_missing(self): self.assert_doesnt_exist(".coverage") covdata = CoverageData() - self.data_files.read(covdata) + covdata.read() self.assert_line_counts(covdata, {}) def test_writing_and_reading(self): covdata1 = CoverageData() covdata1.add_lines(LINES_1) - self.data_files.write(covdata1) + covdata1.write() covdata2 = CoverageData() - self.data_files.read(covdata2) + covdata2.read() self.assert_line_counts(covdata2, SUMMARY_1) def test_debug_output_with_debug_option(self): @@ -547,10 +540,10 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): debug = DebugControlString(options=["dataio"]) covdata1 = CoverageData(debug=debug) covdata1.add_lines(LINES_1) - self.data_files.write(covdata1) + covdata1.write() covdata2 = CoverageData(debug=debug) - self.data_files.read(covdata2) + covdata2.read() self.assert_line_counts(covdata2, SUMMARY_1) self.assertRegex( @@ -565,10 +558,10 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): debug = DebugControlString(options=[]) covdata1 = CoverageData(debug=debug) covdata1.add_lines(LINES_1) - self.data_files.write(covdata1) + covdata1.write() covdata2 = CoverageData(debug=debug) - self.data_files.read(covdata2) + covdata2.read() self.assert_line_counts(covdata2, SUMMARY_1) self.assertEqual(debug.get_output(), "") @@ -577,7 +570,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): self.assert_doesnt_exist(".coverage.SUFFIX") covdata = CoverageData() covdata.add_lines(LINES_1) - self.data_files.write(covdata, suffix='SUFFIX') + covdata.write(suffix='SUFFIX') self.assert_exists(".coverage.SUFFIX") self.assert_doesnt_exist(".coverage") @@ -587,7 +580,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): # suffix=True will make a randomly named data file. covdata1 = CoverageData() covdata1.add_lines(LINES_1) - self.data_files.write(covdata1, suffix=True) + covdata1.write(suffix=True) self.assert_doesnt_exist(".coverage") data_files1 = glob.glob(".coverage.*") self.assertEqual(len(data_files1), 1) @@ -595,7 +588,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): # Another suffix=True will choose a different name. covdata2 = CoverageData() covdata2.add_lines(LINES_1) - self.data_files.write(covdata2, suffix=True) + covdata2.write(suffix=True) self.assert_doesnt_exist(".coverage") data_files2 = glob.glob(".coverage.*") self.assertEqual(len(data_files2), 2) @@ -609,17 +602,17 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): covdata1 = CoverageData() covdata1.add_lines(LINES_1) - self.data_files.write(covdata1, suffix='1') + covdata1.write(suffix='1') self.assert_exists(".coverage.1") self.assert_doesnt_exist(".coverage.2") covdata2 = CoverageData() covdata2.add_lines(LINES_2) - self.data_files.write(covdata2, suffix='2') + covdata2.write(suffix='2') self.assert_exists(".coverage.2") covdata3 = CoverageData() - self.data_files.combine_parallel_data(covdata3) + covdata3.combine_parallel_data() self.assert_line_counts(covdata3, SUMMARY_1_2) self.assert_measured_files(covdata3, MEASURED_FILES_1_2) self.assert_doesnt_exist(".coverage.1") @@ -628,22 +621,21 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): def test_erasing(self): covdata1 = CoverageData() covdata1.add_lines(LINES_1) - self.data_files.write(covdata1) + covdata1.write() covdata1.erase() self.assert_line_counts(covdata1, {}) - self.data_files.erase() covdata2 = CoverageData() - self.data_files.read(covdata2) + covdata2.read() self.assert_line_counts(covdata2, {}) def test_erasing_parallel(self): self.make_file("datafile.1") self.make_file("datafile.2") self.make_file(".coverage") - data_files = CoverageDataFiles("datafile") - data_files.erase(parallel=True) + data = CoverageData("datafile") + data.erase(parallel=True) self.assert_doesnt_exist("datafile.1") self.assert_doesnt_exist("datafile.2") self.assert_exists(".coverage") @@ -659,7 +651,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): # Write with CoverageData, then read the JSON explicitly. covdata = CoverageData() covdata.add_lines(LINES_1) - self.data_files.write(covdata) + covdata.write() data = self.read_json_data_file(".coverage") @@ -676,7 +668,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): # Write with CoverageData, then read the JSON explicitly. covdata = CoverageData() covdata.add_arcs(ARCS_3) - self.data_files.write(covdata) + covdata.write() data = self.read_json_data_file(".coverage") @@ -689,14 +681,13 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): self.assertNotIn('file_tracers', data) def test_writing_to_other_file(self): - data_files = CoverageDataFiles(".otherfile") - covdata = CoverageData() + covdata = CoverageData(".otherfile") covdata.add_lines(LINES_1) - data_files.write(covdata) + covdata.write() self.assert_doesnt_exist(".coverage") self.assert_exists(".otherfile") - data_files.write(covdata, suffix="extra") + covdata.write(suffix="extra") self.assert_exists(".otherfile.extra") self.assert_doesnt_exist(".coverage") @@ -710,20 +701,20 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): covdata1.add_file_tracers({ '/home/ned/proj/src/template.html': 'html.plugin', }) - self.data_files.write(covdata1, suffix='1') + covdata1.write(suffix='1') covdata2 = CoverageData() covdata2.add_lines({ r'c:\ned\test\a.py': {4: None, 5: None}, r'c:\ned\test\sub\b.py': {3: None, 6: None}, }) - self.data_files.write(covdata2, suffix='2') + covdata2.write(suffix='2') covdata3 = CoverageData() aliases = PathAliases() aliases.add("/home/ned/proj/src/", "./") aliases.add(r"c:\ned\test", "./") - self.data_files.combine_parallel_data(covdata3, aliases=aliases) + covdata3.combine_parallel_data(aliases=aliases) apy = canonical_filename('./a.py') sub_bpy = canonical_filename('./sub/b.py') @@ -750,7 +741,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): covdata_xxx.write_file('.coverage.xxx') covdata3 = CoverageData() - self.data_files.combine_parallel_data(covdata3, data_paths=['cov1', 'cov2']) + covdata3.combine_parallel_data(data_paths=['cov1', 'cov2']) self.assert_line_counts(covdata3, SUMMARY_1_2) self.assert_measured_files(covdata3, MEASURED_FILES_1_2) @@ -776,7 +767,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): covdata_xxx.write_file('cov2/.coverage.xxx') covdata3 = CoverageData() - self.data_files.combine_parallel_data(covdata3, data_paths=['cov1', 'cov2/.coverage.2']) + covdata3.combine_parallel_data(data_paths=['cov1', 'cov2/.coverage.2']) self.assert_line_counts(covdata3, SUMMARY_1_2) self.assert_measured_files(covdata3, MEASURED_FILES_1_2) @@ -789,4 +780,4 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest): covdata = CoverageData() msg = "Couldn't combine from non-existent path 'xyzzy'" with self.assertRaisesRegex(CoverageException, msg): - self.data_files.combine_parallel_data(covdata, data_paths=['xyzzy']) + covdata.combine_parallel_data(data_paths=['xyzzy']) |