From c24e594796b860531521be0190fc2f922c092c0e Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 15 Jul 2018 11:59:33 -0400 Subject: CoverageData now also handles file operations --- coverage/control.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) (limited to 'coverage/control.py') diff --git a/coverage/control.py b/coverage/control.py index a5943aa8..1760ee78 100644 --- a/coverage/control.py +++ b/coverage/control.py @@ -15,7 +15,7 @@ from coverage.annotate import AnnotateReporter from coverage.backward import string_class, iitems from coverage.collector import Collector from coverage.config import read_coverage_config -from coverage.data import CoverageData, CoverageDataFiles +from coverage.data import CoverageData from coverage.debug import DebugControl, write_formatted_info from coverage.disposition import disposition_debug_msg from coverage.files import PathAliases, set_relative_directory, abs_file @@ -152,7 +152,7 @@ class Coverage(object): self._warnings = [] # Other instance attributes, set later. - self._data = self._data_files = self._collector = None + self._data = self._collector = None self._plugins = None self._inorout = None self._inorout_class = InOrOut @@ -270,8 +270,7 @@ class Coverage(object): # Create the data file. We do this at construction time so that the # data file will be written into the directory where the process # started rather than wherever the process eventually chdir'd to. - self._data = CoverageData(debug=self._debug) - self._data_files = CoverageDataFiles( + self._data = CoverageData( basename=self.config.data_file, warn=self._warn, debug=self._debug, ) @@ -395,7 +394,7 @@ class Coverage(object): """Load previously-collected coverage data from the data file.""" self._init() self._collector.reset() - self._data_files.read(self._data) + self._data.read() def start(self): """Start measuring code coverage. @@ -449,8 +448,7 @@ class Coverage(object): """ self._init() self._collector.reset() - self._data.erase() - self._data_files.erase(parallel=self.config.parallel) + self._data.erase(parallel=self.config.parallel) def clear_exclude(self, which='exclude'): """Clear the exclude list.""" @@ -503,7 +501,7 @@ class Coverage(object): """Save the collected coverage data to the data file.""" self._init() data = self.get_data() - self._data_files.write(data, suffix=self._data_suffix) + data.write(suffix=self._data_suffix) def combine(self, data_paths=None, strict=False): """Combine together a number of similarly-named coverage data files. @@ -538,9 +536,7 @@ class Coverage(object): for pattern in paths[1:]: aliases.add(pattern, result) - self._data_files.combine_parallel_data( - self._data, aliases=aliases, data_paths=data_paths, strict=strict, - ) + self._data.combine_parallel_data(aliases=aliases, data_paths=data_paths, strict=strict) def get_data(self): """Get the collected data. @@ -827,7 +823,7 @@ class Coverage(object): ('configs_attempted', self.config.attempted_config_files), ('configs_read', self.config.config_files_read), ('config_file', self.config.config_file), - ('data_path', self._data_files.filename), + ('data_path', self._data.filename), ('python', sys.version.replace('\n', '')), ('platform', platform.platform()), ('implementation', platform.python_implementation()), -- cgit v1.2.1 From 2f0d57856550ef7ad248e4e6127700bdabb91e7d Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 4 Aug 2018 07:36:13 -0400 Subject: Pull combine_parallel_data out of CoverageData --- coverage/control.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'coverage/control.py') diff --git a/coverage/control.py b/coverage/control.py index 1760ee78..2f084cc2 100644 --- a/coverage/control.py +++ b/coverage/control.py @@ -15,7 +15,7 @@ from coverage.annotate import AnnotateReporter from coverage.backward import string_class, iitems from coverage.collector import Collector from coverage.config import read_coverage_config -from coverage.data import CoverageData +from coverage.data import CoverageData, combine_parallel_data from coverage.debug import DebugControl, write_formatted_info from coverage.disposition import disposition_debug_msg from coverage.files import PathAliases, set_relative_directory, abs_file @@ -536,7 +536,7 @@ class Coverage(object): for pattern in paths[1:]: aliases.add(pattern, result) - self._data.combine_parallel_data(aliases=aliases, data_paths=data_paths, strict=strict) + combine_parallel_data(self._data, aliases=aliases, data_paths=data_paths, strict=strict) def get_data(self): """Get the collected data. -- cgit v1.2.1 From 0341a891a22f29466fd525bc5aa010c5d85bed52 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Tue, 14 Aug 2018 07:32:08 -0400 Subject: Refactor initialization We need the data file suffix when the data file is created, not when write() is called. This required separating how different pieces were initialized. The old way was dumb anyway, since it (for example) created a Collector when reporting. --- coverage/control.py | 192 +++++++++++++++++++++++++++++----------------------- 1 file changed, 108 insertions(+), 84 deletions(-) (limited to 'coverage/control.py') diff --git a/coverage/control.py b/coverage/control.py index 46c2ece1..c83432af 100644 --- a/coverage/control.py +++ b/coverage/control.py @@ -163,8 +163,11 @@ class Coverage(object): # State machine variables: # Have we initialized everything? self._inited = False + self._inited_for_start = False # Have we started collecting and not stopped it? self._started = False + # Have we written --debug output? + self._wrote_debug = False # If we have sub-process measurement happening automatically, then we # want any explicit creation of a Coverage object to mean, this process @@ -214,73 +217,11 @@ class Coverage(object): # this is a bit childish. :) plugin.configure([self, self.config][int(time.time()) % 2]) - concurrency = self.config.concurrency or [] - if "multiprocessing" in concurrency: - if not patch_multiprocessing: - raise CoverageException( # pragma: only jython - "multiprocessing is not supported on this Python" - ) - patch_multiprocessing(rcfile=self.config.config_file) - # Multi-processing uses parallel for the subprocesses, so also use - # it for the main process. - self.config.parallel = True - - self._collector = Collector( - should_trace=self._should_trace, - check_include=self._check_include_omit_etc, - timid=self.config.timid, - branch=self.config.branch, - warn=self._warn, - concurrency=concurrency, - ) - - # Early warning if we aren't going to be able to support plugins. - if self._plugins.file_tracers and not self._collector.supports_plugins: - self._warn( - "Plugin file tracers (%s) aren't supported with %s" % ( - ", ".join( - plugin._coverage_plugin_name - for plugin in self._plugins.file_tracers - ), - self._collector.tracer_name(), - ) - ) - for plugin in self._plugins.file_tracers: - plugin._coverage_enabled = False - - # Create the file classifying substructure. - self._inorout = self._inorout_class(warn=self._warn) - self._inorout.configure(self.config) - self._inorout.plugins = self._plugins - self._inorout.disp_class = self._collector.file_disposition_class - - # Suffixes are a bit tricky. We want to use the data suffix only when - # collecting data, not when combining data. So we save it as - # `self._run_suffix` now, and promote it to `self._data_suffix` if we - # find that we are collecting data later. - if self._data_suffix_specified or self.config.parallel: - if not isinstance(self._data_suffix_specified, string_class): - # if data_suffix=True, use .machinename.pid.random - self._data_suffix_specified = True - else: - self._data_suffix_specified = None - self._data_suffix = None - self._run_suffix = self._data_suffix_specified - - # Create the data file. We do this at construction time so that the - # data file will be written into the directory where the process - # started rather than wherever the process eventually chdir'd to. - self._data = CoverageData( - basename=self.config.data_file, warn=self._warn, debug=self._debug, - ) - - # Set the reporting precision. - Numbers.set_precision(self.config.precision) - - atexit.register(self._atexit) - - # The user may want to debug things, show info if desired. - self._write_startup_debug() + def _post_init(self): + """Stuff to do after everything is initialized.""" + if not self._wrote_debug: + self._wrote_debug = True + self._write_startup_debug() def _write_startup_debug(self): """Write out debug info at startup if needed.""" @@ -387,9 +328,79 @@ class Coverage(object): def load(self): """Load previously-collected coverage data from the data file.""" self._init() - self._collector.reset() + if self._collector: + self._collector.reset() + self._init_data(suffix=None) + self._post_init() self._data.read() + def _init_for_start(self): + """Initialization for start()""" + concurrency = self.config.concurrency or [] + if "multiprocessing" in concurrency: + if not patch_multiprocessing: + raise CoverageException( # pragma: only jython + "multiprocessing is not supported on this Python" + ) + patch_multiprocessing(rcfile=self.config.config_file) + # Multi-processing uses parallel for the subprocesses, so also use + # it for the main process. + self.config.parallel = True + + self._collector = Collector( + should_trace=self._should_trace, + check_include=self._check_include_omit_etc, + timid=self.config.timid, + branch=self.config.branch, + warn=self._warn, + concurrency=concurrency, + ) + + suffix = self._data_suffix_specified + if suffix or self.config.parallel: + if not isinstance(suffix, string_class): + # if data_suffix=True, use .machinename.pid.random + suffix = True + else: + suffix = None + + self._init_data(suffix) + + # Early warning if we aren't going to be able to support plugins. + if self._plugins.file_tracers and not self._collector.supports_plugins: + self._warn( + "Plugin file tracers (%s) aren't supported with %s" % ( + ", ".join( + plugin._coverage_plugin_name + for plugin in self._plugins.file_tracers + ), + self._collector.tracer_name(), + ) + ) + for plugin in self._plugins.file_tracers: + plugin._coverage_enabled = False + + # Create the file classifying substructure. + self._inorout = self._inorout_class(warn=self._warn) + self._inorout.configure(self.config) + self._inorout.plugins = self._plugins + self._inorout.disp_class = self._collector.file_disposition_class + + atexit.register(self._atexit) + + def _init_data(self, suffix): + """Create a data file if we don't have one yet.""" + if self._data is None: + # Create the data file. We do this at construction time so that the + # data file will be written into the directory where the process + # started rather than wherever the process eventually chdir'd to. + self._data = CoverageData( + basename=self.config.data_file, + suffix=suffix, + warn=self._warn, + debug=self._debug, + ) + def start(self): """Start measuring code coverage. @@ -402,19 +413,22 @@ class Coverage(object): """ self._init() - self._inorout.warn_conflicting_settings() + if not self._inited_for_start: + self._inited_for_start = True + self._init_for_start() + self._post_init() - if self._run_suffix: - # Calling start() means we're running code, so use the run_suffix - # as the data_suffix when we eventually save the data. - self._data_suffix = self._run_suffix - if self._auto_load: - self.load() + # Issue warnings for possible problems. + self._inorout.warn_conflicting_settings() - # See if we think some code that would eventually be measured has already been imported. + # See if we think some code that would eventually be measured has + # already been imported. if self._warn_preimported_source: self._inorout.warn_already_imported_files() + if self._auto_load: + self.load() + self._collector.start() self._started = True @@ -441,7 +455,10 @@ class Coverage(object): """ self._init() - self._collector.reset() + self._post_init() + if self._collector: + self._collector.reset() + self._init_data(suffix=None) self._data.erase(parallel=self.config.parallel) def clear_exclude(self, which='exclude'): @@ -493,9 +510,8 @@ class Coverage(object): def save(self): """Save the collected coverage data to the data file.""" - self._init() data = self.get_data() - data.write(suffix=self._data_suffix) + data.write() def combine(self, data_paths=None, strict=False): """Combine together a number of similarly-named coverage data files. @@ -520,6 +536,8 @@ class Coverage(object): """ self._init() + self._init_data(suffix=None) + self._post_init() self.get_data() aliases = None @@ -544,7 +562,7 @@ class Coverage(object): """ self._init() - if self._collector.save_data(self._data): + if self._collector and self._collector.save_data(self._data): self._post_save_work() return self._data @@ -595,7 +613,6 @@ class Coverage(object): coverage data. """ - self._init() analysis = self._analyze(morf) return ( analysis.filename, @@ -611,6 +628,11 @@ class Coverage(object): Returns an `Analysis` object. """ + # All reporting comes through here, so do reporting initialization. + self._init() + Numbers.set_precision(self.config.precision) + self._post_init() + data = self.get_data() if not isinstance(it, FileReporter): it = self._get_file_reporter(it) @@ -797,6 +819,7 @@ class Coverage(object): import coverage as covmod self._init() + self._post_init() def plugin_info(plugins): """Make an entry for the sys_info from a list of plug-ins.""" @@ -811,13 +834,13 @@ class Coverage(object): info = [ ('version', covmod.__version__), ('coverage', covmod.__file__), - ('tracer', self._collector.tracer_name()), + ('tracer', self._collector.tracer_name() if self._collector else "-none-"), ('plugins.file_tracers', plugin_info(self._plugins.file_tracers)), ('plugins.configurers', plugin_info(self._plugins.configurers)), ('configs_attempted', self.config.attempted_config_files), ('configs_read', self.config.config_files_read), ('config_file', self.config.config_file), - ('data_path', self._data.filename), + ('data_path', self._data.filename if self._data else "-none-"), ('python', sys.version.replace('\n', '')), ('platform', platform.platform()), ('implementation', platform.python_implementation()), @@ -832,7 +855,8 @@ class Coverage(object): ('command_line', " ".join(getattr(sys, 'argv', ['???']))), ] - info.extend(self._inorout.sys_info()) + if self._inorout: + info.extend(self._inorout.sys_info()) return info -- cgit v1.2.1 From ad58ff0db4eeb40794e3cf87c2ee9365aedc7bd6 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 23 Aug 2018 20:17:50 -0400 Subject: Fix the pytest-cov test --- coverage/control.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'coverage/control.py') diff --git a/coverage/control.py b/coverage/control.py index c83432af..4dd62e10 100644 --- a/coverage/control.py +++ b/coverage/control.py @@ -460,6 +460,7 @@ class Coverage(object): self._collector.reset() self._init_data(suffix=None) self._data.erase(parallel=self.config.parallel) + self._data = None def clear_exclude(self, which='exclude'): """Clear the exclude list.""" @@ -561,6 +562,8 @@ class Coverage(object): """ self._init() + self._init_data(suffix=None) + self._post_init() if self._collector and self._collector.save_data(self._data): self._post_save_work() -- cgit v1.2.1