summaryrefslogtreecommitdiff
path: root/coverage/control.py
diff options
context:
space:
mode:
authorNed Batchelder <ned@nedbatchelder.com>2018-08-24 07:13:42 -0400
committerNed Batchelder <ned@nedbatchelder.com>2018-08-24 07:13:42 -0400
commitc4b2392dd51b7f976972afb00f01d4618c523cff (patch)
tree7c77b420d4eec7ac628393663c67c0e9bc2c66f7 /coverage/control.py
parent8a337f91e6444c027771741a56636a56389706e3 (diff)
parentdd5b0cc88ebe4528abaa7cdf0b3fd516fb1f7e01 (diff)
downloadpython-coveragepy-git-c4b2392dd51b7f976972afb00f01d4618c523cff.tar.gz
Merge branch 'nedbat/data-sqlite'
Diffstat (limited to 'coverage/control.py')
-rw-r--r--coverage/control.py209
1 files changed, 116 insertions, 93 deletions
diff --git a/coverage/control.py b/coverage/control.py
index 03238910..4dd62e10 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -15,7 +15,7 @@ from coverage.annotate import AnnotateReporter
from coverage.backward import string_class, iitems
from coverage.collector import Collector
from coverage.config import read_coverage_config
-from coverage.data import CoverageData, CoverageDataFiles
+from coverage.data import CoverageData, combine_parallel_data
from coverage.debug import DebugControl, write_formatted_info
from coverage.disposition import disposition_debug_msg
from coverage.files import PathAliases, set_relative_directory, abs_file
@@ -152,7 +152,7 @@ class Coverage(object):
self._warnings = []
# Other instance attributes, set later.
- self._data = self._data_files = self._collector = None
+ self._data = self._collector = None
self._plugins = None
self._inorout = None
self._inorout_class = InOrOut
@@ -163,8 +163,11 @@ class Coverage(object):
# State machine variables:
# Have we initialized everything?
self._inited = False
+ self._inited_for_start = False
# Have we started collecting and not stopped it?
self._started = False
+ # Have we written --debug output?
+ self._wrote_debug = False
# If we have sub-process measurement happening automatically, then we
# want any explicit creation of a Coverage object to mean, this process
@@ -214,74 +217,11 @@ class Coverage(object):
# this is a bit childish. :)
plugin.configure([self, self.config][int(time.time()) % 2])
- concurrency = self.config.concurrency or []
- if "multiprocessing" in concurrency:
- if not patch_multiprocessing:
- raise CoverageException( # pragma: only jython
- "multiprocessing is not supported on this Python"
- )
- patch_multiprocessing(rcfile=self.config.config_file)
- # Multi-processing uses parallel for the subprocesses, so also use
- # it for the main process.
- self.config.parallel = True
-
- self._collector = Collector(
- should_trace=self._should_trace,
- check_include=self._check_include_omit_etc,
- timid=self.config.timid,
- branch=self.config.branch,
- warn=self._warn,
- concurrency=concurrency,
- )
-
- # Early warning if we aren't going to be able to support plugins.
- if self._plugins.file_tracers and not self._collector.supports_plugins:
- self._warn(
- "Plugin file tracers (%s) aren't supported with %s" % (
- ", ".join(
- plugin._coverage_plugin_name
- for plugin in self._plugins.file_tracers
- ),
- self._collector.tracer_name(),
- )
- )
- for plugin in self._plugins.file_tracers:
- plugin._coverage_enabled = False
-
- # Create the file classifying substructure.
- self._inorout = self._inorout_class(warn=self._warn)
- self._inorout.configure(self.config)
- self._inorout.plugins = self._plugins
- self._inorout.disp_class = self._collector.file_disposition_class
-
- # Suffixes are a bit tricky. We want to use the data suffix only when
- # collecting data, not when combining data. So we save it as
- # `self._run_suffix` now, and promote it to `self._data_suffix` if we
- # find that we are collecting data later.
- if self._data_suffix_specified or self.config.parallel:
- if not isinstance(self._data_suffix_specified, string_class):
- # if data_suffix=True, use .machinename.pid.random
- self._data_suffix_specified = True
- else:
- self._data_suffix_specified = None
- self._data_suffix = None
- self._run_suffix = self._data_suffix_specified
-
- # Create the data file. We do this at construction time so that the
- # data file will be written into the directory where the process
- # started rather than wherever the process eventually chdir'd to.
- self._data = CoverageData(debug=self._debug)
- self._data_files = CoverageDataFiles(
- basename=self.config.data_file, warn=self._warn, debug=self._debug,
- )
-
- # Set the reporting precision.
- Numbers.set_precision(self.config.precision)
-
- atexit.register(self._atexit)
-
- # The user may want to debug things, show info if desired.
- self._write_startup_debug()
+ def _post_init(self):
+ """Stuff to do after everything is initialized."""
+ if not self._wrote_debug:
+ self._wrote_debug = True
+ self._write_startup_debug()
def _write_startup_debug(self):
"""Write out debug info at startup if needed."""
@@ -388,8 +328,78 @@ class Coverage(object):
def load(self):
"""Load previously-collected coverage data from the data file."""
self._init()
- self._collector.reset()
- self._data_files.read(self._data)
+ if self._collector:
+ self._collector.reset()
+ self._init_data(suffix=None)
+ self._post_init()
+ self._data.read()
+
+ def _init_for_start(self):
+ """Initialization for start()"""
+ concurrency = self.config.concurrency or []
+ if "multiprocessing" in concurrency:
+ if not patch_multiprocessing:
+ raise CoverageException( # pragma: only jython
+ "multiprocessing is not supported on this Python"
+ )
+ patch_multiprocessing(rcfile=self.config.config_file)
+ # Multi-processing uses parallel for the subprocesses, so also use
+ # it for the main process.
+ self.config.parallel = True
+
+ self._collector = Collector(
+ should_trace=self._should_trace,
+ check_include=self._check_include_omit_etc,
+ timid=self.config.timid,
+ branch=self.config.branch,
+ warn=self._warn,
+ concurrency=concurrency,
+ )
+
+ suffix = self._data_suffix_specified
+ if suffix or self.config.parallel:
+ if not isinstance(suffix, string_class):
+ # if data_suffix=True, use .machinename.pid.random
+ suffix = True
+ else:
+ suffix = None
+
+ self._init_data(suffix)
+
+ # Early warning if we aren't going to be able to support plugins.
+ if self._plugins.file_tracers and not self._collector.supports_plugins:
+ self._warn(
+ "Plugin file tracers (%s) aren't supported with %s" % (
+ ", ".join(
+ plugin._coverage_plugin_name
+ for plugin in self._plugins.file_tracers
+ ),
+ self._collector.tracer_name(),
+ )
+ )
+ for plugin in self._plugins.file_tracers:
+ plugin._coverage_enabled = False
+
+ # Create the file classifying substructure.
+ self._inorout = self._inorout_class(warn=self._warn)
+ self._inorout.configure(self.config)
+ self._inorout.plugins = self._plugins
+ self._inorout.disp_class = self._collector.file_disposition_class
+
+ atexit.register(self._atexit)
+
+ def _init_data(self, suffix):
+ """Create a data file if we don't have one yet."""
+ if self._data is None:
+ # Create the data file. We do this at construction time so that the
+ # data file will be written into the directory where the process
+ # started rather than wherever the process eventually chdir'd to.
+ self._data = CoverageData(
+ basename=self.config.data_file,
+ suffix=suffix,
+ warn=self._warn,
+ debug=self._debug,
+ )
def start(self):
"""Start measuring code coverage.
@@ -403,19 +413,22 @@ class Coverage(object):
"""
self._init()
- self._inorout.warn_conflicting_settings()
+ if not self._inited_for_start:
+ self._inited_for_start = True
+ self._init_for_start()
+ self._post_init()
- if self._run_suffix:
- # Calling start() means we're running code, so use the run_suffix
- # as the data_suffix when we eventually save the data.
- self._data_suffix = self._run_suffix
- if self._auto_load:
- self.load()
+ # Issue warnings for possible problems.
+ self._inorout.warn_conflicting_settings()
- # See if we think some code that would eventually be measured has already been imported.
+ # See if we think some code that would eventually be measured has
+ # already been imported.
if self._warn_preimported_source:
self._inorout.warn_already_imported_files()
+ if self._auto_load:
+ self.load()
+
self._collector.start()
self._started = True
@@ -442,9 +455,12 @@ class Coverage(object):
"""
self._init()
- self._collector.reset()
- self._data.erase()
- self._data_files.erase(parallel=self.config.parallel)
+ self._post_init()
+ if self._collector:
+ self._collector.reset()
+ self._init_data(suffix=None)
+ self._data.erase(parallel=self.config.parallel)
+ self._data = None
def clear_exclude(self, which='exclude'):
"""Clear the exclude list."""
@@ -495,9 +511,8 @@ class Coverage(object):
def save(self):
"""Save the collected coverage data to the data file."""
- self._init()
data = self.get_data()
- self._data_files.write(data, suffix=self._data_suffix)
+ data.write()
def combine(self, data_paths=None, strict=False):
"""Combine together a number of similarly-named coverage data files.
@@ -522,6 +537,8 @@ class Coverage(object):
"""
self._init()
+ self._init_data(suffix=None)
+ self._post_init()
self.get_data()
aliases = None
@@ -532,9 +549,7 @@ class Coverage(object):
for pattern in paths[1:]:
aliases.add(pattern, result)
- self._data_files.combine_parallel_data(
- self._data, aliases=aliases, data_paths=data_paths, strict=strict,
- )
+ combine_parallel_data(self._data, aliases=aliases, data_paths=data_paths, strict=strict)
def get_data(self):
"""Get the collected data.
@@ -547,8 +562,10 @@ class Coverage(object):
"""
self._init()
+ self._init_data(suffix=None)
+ self._post_init()
- if self._collector.save_data(self._data):
+ if self._collector and self._collector.save_data(self._data):
self._post_save_work()
return self._data
@@ -599,7 +616,6 @@ class Coverage(object):
coverage data.
"""
- self._init()
analysis = self._analyze(morf)
return (
analysis.filename,
@@ -615,6 +631,11 @@ class Coverage(object):
Returns an `Analysis` object.
"""
+ # All reporting comes through here, so do reporting initialization.
+ self._init()
+ Numbers.set_precision(self.config.precision)
+ self._post_init()
+
data = self.get_data()
if not isinstance(it, FileReporter):
it = self._get_file_reporter(it)
@@ -801,6 +822,7 @@ class Coverage(object):
import coverage as covmod
self._init()
+ self._post_init()
def plugin_info(plugins):
"""Make an entry for the sys_info from a list of plug-ins."""
@@ -815,13 +837,13 @@ class Coverage(object):
info = [
('version', covmod.__version__),
('coverage', covmod.__file__),
- ('tracer', self._collector.tracer_name()),
+ ('tracer', self._collector.tracer_name() if self._collector else "-none-"),
('plugins.file_tracers', plugin_info(self._plugins.file_tracers)),
('plugins.configurers', plugin_info(self._plugins.configurers)),
('configs_attempted', self.config.attempted_config_files),
('configs_read', self.config.config_files_read),
('config_file', self.config.config_file),
- ('data_path', self._data_files.filename),
+ ('data_path', self._data.filename if self._data else "-none-"),
('python', sys.version.replace('\n', '')),
('platform', platform.platform()),
('implementation', platform.python_implementation()),
@@ -836,7 +858,8 @@ class Coverage(object):
('command_line', " ".join(getattr(sys, 'argv', ['???']))),
]
- info.extend(self._inorout.sys_info())
+ if self._inorout:
+ info.extend(self._inorout.sys_info())
return info