summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNed Batchelder <ned@nedbatchelder.com>2015-10-03 16:59:46 -0400
committerNed Batchelder <ned@nedbatchelder.com>2015-10-03 16:59:46 -0400
commita877aaf13d9f72ee317b392c86313a3003bcb3b8 (patch)
tree22fdb568fd85c4179774fdfb4024486e37d3e1ce
parentb27f9eab849696b46e9fbb84dcdc0f34078684d1 (diff)
downloadpython-coveragepy-git-a877aaf13d9f72ee317b392c86313a3003bcb3b8.tar.gz
Combine can now ignore errors.
-rw-r--r--CHANGES.rst8
-rw-r--r--coverage/cmdline.py11
-rw-r--r--coverage/config.py6
-rw-r--r--coverage/control.py13
-rw-r--r--coverage/data.py25
-rw-r--r--doc/cmd.rst5
-rw-r--r--doc/config.rst14
-rw-r--r--tests/test_api.py74
-rw-r--r--tests/test_cmdline.py14
-rw-r--r--tests/test_config.py5
-rw-r--r--tests/test_process.py36
11 files changed, 178 insertions, 33 deletions
diff --git a/CHANGES.rst b/CHANGES.rst
index 2ba0605f..349247ff 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -9,7 +9,13 @@ Change history for Coverage.py
Version 4.1
-----------
-Nothing yet.
+- When combining data files, you can now ignore errors while reading the data
+ files, using the ``--ignore-errors`` command line switch, or the ``[combine]
+ ignore_errors`` configuration file setting. I don't understand why people
+ are seeing corrupt data files, but this lets them continue combining anyway.
+ Prompted by `issue 418`_.
+
+.. _issue 418: https://bitbucket.org/ned/coveragepy/issues/418/json-parse-error
Version 4.0 --- 20 September 2015
diff --git a/coverage/cmdline.py b/coverage/cmdline.py
index 2802948e..6f5719fd 100644
--- a/coverage/cmdline.py
+++ b/coverage/cmdline.py
@@ -58,6 +58,10 @@ class Opts(object):
'-i', '--ignore-errors', action='store_true',
help="Ignore errors while reading source files.",
)
+ ignore_combine_errors = optparse.make_option(
+ '-i', '--ignore-errors', action='store_true',
+ help="Ignore errors while reading data files.",
+ )
include = optparse.make_option(
'', '--include', action='store',
metavar="PAT1,PAT2,...",
@@ -265,7 +269,10 @@ CMDS = {
),
'combine': CmdOptionParser(
- "combine", GLOBAL_ARGS,
+ "combine",
+ [
+ Opts.ignore_combine_errors,
+ ] + GLOBAL_ARGS,
usage="<path1> <path2> ... <pathN>",
description=(
"Combine data from multiple coverage files collected "
@@ -464,7 +471,7 @@ class CoverageScript(object):
elif options.action == "combine":
self.coverage.load()
data_dirs = args or None
- self.coverage.combine(data_dirs)
+ self.coverage.combine(data_dirs, ignore_errors=options.ignore_errors)
self.coverage.save()
return OK
diff --git a/coverage/config.py b/coverage/config.py
index 458d4903..8aff41dc 100644
--- a/coverage/config.py
+++ b/coverage/config.py
@@ -162,6 +162,9 @@ class CoverageConfig(object):
self.source = None
self.timid = False
+ # Defaults for [combine]
+ self.ignore_combine_errors = False
+
# Defaults for [report]
self.exclude_list = DEFAULT_EXCLUDE[:]
self.fail_under = 0
@@ -277,6 +280,9 @@ class CoverageConfig(object):
('source', 'run:source', 'list'),
('timid', 'run:timid', 'boolean'),
+ # [combine]
+ ('ignore_combine_errors', 'combine:ignore_errors', 'boolean'),
+
# [report]
('exclude_list', 'report:exclude_lines', 'regexlist'),
('fail_under', 'report:fail_under', 'int'),
diff --git a/coverage/control.py b/coverage/control.py
index 16b09c84..77737181 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -279,7 +279,7 @@ class Coverage(object):
# data file will be written into the directory where the process
# started rather than wherever the process eventually chdir'd to.
self.data = CoverageData(debug=self.debug)
- self.data_files = CoverageDataFiles(basename=self.config.data_file)
+ self.data_files = CoverageDataFiles(basename=self.config.data_file, warn=self._warn)
# The directories for files considered "installed with the interpreter".
self.pylib_dirs = set()
@@ -757,7 +757,7 @@ class Coverage(object):
self.get_data()
self.data_files.write(self.data, suffix=self.data_suffix)
- def combine(self, data_paths=None):
+ def combine(self, data_paths=None, ignore_errors=None):
"""Combine together a number of similarly-named coverage data files.
All coverage data files whose name starts with `data_file` (from the
@@ -776,6 +776,8 @@ class Coverage(object):
self._init()
self.get_data()
+ self.config.from_args(ignore_combine_errors=ignore_errors)
+
aliases = None
if self.config.paths:
aliases = PathAliases()
@@ -784,7 +786,12 @@ class Coverage(object):
for pattern in paths[1:]:
aliases.add(pattern, result)
- self.data_files.combine_parallel_data(self.data, aliases=aliases, data_paths=data_paths)
+ self.data_files.combine_parallel_data(
+ self.data,
+ aliases=aliases,
+ data_paths=data_paths,
+ ignore_errors=self.config.ignore_combine_errors,
+ )
def get_data(self):
"""Get the collected data and reset the collector.
diff --git a/coverage/data.py b/coverage/data.py
index c608f489..b8d9dadf 100644
--- a/coverage/data.py
+++ b/coverage/data.py
@@ -601,12 +601,15 @@ class CoverageData(object):
class CoverageDataFiles(object):
"""Manage the use of coverage data files."""
- def __init__(self, basename=None):
+ def __init__(self, basename=None, warn=None):
"""Create a CoverageDataFiles to manage data files.
+ `warn` is the warning function to use.
+
`basename` is the name of the file to use for storing data.
"""
+ self.warn = warn
# Construct the file name that will be used for data storage.
self.filename = os.path.abspath(basename or ".coverage")
@@ -659,7 +662,7 @@ class CoverageDataFiles(object):
filename += "." + suffix
data.write_file(filename)
- def combine_parallel_data(self, data, aliases=None, data_paths=None):
+ def combine_parallel_data(self, data, aliases=None, data_paths=None, ignore_errors=False):
"""Combine a number of data files together.
Treat `self.filename` as a file prefix, and combine the data from all
@@ -677,6 +680,9 @@ class CoverageDataFiles(object):
Every data file found and combined is then deleted from disk.
+ If `ignore_errors` is True, then files that cannot be read will cause
+ a warning, and will not be deleted.
+
"""
# Because of the os.path.abspath in the constructor, data_dir will
# never be an empty string.
@@ -696,9 +702,18 @@ class CoverageDataFiles(object):
for f in files_to_combine:
new_data = CoverageData()
- new_data.read_file(f)
- data.update(new_data, aliases=aliases)
- file_be_gone(f)
+ try:
+ new_data.read_file(f)
+ except CoverageException as exc:
+ if not ignore_errors:
+ raise
+ if self.warn:
+ # The CoverageException has the file name in it, so just
+ # use the message as the warning.
+ self.warn(str(exc))
+ else:
+ data.update(new_data, aliases=aliases)
+ file_be_gone(f)
def canonicalize_json_data(data):
diff --git a/doc/cmd.rst b/doc/cmd.rst
index d6de9f50..e9695ef8 100644
--- a/doc/cmd.rst
+++ b/doc/cmd.rst
@@ -221,6 +221,11 @@ systems, coverage.py won't know how to combine the data. You can tell
coverage.py how the different locations correlate with a ``[paths]`` section in
your configuration file. See :ref:`config_paths` for details.
+Some techniques for collecting data can result in corrupt data files. If you
+have this problem, you can use the ``--ignore-errors`` option to turn those
+errors into warnings instead.
+
+
.. _cmd_reporting:
Reporting
diff --git a/doc/config.rst b/doc/config.rst
index 1b451b30..840e7de7 100644
--- a/doc/config.rst
+++ b/doc/config.rst
@@ -148,6 +148,20 @@ measure during execution. See :ref:`source` for details.
Try this if you get seemingly impossible results.
+.. _config_combine:
+
+[combine]
+---------
+
+Options for the ``coverage combine`` command.
+
+``ignore_errors`` (boolean, default False): ignore errors when reading the data
+files, so that corrupt data files won't prevent the other good ones from being
+combined.
+
+.. versionadded:: 4.1
+
+
.. _config_paths:
[paths]
diff --git a/tests/test_api.py b/tests/test_api.py
index 61cd9408..ad322224 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -251,7 +251,8 @@ class ApiTest(CoverageTest):
cov.erase()
self.assertRaises(CoverageException, cov.report)
- def test_start_stop_start_stop(self):
+ def make_code1_code2(self):
+ """Create the code1.py and code2.py files."""
self.make_file("code1.py", """\
code1 = 1
""")
@@ -259,10 +260,9 @@ class ApiTest(CoverageTest):
code2 = 1
code2 = 2
""")
- cov = coverage.Coverage()
- self.start_import_stop(cov, "code1")
- cov.save()
- self.start_import_stop(cov, "code2")
+
+ def check_code1_code2(self, cov):
+ """Check the analysis is correct for code1.py and code2.py."""
_, statements, missing, _ = cov.analysis("code1.py")
self.assertEqual(statements, [1])
self.assertEqual(missing, [])
@@ -270,16 +270,18 @@ class ApiTest(CoverageTest):
self.assertEqual(statements, [1, 2])
self.assertEqual(missing, [])
+ def test_start_stop_start_stop(self):
+ self.make_code1_code2()
+ cov = coverage.Coverage()
+ self.start_import_stop(cov, "code1")
+ cov.save()
+ self.start_import_stop(cov, "code2")
+ self.check_code1_code2(cov)
+
if 0: # expected failure
# for https://bitbucket.org/ned/coveragepy/issue/79
def test_start_save_stop(self):
- self.make_file("code1.py", """\
- code1 = 1
- """)
- self.make_file("code2.py", """\
- code2 = 1
- code2 = 2
- """)
+ self.make_code1_code2()
cov = coverage.Coverage()
cov.start()
self.import_local_file("code1")
@@ -287,13 +289,49 @@ class ApiTest(CoverageTest):
self.import_local_file("code2")
cov.stop()
- _, statements, missing, _ = cov.analysis("code1.py")
- self.assertEqual(statements, [1])
- self.assertEqual(missing, [])
- _, statements, missing, _ = cov.analysis("code2.py")
- self.assertEqual(statements, [1, 2])
- self.assertEqual(missing, [])
+ self.check_code1_code2(cov)
+
+ def make_corrupt_data_files(self):
+ """Make some good and some bad data files."""
+ self.make_code1_code2()
+ cov = coverage.Coverage(data_suffix=True)
+ self.start_import_stop(cov, "code1")
+ cov.save()
+
+ cov = coverage.Coverage(data_suffix=True)
+ self.start_import_stop(cov, "code2")
+ cov.save()
+
+ self.make_file(".coverage.foo", """La la la, this isn't coverage data!""")
+
+ def test_combining_corrupt_data(self):
+ self.make_corrupt_data_files()
+ cov = coverage.Coverage()
+
+ msg = r"Couldn't read data from '.*\.coverage\.foo'"
+ with self.assertRaisesRegex(CoverageException, msg):
+ cov.combine()
+
+ # The bad file still exists.
+ self.assert_exists(".coverage.foo")
+
+ def test_combining_corrupt_data_while_ignoring_errors(self):
+ # If you combine a corrupt data file with ignore_errors=True, then you
+ # will get a warning, and the file will remain.
+ self.make_corrupt_data_files()
+ cov = coverage.Coverage()
+ warning_regex = (
+ r"Couldn't read data from '.*\.coverage\.foo': "
+ r"CoverageException: Doesn't seem to be a coverage\.py data file"
+ )
+ with self.assert_warnings(cov, [warning_regex]):
+ cov.combine(ignore_errors=True)
+
+ # We got the results from code1 and code2 properly.
+ self.check_code1_code2(cov)
+ # The bad file still exists.
+ self.assert_exists(".coverage.foo")
class UsingModulesMixin(object):
diff --git a/tests/test_cmdline.py b/tests/test_cmdline.py
index c78e3468..932c9ef9 100644
--- a/tests/test_cmdline.py
+++ b/tests/test_cmdline.py
@@ -190,14 +190,20 @@ class CmdLineTest(BaseCmdLineTest):
self.cmd_executes("combine datadir1", """\
.coverage()
.load()
- .combine(["datadir1"])
+ .combine(["datadir1"], ignore_errors=None)
.save()
""")
# coverage combine without args
self.cmd_executes("combine", """\
.coverage()
.load()
- .combine(None)
+ .combine(None, ignore_errors=None)
+ .save()
+ """)
+ self.cmd_executes("combine -i", """\
+ .coverage()
+ .load()
+ .combine(None, ignore_errors=True)
.save()
""")
@@ -206,13 +212,13 @@ class CmdLineTest(BaseCmdLineTest):
self.cmd_executes("combine --rcfile cov.ini", """\
.coverage(config_file='cov.ini')
.load()
- .combine(None)
+ .combine(None, ignore_errors=None)
.save()
""")
self.cmd_executes("combine --rcfile cov.ini data1 data2/more", """\
.coverage(config_file='cov.ini')
.load()
- .combine(["data1", "data2/more"])
+ .combine(["data1", "data2/more"], ignore_errors=None)
.save()
""")
diff --git a/tests/test_config.py b/tests/test_config.py
index 93a7bbf6..6d6c7f80 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -235,6 +235,9 @@ class ConfigFileTest(CoverageTest):
plugins.a_plugin
plugins.another
+ [{section}combine]
+ ignore_errors = True
+
[{section}report]
; these settings affect reporting.
exclude_lines =
@@ -301,6 +304,8 @@ class ConfigFileTest(CoverageTest):
self.assertTrue(cov.config.parallel)
self.assertEqual(cov.config.concurrency, "thread")
+ self.assertTrue(cov.config.ignore_combine_errors)
+
self.assertEqual(cov.get_exclude_list(), ["if 0:", r"pragma:?\s+no cover", "another_tab"])
self.assertTrue(cov.config.ignore_errors)
self.assertEqual(cov.config.include, ["a/", "b/"])
diff --git a/tests/test_process.py b/tests/test_process.py
index 3023a18c..b41472d6 100644
--- a/tests/test_process.py
+++ b/tests/test_process.py
@@ -94,6 +94,42 @@ class ProcessTest(CoverageTest):
data.read_file(".coverage")
self.assertEqual(data.line_counts()['b_or_c.py'], 7)
+ def test_combine_parallel_data_with_a_corrupt_file(self):
+ self.make_b_or_c_py()
+ out = self.run_command("coverage run -p b_or_c.py b")
+ self.assertEqual(out, 'done\n')
+ self.assert_doesnt_exist(".coverage")
+ self.assertEqual(self.number_of_data_files(), 1)
+
+ out = self.run_command("coverage run -p b_or_c.py c")
+ self.assertEqual(out, 'done\n')
+ self.assert_doesnt_exist(".coverage")
+
+ # After two -p runs, there should be two .coverage.machine.123 files.
+ self.assertEqual(self.number_of_data_files(), 2)
+
+ # Make a bogus data file.
+ self.make_file(".coverage.bad", "This isn't a coverage data file.")
+
+ # Combine the parallel coverage data files into .coverage .
+ out = self.run_command("coverage combine -i")
+ self.assert_exists(".coverage")
+ self.assert_exists(".coverage.bad")
+ warning_regex = (
+ r"Coverage.py warning: Couldn't read data from '.*\.coverage\.bad': "
+ r"CoverageException: Doesn't seem to be a coverage\.py data file"
+ )
+ self.assertRegex(out, warning_regex)
+
+ # After combining, those two should be the only data files.
+ self.assertEqual(self.number_of_data_files(), 2)
+
+ # Read the coverage file and see that b_or_c.py has all 7 lines
+ # executed.
+ data = coverage.CoverageData()
+ data.read_file(".coverage")
+ self.assertEqual(data.line_counts()['b_or_c.py'], 7)
+
def test_combine_parallel_data_in_two_steps(self):
self.make_b_or_c_py()