summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--AUTHORS.txt1
-rw-r--r--coverage/cmdline.py6
-rw-r--r--coverage/control.py2
-rw-r--r--coverage/data.py32
-rw-r--r--tests/test_data.py33
5 files changed, 49 insertions, 25 deletions
diff --git a/AUTHORS.txt b/AUTHORS.txt
index 7db5d1c2..d484edf1 100644
--- a/AUTHORS.txt
+++ b/AUTHORS.txt
@@ -5,6 +5,7 @@ Other contributions have been made by:
Adi Roiban
Alex Gaynor
+Alexander Todorov
Anthony Sottile
Ben Finney
Bill Hart
diff --git a/coverage/cmdline.py b/coverage/cmdline.py
index 5d1b388d..a6ae7c3f 100644
--- a/coverage/cmdline.py
+++ b/coverage/cmdline.py
@@ -256,9 +256,9 @@ CMDS = {
description = "Combine data from multiple coverage files collected "
"with 'run -p'. The combined results are written to a single "
"file representing the union of the data. The positional "
- "arguments are files or directories or shell globs "
- "representing the data files which should be combined. "
- "By default, only data files in the current directory are combined."
+ "arguments are data files or directories containing data files. "
+ "If no paths are provided, data files in the default data file's "
+ "directory are combined."
),
'debug': CmdOptionParser("debug", GLOBAL_ARGS,
diff --git a/coverage/control.py b/coverage/control.py
index 80bd853f..deeeae45 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -730,7 +730,7 @@ class Coverage(object):
for pattern in paths[1:]:
aliases.add(pattern, result)
- self.data_files.combine_parallel_data(self.data, aliases=aliases, data_dirs=data_dirs)
+ self.data_files.combine_parallel_data(self.data, aliases=aliases, data_paths=data_dirs)
def get_data(self):
"""Get the collected data and reset the collector.
diff --git a/coverage/data.py b/coverage/data.py
index 4e7d999e..4a5ae27a 100644
--- a/coverage/data.py
+++ b/coverage/data.py
@@ -591,7 +591,7 @@ class CoverageDataFiles(object):
filename += "." + suffix
data.write_file(filename)
- def combine_parallel_data(self, data, aliases=None, data_dirs=None):
+ def combine_parallel_data(self, data, aliases=None, data_paths=None):
"""Combine a number of data files together.
Treat `self.filename` as a file prefix, and combine the data from all
@@ -600,10 +600,12 @@ class CoverageDataFiles(object):
If `aliases` is provided, it's a `PathAliases` object that is used to
re-map paths to match the local machine's.
- If `data_dirs` is provided, then it combines the data files from each
- directory into a single file. If `data_dirs` is not provided, then the
- directory portion of `self.filename` is used as the directory to search
- for data files.
+ If `data_paths` is provided, it is a list of directories or files to
+ combine. Directories are searched for files that start with
+ `self.filename` plus dot as a prefix, and those files are combined.
+
+ If `data_dirs` is not provided, then the directory portion of
+ `self.filename` is used as the directory to search for data files.
Every data file found and combined is then deleted from disk.
@@ -613,24 +615,18 @@ class CoverageDataFiles(object):
data_dir, local = os.path.split(self.filename)
localdot = local + '.*'
- data_dirs = data_dirs or [data_dir]
+ data_paths = data_paths or [data_dir]
files_to_combine = []
- for d in data_dirs:
- if os.path.isfile(d):
- files_to_combine.append(os.path.abspath(d))
- elif os.path.isdir(d):
- pattern = os.path.join(os.path.abspath(d), localdot)
+ for p in data_paths:
+ if os.path.isfile(p):
+ files_to_combine.append(os.path.abspath(p))
+ elif os.path.isdir(p):
+ pattern = os.path.join(os.path.abspath(p), localdot)
files_to_combine.extend(glob.glob(pattern))
else:
- files = glob.glob(d)
- if not files:
- raise CoverageException("Couldn't combine from non-existing path '%s'" % (d,))
- files_to_combine.extend(files)
-
+ raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,))
for f in files_to_combine:
- if not os.path.isfile(f):
- raise CoverageException("Couldn't combine from non-existing file '%s'" % (f,))
new_data = CoverageData()
new_data.read_file(f)
data.update(new_data, aliases=aliases)
diff --git a/tests/test_data.py b/tests/test_data.py
index a67a27ca..ec163950 100644
--- a/tests/test_data.py
+++ b/tests/test_data.py
@@ -676,7 +676,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
covdata_xxx.write_file('.coverage.xxx')
covdata3 = CoverageData()
- self.data_files.combine_parallel_data(covdata3, data_dirs=['cov1', 'cov2'])
+ self.data_files.combine_parallel_data(covdata3, data_paths=['cov1', 'cov2'])
self.assert_line_counts(covdata3, SUMMARY_1_2)
self.assert_measured_files(covdata3, MEASURED_FILES_1_2)
@@ -684,8 +684,35 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
self.assert_doesnt_exist("cov2/.coverage.2")
self.assert_exists(".coverage.xxx")
+ def test_combining_from_files(self):
+ covdata1 = CoverageData()
+ covdata1.set_lines(LINES_1)
+ os.makedirs('cov1')
+ covdata1.write_file('cov1/.coverage.1')
+
+ covdata2 = CoverageData()
+ covdata2.set_lines(LINES_2)
+ os.makedirs('cov2')
+ covdata2.write_file('cov2/.coverage.2')
+
+ # This data won't be included.
+ covdata_xxx = CoverageData()
+ covdata_xxx.set_arcs(ARCS_3)
+ covdata_xxx.write_file('.coverage.xxx')
+ covdata_xxx.write_file('cov2/.coverage.xxx')
+
+ covdata3 = CoverageData()
+ self.data_files.combine_parallel_data(covdata3, data_paths=['cov1', 'cov2/.coverage.2'])
+
+ self.assert_line_counts(covdata3, SUMMARY_1_2)
+ self.assert_measured_files(covdata3, MEASURED_FILES_1_2)
+ self.assert_doesnt_exist("cov1/.coverage.1")
+ self.assert_doesnt_exist("cov2/.coverage.2")
+ self.assert_exists(".coverage.xxx")
+ self.assert_exists("cov2/.coverage.xxx")
+
def test_combining_from_nonexistent_directories(self):
covdata = CoverageData()
- msg = "Couldn't combine from non-existing path 'xyzzy'"
+ msg = "Couldn't combine from non-existent path 'xyzzy'"
with self.assertRaisesRegex(CoverageException, msg):
- self.data_files.combine_parallel_data(covdata, data_dirs=['xyzzy'])
+ self.data_files.combine_parallel_data(covdata, data_paths=['xyzzy'])