diff options
author | Ned Batchelder <ned@nedbatchelder.com> | 2015-07-30 07:34:37 -0400 |
---|---|---|
committer | Ned Batchelder <ned@nedbatchelder.com> | 2015-07-30 07:34:37 -0400 |
commit | 8bdff4ef8448078d06e4d7aba2d2892b8b9b7ac7 (patch) | |
tree | de499650c8724205dd8feacb5cac78836fae4884 /coverage/data.py | |
parent | f57c1cf959440b2344bc37f0e8e645563f88d524 (diff) | |
download | python-coveragepy-git-8bdff4ef8448078d06e4d7aba2d2892b8b9b7ac7.tar.gz |
Clean up from the merge of PR 62
Remove the globbing option. Added a test. Corrected parameter names and docs.
Updated the AUTHORS file.
Diffstat (limited to 'coverage/data.py')
-rw-r--r-- | coverage/data.py | 32 |
1 files changed, 14 insertions, 18 deletions
diff --git a/coverage/data.py b/coverage/data.py index 4e7d999e..4a5ae27a 100644 --- a/coverage/data.py +++ b/coverage/data.py @@ -591,7 +591,7 @@ class CoverageDataFiles(object): filename += "." + suffix data.write_file(filename) - def combine_parallel_data(self, data, aliases=None, data_dirs=None): + def combine_parallel_data(self, data, aliases=None, data_paths=None): """Combine a number of data files together. Treat `self.filename` as a file prefix, and combine the data from all @@ -600,10 +600,12 @@ class CoverageDataFiles(object): If `aliases` is provided, it's a `PathAliases` object that is used to re-map paths to match the local machine's. - If `data_dirs` is provided, then it combines the data files from each - directory into a single file. If `data_dirs` is not provided, then the - directory portion of `self.filename` is used as the directory to search - for data files. + If `data_paths` is provided, it is a list of directories or files to + combine. Directories are searched for files that start with + `self.filename` plus dot as a prefix, and those files are combined. + + If `data_dirs` is not provided, then the directory portion of + `self.filename` is used as the directory to search for data files. Every data file found and combined is then deleted from disk. @@ -613,24 +615,18 @@ class CoverageDataFiles(object): data_dir, local = os.path.split(self.filename) localdot = local + '.*' - data_dirs = data_dirs or [data_dir] + data_paths = data_paths or [data_dir] files_to_combine = [] - for d in data_dirs: - if os.path.isfile(d): - files_to_combine.append(os.path.abspath(d)) - elif os.path.isdir(d): - pattern = os.path.join(os.path.abspath(d), localdot) + for p in data_paths: + if os.path.isfile(p): + files_to_combine.append(os.path.abspath(p)) + elif os.path.isdir(p): + pattern = os.path.join(os.path.abspath(p), localdot) files_to_combine.extend(glob.glob(pattern)) else: - files = glob.glob(d) - if not files: - raise CoverageException("Couldn't combine from non-existing path '%s'" % (d,)) - files_to_combine.extend(files) - + raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,)) for f in files_to_combine: - if not os.path.isfile(f): - raise CoverageException("Couldn't combine from non-existing file '%s'" % (f,)) new_data = CoverageData() new_data.read_file(f) data.update(new_data, aliases=aliases) |