diff options
Diffstat (limited to 'coverage')
-rw-r--r-- | coverage/cmdline.py | 5 | ||||
-rw-r--r-- | coverage/control.py | 4 | ||||
-rw-r--r-- | coverage/data.py | 40 |
3 files changed, 30 insertions, 19 deletions
diff --git a/coverage/cmdline.py b/coverage/cmdline.py index 2be32947..c611e037 100644 --- a/coverage/cmdline.py +++ b/coverage/cmdline.py @@ -249,7 +249,7 @@ CMDS = { ), 'combine': CmdOptionParser("combine", GLOBAL_ARGS, - usage = " ", + usage = "<dir1> <dir2> ... <dirN>", description = "Combine data from multiple coverage files collected " "with 'run -p'. The combined results are written to a single " "file representing the union of the data." @@ -430,7 +430,8 @@ class CoverageScript(object): self.do_run(options, args) if options.action == "combine": - self.coverage.combine() + data_dirs = argv if argv else None + self.coverage.combine(data_dirs) self.coverage.save() # Remaining actions are reporting, with some common options. diff --git a/coverage/control.py b/coverage/control.py index 563925ef..4a9ac727 100644 --- a/coverage/control.py +++ b/coverage/control.py @@ -717,7 +717,7 @@ class Coverage(object): self._harvest_data() self.data.write(suffix=data_suffix) - def combine(self): + def combine(self, data_dirs=None): """Combine together a number of similarly-named coverage data files. All coverage data files whose name starts with `data_file` (from the @@ -733,7 +733,7 @@ class Coverage(object): result = paths[0] for pattern in paths[1:]: aliases.add(pattern, result) - self.data.combine_parallel_data(aliases=aliases) + self.data.combine_parallel_data(aliases=aliases, data_dirs=data_dirs) def _harvest_data(self): """Get the collected data and reset the collector. diff --git a/coverage/data.py b/coverage/data.py index 2c5d3516..ed79f794 100644 --- a/coverage/data.py +++ b/coverage/data.py @@ -1,5 +1,6 @@ """Coverage data for Coverage.""" +import glob import os from coverage.backward import iitems, pickle @@ -190,7 +191,7 @@ class CoverageData(object): pass return lines, arcs, plugins - def combine_parallel_data(self, aliases=None): + def combine_parallel_data(self, aliases=None, data_dirs=None): """Combine a number of data files together. Treat `self.filename` as a file prefix, and combine the data from all @@ -199,23 +200,32 @@ class CoverageData(object): If `aliases` is provided, it's a `PathAliases` object that is used to re-map paths to match the local machine's. + If `data_dirs` is provided, then it combines the data files from each + directory into a single file. + """ aliases = aliases or PathAliases() data_dir, local = os.path.split(self.filename) - localdot = local + '.' - for f in os.listdir(data_dir or '.'): - if f.startswith(localdot): - full_path = os.path.join(data_dir, f) - new_lines, new_arcs, new_plugins = self._read_file(full_path) - for filename, file_data in iitems(new_lines): - filename = aliases.map(filename) - self.lines.setdefault(filename, {}).update(file_data) - for filename, file_data in iitems(new_arcs): - filename = aliases.map(filename) - self.arcs.setdefault(filename, {}).update(file_data) - self.plugins.update(new_plugins) - if f != local: - os.remove(full_path) + localdot = local + '.*' + + data_dirs = data_dirs or [data_dir] or ['.'] + files_to_combine = [] + for d in data_dirs: + pattern = os.path.join(os.path.abspath(d), localdot) + files_to_combine.extend(glob.glob(pattern)) + + for f in files_to_combine: + new_lines, new_arcs, new_plugins = self._read_file(f) + for filename, file_data in iitems(new_lines): + filename = aliases.map(filename) + self.lines.setdefault(filename, {}).update(file_data) + for filename, file_data in iitems(new_arcs): + filename = aliases.map(filename) + self.arcs.setdefault(filename, {}).update(file_data) + self.plugins.update(new_plugins) + + if os.path.basename(f) != local: + os.remove(f) def add_line_data(self, line_data): """Add executed line data. |