summaryrefslogtreecommitdiff
path: root/coverage
diff options
context:
space:
mode:
Diffstat (limited to 'coverage')
-rw-r--r--coverage/cmdline.py7
-rw-r--r--coverage/collector.py22
-rw-r--r--coverage/config.py2
-rw-r--r--coverage/control.py21
-rw-r--r--coverage/data.py11
-rw-r--r--coverage/sqldata.py129
6 files changed, 147 insertions, 45 deletions
diff --git a/coverage/cmdline.py b/coverage/cmdline.py
index 8f6b0a90..7137852d 100644
--- a/coverage/cmdline.py
+++ b/coverage/cmdline.py
@@ -43,6 +43,10 @@ class Opts(object):
"Valid values are: %s."
) % ", ".join(CONCURRENCY_CHOICES),
)
+ context = optparse.make_option(
+ '', '--context', action='store', metavar="LABEL",
+ help="The context label to record for this coverage run",
+ )
debug = optparse.make_option(
'', '--debug', action='store', metavar="OPTS",
help="Debug options, separated by commas. [env: COVERAGE_DEBUG]",
@@ -160,6 +164,7 @@ class CoverageOptionParser(optparse.OptionParser, object):
append=None,
branch=None,
concurrency=None,
+ context=None,
debug=None,
directory=None,
fail_under=None,
@@ -358,6 +363,7 @@ CMDS = {
Opts.append,
Opts.branch,
Opts.concurrency,
+ Opts.context,
Opts.include,
Opts.module,
Opts.omit,
@@ -482,6 +488,7 @@ class CoverageScript(object):
debug=debug,
concurrency=options.concurrency,
check_preimported=True,
+ context=options.context,
)
if options.action == "debug":
diff --git a/coverage/collector.py b/coverage/collector.py
index fa3eaaa4..e0144979 100644
--- a/coverage/collector.py
+++ b/coverage/collector.py
@@ -99,6 +99,9 @@ class Collector(object):
self.warn = warn
self.branch = branch
self.threading = None
+ self.covdata = None
+
+ self.static_context = None
self.origin = short_stack()
@@ -160,6 +163,12 @@ class Collector(object):
def __repr__(self):
return "<Collector at 0x%x: %s>" % (id(self), self.tracer_name())
+ def use_data(self, covdata, context):
+ """Use `covdata` for recording data."""
+ self.covdata = covdata
+ self.static_context = context
+ self.covdata.set_context(self.static_context)
+
def tracer_name(self):
"""Return the class name of the tracer we're using."""
return self._trace_class.__name__
@@ -378,8 +387,11 @@ class Collector(object):
except KeyError:
return self.abs_file_cache.setdefault(key, abs_file(filename))
- def save_data(self, covdata):
- """Save the collected data to a `CoverageData`.
+ def flush_data(self):
+ """Save the collected data to our associated `CoverageData`.
+
+ Data may have also been saved along the way. This forces the
+ last of the data to be saved.
Returns True if there was data to save, False if not.
"""
@@ -406,10 +418,10 @@ class Collector(object):
return dict((self.cached_abs_file(k), v) for k, v in items)
if self.branch:
- covdata.add_arcs(abs_file_dict(self.data))
+ self.covdata.add_arcs(abs_file_dict(self.data))
else:
- covdata.add_lines(abs_file_dict(self.data))
- covdata.add_file_tracers(abs_file_dict(self.file_tracers))
+ self.covdata.add_lines(abs_file_dict(self.data))
+ self.covdata.add_file_tracers(abs_file_dict(self.file_tracers))
if self.wtw:
# Just a hack, so just hack it.
diff --git a/coverage/config.py b/coverage/config.py
index 69c929b4..9a11323d 100644
--- a/coverage/config.py
+++ b/coverage/config.py
@@ -175,6 +175,7 @@ class CoverageConfig(object):
# Defaults for [run]
self.branch = False
self.concurrency = None
+ self.context = None
self.cover_pylib = False
self.data_file = ".coverage"
self.debug = []
@@ -318,6 +319,7 @@ class CoverageConfig(object):
# [run]
('branch', 'run:branch', 'boolean'),
('concurrency', 'run:concurrency', 'list'),
+ ('context', 'run:context'),
('cover_pylib', 'run:cover_pylib', 'boolean'),
('data_file', 'run:data_file'),
('debug', 'run:debug', 'list'),
diff --git a/coverage/control.py b/coverage/control.py
index 5d42af77..f7d97cf6 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -57,7 +57,7 @@ class Coverage(object):
self, data_file=None, data_suffix=None, cover_pylib=None,
auto_data=False, timid=None, branch=None, config_file=True,
source=None, omit=None, include=None, debug=None,
- concurrency=None, check_preimported=False,
+ concurrency=None, check_preimported=False, context=None,
):
"""
`data_file` is the base name of the data file to use, defaulting to
@@ -116,6 +116,8 @@ class Coverage(object):
by coverage. Importing measured files before coverage is started can
mean that code is missed.
+ `context` is a string to use as the context label for collected data.
+
.. versionadded:: 4.0
The `concurrency` parameter.
@@ -133,7 +135,7 @@ class Coverage(object):
branch=branch, parallel=bool_or_none(data_suffix),
source=source, run_omit=omit, run_include=include, debug=debug,
report_omit=omit, report_include=include,
- concurrency=concurrency,
+ concurrency=concurrency, context=context,
)
# This is injectable by tests.
@@ -333,6 +335,7 @@ class Coverage(object):
def _init_for_start(self):
"""Initialization for start()"""
+ # Construct the collector.
concurrency = self.config.concurrency or []
if "multiprocessing" in concurrency:
if not patch_multiprocessing:
@@ -363,6 +366,8 @@ class Coverage(object):
self._init_data(suffix)
+ self._collector.use_data(self._data, self.config.context)
+
# Early warning if we aren't going to be able to support plugins.
if self._plugins.file_tracers and not self._collector.supports_plugins:
self._warn(
@@ -562,7 +567,7 @@ class Coverage(object):
self._init_data(suffix=None)
self._post_init()
- if self._collector and self._collector.save_data(self._data):
+ if self._collector and self._collector.flush_data():
self._post_save_work()
return self._data
@@ -678,15 +683,11 @@ class Coverage(object):
if not morfs:
morfs = self._data.measured_files()
- # Be sure we have a list.
- if not isinstance(morfs, (list, tuple)):
+ # Be sure we have a collection.
+ if not isinstance(morfs, (list, tuple, set)):
morfs = [morfs]
- file_reporters = []
- for morf in morfs:
- file_reporter = self._get_file_reporter(morf)
- file_reporters.append(file_reporter)
-
+ file_reporters = [self._get_file_reporter(morf) for morf in morfs]
return file_reporters
def report(
diff --git a/coverage/data.py b/coverage/data.py
index f03e90ca..e6d56d84 100644
--- a/coverage/data.py
+++ b/coverage/data.py
@@ -252,8 +252,8 @@ class CoverageJsonData(object):
return self._runs
def measured_files(self):
- """A list of all files that had been measured."""
- return list(self._arcs or self._lines or {})
+ """A set of all files that had been measured."""
+ return set(self._arcs or self._lines or {})
def __nonzero__(self):
return bool(self._lines or self._arcs)
@@ -445,6 +445,11 @@ class CoverageJsonData(object):
self._validate()
+ def set_context(self, context):
+ """Set the context. Not implemented for JSON storage."""
+ if context:
+ raise CoverageException("JSON storage doesn't support contexts")
+
def write(self):
"""Write the collected coverage data to a file.
@@ -722,6 +727,8 @@ def combine_parallel_data(data, aliases=None, data_paths=None, strict=False):
files_combined = 0
for f in files_to_combine:
+ if data._debug and data._debug.should('dataio'):
+ data._debug.write("Combining data file %r" % (f,))
try:
new_data = CoverageData(f, debug=data._debug)
new_data.read()
diff --git a/coverage/sqldata.py b/coverage/sqldata.py
index f9598485..fb2279c9 100644
--- a/coverage/sqldata.py
+++ b/coverage/sqldata.py
@@ -12,6 +12,7 @@
# TODO: run_info
import glob
+import itertools
import os
import sqlite3
@@ -22,7 +23,11 @@ from coverage.files import PathAliases
from coverage.misc import CoverageException, file_be_gone
-SCHEMA_VERSION = 1
+# Schema versions:
+# 1: Released in 5.0a2
+# 2: Added contexts
+
+SCHEMA_VERSION = 2
SCHEMA = """
create table coverage_schema (
@@ -40,17 +45,25 @@ create table file (
unique(path)
);
+create table context (
+ id integer primary key,
+ context text,
+ unique(context)
+);
+
create table line (
file_id integer,
+ context_id integer,
lineno integer,
- unique(file_id, lineno)
+ unique(file_id, context_id, lineno)
);
create table arc (
file_id integer,
+ context_id integer,
fromno integer,
tono integer,
- unique(file_id, fromno, tono)
+ unique(file_id, context_id, fromno, tono)
);
create table tracer (
@@ -78,6 +91,8 @@ class CoverageSqliteData(SimpleReprMixin):
self._has_lines = False
self._has_arcs = False
+ self._current_context_id = None
+
def _choose_filename(self):
self.filename = self._basename
suffix = filename_suffix(self._suffix)
@@ -90,6 +105,7 @@ class CoverageSqliteData(SimpleReprMixin):
self._db = None
self._file_map = {}
self._have_used = False
+ self._current_context_id = None
def _create_db(self):
if self._debug and self._debug.should('dataio'):
@@ -97,7 +113,7 @@ class CoverageSqliteData(SimpleReprMixin):
self._db = Sqlite(self.filename, self._debug)
with self._db:
for stmt in SCHEMA.split(';'):
- stmt = stmt.strip()
+ stmt = " ".join(stmt.strip().split())
if stmt:
self._db.execute(stmt)
self._db.execute("insert into coverage_schema (version) values (?)", (SCHEMA_VERSION,))
@@ -151,6 +167,12 @@ class CoverageSqliteData(SimpleReprMixin):
__bool__ = __nonzero__
+ def dump(self): # pragma: debugging
+ """Write a dump of the database."""
+ if self._debug:
+ with self._connect() as con:
+ self._debug.write(con.dump())
+
def _file_id(self, filename, add=False):
"""Get the file id for `filename`.
@@ -164,6 +186,29 @@ class CoverageSqliteData(SimpleReprMixin):
self._file_map[filename] = cur.lastrowid
return self._file_map.get(filename)
+ def _context_id(self, context):
+ """Get the id for a context."""
+ assert context is not None
+ self._start_using()
+ with self._connect() as con:
+ row = con.execute("select id from context where context = ?", (context,)).fetchone()
+ if row is not None:
+ return row[0]
+ else:
+ return None
+
+ def set_context(self, context):
+ """Set the current context for future `add_lines` etc."""
+ self._start_using()
+ context = context or ""
+ with self._connect() as con:
+ row = con.execute("select id from context where context = ?", (context,)).fetchone()
+ if row is not None:
+ self._current_context_id = row[0]
+ else:
+ cur = con.execute("insert into context (context) values (?)", (context,))
+ self._current_context_id = cur.lastrowid
+
def add_lines(self, line_data):
"""Add measured line data.
@@ -178,12 +223,14 @@ class CoverageSqliteData(SimpleReprMixin):
))
self._start_using()
self._choose_lines_or_arcs(lines=True)
+ if self._current_context_id is None:
+ self.set_context("")
with self._connect() as con:
for filename, linenos in iitems(line_data):
file_id = self._file_id(filename, add=True)
- data = [(file_id, lineno) for lineno in linenos]
+ data = [(file_id, self._current_context_id, lineno) for lineno in linenos]
con.executemany(
- "insert or ignore into line (file_id, lineno) values (?, ?)",
+ "insert or ignore into line (file_id, context_id, lineno) values (?, ?, ?)",
data,
)
@@ -201,12 +248,14 @@ class CoverageSqliteData(SimpleReprMixin):
))
self._start_using()
self._choose_lines_or_arcs(arcs=True)
+ if self._current_context_id is None:
+ self.set_context("")
with self._connect() as con:
for filename, arcs in iitems(arc_data):
file_id = self._file_id(filename, add=True)
- data = [(file_id, fromno, tono) for fromno, tono in arcs]
+ data = [(file_id, self._current_context_id, fromno, tono) for fromno, tono in arcs]
con.executemany(
- "insert or ignore into arc (file_id, fromno, tono) values (?, ?, ?)",
+ "insert or ignore into arc (file_id, context_id, fromno, tono) values (?, ?, ?, ?)",
data,
)
@@ -276,23 +325,27 @@ class CoverageSqliteData(SimpleReprMixin):
aliases = aliases or PathAliases()
# See what we had already measured, for accurate conflict reporting.
- this_measured = set(self.measured_files())
+ this_measured = self.measured_files()
# lines
if other_data._has_lines:
- for filename in other_data.measured_files():
- lines = set(other_data.lines(filename))
- filename = aliases.map(filename)
- lines.update(self.lines(filename) or ())
- self.add_lines({filename: lines})
+ for context in other_data.measured_contexts():
+ self.set_context(context)
+ for filename in other_data.measured_files():
+ lines = set(other_data.lines(filename, context=context))
+ filename = aliases.map(filename)
+ lines.update(self.lines(filename, context=context) or ())
+ self.add_lines({filename: lines})
# arcs
if other_data._has_arcs:
- for filename in other_data.measured_files():
- arcs = set(other_data.arcs(filename))
- filename = aliases.map(filename)
- arcs.update(self.arcs(filename) or ())
- self.add_arcs({filename: arcs})
+ for context in other_data.measured_contexts():
+ self.set_context(context)
+ for filename in other_data.measured_files():
+ arcs = set(other_data.arcs(filename, context=context))
+ filename = aliases.map(filename)
+ arcs.update(self.arcs(filename, context=context) or ())
+ self.add_arcs({filename: arcs})
# file_tracers
for filename in other_data.measured_files():
@@ -353,8 +406,15 @@ class CoverageSqliteData(SimpleReprMixin):
return bool(self._has_arcs)
def measured_files(self):
- """A list of all files that had been measured."""
- return list(self._file_map)
+ """A set of all files that had been measured."""
+ return set(self._file_map)
+
+ def measured_contexts(self):
+ """A set of all contexts that have been measured."""
+ self._start_using()
+ with self._connect() as con:
+ contexts = set(row[0] for row in con.execute("select distinct(context) from context"))
+ return contexts
def file_tracer(self, filename):
"""Get the plugin name of the file tracer for a file.
@@ -374,12 +434,11 @@ class CoverageSqliteData(SimpleReprMixin):
return row[0] or ""
return "" # File was measured, but no tracer associated.
- def lines(self, filename):
+ def lines(self, filename, context=None):
self._start_using()
if self.has_arcs():
- arcs = self.arcs(filename)
+ arcs = self.arcs(filename, context=context)
if arcs is not None:
- import itertools
all_lines = itertools.chain.from_iterable(arcs)
return list(set(l for l in all_lines if l > 0))
@@ -388,18 +447,28 @@ class CoverageSqliteData(SimpleReprMixin):
if file_id is None:
return None
else:
- linenos = con.execute("select lineno from line where file_id = ?", (file_id,))
+ query = "select lineno from line where file_id = ?"
+ data = [file_id]
+ if context is not None:
+ query += " and context_id = ?"
+ data += [self._context_id(context)]
+ linenos = con.execute(query, data)
return [lineno for lineno, in linenos]
- def arcs(self, filename):
+ def arcs(self, filename, context=None):
self._start_using()
with self._connect() as con:
file_id = self._file_id(filename)
if file_id is None:
return None
else:
- arcs = con.execute("select fromno, tono from arc where file_id = ?", (file_id,))
- return [pair for pair in arcs]
+ query = "select fromno, tono from arc where file_id = ?"
+ data = [file_id]
+ if context is not None:
+ query += " and context_id = ?"
+ data += [self._context_id(context)]
+ arcs = con.execute(query, data)
+ return list(arcs)
def run_infos(self):
return [] # TODO
@@ -456,3 +525,7 @@ class Sqlite(SimpleReprMixin):
if self.debug:
self.debug.write("Executing many {!r} with {} rows".format(sql, len(data)))
return self.con.executemany(sql, data)
+
+ def dump(self): # pragma: debugging
+ """Return a multi-line string, the dump of the database."""
+ return "\n".join(self.con.iterdump())