summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--coverage/cmdline.py2
-rw-r--r--coverage/control.py20
-rw-r--r--coverage/data.py272
-rw-r--r--coverage/debug.py17
-rw-r--r--coverage/misc.py10
-rw-r--r--coverage/results.py3
-rw-r--r--coverage/sqldata.py285
-rw-r--r--doc/cmd.rst2
-rw-r--r--lab/gendata.py40
-rw-r--r--tests/test_cmdline.py11
-rw-r--r--tests/test_concurrency.py4
-rw-r--r--tests/test_data.py149
-rw-r--r--tests/test_debug.py4
-rw-r--r--tests/test_process.py46
-rw-r--r--tests/test_summary.py2
15 files changed, 593 insertions, 274 deletions
diff --git a/coverage/cmdline.py b/coverage/cmdline.py
index c21acda6..1b7955d3 100644
--- a/coverage/cmdline.py
+++ b/coverage/cmdline.py
@@ -659,7 +659,7 @@ class CoverageScript(object):
self.coverage.load()
data = self.coverage.get_data()
print(info_header("data"))
- print("path: %s" % self.coverage._data_files.filename)
+ print("path: %s" % self.coverage.get_data().filename)
if data:
print("has_arcs: %r" % data.has_arcs())
summary = data.line_counts(fullpath=True)
diff --git a/coverage/control.py b/coverage/control.py
index 03238910..46c2ece1 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -15,7 +15,7 @@ from coverage.annotate import AnnotateReporter
from coverage.backward import string_class, iitems
from coverage.collector import Collector
from coverage.config import read_coverage_config
-from coverage.data import CoverageData, CoverageDataFiles
+from coverage.data import CoverageData, combine_parallel_data
from coverage.debug import DebugControl, write_formatted_info
from coverage.disposition import disposition_debug_msg
from coverage.files import PathAliases, set_relative_directory, abs_file
@@ -152,7 +152,7 @@ class Coverage(object):
self._warnings = []
# Other instance attributes, set later.
- self._data = self._data_files = self._collector = None
+ self._data = self._collector = None
self._plugins = None
self._inorout = None
self._inorout_class = InOrOut
@@ -270,8 +270,7 @@ class Coverage(object):
# Create the data file. We do this at construction time so that the
# data file will be written into the directory where the process
# started rather than wherever the process eventually chdir'd to.
- self._data = CoverageData(debug=self._debug)
- self._data_files = CoverageDataFiles(
+ self._data = CoverageData(
basename=self.config.data_file, warn=self._warn, debug=self._debug,
)
@@ -389,7 +388,7 @@ class Coverage(object):
"""Load previously-collected coverage data from the data file."""
self._init()
self._collector.reset()
- self._data_files.read(self._data)
+ self._data.read()
def start(self):
"""Start measuring code coverage.
@@ -443,8 +442,7 @@ class Coverage(object):
"""
self._init()
self._collector.reset()
- self._data.erase()
- self._data_files.erase(parallel=self.config.parallel)
+ self._data.erase(parallel=self.config.parallel)
def clear_exclude(self, which='exclude'):
"""Clear the exclude list."""
@@ -497,7 +495,7 @@ class Coverage(object):
"""Save the collected coverage data to the data file."""
self._init()
data = self.get_data()
- self._data_files.write(data, suffix=self._data_suffix)
+ data.write(suffix=self._data_suffix)
def combine(self, data_paths=None, strict=False):
"""Combine together a number of similarly-named coverage data files.
@@ -532,9 +530,7 @@ class Coverage(object):
for pattern in paths[1:]:
aliases.add(pattern, result)
- self._data_files.combine_parallel_data(
- self._data, aliases=aliases, data_paths=data_paths, strict=strict,
- )
+ combine_parallel_data(self._data, aliases=aliases, data_paths=data_paths, strict=strict)
def get_data(self):
"""Get the collected data.
@@ -821,7 +817,7 @@ class Coverage(object):
('configs_attempted', self.config.attempted_config_files),
('configs_read', self.config.config_files_read),
('config_file', self.config.config_file),
- ('data_path', self._data_files.filename),
+ ('data_path', self._data.filename),
('python', sys.version.replace('\n', '')),
('platform', platform.platform()),
('implementation', platform.python_implementation()),
diff --git a/coverage/data.py b/coverage/data.py
index 9f2d1308..0b3b640b 100644
--- a/coverage/data.py
+++ b/coverage/data.py
@@ -22,7 +22,7 @@ from coverage.misc import CoverageException, file_be_gone, isolate_module
os = isolate_module(os)
-class CoverageData(object):
+class CoverageJsonData(object):
"""Manages collected coverage data, including file storage.
This class is the public supported API to the data coverage.py collects
@@ -57,8 +57,10 @@ class CoverageData(object):
names in this API are case-sensitive, even on platforms with
case-insensitive file systems.
- To read a coverage.py data file, use :meth:`read_file`, or
- :meth:`read_fileobj` if you have an already-opened file. You can then
+ A data file is associated with the data when the :class:`CoverageData`
+ is created.
+
+ To read a coverage.py data file, use :meth:`read`. You can then
access the line, arc, or file tracer data with :meth:`lines`, :meth:`arcs`,
or :meth:`file_tracer`. Run information is available with
:meth:`run_infos`.
@@ -69,17 +71,15 @@ class CoverageData(object):
most Python containers, you can determine if there is any data at all by
using this object as a boolean value.
-
Most data files will be created by coverage.py itself, but you can use
methods here to create data files if you like. The :meth:`add_lines`,
:meth:`add_arcs`, and :meth:`add_file_tracers` methods add data, in ways
that are convenient for coverage.py. The :meth:`add_run_info` method adds
key-value pairs to the run information.
- To add a file without any measured data, use :meth:`touch_file`.
+ To add a source file without any measured data, use :meth:`touch_file`.
- You write to a named file with :meth:`write_file`, or to an already opened
- file with :meth:`write_fileobj`.
+ Write the data to its file with :meth:`write`.
You can clear the data in memory with :meth:`erase`. Two data collections
can be combined by using :meth:`update` on one :class:`CoverageData`,
@@ -112,13 +112,19 @@ class CoverageData(object):
# line data is easily recovered from the arcs: it is all the first elements
# of the pairs that are greater than zero.
- def __init__(self, debug=None):
+ def __init__(self, basename=None, warn=None, debug=None):
"""Create a CoverageData.
+ `warn` is the warning function to use.
+
+ `basename` is the name of the file to use for storing data.
+
`debug` is a `DebugControl` object for writing debug messages.
"""
+ self._warn = warn
self._debug = debug
+ self.filename = os.path.abspath(basename or ".coverage")
# A map from canonical Python source file name to a dictionary in
# which there's an entry for each line number that has been
@@ -262,7 +268,16 @@ class CoverageData(object):
__bool__ = __nonzero__
- def read_fileobj(self, file_obj):
+ def read(self):
+ """Read the coverage data.
+
+ It is fine for the file to not exist, in which case no data is read.
+
+ """
+ if os.path.exists(self.filename):
+ self._read_file(self.filename)
+
+ def _read_fileobj(self, file_obj):
"""Read the coverage data from the given file object.
Should only be used on an empty CoverageData object.
@@ -284,13 +299,13 @@ class CoverageData(object):
self._validate()
- def read_file(self, filename):
+ def _read_file(self, filename):
"""Read the coverage data from `filename` into this object."""
if self._debug and self._debug.should('dataio'):
self._debug.write("Reading data from %r" % (filename,))
try:
with self._open_for_reading(filename) as f:
- self.read_fileobj(f)
+ self._read_fileobj(f)
except Exception as exc:
raise CoverageException(
"Couldn't read data from '%s': %s: %s" % (
@@ -438,7 +453,34 @@ class CoverageData(object):
self._validate()
- def write_fileobj(self, file_obj):
+ def write(self, suffix=None):
+ """Write the collected coverage data to a file.
+
+ `suffix` is a suffix to append to the base file name. This can be used
+ for multiple or parallel execution, so that many coverage data files
+ can exist simultaneously. A dot will be used to join the base name and
+ the suffix.
+
+ """
+ filename = self.filename
+ if suffix is True:
+ # If data_suffix was a simple true value, then make a suffix with
+ # plenty of distinguishing information. We do this here in
+ # `save()` at the last minute so that the pid will be correct even
+ # if the process forks.
+ extra = ""
+ if _TEST_NAME_FILE: # pragma: debugging
+ with open(_TEST_NAME_FILE) as f:
+ test_name = f.read()
+ extra = "." + test_name
+ dice = random.Random(os.urandom(8)).randint(0, 999999)
+ suffix = "%s%s.%s.%06d" % (socket.gethostname(), extra, os.getpid(), dice)
+
+ if suffix:
+ filename += "." + suffix
+ self._write_file(filename)
+
+ def _write_fileobj(self, file_obj):
"""Write the coverage data to `file_obj`."""
# Create the file data.
@@ -460,21 +502,38 @@ class CoverageData(object):
file_obj.write(self._GO_AWAY)
json.dump(file_data, file_obj, separators=(',', ':'))
- def write_file(self, filename):
+ def _write_file(self, filename):
"""Write the coverage data to `filename`."""
if self._debug and self._debug.should('dataio'):
self._debug.write("Writing data to %r" % (filename,))
with open(filename, 'w') as fdata:
- self.write_fileobj(fdata)
+ self._write_fileobj(fdata)
+
+ def erase(self, parallel=False):
+ """Erase the data in this object.
+
+ If `parallel` is true, then also deletes data files created from the
+ basename by parallel-mode.
- def erase(self):
- """Erase the data in this object."""
+ """
self._lines = None
self._arcs = None
self._file_tracers = {}
self._runs = []
self._validate()
+ if self._debug and self._debug.should('dataio'):
+ self._debug.write("Erasing data file %r" % (self.filename,))
+ file_be_gone(self.filename)
+ if parallel:
+ data_dir, local = os.path.split(self.filename)
+ localdot = local + '.*'
+ pattern = os.path.join(os.path.abspath(data_dir), localdot)
+ for filename in glob.glob(pattern):
+ if self._debug and self._debug.should('dataio'):
+ self._debug.write("Erasing parallel data file %r" % (filename,))
+ file_be_gone(filename)
+
def update(self, other_data, aliases=None):
"""Update this data with data from another `CoverageData`.
@@ -609,139 +668,76 @@ class CoverageData(object):
return self._arcs is not None
-class CoverageDataFiles(object):
- """Manage the use of coverage data files."""
+which = os.environ.get("COV_STORAGE", "json")
+if which == "json":
+ CoverageData = CoverageJsonData
+elif which == "sql":
+ from coverage.sqldata import CoverageSqliteData
+ CoverageData = CoverageSqliteData
- def __init__(self, basename=None, warn=None, debug=None):
- """Create a CoverageDataFiles to manage data files.
- `warn` is the warning function to use.
+def combine_parallel_data(data, aliases=None, data_paths=None, strict=False):
+ """Combine a number of data files together.
- `basename` is the name of the file to use for storing data.
+ Treat `data.filename` as a file prefix, and combine the data from all
+ of the data files starting with that prefix plus a dot.
- `debug` is a `DebugControl` object for writing debug messages.
-
- """
- self.warn = warn
- self.debug = debug
-
- # Construct the file name that will be used for data storage.
- self.filename = os.path.abspath(basename or ".coverage")
-
- def erase(self, parallel=False):
- """Erase the data from the file storage.
+ If `aliases` is provided, it's a `PathAliases` object that is used to
+ re-map paths to match the local machine's.
- If `parallel` is true, then also deletes data files created from the
- basename by parallel-mode.
-
- """
- if self.debug and self.debug.should('dataio'):
- self.debug.write("Erasing data file %r" % (self.filename,))
- file_be_gone(self.filename)
- if parallel:
- data_dir, local = os.path.split(self.filename)
- localdot = local + '.*'
- pattern = os.path.join(os.path.abspath(data_dir), localdot)
- for filename in glob.glob(pattern):
- if self.debug and self.debug.should('dataio'):
- self.debug.write("Erasing parallel data file %r" % (filename,))
- file_be_gone(filename)
+ If `data_paths` is provided, it is a list of directories or files to
+ combine. Directories are searched for files that start with
+ `data.filename` plus dot as a prefix, and those files are combined.
- def read(self, data):
- """Read the coverage data."""
- if os.path.exists(self.filename):
- data.read_file(self.filename)
+ If `data_paths` is not provided, then the directory portion of
+ `data.filename` is used as the directory to search for data files.
- def write(self, data, suffix=None):
- """Write the collected coverage data to a file.
+ Every data file found and combined is then deleted from disk. If a file
+ cannot be read, a warning will be issued, and the file will not be
+ deleted.
- `suffix` is a suffix to append to the base file name. This can be used
- for multiple or parallel execution, so that many coverage data files
- can exist simultaneously. A dot will be used to join the base name and
- the suffix.
+ If `strict` is true, and no files are found to combine, an error is
+ raised.
- """
- filename = self.filename
- if suffix is True:
- # If data_suffix was a simple true value, then make a suffix with
- # plenty of distinguishing information. We do this here in
- # `save()` at the last minute so that the pid will be correct even
- # if the process forks.
- extra = ""
- if _TEST_NAME_FILE: # pragma: debugging
- with open(_TEST_NAME_FILE) as f:
- test_name = f.read()
- extra = "." + test_name
- dice = random.Random(os.urandom(8)).randint(0, 999999)
- suffix = "%s%s.%s.%06d" % (socket.gethostname(), extra, os.getpid(), dice)
-
- if suffix:
- filename += "." + suffix
- data.write_file(filename)
-
- def combine_parallel_data(self, data, aliases=None, data_paths=None, strict=False):
- """Combine a number of data files together.
-
- Treat `self.filename` as a file prefix, and combine the data from all
- of the data files starting with that prefix plus a dot.
-
- If `aliases` is provided, it's a `PathAliases` object that is used to
- re-map paths to match the local machine's.
-
- If `data_paths` is provided, it is a list of directories or files to
- combine. Directories are searched for files that start with
- `self.filename` plus dot as a prefix, and those files are combined.
-
- If `data_paths` is not provided, then the directory portion of
- `self.filename` is used as the directory to search for data files.
-
- Every data file found and combined is then deleted from disk. If a file
- cannot be read, a warning will be issued, and the file will not be
- deleted.
-
- If `strict` is true, and no files are found to combine, an error is
- raised.
+ """
+ # Because of the os.path.abspath in the constructor, data_dir will
+ # never be an empty string.
+ data_dir, local = os.path.split(data.filename)
+ localdot = local + '.*'
+
+ data_paths = data_paths or [data_dir]
+ files_to_combine = []
+ for p in data_paths:
+ if os.path.isfile(p):
+ files_to_combine.append(os.path.abspath(p))
+ elif os.path.isdir(p):
+ pattern = os.path.join(os.path.abspath(p), localdot)
+ files_to_combine.extend(glob.glob(pattern))
+ else:
+ raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,))
- """
- # Because of the os.path.abspath in the constructor, data_dir will
- # never be an empty string.
- data_dir, local = os.path.split(self.filename)
- localdot = local + '.*'
-
- data_paths = data_paths or [data_dir]
- files_to_combine = []
- for p in data_paths:
- if os.path.isfile(p):
- files_to_combine.append(os.path.abspath(p))
- elif os.path.isdir(p):
- pattern = os.path.join(os.path.abspath(p), localdot)
- files_to_combine.extend(glob.glob(pattern))
- else:
- raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,))
-
- if strict and not files_to_combine:
- raise CoverageException("No data to combine")
-
- files_combined = 0
- for f in files_to_combine:
- new_data = CoverageData(debug=self.debug)
- try:
- new_data.read_file(f)
- except CoverageException as exc:
- if self.warn:
- # The CoverageException has the file name in it, so just
- # use the message as the warning.
- self.warn(str(exc))
- else:
- data.update(new_data, aliases=aliases)
- files_combined += 1
- if self.debug and self.debug.should('dataio'):
- self.debug.write("Deleting combined data file %r" % (f,))
- file_be_gone(f)
-
- if strict and not files_combined:
- raise CoverageException("No usable data files")
+ if strict and not files_to_combine:
+ raise CoverageException("No data to combine")
+ files_combined = 0
+ for f in files_to_combine:
+ try:
+ new_data = CoverageData(f, debug=data._debug)
+ new_data.read()
+ except CoverageException as exc:
+ if data._warn:
+ # The CoverageException has the file name in it, so just
+ # use the message as the warning.
+ data._warn(str(exc))
+ else:
+ data.update(new_data, aliases=aliases)
+ files_combined += 1
+ if data._debug and data._debug.should('dataio'):
+ data._debug.write("Deleting combined data file %r" % (f,))
+ file_be_gone(f)
+
+ if strict and not files_combined:
+ raise CoverageException("No usable data files")
def canonicalize_json_data(data):
"""Canonicalize our JSON data so it can be compared."""
diff --git a/coverage/debug.py b/coverage/debug.py
index d63a9070..fd27c731 100644
--- a/coverage/debug.py
+++ b/coverage/debug.py
@@ -31,6 +31,8 @@ _TEST_NAME_FILE = "" # "/tmp/covtest.txt"
class DebugControl(object):
"""Control and output for debugging."""
+ show_repr_attr = False # For SimpleRepr
+
def __init__(self, options, output):
"""Configure the options and output file for debugging."""
self.options = list(options) + FORCED_DEBUG
@@ -71,6 +73,10 @@ class DebugControl(object):
`msg` is the line to write. A newline will be appended.
"""
+ if self.should('self'):
+ caller_self = inspect.stack()[1][0].f_locals.get('self')
+ if caller_self is not None:
+ msg = "[self: {!r}] {}".format(caller_self, msg)
self.output.write(msg+"\n")
if self.should('callers'):
dump_stack_frames(out=self.output, skip=1)
@@ -167,6 +173,17 @@ def add_pid_and_tid(text):
return text
+class SimpleRepr(object):
+ """A mixin implementing a simple __repr__."""
+ def __repr__(self):
+ show_attrs = ((k, v) for k, v in self.__dict__.items() if getattr(v, "show_repr_attr", True))
+ return "<{klass} @0x{id:x} {attrs}>".format(
+ klass=self.__class__.__name__,
+ id=id(self),
+ attrs=" ".join("{}={!r}".format(k, v) for k, v in show_attrs),
+ )
+
+
def filter_text(text, filters):
"""Run `text` through a series of filters.
diff --git a/coverage/misc.py b/coverage/misc.py
index fff2a187..78ec027f 100644
--- a/coverage/misc.py
+++ b/coverage/misc.py
@@ -249,16 +249,6 @@ def _needs_to_implement(that, func_name):
)
-class SimpleRepr(object):
- """A mixin implementing a simple __repr__."""
- def __repr__(self):
- return "<{klass} @{id:x} {attrs}>".format(
- klass=self.__class__.__name__,
- id=id(self) & 0xFFFFFF,
- attrs=" ".join("{}={!r}".format(k, v) for k, v in self.__dict__.items()),
- )
-
-
class BaseCoverageException(Exception):
"""The base of all Coverage exceptions."""
pass
diff --git a/coverage/results.py b/coverage/results.py
index 7e3bd268..fb919c9b 100644
--- a/coverage/results.py
+++ b/coverage/results.py
@@ -6,7 +6,8 @@
import collections
from coverage.backward import iitems
-from coverage.misc import contract, format_lines, SimpleRepr
+from coverage.debug import SimpleRepr
+from coverage.misc import contract, format_lines
class Analysis(object):
diff --git a/coverage/sqldata.py b/coverage/sqldata.py
new file mode 100644
index 00000000..296e353e
--- /dev/null
+++ b/coverage/sqldata.py
@@ -0,0 +1,285 @@
+# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
+# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
+
+"""Sqlite coverage data."""
+
+import os
+import sqlite3
+
+from coverage.backward import iitems
+from coverage.debug import SimpleRepr
+from coverage.misc import CoverageException, file_be_gone
+
+
+SCHEMA = """
+create table schema (
+ version integer
+);
+
+insert into schema (version) values (1);
+
+create table meta (
+ has_lines boolean,
+ has_arcs boolean
+);
+
+create table file (
+ id integer primary key,
+ path text,
+ tracer text,
+ unique(path)
+);
+
+create table line (
+ file_id integer,
+ lineno integer,
+ unique(file_id, lineno)
+);
+
+create table arc (
+ file_id integer,
+ fromno integer,
+ tono integer,
+ unique(file_id, fromno, tono)
+);
+"""
+
+# >>> struct.unpack(">i", b"\xc0\x7e\x8a\x6e") # "coverage", kind of.
+# (-1065448850,)
+APP_ID = -1065448850
+
+class CoverageSqliteData(SimpleRepr):
+ def __init__(self, basename=None, warn=None, debug=None):
+ self.filename = os.path.abspath(basename or ".coverage")
+ self._warn = warn
+ self._debug = debug
+
+ self._file_map = {}
+ self._db = None
+ # Are we in sync with the data file?
+ self._have_read = False
+
+ self._has_lines = False
+ self._has_arcs = False
+
+ def _reset(self):
+ self._file_map = {}
+ if self._db is not None:
+ self._db.close()
+ self._db = None
+ self._have_read = False
+
+ def _create_db(self):
+ if self._debug and self._debug.should('dataio'):
+ self._debug.write("Creating data file {!r}".format(self.filename))
+ self._db = Sqlite(self.filename, self._debug)
+ with self._db:
+ self._db.execute("pragma application_id = {}".format(APP_ID))
+ for stmt in SCHEMA.split(';'):
+ stmt = stmt.strip()
+ if stmt:
+ self._db.execute(stmt)
+ self._db.execute(
+ "insert into meta (has_lines, has_arcs) values (?, ?)",
+ (self._has_lines, self._has_arcs)
+ )
+
+ def _open_db(self):
+ if self._debug and self._debug.should('dataio'):
+ self._debug.write("Opening data file {!r}".format(self.filename))
+ self._db = Sqlite(self.filename, self._debug)
+ with self._db:
+ for app_id, in self._db.execute("pragma application_id"):
+ app_id = int(app_id)
+ if app_id != APP_ID:
+ raise CoverageException(
+ "File {!r} doesn't look like a coverage data file: "
+ "0x{:08x} != 0x{:08x}".format(self.filename, app_id, APP_ID)
+ )
+ for row in self._db.execute("select has_lines, has_arcs from meta"):
+ self._has_lines, self._has_arcs = row
+
+ for path, id in self._db.execute("select path, id from file"):
+ self._file_map[path] = id
+
+ def _connect(self):
+ if self._db is None:
+ if os.path.exists(self.filename):
+ self._open_db()
+ else:
+ self._create_db()
+ return self._db
+
+ def _file_id(self, filename):
+ self._start_writing()
+ if filename not in self._file_map:
+ with self._connect() as con:
+ cur = con.execute("insert into file (path) values (?)", (filename,))
+ self._file_map[filename] = cur.lastrowid
+ return self._file_map[filename]
+
+ def add_lines(self, line_data):
+ """Add measured line data.
+
+ `line_data` is a dictionary mapping file names to dictionaries::
+
+ { filename: { lineno: None, ... }, ...}
+
+ """
+ if self._debug and self._debug.should('dataop'):
+ self._debug.write("Adding lines: %d files, %d lines total" % (
+ len(line_data), sum(len(lines) for lines in line_data.values())
+ ))
+ self._start_writing()
+ self._choose_lines_or_arcs(lines=True)
+ with self._connect() as con:
+ for filename, linenos in iitems(line_data):
+ file_id = self._file_id(filename)
+ data = [(file_id, lineno) for lineno in linenos]
+ con.executemany(
+ "insert or ignore into line (file_id, lineno) values (?, ?)",
+ data,
+ )
+
+ def add_arcs(self, arc_data):
+ """Add measured arc data.
+
+ `arc_data` is a dictionary mapping file names to dictionaries::
+
+ { filename: { (l1,l2): None, ... }, ...}
+
+ """
+ if self._debug and self._debug.should('dataop'):
+ self._debug.write("Adding arcs: %d files, %d arcs total" % (
+ len(arc_data), sum(len(arcs) for arcs in arc_data.values())
+ ))
+ self._start_writing()
+ self._choose_lines_or_arcs(arcs=True)
+ with self._connect() as con:
+ for filename, arcs in iitems(arc_data):
+ file_id = self._file_id(filename)
+ data = [(file_id, fromno, tono) for fromno, tono in arcs]
+ con.executemany(
+ "insert or ignore into arc (file_id, fromno, tono) values (?, ?, ?)",
+ data,
+ )
+
+ def _choose_lines_or_arcs(self, lines=False, arcs=False):
+ if lines and self._has_arcs:
+ raise CoverageException("Can't add lines to existing arc data")
+ if arcs and self._has_lines:
+ raise CoverageException("Can't add arcs to existing line data")
+ if not self._has_arcs and not self._has_lines:
+ self._has_lines = lines
+ self._has_arcs = arcs
+ with self._connect() as con:
+ con.execute("update meta set has_lines = ?, has_arcs = ?", (lines, arcs))
+
+ def add_file_tracers(self, file_tracers):
+ """Add per-file plugin information.
+
+ `file_tracers` is { filename: plugin_name, ... }
+
+ """
+ self._start_writing()
+ with self._connect() as con:
+ data = list(iitems(file_tracers))
+ if data:
+ con.executemany(
+ "insert into file (path, tracer) values (?, ?) on duplicate key update",
+ data,
+ )
+
+ def erase(self, parallel=False):
+ """Erase the data in this object.
+
+ If `parallel` is true, then also deletes data files created from the
+ basename by parallel-mode.
+
+ """
+ self._reset()
+ if self._debug and self._debug.should('dataio'):
+ self._debug.write("Erasing data file {!r}".format(self.filename))
+ file_be_gone(self.filename)
+ if parallel:
+ data_dir, local = os.path.split(self.filename)
+ localdot = local + '.*'
+ pattern = os.path.join(os.path.abspath(data_dir), localdot)
+ for filename in glob.glob(pattern):
+ if self._debug and self._debug.should('dataio'):
+ self._debug.write("Erasing parallel data file {!r}".format(filename))
+ file_be_gone(filename)
+
+ def read(self):
+ self._connect() # TODO: doesn't look right
+ self._have_read = True
+
+ def write(self, suffix=None):
+ """Write the collected coverage data to a file."""
+ pass
+
+ def _start_writing(self):
+ if not self._have_read:
+ self.erase()
+ self._have_read = True
+
+ def has_arcs(self):
+ return self._has_arcs
+
+ def measured_files(self):
+ """A list of all files that had been measured."""
+ return list(self._file_map)
+
+ def file_tracer(self, filename):
+ """Get the plugin name of the file tracer for a file.
+
+ Returns the name of the plugin that handles this file. If the file was
+ measured, but didn't use a plugin, then "" is returned. If the file
+ was not measured, then None is returned.
+
+ """
+ return "" # TODO
+
+ def lines(self, filename):
+ with self._connect() as con:
+ file_id = self._file_id(filename)
+ return [lineno for lineno, in con.execute("select lineno from line where file_id = ?", (file_id,))]
+
+ def arcs(self, filename):
+ with self._connect() as con:
+ file_id = self._file_id(filename)
+ return [pair for pair in con.execute("select fromno, tono from arc where file_id = ?", (file_id,))]
+
+
+class Sqlite(SimpleRepr):
+ def __init__(self, filename, debug):
+ self.debug = debug if (debug and debug.should('sql')) else None
+ if self.debug:
+ self.debug.write("Connecting to {!r}".format(filename))
+ self.con = sqlite3.connect(filename)
+
+ # This pragma makes writing faster. It disables rollbacks, but we never need them.
+ self.execute("pragma journal_mode=off")
+ # This pragma makes writing faster.
+ self.execute("pragma synchronous=off")
+
+ def close(self):
+ self.con.close()
+
+ def __enter__(self):
+ self.con.__enter__()
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ return self.con.__exit__(exc_type, exc_value, traceback)
+
+ def execute(self, sql, parameters=()):
+ if self.debug:
+ tail = " with {!r}".format(parameters) if parameters else ""
+ self.debug.write("Executing {!r}{}".format(sql, tail))
+ return self.con.execute(sql, parameters)
+
+ def executemany(self, sql, data):
+ if self.debug:
+ self.debug.write("Executing many {!r} with {} rows".format(sql, len(data)))
+ return self.con.executemany(sql, data)
diff --git a/doc/cmd.rst b/doc/cmd.rst
index d198178f..908b2ee9 100644
--- a/doc/cmd.rst
+++ b/doc/cmd.rst
@@ -486,6 +486,8 @@ to log:
* ``process``: show process creation information, and changes in the current
directory.
+* ``self``: annotate each debug message with the object printing the message.
+
* ``sys``: before starting, dump all the system and environment information,
as with :ref:`coverage debug sys <cmd_debug>`.
diff --git a/lab/gendata.py b/lab/gendata.py
new file mode 100644
index 00000000..0e9c6b6f
--- /dev/null
+++ b/lab/gendata.py
@@ -0,0 +1,40 @@
+import random
+import time
+
+from coverage.data import CoverageJsonData
+from coverage.sqldata import CoverageSqliteData
+
+NUM_FILES = 1000
+NUM_LINES = 1000
+
+def gen_data(cdata):
+ rnd = random.Random()
+ rnd.seed(17)
+
+ def linenos(num_lines, prob):
+ return (n for n in range(num_lines) if random.random() < prob)
+
+ start = time.time()
+ for i in range(NUM_FILES):
+ filename = f"/src/foo/project/file{i}.py"
+ line_data = { filename: dict.fromkeys(linenos(NUM_LINES, .6)) }
+ cdata.add_lines(line_data)
+
+ cdata.write()
+ end = time.time()
+ delta = end - start
+ return delta
+
+class DummyData:
+ def add_lines(self, line_data):
+ return
+ def write(self):
+ return
+
+overhead = gen_data(DummyData())
+jtime = gen_data(CoverageJsonData("gendata.json")) - overhead
+stime = gen_data(CoverageSqliteData("gendata.db")) - overhead
+print(f"Overhead: {overhead:.3f}s")
+print(f"JSON: {jtime:.3f}s")
+print(f"SQLite: {stime:.3f}s")
+print(f"{stime / jtime:.3f}x slower")
diff --git a/tests/test_cmdline.py b/tests/test_cmdline.py
index b8a659f1..ecd4d8b3 100644
--- a/tests/test_cmdline.py
+++ b/tests/test_cmdline.py
@@ -16,7 +16,7 @@ import coverage
import coverage.cmdline
from coverage import env
from coverage.config import CoverageConfig
-from coverage.data import CoverageData, CoverageDataFiles
+from coverage.data import CoverageData
from coverage.misc import ExceptionDuringRun
from tests.coveragetest import CoverageTest, OK, ERR, command_line
@@ -589,8 +589,7 @@ class CmdLineWithFilesTest(BaseCmdLineTest):
"file2.py": dict.fromkeys(range(1, 24)),
})
data.add_file_tracers({"file1.py": "a_plugin"})
- data_files = CoverageDataFiles()
- data_files.write(data)
+ data.write()
self.command_line("debug data")
self.assertMultiLineEqual(self.stdout(), textwrap.dedent("""\
@@ -601,16 +600,16 @@ class CmdLineWithFilesTest(BaseCmdLineTest):
2 files:
file1.py: 17 lines [a_plugin]
file2.py: 23 lines
- """).replace("FILENAME", data_files.filename))
+ """).replace("FILENAME", data.filename))
def test_debug_data_with_no_data(self):
- data_files = CoverageDataFiles()
+ data = CoverageData()
self.command_line("debug data")
self.assertMultiLineEqual(self.stdout(), textwrap.dedent("""\
-- data ------------------------------------------------------
path: FILENAME
No data collected
- """).replace("FILENAME", data_files.filename))
+ """).replace("FILENAME", data.filename))
class CmdLineStdoutTest(BaseCmdLineTest):
diff --git a/tests/test_concurrency.py b/tests/test_concurrency.py
index 58529ec5..a4f700ed 100644
--- a/tests/test_concurrency.py
+++ b/tests/test_concurrency.py
@@ -235,8 +235,8 @@ class ConcurrencyTest(CoverageTest):
# Read the coverage file and see that try_it.py has all its lines
# executed.
- data = coverage.CoverageData()
- data.read_file(".coverage")
+ data = coverage.CoverageData(".coverage")
+ data.read()
# If the test fails, it's helpful to see this info:
fname = abs_file("try_it.py")
diff --git a/tests/test_data.py b/tests/test_data.py
index 59c4a5bb..68b2c375 100644
--- a/tests/test_data.py
+++ b/tests/test_data.py
@@ -11,8 +11,7 @@ import re
import mock
-from coverage.backward import StringIO
-from coverage.data import CoverageData, CoverageDataFiles, debug_main, canonicalize_json_data
+from coverage.data import CoverageData, debug_main, canonicalize_json_data, combine_parallel_data
from coverage.debug import DebugControlString
from coverage.files import PathAliases, canonical_filename
from coverage.misc import CoverageException
@@ -420,12 +419,10 @@ class CoverageDataTest(DataTestHelpers, CoverageTest):
def test_read_and_write_are_opposites(self):
covdata1 = CoverageData()
covdata1.add_arcs(ARCS_3)
- stringio = StringIO()
- covdata1.write_fileobj(stringio)
+ covdata1.write()
- stringio.seek(0)
covdata2 = CoverageData()
- covdata2.read_fileobj(stringio)
+ covdata2.read()
self.assert_arcs3_data(covdata2)
@@ -433,59 +430,58 @@ class CoverageDataTestInTempDir(DataTestHelpers, CoverageTest):
"""Tests of CoverageData that need a temporary directory to make files."""
def test_read_write_lines(self):
- covdata1 = CoverageData()
+ covdata1 = CoverageData("lines.dat")
covdata1.add_lines(LINES_1)
- covdata1.write_file("lines.dat")
+ covdata1.write()
- covdata2 = CoverageData()
- covdata2.read_file("lines.dat")
+ covdata2 = CoverageData("lines.dat")
+ covdata2.read()
self.assert_lines1_data(covdata2)
def test_read_write_arcs(self):
- covdata1 = CoverageData()
+ covdata1 = CoverageData("arcs.dat")
covdata1.add_arcs(ARCS_3)
- covdata1.write_file("arcs.dat")
+ covdata1.write()
- covdata2 = CoverageData()
- covdata2.read_file("arcs.dat")
+ covdata2 = CoverageData("arcs.dat")
+ covdata2.read()
self.assert_arcs3_data(covdata2)
def test_read_errors(self):
- covdata = CoverageData()
+ msg = r"Couldn't read data from '.*[/\\]{0}': \S+"
- msg = r"Couldn't read data from '{0}': \S+"
self.make_file("xyzzy.dat", "xyzzy")
with self.assertRaisesRegex(CoverageException, msg.format("xyzzy.dat")):
- covdata.read_file("xyzzy.dat")
+ covdata = CoverageData("xyzzy.dat")
+ covdata.read()
+ self.assertFalse(covdata)
self.make_file("empty.dat", "")
with self.assertRaisesRegex(CoverageException, msg.format("empty.dat")):
- covdata.read_file("empty.dat")
-
- with self.assertRaisesRegex(CoverageException, msg.format("nonexistent.dat")):
- covdata.read_file("nonexistent.dat")
+ covdata = CoverageData("empty.dat")
+ covdata.read()
+ self.assertFalse(covdata)
self.make_file("misleading.dat", CoverageData._GO_AWAY + " this isn't JSON")
with self.assertRaisesRegex(CoverageException, msg.format("misleading.dat")):
- covdata.read_file("misleading.dat")
-
- # After all that, no data should be in our CoverageData.
+ covdata = CoverageData("misleading.dat")
+ covdata.read()
self.assertFalse(covdata)
def test_debug_main(self):
- covdata1 = CoverageData()
+ covdata1 = CoverageData(".coverage")
covdata1.add_lines(LINES_1)
- covdata1.write_file(".coverage")
+ covdata1.write()
debug_main([])
- covdata2 = CoverageData()
+ covdata2 = CoverageData("arcs.dat")
covdata2.add_arcs(ARCS_3)
covdata2.add_file_tracers({"y.py": "magic_plugin"})
covdata2.add_run_info(version="v3.14", chunks=["z", "a"])
- covdata2.write_file("arcs.dat")
+ covdata2.write()
- covdata3 = CoverageData()
- covdata3.write_file("empty.dat")
+ covdata3 = CoverageData("empty.dat")
+ covdata3.write()
debug_main(["arcs.dat", "empty.dat"])
expected = {
@@ -518,27 +514,23 @@ class CoverageDataTestInTempDir(DataTestHelpers, CoverageTest):
class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
- """Tests of CoverageDataFiles."""
+ """Tests of CoverageData file handling."""
no_files_in_temp_dir = True
- def setUp(self):
- super(CoverageDataFilesTest, self).setUp()
- self.data_files = CoverageDataFiles()
-
def test_reading_missing(self):
self.assert_doesnt_exist(".coverage")
covdata = CoverageData()
- self.data_files.read(covdata)
+ covdata.read()
self.assert_line_counts(covdata, {})
def test_writing_and_reading(self):
covdata1 = CoverageData()
covdata1.add_lines(LINES_1)
- self.data_files.write(covdata1)
+ covdata1.write()
covdata2 = CoverageData()
- self.data_files.read(covdata2)
+ covdata2.read()
self.assert_line_counts(covdata2, SUMMARY_1)
def test_debug_output_with_debug_option(self):
@@ -547,10 +539,10 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
debug = DebugControlString(options=["dataio"])
covdata1 = CoverageData(debug=debug)
covdata1.add_lines(LINES_1)
- self.data_files.write(covdata1)
+ covdata1.write()
covdata2 = CoverageData(debug=debug)
- self.data_files.read(covdata2)
+ covdata2.read()
self.assert_line_counts(covdata2, SUMMARY_1)
self.assertRegex(
@@ -565,10 +557,10 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
debug = DebugControlString(options=[])
covdata1 = CoverageData(debug=debug)
covdata1.add_lines(LINES_1)
- self.data_files.write(covdata1)
+ covdata1.write()
covdata2 = CoverageData(debug=debug)
- self.data_files.read(covdata2)
+ covdata2.read()
self.assert_line_counts(covdata2, SUMMARY_1)
self.assertEqual(debug.get_output(), "")
@@ -577,7 +569,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
self.assert_doesnt_exist(".coverage.SUFFIX")
covdata = CoverageData()
covdata.add_lines(LINES_1)
- self.data_files.write(covdata, suffix='SUFFIX')
+ covdata.write(suffix='SUFFIX')
self.assert_exists(".coverage.SUFFIX")
self.assert_doesnt_exist(".coverage")
@@ -587,7 +579,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
# suffix=True will make a randomly named data file.
covdata1 = CoverageData()
covdata1.add_lines(LINES_1)
- self.data_files.write(covdata1, suffix=True)
+ covdata1.write(suffix=True)
self.assert_doesnt_exist(".coverage")
data_files1 = glob.glob(".coverage.*")
self.assertEqual(len(data_files1), 1)
@@ -595,7 +587,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
# Another suffix=True will choose a different name.
covdata2 = CoverageData()
covdata2.add_lines(LINES_1)
- self.data_files.write(covdata2, suffix=True)
+ covdata2.write(suffix=True)
self.assert_doesnt_exist(".coverage")
data_files2 = glob.glob(".coverage.*")
self.assertEqual(len(data_files2), 2)
@@ -608,18 +600,18 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
covdata1 = CoverageData()
covdata1.add_lines(LINES_1)
- self.data_files.write(covdata1, suffix='1')
+ covdata1.write(suffix='1')
self.assert_exists(".coverage.1")
self.assert_file_count(".coverage.*", 1)
covdata2 = CoverageData()
covdata2.add_lines(LINES_2)
- self.data_files.write(covdata2, suffix='2')
+ covdata2.write(suffix='2')
self.assert_exists(".coverage.2")
self.assert_file_count(".coverage.*", 2)
covdata3 = CoverageData()
- self.data_files.combine_parallel_data(covdata3)
+ combine_parallel_data(covdata3)
self.assert_line_counts(covdata3, SUMMARY_1_2)
self.assert_measured_files(covdata3, MEASURED_FILES_1_2)
self.assert_file_count(".coverage.*", 0)
@@ -627,22 +619,21 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
def test_erasing(self):
covdata1 = CoverageData()
covdata1.add_lines(LINES_1)
- self.data_files.write(covdata1)
+ covdata1.write()
covdata1.erase()
self.assert_line_counts(covdata1, {})
- self.data_files.erase()
covdata2 = CoverageData()
- self.data_files.read(covdata2)
+ covdata2.read()
self.assert_line_counts(covdata2, {})
def test_erasing_parallel(self):
self.make_file("datafile.1")
self.make_file("datafile.2")
self.make_file(".coverage")
- data_files = CoverageDataFiles("datafile")
- data_files.erase(parallel=True)
+ data = CoverageData("datafile")
+ data.erase(parallel=True)
self.assert_file_count("datafile.*", 0)
self.assert_exists(".coverage")
@@ -657,7 +648,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
# Write with CoverageData, then read the JSON explicitly.
covdata = CoverageData()
covdata.add_lines(LINES_1)
- self.data_files.write(covdata)
+ covdata.write()
data = self.read_json_data_file(".coverage")
@@ -674,7 +665,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
# Write with CoverageData, then read the JSON explicitly.
covdata = CoverageData()
covdata.add_arcs(ARCS_3)
- self.data_files.write(covdata)
+ covdata.write()
data = self.read_json_data_file(".coverage")
@@ -687,14 +678,13 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
self.assertNotIn('file_tracers', data)
def test_writing_to_other_file(self):
- data_files = CoverageDataFiles(".otherfile")
- covdata = CoverageData()
+ covdata = CoverageData(".otherfile")
covdata.add_lines(LINES_1)
- data_files.write(covdata)
+ covdata.write()
self.assert_doesnt_exist(".coverage")
self.assert_exists(".otherfile")
- data_files.write(covdata, suffix="extra")
+ covdata.write(suffix="extra")
self.assert_exists(".otherfile.extra")
self.assert_doesnt_exist(".coverage")
@@ -708,14 +698,14 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
covdata1.add_file_tracers({
'/home/ned/proj/src/template.html': 'html.plugin',
})
- self.data_files.write(covdata1, suffix='1')
+ covdata1.write(suffix='1')
covdata2 = CoverageData()
covdata2.add_lines({
r'c:\ned\test\a.py': {4: None, 5: None},
r'c:\ned\test\sub\b.py': {3: None, 6: None},
})
- self.data_files.write(covdata2, suffix='2')
+ covdata2.write(suffix='2')
self.assert_file_count(".coverage.*", 2)
@@ -723,7 +713,7 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
aliases = PathAliases()
aliases.add("/home/ned/proj/src/", "./")
aliases.add(r"c:\ned\test", "./")
- self.data_files.combine_parallel_data(covdata3, aliases=aliases)
+ combine_parallel_data(covdata3, aliases=aliases)
self.assert_file_count(".coverage.*", 0)
# covdata3 hasn't been written yet. Should this file exist or not?
#self.assert_exists(".coverage")
@@ -737,23 +727,23 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
self.assertEqual(covdata3.file_tracer(template_html), 'html.plugin')
def test_combining_from_different_directories(self):
- covdata1 = CoverageData()
+ covdata1 = CoverageData('cov1/.coverage.1')
covdata1.add_lines(LINES_1)
os.makedirs('cov1')
- covdata1.write_file('cov1/.coverage.1')
+ covdata1.write()
- covdata2 = CoverageData()
+ covdata2 = CoverageData('cov2/.coverage.2')
covdata2.add_lines(LINES_2)
os.makedirs('cov2')
- covdata2.write_file('cov2/.coverage.2')
+ covdata2.write()
# This data won't be included.
- covdata_xxx = CoverageData()
+ covdata_xxx = CoverageData('.coverage.xxx')
covdata_xxx.add_arcs(ARCS_3)
- covdata_xxx.write_file('.coverage.xxx')
+ covdata_xxx.write()
covdata3 = CoverageData()
- self.data_files.combine_parallel_data(covdata3, data_paths=['cov1', 'cov2'])
+ combine_parallel_data(covdata3, data_paths=['cov1', 'cov2'])
self.assert_line_counts(covdata3, SUMMARY_1_2)
self.assert_measured_files(covdata3, MEASURED_FILES_1_2)
@@ -762,24 +752,27 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
self.assert_exists(".coverage.xxx")
def test_combining_from_files(self):
- covdata1 = CoverageData()
+ covdata1 = CoverageData('cov1/.coverage.1')
covdata1.add_lines(LINES_1)
os.makedirs('cov1')
- covdata1.write_file('cov1/.coverage.1')
+ covdata1.write()
- covdata2 = CoverageData()
+ covdata2 = CoverageData('cov2/.coverage.2')
covdata2.add_lines(LINES_2)
os.makedirs('cov2')
- covdata2.write_file('cov2/.coverage.2')
+ covdata2.write()
# This data won't be included.
- covdata_xxx = CoverageData()
+ covdata_xxx = CoverageData('.coverage.xxx')
covdata_xxx.add_arcs(ARCS_3)
- covdata_xxx.write_file('.coverage.xxx')
- covdata_xxx.write_file('cov2/.coverage.xxx')
+ covdata_xxx.write()
+
+ covdata_2xxx = CoverageData('cov2/.coverage.xxx')
+ covdata_2xxx.add_arcs(ARCS_3)
+ covdata_2xxx.write()
covdata3 = CoverageData()
- self.data_files.combine_parallel_data(covdata3, data_paths=['cov1', 'cov2/.coverage.2'])
+ combine_parallel_data(covdata3, data_paths=['cov1', 'cov2/.coverage.2'])
self.assert_line_counts(covdata3, SUMMARY_1_2)
self.assert_measured_files(covdata3, MEASURED_FILES_1_2)
@@ -792,4 +785,4 @@ class CoverageDataFilesTest(DataTestHelpers, CoverageTest):
covdata = CoverageData()
msg = "Couldn't combine from non-existent path 'xyzzy'"
with self.assertRaisesRegex(CoverageException, msg):
- self.data_files.combine_parallel_data(covdata, data_paths=['xyzzy'])
+ combine_parallel_data(covdata, data_paths=['xyzzy'])
diff --git a/tests/test_debug.py b/tests/test_debug.py
index 2699ca61..c46e3dae 100644
--- a/tests/test_debug.py
+++ b/tests/test_debug.py
@@ -136,10 +136,10 @@ class DebugTraceTest(CoverageTest):
self.assertEqual(len(real_messages), len(frames))
# The last message should be "Writing data", and the last frame should
- # be write_file in data.py.
+ # be _write_file in data.py.
self.assertRegex(real_messages[-1], r"^\s*\d+\.\w{4}: Writing data")
last_line = out_lines.splitlines()[-1]
- self.assertRegex(last_line, r"\s+write_file : .*coverage[/\\]data.py @\d+$")
+ self.assertRegex(last_line, r"\s+_write_file : .*coverage[/\\]data.py @\d+$")
def test_debug_config(self):
out_lines = self.f1_debug_output(["config"])
diff --git a/tests/test_process.py b/tests/test_process.py
index 1d277149..ede86691 100644
--- a/tests/test_process.py
+++ b/tests/test_process.py
@@ -90,7 +90,7 @@ class ProcessTest(CoverageTest):
# Read the coverage file and see that b_or_c.py has all 8 lines
# executed.
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['b_or_c.py'], 8)
# Running combine again should fail, because there are no parallel data
@@ -101,7 +101,7 @@ class ProcessTest(CoverageTest):
# And the originally combined data is still there.
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['b_or_c.py'], 8)
def test_combine_parallel_data_with_a_corrupt_file(self):
@@ -137,7 +137,7 @@ class ProcessTest(CoverageTest):
# Read the coverage file and see that b_or_c.py has all 8 lines
# executed.
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['b_or_c.py'], 8)
def test_combine_no_usable_files(self):
@@ -172,7 +172,7 @@ class ProcessTest(CoverageTest):
# Read the coverage file and see that b_or_c.py has 6 lines
# executed (we only did b, not c).
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['b_or_c.py'], 6)
def test_combine_parallel_data_in_two_steps(self):
@@ -203,7 +203,7 @@ class ProcessTest(CoverageTest):
# Read the coverage file and see that b_or_c.py has all 8 lines
# executed.
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['b_or_c.py'], 8)
def test_combine_parallel_data_no_append(self):
@@ -235,7 +235,7 @@ class ProcessTest(CoverageTest):
# Read the coverage file and see that b_or_c.py has only 7 lines
# because we didn't keep the data from running b.
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['b_or_c.py'], 7)
def test_append_data(self):
@@ -254,7 +254,7 @@ class ProcessTest(CoverageTest):
# Read the coverage file and see that b_or_c.py has all 8 lines
# executed.
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['b_or_c.py'], 8)
def test_append_data_with_different_file(self):
@@ -277,8 +277,8 @@ class ProcessTest(CoverageTest):
# Read the coverage file and see that b_or_c.py has all 8 lines
# executed.
- data = coverage.CoverageData()
- data.read_file(".mycovdata")
+ data = coverage.CoverageData(".mycovdata")
+ data.read()
self.assertEqual(data.line_counts()['b_or_c.py'], 8)
def test_append_can_create_a_data_file(self):
@@ -292,7 +292,7 @@ class ProcessTest(CoverageTest):
# Read the coverage file and see that b_or_c.py has only 6 lines
# executed.
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['b_or_c.py'], 6)
def test_combine_with_rc(self):
@@ -325,7 +325,7 @@ class ProcessTest(CoverageTest):
# Read the coverage file and see that b_or_c.py has all 8 lines
# executed.
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['b_or_c.py'], 8)
# Reporting should still work even with the .rc file
@@ -379,7 +379,7 @@ class ProcessTest(CoverageTest):
# Read the coverage data file and see that the two different x.py
# files have been combined together.
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
summary = data.line_counts(fullpath=True)
self.assertEqual(len(summary), 1)
actual = os.path.normcase(os.path.abspath(list(summary.keys())[0]))
@@ -543,7 +543,7 @@ class ProcessTest(CoverageTest):
self.assert_file_count(".coverage.*", 0)
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['fork.py'], 9)
def test_warnings_during_reporting(self):
@@ -649,8 +649,8 @@ class ProcessTest(CoverageTest):
self.make_file("simple.py", """print('hello')""")
self.run_command("coverage run simple.py")
- data = CoverageData()
- data.read_file("mydata.dat")
+ data = CoverageData("mydata.dat")
+ data.read()
infos = data.run_infos()
self.assertEqual(len(infos), 1)
expected = u"These are musical notes: ♫𝅗𝅥♩"
@@ -680,7 +680,7 @@ class ProcessTest(CoverageTest):
out = self.run_command("python -m coverage run -L getenv.py")
self.assertEqual(out, "FOOEY == BOO\n")
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
# The actual number of executed lines in os.py when it's
# imported is 120 or so. Just running os.getenv executes
# about 5.
@@ -910,7 +910,7 @@ class ExcepthookTest(CoverageTest):
# Read the coverage file and see that excepthook.py has 7 lines
# executed.
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['excepthook.py'], 7)
def test_excepthook_exit(self):
@@ -1239,9 +1239,9 @@ class ProcessStartupTest(ProcessCoverageMixin, CoverageTest):
# An existing data file should not be read when a subprocess gets
# measured automatically. Create the data file here with bogus data in
# it.
- data = coverage.CoverageData()
+ data = coverage.CoverageData(".mycovdata")
data.add_lines({os.path.abspath('sub.py'): dict.fromkeys(range(100))})
- data.write_file(".mycovdata")
+ data.write()
self.make_file("coverage.ini", """\
[run]
@@ -1255,8 +1255,8 @@ class ProcessStartupTest(ProcessCoverageMixin, CoverageTest):
# Read the data from .coverage
self.assert_exists(".mycovdata")
- data = coverage.CoverageData()
- data.read_file(".mycovdata")
+ data = coverage.CoverageData(".mycovdata")
+ data.read()
self.assertEqual(data.line_counts()['sub.py'], 3)
def test_subprocess_with_pth_files_and_parallel(self): # pragma: no metacov
@@ -1280,7 +1280,7 @@ class ProcessStartupTest(ProcessCoverageMixin, CoverageTest):
# assert that the combined .coverage data file is correct
self.assert_exists(".coverage")
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
self.assertEqual(data.line_counts()['sub.py'], 3)
# assert that there are *no* extra data files left over after a combine
@@ -1370,7 +1370,7 @@ class ProcessStartupWithSourceTest(ProcessCoverageMixin, CoverageTest):
# Read the data from .coverage
self.assert_exists(".coverage")
data = coverage.CoverageData()
- data.read_file(".coverage")
+ data.read()
summary = data.line_counts()
print(summary)
self.assertEqual(summary[source + '.py'], 3)
diff --git a/tests/test_summary.py b/tests/test_summary.py
index b2895370..980fd3d4 100644
--- a/tests/test_summary.py
+++ b/tests/test_summary.py
@@ -161,7 +161,7 @@ class SummaryTest(UsingModulesMixin, CoverageTest):
# Read the data written, to see that the right files have been omitted from running.
covdata = CoverageData()
- covdata.read_file(".coverage")
+ covdata.read()
files = [os.path.basename(p) for p in covdata.measured_files()]
self.assertIn("covmod1.py", files)
self.assertNotIn("covmodzip1.py", files)