summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--coverage/codeunit.py72
-rw-r--r--coverage/collector.py61
-rw-r--r--coverage/config.py21
-rw-r--r--coverage/control.py85
-rw-r--r--coverage/data.py19
-rw-r--r--coverage/django.py61
-rw-r--r--coverage/extension.py20
-rw-r--r--coverage/parser.py2
-rw-r--r--coverage/report.py3
-rw-r--r--coverage/tracer.c24
-rw-r--r--doc/config.rst2
11 files changed, 292 insertions, 78 deletions
diff --git a/coverage/codeunit.py b/coverage/codeunit.py
index 9282687d..35167a72 100644
--- a/coverage/codeunit.py
+++ b/coverage/codeunit.py
@@ -7,14 +7,18 @@ from coverage.misc import CoverageException, NoSource
from coverage.parser import CodeParser, PythonParser
from coverage.phystokens import source_token_lines, source_encoding
+from coverage.django import DjangoTracer
-def code_unit_factory(morfs, file_locator):
+
+def code_unit_factory(morfs, file_locator, get_ext=None):
"""Construct a list of CodeUnits from polymorphic inputs.
`morfs` is a module or a filename, or a list of same.
`file_locator` is a FileLocator that can help resolve filenames.
+ `get_ext` TODO
+
Returns a list of CodeUnit objects.
"""
@@ -22,19 +26,28 @@ def code_unit_factory(morfs, file_locator):
if not isinstance(morfs, (list, tuple)):
morfs = [morfs]
+ django_tracer = DjangoTracer()
+
code_units = []
for morf in morfs:
- # Hacked-in Mako support. Define COVERAGE_MAKO_PATH as a fragment of
- # the path that indicates the Python file is actually a compiled Mako
- # template. THIS IS TEMPORARY!
- MAKO_PATH = os.environ.get('COVERAGE_MAKO_PATH')
- if MAKO_PATH and isinstance(morf, string_class) and MAKO_PATH in morf:
- # Super hack! Do mako both ways!
- if 0:
- cu = PythonCodeUnit(morf, file_locator)
- cu.name += '_fako'
- code_units.append(cu)
- klass = MakoCodeUnit
+ ext = None
+ if isinstance(morf, string_class) and get_ext:
+ ext = get_ext(morf)
+ if ext:
+ klass = DjangoTracer # NOT REALLY! TODO
+ # Hacked-in Mako support. Define COVERAGE_MAKO_PATH as a fragment of
+ # the path that indicates the Python file is actually a compiled Mako
+ # template. THIS IS TEMPORARY!
+ #MAKO_PATH = os.environ.get('COVERAGE_MAKO_PATH')
+ #if MAKO_PATH and isinstance(morf, string_class) and MAKO_PATH in morf:
+ # # Super hack! Do mako both ways!
+ # if 0:
+ # cu = PythonCodeUnit(morf, file_locator)
+ # cu.name += '_fako'
+ # code_units.append(cu)
+ # klass = MakoCodeUnit
+ #elif isinstance(morf, string_class) and morf.endswith(".html"):
+ # klass = DjangoCodeUnit
else:
klass = PythonCodeUnit
code_units.append(klass(morf, file_locator))
@@ -134,6 +147,12 @@ class CodeUnit(object):
"No source for code '%s'." % self.filename
)
+ def source_token_lines(self, source):
+ """Return the 'tokenized' text for the code."""
+ # TODO: Taking source here is wrong, change it?
+ for line in source.splitlines():
+ yield [('txt', line)]
+
def should_be_python(self):
"""Does it seem like this file should contain Python?
@@ -258,12 +277,29 @@ class MakoCodeUnit(CodeUnit):
def get_parser(self, exclude=None):
return MakoParser(self.metadata)
- def source_token_lines(self, source):
- """Return the 'tokenized' text for the code."""
- # TODO: Taking source here is wrong, change it?
- for line in source.splitlines():
- yield [('txt', line)]
-
def source_encoding(self, source):
# TODO: Taking source here is wrong, change it!
return self.metadata['source_encoding']
+
+
+class DjangoCodeUnit(CodeUnit):
+ def source(self):
+ with open(self.filename) as f:
+ return f.read()
+
+ def get_parser(self, exclude=None):
+ return DjangoParser(self.filename)
+
+ def source_encoding(self, source):
+ return "utf8"
+
+
+class DjangoParser(CodeParser):
+ def __init__(self, filename):
+ self.filename = filename
+
+ def parse_source(self):
+ with open(self.filename) as f:
+ source = f.read()
+ executable = set(range(1, len(source.splitlines())+1))
+ return executable, set()
diff --git a/coverage/collector.py b/coverage/collector.py
index 94af5df5..1c530c7a 100644
--- a/coverage/collector.py
+++ b/coverage/collector.py
@@ -41,17 +41,21 @@ class PyTracer(object):
# used to force the use of this tracer.
def __init__(self):
+ # Attributes set from the collector:
self.data = None
+ self.arcs = False
self.should_trace = None
self.should_trace_cache = None
self.warn = None
+ self.extensions = None
+
+ self.extension = None
self.cur_file_data = None
self.last_line = 0
self.data_stack = []
self.data_stacks = collections.defaultdict(list)
self.last_exc_back = None
self.last_exc_firstlineno = 0
- self.arcs = False
self.thread = None
self.stopped = False
self.coroutine_id_func = None
@@ -76,7 +80,7 @@ class PyTracer(object):
self.cur_file_data[pair] = None
if self.coroutine_id_func:
self.data_stack = self.data_stacks[self.coroutine_id_func()]
- self.cur_file_data, self.last_line = self.data_stack.pop()
+ self.handler, self.cur_file_data, self.last_line = self.data_stack.pop()
self.last_exc_back = None
if event == 'call':
@@ -85,19 +89,24 @@ class PyTracer(object):
if self.coroutine_id_func:
self.data_stack = self.data_stacks[self.coroutine_id_func()]
self.last_coroutine = self.coroutine_id_func()
- self.data_stack.append((self.cur_file_data, self.last_line))
+ self.data_stack.append((self.extension, self.cur_file_data, self.last_line))
filename = frame.f_code.co_filename
- if filename not in self.should_trace_cache:
- tracename = self.should_trace(filename, frame)
- self.should_trace_cache[filename] = tracename
- else:
- tracename = self.should_trace_cache[filename]
+ disp = self.should_trace_cache.get(filename)
+ if disp is None:
+ disp = self.should_trace(filename, frame)
+ self.should_trace_cache[filename] = disp
#print("called, stack is %d deep, tracename is %r" % (
# len(self.data_stack), tracename))
+ tracename = disp.filename
+ if tracename and disp.extension:
+ tracename = disp.extension.file_name(frame)
if tracename:
if tracename not in self.data:
self.data[tracename] = {}
+ if disp.extension:
+ self.extensions[tracename] = disp.extension.__name__
self.cur_file_data = self.data[tracename]
+ self.extension = disp.extension
else:
self.cur_file_data = None
# Set the last_line to -1 because the next arc will be entering a
@@ -107,14 +116,20 @@ class PyTracer(object):
# Record an executed line.
#if self.coroutine_id_func:
# assert self.last_coroutine == self.coroutine_id_func()
- if self.cur_file_data is not None:
- if self.arcs:
- #print("lin", self.last_line, frame.f_lineno)
- self.cur_file_data[(self.last_line, frame.f_lineno)] = None
- else:
- #print("lin", frame.f_lineno)
- self.cur_file_data[frame.f_lineno] = None
- self.last_line = frame.f_lineno
+ if self.extension:
+ lineno_from, lineno_to = self.extension.line_number_range(frame)
+ else:
+ lineno_from, lineno_to = frame.f_lineno, frame.f_lineno
+ if lineno_from != -1:
+ if self.cur_file_data is not None:
+ if self.arcs:
+ #print("lin", self.last_line, frame.f_lineno)
+ self.cur_file_data[(self.last_line, lineno_from)] = None
+ else:
+ #print("lin", frame.f_lineno)
+ for lineno in range(lineno_from, lineno_to+1):
+ self.cur_file_data[lineno] = None
+ self.last_line = lineno_to
elif event == 'return':
if self.arcs and self.cur_file_data:
first = frame.f_code.co_firstlineno
@@ -123,7 +138,7 @@ class PyTracer(object):
if self.coroutine_id_func:
self.data_stack = self.data_stacks[self.coroutine_id_func()]
self.last_coroutine = self.coroutine_id_func()
- self.cur_file_data, self.last_line = self.data_stack.pop()
+ self.extension, self.cur_file_data, self.last_line = self.data_stack.pop()
#print("returned, stack is %d deep" % (len(self.data_stack)))
elif event == 'exception':
#print("exc", self.last_line, frame.f_lineno)
@@ -240,6 +255,8 @@ class Collector(object):
# or mapping filenames to dicts with linenumber pairs as keys.
self.data = {}
+ self.extensions = {}
+
# A cache of the results from should_trace, the decision about whether
# to trace execution in a file. A dict of filename to (filename or
# None).
@@ -258,6 +275,8 @@ class Collector(object):
tracer.warn = self.warn
if hasattr(tracer, 'coroutine_id_func'):
tracer.coroutine_id_func = self.coroutine_id_func
+ if hasattr(tracer, 'extensions'):
+ tracer.extensions = self.extensions
fn = tracer.start()
self.tracers.append(tracer)
return fn
@@ -356,10 +375,7 @@ class Collector(object):
# to show line data.
line_data = {}
for f, arcs in self.data.items():
- line_data[f] = ldf = {}
- for l1, _ in list(arcs.keys()):
- if l1:
- ldf[l1] = None
+ line_data[f] = dict((l1, None) for l1, _ in arcs.keys() if l1)
return line_data
else:
return self.data
@@ -377,3 +393,6 @@ class Collector(object):
return self.data
else:
return {}
+
+ def get_extension_data(self):
+ return self.extensions
diff --git a/coverage/config.py b/coverage/config.py
index 60ec3f41..e5e35856 100644
--- a/coverage/config.py
+++ b/coverage/config.py
@@ -21,6 +21,15 @@ class HandyConfigParser(configparser.RawConfigParser):
return configparser.RawConfigParser.read(self, filename, **kwargs)
def get(self, *args, **kwargs):
+ """Get a value, replacing environment variables also.
+
+ The arguments are the same as `RawConfigParser.get`, but in the found
+ value, ``$WORD`` or ``${WORD}`` are replaced by the value of the
+ environment variable ``WORD``.
+
+ Returns the finished value.
+
+ """
v = configparser.RawConfigParser.get(self, *args, **kwargs)
def dollar_replace(m):
"""Called for each $replacement."""
@@ -113,6 +122,7 @@ class CoverageConfig(object):
self.timid = False
self.source = None
self.debug = []
+ self.extensions = []
# Defaults for [report]
self.exclude_list = DEFAULT_EXCLUDE[:]
@@ -144,7 +154,7 @@ class CoverageConfig(object):
if env:
self.timid = ('--timid' in env)
- MUST_BE_LIST = ["omit", "include", "debug"]
+ MUST_BE_LIST = ["omit", "include", "debug", "extensions"]
def from_args(self, **kwargs):
"""Read config values from `kwargs`."""
@@ -176,12 +186,21 @@ class CoverageConfig(object):
self.paths[option] = cp.getlist('paths', option)
CONFIG_FILE_OPTIONS = [
+ # These are *args for set_attr_from_config_option:
+ # (attr, where, type_="")
+ #
+ # attr is the attribute to set on the CoverageConfig object.
+ # where is the section:name to read from the configuration file.
+ # type_ is the optional type to apply, by using .getTYPE to read the
+ # configuration value from the file.
+
# [run]
('branch', 'run:branch', 'boolean'),
('coroutine', 'run:coroutine'),
('cover_pylib', 'run:cover_pylib', 'boolean'),
('data_file', 'run:data_file'),
('debug', 'run:debug', 'list'),
+ ('extensions', 'run:extensions', 'list'),
('include', 'run:include', 'list'),
('omit', 'run:omit', 'list'),
('parallel', 'run:parallel', 'boolean'),
diff --git a/coverage/control.py b/coverage/control.py
index 14f9b80e..19b68ca0 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -9,6 +9,7 @@ from coverage.collector import Collector
from coverage.config import CoverageConfig
from coverage.data import CoverageData
from coverage.debug import DebugControl
+from coverage.extension import load_extensions
from coverage.files import FileLocator, TreeMatcher, FnmatchMatcher
from coverage.files import PathAliases, find_python_files, prep_patterns
from coverage.html import HtmlReporter
@@ -18,6 +19,7 @@ from coverage.results import Analysis, Numbers
from coverage.summary import SummaryReporter
from coverage.xmlreport import XmlReporter
+
# Pypy has some unusual stuff in the "stdlib". Consider those locations
# when deciding where the stdlib is.
try:
@@ -125,6 +127,10 @@ class coverage(object):
# Create and configure the debugging controller.
self.debug = DebugControl(self.config.debug, debug_file or sys.stderr)
+ # Load extensions
+ tracer_classes = load_extensions(self.config.extensions, "tracer")
+ self.tracer_extensions = [cls() for cls in tracer_classes]
+
self.auto_data = auto_data
# _exclude_re is a dict mapping exclusion list names to compiled
@@ -232,32 +238,24 @@ class coverage(object):
This function is called from the trace function. As each new file name
is encountered, this function determines whether it is traced or not.
- Returns a pair of values: the first indicates whether the file should
- be traced: it's a canonicalized filename if it should be traced, None
- if it should not. The second value is a string, the resason for the
- decision.
+ Returns a FileDisposition object.
"""
+ disp = FileDisposition(filename)
+
if not filename:
# Empty string is pretty useless
- return None, "empty string isn't a filename"
+ return disp.nope("empty string isn't a filename")
if filename.startswith('memory:'):
- if 0:
- import dis, sys, StringIO
- _stdout = sys.stdout
- sys.stdout = new_stdout = StringIO.StringIO()
- dis.dis(frame.f_code)
- sys.stdout = _stdout
- return None, new_stdout.getvalue()
- return None, "memory isn't traceable"
+ return disp.nope("memory isn't traceable")
if filename.startswith('<'):
# Lots of non-file execution is represented with artificial
# filenames like "<string>", "<doctest readme.txt[0]>", or
# "<exec_function>". Don't ever trace these executions, since we
# can't do anything with the data later anyway.
- return None, "not a real filename"
+ return disp.nope("not a real filename")
self._check_for_packages()
@@ -277,47 +275,51 @@ class coverage(object):
canonical = self.file_locator.canonical_filename(filename)
+ # Try the extensions, see if they have an opinion about the file.
+ for tracer in self.tracer_extensions:
+ ext_disp = tracer.should_trace(canonical)
+ if ext_disp:
+ ext_disp.extension = tracer
+ return ext_disp
+
# If the user specified source or include, then that's authoritative
# about the outer bound of what to measure and we don't have to apply
# any canned exclusions. If they didn't, then we have to exclude the
# stdlib and coverage.py directories.
if self.source_match:
if not self.source_match.match(canonical):
- return None, "falls outside the --source trees"
+ return disp.nope("falls outside the --source trees")
elif self.include_match:
if not self.include_match.match(canonical):
- return None, "falls outside the --include trees"
+ return disp.nope("falls outside the --include trees")
else:
# If we aren't supposed to trace installed code, then check if this
# is near the Python standard library and skip it if so.
if self.pylib_match and self.pylib_match.match(canonical):
- return None, "is in the stdlib"
+ return disp.nope("is in the stdlib")
# We exclude the coverage code itself, since a little of it will be
# measured otherwise.
if self.cover_match and self.cover_match.match(canonical):
- return None, "is part of coverage.py"
+ return disp.nope("is part of coverage.py")
# Check the file against the omit pattern.
if self.omit_match and self.omit_match.match(canonical):
- return None, "is inside an --omit pattern"
+ return disp.nope("is inside an --omit pattern")
- return canonical, "because we love you"
+ disp.filename = canonical
+ return disp
def _should_trace(self, filename, frame):
"""Decide whether to trace execution in `filename`.
- Calls `_should_trace_with_reason`, and returns just the decision.
+ Calls `_should_trace_with_reason`, and returns the FileDisposition.
"""
- canonical, reason = self._should_trace_with_reason(filename, frame)
+ disp = self._should_trace_with_reason(filename, frame)
if self.debug.should('trace'):
- if not canonical:
- msg = "Not tracing %r: %s" % (filename, reason)
- else:
- msg = "Tracing %r" % (filename,)
- self.debug.write(msg)
- return canonical
+ self.debug.write(disp.debug_message())
+ return disp
def _warn(self, msg):
"""Use `msg` as a warning."""
@@ -535,8 +537,10 @@ class coverage(object):
if not self._measured:
return
+ # TODO: seems like this parallel structure is getting kinda old...
self.data.add_line_data(self.collector.get_line_data())
self.data.add_arc_data(self.collector.get_arc_data())
+ self.data.add_extension_data(self.collector.get_extension_data())
self.collector.reset()
# If there are still entries in the source_pkgs list, then we never
@@ -604,7 +608,8 @@ class coverage(object):
"""
self._harvest_data()
if not isinstance(it, CodeUnit):
- it = code_unit_factory(it, self.file_locator)[0]
+ get_ext = self.data.extension_data().get
+ it = code_unit_factory(it, self.file_locator, get_ext)[0]
return Analysis(self, it)
@@ -770,6 +775,28 @@ class coverage(object):
return info
+class FileDisposition(object):
+ """A simple object for noting a number of details of files to trace."""
+ def __init__(self, original_filename):
+ self.original_filename = original_filename
+ self.filename = None
+ self.reason = ""
+ self.extension = None
+
+ def nope(self, reason):
+ """A helper for returning a NO answer from should_trace."""
+ self.reason = reason
+ return self
+
+ def debug_message(self):
+ """Produce a debugging message explaining the outcome."""
+ if not self.filename:
+ msg = "Not tracing %r: %s" % (self.original_filename, self.reason)
+ else:
+ msg = "Tracing %r" % (self.original_filename,)
+ return msg
+
+
def process_startup():
"""Call this at Python startup to perhaps measure coverage.
diff --git a/coverage/data.py b/coverage/data.py
index 042b6405..b78c931d 100644
--- a/coverage/data.py
+++ b/coverage/data.py
@@ -21,6 +21,11 @@ class CoverageData(object):
* arcs: a dict mapping filenames to sorted lists of line number pairs:
{ 'file1': [(17,23), (17,25), (25,26)], ... }
+ * extensions: a dict mapping filenames to extension names:
+ { 'file1': "django.coverage", ... }
+ # TODO: how to handle the difference between a extension module
+ # name, and the class in the module?
+
"""
def __init__(self, basename=None, collector=None, debug=None):
@@ -64,6 +69,14 @@ class CoverageData(object):
#
self.arcs = {}
+ # A map from canonical source file name to an extension module name:
+ #
+ # {
+ # 'filename1.py': 'django.coverage',
+ # ...
+ # }
+ self.extensions = {}
+
def usefile(self, use_file=True):
"""Set whether or not to use a disk file for data."""
self.use_file = use_file
@@ -110,6 +123,9 @@ class CoverageData(object):
(f, sorted(amap.keys())) for f, amap in iitems(self.arcs)
)
+ def extension_data(self):
+ return self.extensions
+
def write_file(self, filename):
"""Write the coverage data to `filename`."""
@@ -213,6 +229,9 @@ class CoverageData(object):
for filename, arcs in iitems(arc_data):
self.arcs.setdefault(filename, {}).update(arcs)
+ def add_extension_data(self, extension_data):
+ self.extensions.update(extension_data)
+
def touch_file(self, filename):
"""Ensure that `filename` appears in the data, empty if needed."""
self.lines.setdefault(filename, {})
diff --git a/coverage/django.py b/coverage/django.py
new file mode 100644
index 00000000..00f2ed54
--- /dev/null
+++ b/coverage/django.py
@@ -0,0 +1,61 @@
+import sys
+
+
+ALL_TEMPLATE_MAP = {}
+
+def get_line_map(filename):
+ if filename not in ALL_TEMPLATE_MAP:
+ with open(filename) as template_file:
+ template_source = template_file.read()
+ line_lengths = [len(l) for l in template_source.splitlines(True)]
+ ALL_TEMPLATE_MAP[filename] = list(running_sum(line_lengths))
+ return ALL_TEMPLATE_MAP[filename]
+
+def get_line_number(line_map, offset):
+ for lineno, line_offset in enumerate(line_map, start=1):
+ if line_offset >= offset:
+ return lineno
+ return -1
+
+class DjangoTracer(object):
+ def should_trace(self, canonical):
+ return "/django/template/" in canonical
+
+ def source(self, frame):
+ if frame.f_code.co_name != 'render':
+ return None
+ that = frame.f_locals['self']
+ return getattr(that, "source", None)
+
+ def file_name(self, frame):
+ source = self.source(frame)
+ if not source:
+ return None
+ return source[0].name.encode(sys.getfilesystemencoding())
+
+ def line_number_range(self, frame):
+ source = self.source(frame)
+ if not source:
+ return -1, -1
+ filename = source[0].name
+ line_map = get_line_map(filename)
+ start = get_line_number(line_map, source[1][0])
+ end = get_line_number(line_map, source[1][1])
+ if start < 0 or end < 0:
+ return -1, -1
+ return start, end
+
+def running_sum(seq):
+ total = 0
+ for num in seq:
+ total += num
+ yield total
+
+def ppp(obj):
+ ret = []
+ import inspect
+ for name, value in inspect.getmembers(obj):
+ if not callable(value):
+ ret.append("%s=%r" % (name, value))
+ attrs = ", ".join(ret)
+ return "%s: %s" % (obj.__class__, attrs)
diff --git a/coverage/extension.py b/coverage/extension.py
new file mode 100644
index 00000000..8c89b88e
--- /dev/null
+++ b/coverage/extension.py
@@ -0,0 +1,20 @@
+"""Extension management for coverage.py"""
+
+def load_extensions(modules, name):
+ """Load extensions from `modules`, finding them by `name`.
+
+ Yields the loaded extensions.
+
+ """
+
+ for module in modules:
+ try:
+ __import__(module)
+ mod = sys.modules[module]
+ except ImportError:
+ blah()
+ continue
+
+ entry = getattr(mod, name, None)
+ if entry:
+ yield entry
diff --git a/coverage/parser.py b/coverage/parser.py
index 5bb15466..c5e95baa 100644
--- a/coverage/parser.py
+++ b/coverage/parser.py
@@ -15,7 +15,7 @@ class CodeParser(object):
Base class for any code parser.
"""
def translate_lines(self, lines):
- return lines
+ return set(lines)
def translate_arcs(self, arcs):
return arcs
diff --git a/coverage/report.py b/coverage/report.py
index 03e9122c..7627d1aa 100644
--- a/coverage/report.py
+++ b/coverage/report.py
@@ -33,7 +33,8 @@ class Reporter(object):
"""
morfs = morfs or self.coverage.data.measured_files()
file_locator = self.coverage.file_locator
- self.code_units = code_unit_factory(morfs, file_locator)
+ get_ext = self.coverage.data.extension_data().get
+ self.code_units = code_unit_factory(morfs, file_locator, get_ext)
if self.config.include:
patterns = prep_patterns(self.config.include)
diff --git a/coverage/tracer.c b/coverage/tracer.c
index 97dd113b..ca8d61c1 100644
--- a/coverage/tracer.c
+++ b/coverage/tracer.c
@@ -259,6 +259,7 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse
int ret = RET_OK;
PyObject * filename = NULL;
PyObject * tracename = NULL;
+ PyObject * disposition = NULL;
#if WHAT_LOG || TRACE_LOG
PyObject * ascii = NULL;
#endif
@@ -335,41 +336,51 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse
/* Check if we should trace this line. */
filename = frame->f_code->co_filename;
- tracename = PyDict_GetItem(self->should_trace_cache, filename);
- if (tracename == NULL) {
+ disposition = PyDict_GetItem(self->should_trace_cache, filename);
+ if (disposition == NULL) {
STATS( self->stats.new_files++; )
/* We've never considered this file before. */
/* Ask should_trace about it. */
PyObject * args = Py_BuildValue("(OO)", filename, frame);
- tracename = PyObject_Call(self->should_trace, args, NULL);
+ disposition = PyObject_Call(self->should_trace, args, NULL);
Py_DECREF(args);
- if (tracename == NULL) {
+ if (disposition == NULL) {
/* An error occurred inside should_trace. */
STATS( self->stats.errors++; )
return RET_ERROR;
}
- if (PyDict_SetItem(self->should_trace_cache, filename, tracename) < 0) {
+ if (PyDict_SetItem(self->should_trace_cache, filename, disposition) < 0) {
STATS( self->stats.errors++; )
return RET_ERROR;
}
}
else {
- Py_INCREF(tracename);
+ Py_INCREF(disposition);
}
/* If tracename is a string, then we're supposed to trace. */
+ tracename = PyObject_GetAttrString(disposition, "filename");
+ if (tracename == NULL) {
+ STATS( self->stats.errors++; )
+ Py_DECREF(disposition);
+ return RET_ERROR;
+ }
if (MyText_Check(tracename)) {
PyObject * file_data = PyDict_GetItem(self->data, tracename);
if (file_data == NULL) {
file_data = PyDict_New();
if (file_data == NULL) {
STATS( self->stats.errors++; )
+ Py_DECREF(tracename);
+ Py_DECREF(disposition);
return RET_ERROR;
}
ret = PyDict_SetItem(self->data, tracename, file_data);
Py_DECREF(file_data);
if (ret < 0) {
STATS( self->stats.errors++; )
+ Py_DECREF(tracename);
+ Py_DECREF(disposition);
return RET_ERROR;
}
}
@@ -385,6 +396,7 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse
}
Py_DECREF(tracename);
+ Py_DECREF(disposition);
self->last_line = -1;
break;
diff --git a/doc/config.rst b/doc/config.rst
index 7ff82021..882fc777 100644
--- a/doc/config.rst
+++ b/doc/config.rst
@@ -41,7 +41,7 @@ Boolean values can be specified as ``on``, ``off``, ``true``, ``false``, ``1``,
or ``0`` and are case-insensitive.
Environment variables can be substituted in by using dollar signs: ``$WORD``
-``${WORD}`` will be replaced with the value of ``WORD`` in the environment.
+or ``${WORD}`` will be replaced with the value of ``WORD`` in the environment.
A dollar sign can be inserted with ``$$``. Missing environment variables
will result in empty strings with no error.