summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--coverage/parser.py230
-rw-r--r--coverage/python.py4
-rw-r--r--coverage/results.py5
-rw-r--r--lab/parser.py25
-rw-r--r--tests/coveragetest.py1
-rw-r--r--tests/test_arcs.py178
6 files changed, 431 insertions, 12 deletions
diff --git a/coverage/parser.py b/coverage/parser.py
index 7b8a60f..fb2cf95 100644
--- a/coverage/parser.py
+++ b/coverage/parser.py
@@ -3,6 +3,7 @@
"""Code parsing for coverage.py."""
+import ast
import collections
import dis
import re
@@ -260,6 +261,18 @@ class PythonParser(object):
self._all_arcs.add((fl1, fl2))
return self._all_arcs
+ def ast_arcs(self):
+ aaa = AstArcAnalyzer(self.text)
+ arcs = aaa.collect_arcs()
+
+ arcs_ = set()
+ for l1, l2 in arcs:
+ fl1 = self.first_line(l1)
+ fl2 = self.first_line(l2)
+ if fl1 != fl2:
+ arcs_.add((fl1, fl2))
+ return arcs_
+
def exit_counts(self):
"""Get a count of exits from that each line.
@@ -288,6 +301,168 @@ class PythonParser(object):
return exit_counts
+class AstArcAnalyzer(object):
+ def __init__(self, text):
+ self.root_node = ast.parse(text)
+ ast_dump(self.root_node)
+
+ self.arcs = None
+ # References to the nearest enclosing thing of its kind.
+ self.function_start = None
+ self.loop_start = None
+
+ # Break-exits from a loop
+ self.break_exits = None
+
+ def line_for_node(self, node):
+ """What is the right line number to use for this node?"""
+ node_name = node.__class__.__name__
+ if node_name == "Assign":
+ return node.value.lineno
+ elif node_name == "comprehension":
+ # TODO: is this how to get the line number for a comprehension?
+ return node.target.lineno
+ else:
+ return node.lineno
+
+ def collect_arcs(self):
+ self.arcs = set()
+ self.add_arcs_for_code_objects(self.root_node)
+ return self.arcs
+
+ def add_arcs(self, node):
+ """add the arcs for `node`.
+
+ Return a set of line numbers, exits from this node to the next.
+ """
+ node_name = node.__class__.__name__
+ #print("Adding arcs for {}".format(node_name))
+
+ handler = getattr(self, "handle_" + node_name, self.handle_default)
+ return handler(node)
+
+ def add_body_arcs(self, body, from_line):
+ prev_lines = set([from_line])
+ for body_node in body:
+ lineno = self.line_for_node(body_node)
+ for prev_lineno in prev_lines:
+ self.arcs.add((prev_lineno, lineno))
+ prev_lines = self.add_arcs(body_node)
+ return prev_lines
+
+ def is_constant_expr(self, node):
+ """Is this a compile-time constant?"""
+ node_name = node.__class__.__name__
+ return node_name in ["NameConstant", "Num"]
+
+ # tests to write:
+ # TODO: while EXPR:
+ # TODO: while False:
+ # TODO: multi-target assignment with computed targets
+ # TODO: listcomps hidden deep in other expressions
+ # TODO: listcomps hidden in lists: x = [[i for i in range(10)]]
+ # TODO: multi-line listcomps
+ # TODO: nested function definitions
+
+ def handle_Break(self, node):
+ here = self.line_for_node(node)
+ # TODO: what if self.break_exits is None?
+ self.break_exits.add(here)
+ return set([])
+
+ def handle_Continue(self, node):
+ here = self.line_for_node(node)
+ # TODO: what if self.loop_start is None?
+ self.arcs.add((here, self.loop_start))
+ return set([])
+
+ def handle_For(self, node):
+ start = self.line_for_node(node.iter)
+ loop_state = self.loop_start, self.break_exits
+ self.loop_start = start
+ self.break_exits = set()
+ exits = self.add_body_arcs(node.body, from_line=start)
+ for exit in exits:
+ self.arcs.add((exit, start))
+ exits = self.break_exits
+ self.loop_start, self.break_exits = loop_state
+ if node.orelse:
+ else_start = self.line_for_node(node.orelse[0])
+ self.arcs.add((start, else_start))
+ else_exits = self.add_body_arcs(node.orelse, from_line=start)
+ exits |= else_exits
+ else:
+ # no else clause: exit from the for line.
+ exits.add(start)
+ return exits
+
+ def handle_FunctionDef(self, node):
+ start = self.line_for_node(node)
+ # the body is handled in add_arcs_for_code_objects.
+ exits = set([start])
+ return exits
+
+ def handle_If(self, node):
+ start = self.line_for_node(node.test)
+ exits = self.add_body_arcs(node.body, from_line=start)
+ exits |= self.add_body_arcs(node.orelse, from_line=start)
+ return exits
+
+ def handle_Module(self, node):
+ raise Exception("TODO: this shouldn't happen")
+
+ def handle_Return(self, node):
+ here = self.line_for_node(node)
+ # TODO: what if self.function_start is None?
+ self.arcs.add((here, -self.function_start))
+ return set([])
+
+ def handle_While(self, node):
+ constant_test = self.is_constant_expr(node.test)
+ start = to_top = self.line_for_node(node.test)
+ if constant_test:
+ to_top = self.line_for_node(node.body[0])
+ loop_state = self.loop_start, self.break_exits
+ self.loop_start = start
+ self.break_exits = set()
+ exits = self.add_body_arcs(node.body, from_line=start)
+ for exit in exits:
+ self.arcs.add((exit, to_top))
+ exits = self.break_exits
+ self.loop_start, self.break_exits = loop_state
+ # TODO: orelse
+ return exits
+
+ def handle_default(self, node):
+ node_name = node.__class__.__name__
+ if node_name not in ["Assign", "Assert", "AugAssign", "Expr"]:
+ print("*** Unhandled: {}".format(node))
+ return set([self.line_for_node(node)])
+
+ def add_arcs_for_code_objects(self, root_node):
+ for node in ast.walk(root_node):
+ node_name = node.__class__.__name__
+ if node_name == "Module":
+ start = self.line_for_node(node.body[0])
+ exits = self.add_body_arcs(node.body, from_line=-1)
+ for exit in exits:
+ self.arcs.add((exit, -start))
+ elif node_name == "FunctionDef":
+ start = self.line_for_node(node)
+ self.function_start = start
+ func_exits = self.add_body_arcs(node.body, from_line=-1)
+ for exit in func_exits:
+ self.arcs.add((exit, -start))
+ self.function_start = None
+ elif node_name == "comprehension":
+ start = self.line_for_node(node)
+ self.arcs.add((-1, start))
+ self.arcs.add((start, -start))
+ # TODO: guaranteed this won't work for multi-line comps.
+
+
+
+
## Opcodes that guide the ByteParser.
def _opcode(name):
@@ -321,7 +496,7 @@ OPS_CHUNK_BEGIN = _opcode_set('JUMP_ABSOLUTE', 'JUMP_FORWARD')
# Opcodes that push a block on the block stack.
OPS_PUSH_BLOCK = _opcode_set(
- 'SETUP_LOOP', 'SETUP_EXCEPT', 'SETUP_FINALLY', 'SETUP_WITH'
+ 'SETUP_LOOP', 'SETUP_EXCEPT', 'SETUP_FINALLY', 'SETUP_WITH', 'SETUP_ASYNC_WITH',
)
# Block types for exception handling.
@@ -330,6 +505,8 @@ OPS_EXCEPT_BLOCKS = _opcode_set('SETUP_EXCEPT', 'SETUP_FINALLY')
# Opcodes that pop a block from the block stack.
OPS_POP_BLOCK = _opcode_set('POP_BLOCK')
+OPS_GET_AITER = _opcode_set('GET_AITER')
+
# Opcodes that have a jump destination, but aren't really a jump.
OPS_NO_JUMP = OPS_PUSH_BLOCK
@@ -449,6 +626,8 @@ class ByteParser(object):
# is a count of how many ignores are left.
ignore_branch = 0
+ ignore_pop_block = 0
+
# We have to handle the last two bytecodes specially.
ult = penult = None
@@ -507,7 +686,10 @@ class ByteParser(object):
block_stack.append((bc.op, bc.jump_to))
if bc.op in OPS_POP_BLOCK:
# The opcode pops a block from the block stack.
- block_stack.pop()
+ if ignore_pop_block:
+ ignore_pop_block -= 1
+ else:
+ block_stack.pop()
if bc.op in OPS_CHUNK_END:
# This opcode forces the end of the chunk.
if bc.op == OP_BREAK_LOOP:
@@ -527,6 +709,15 @@ class ByteParser(object):
# branch, so that except's don't count as branches.
ignore_branch += 1
+ if bc.op in OPS_GET_AITER:
+ # GET_AITER is weird: First, it seems to generate one more
+ # POP_BLOCK than SETUP_*, so we have to prepare to ignore one
+ # of the POP_BLOCKS. Second, we don't have a clear branch to
+ # the exit of the loop, so we peek into the block stack to find
+ # it.
+ ignore_pop_block += 1
+ chunk.exits.add(block_stack[-1][1])
+
penult = ult
ult = bc
@@ -686,3 +877,38 @@ class Chunk(object):
"v" if self.entrance else "",
list(self.exits),
)
+
+
+SKIP_FIELDS = ["ctx"]
+
+def ast_dump(node, depth=0):
+ indent = " " * depth
+ lineno = getattr(node, "lineno", None)
+ if lineno is not None:
+ linemark = " @ {0}".format(lineno)
+ else:
+ linemark = ""
+ print("{0}<{1}{2}".format(indent, node.__class__.__name__, linemark))
+
+ indent += " "
+ for field_name, value in ast.iter_fields(node):
+ if field_name in SKIP_FIELDS:
+ continue
+ prefix = "{0}{1}:".format(indent, field_name)
+ if value is None:
+ print("{0} None".format(prefix))
+ elif isinstance(value, (str, int)):
+ print("{0} {1!r}".format(prefix, value))
+ elif isinstance(value, list):
+ if value == []:
+ print("{0} []".format(prefix))
+ else:
+ print("{0} [".format(prefix))
+ for n in value:
+ ast_dump(n, depth + 8)
+ print("{0}]".format(indent))
+ else:
+ print(prefix)
+ ast_dump(value, depth + 8)
+
+ print("{0}>".format(" " * depth))
diff --git a/coverage/python.py b/coverage/python.py
index 5e56382..bf19cb2 100644
--- a/coverage/python.py
+++ b/coverage/python.py
@@ -160,6 +160,10 @@ class PythonFileReporter(FileReporter):
return self.parser.arcs()
@expensive
+ def ast_arcs(self):
+ return self.parser.ast_arcs()
+
+ @expensive
def exit_counts(self):
return self.parser.exit_counts()
diff --git a/coverage/results.py b/coverage/results.py
index 9627373..b80d504 100644
--- a/coverage/results.py
+++ b/coverage/results.py
@@ -26,6 +26,7 @@ class Analysis(object):
if self.data.has_arcs():
self._arc_possibilities = sorted(self.file_reporter.arcs())
+ self._ast_arc_possibilities = sorted(self.file_reporter.ast_arcs())
self.exit_counts = self.file_reporter.exit_counts()
self.no_branch = self.file_reporter.no_branch_lines()
n_branches = self.total_branches()
@@ -36,6 +37,7 @@ class Analysis(object):
n_missing_branches = sum(len(v) for k,v in iitems(mba))
else:
self._arc_possibilities = []
+ self._ast_arc_possibilities = []
self.exit_counts = {}
self.no_branch = set()
n_branches = n_partial_branches = n_missing_branches = 0
@@ -66,6 +68,9 @@ class Analysis(object):
"""Returns a sorted list of the arcs in the code."""
return self._arc_possibilities
+ def ast_arc_possibilities(self):
+ return self._ast_arc_possibilities
+
def arcs_executed(self):
"""Returns a sorted list of the arcs actually executed in the code."""
executed = self.data.arcs(self.filename) or []
diff --git a/lab/parser.py b/lab/parser.py
index 1a679e8..9a06425 100644
--- a/lab/parser.py
+++ b/lab/parser.py
@@ -82,7 +82,7 @@ class ParserMain(object):
if options.dis:
print("Main code:")
- self.disassemble(bp, histogram=options.histogram)
+ self.disassemble(bp, chunks=options.chunks, histogram=options.histogram)
arcs = bp._all_arcs()
if options.chunks:
@@ -123,15 +123,20 @@ class ParserMain(object):
m2 = 'C'
if lineno in cp.raw_excluded:
m3 = 'x'
- a = arc_chars[lineno].ljust(arc_width)
+
+ if arc_chars:
+ a = arc_chars[lineno].ljust(arc_width)
+ else:
+ a = ""
+
print("%4d %s%s%s%s%s %s" % (lineno, m0, m1, m2, m3, a, ltext))
- def disassemble(self, byte_parser, histogram=False):
+ def disassemble(self, byte_parser, chunks=False, histogram=False):
"""Disassemble code, for ad-hoc experimenting."""
for bp in byte_parser.child_parsers():
- chunks = bp._split_into_chunks()
- chunkd = dict((chunk.byte, chunk) for chunk in chunks)
+ if chunks:
+ chunkd = dict((chunk.byte, chunk) for chunk in bp._split_into_chunks())
if bp.text:
srclines = bp.text.splitlines()
else:
@@ -151,11 +156,11 @@ class ParserMain(object):
elif disline.offset > 0:
print("")
line = disgen.format_dis_line(disline)
- chunk = chunkd.get(disline.offset)
- if chunk:
- chunkstr = ":: %r" % chunk
- else:
- chunkstr = ""
+ chunkstr = ""
+ if chunks:
+ chunk = chunkd.get(disline.offset)
+ if chunk:
+ chunkstr = ":: %r" % chunk
print("%-70s%s" % (line, chunkstr))
print("")
diff --git a/tests/coveragetest.py b/tests/coveragetest.py
index 3468b79..f3911e3 100644
--- a/tests/coveragetest.py
+++ b/tests/coveragetest.py
@@ -239,6 +239,7 @@ class CoverageTest(
if arcs is not None:
self.assert_equal_args(analysis.arc_possibilities(), arcs, "Possible arcs differ")
+ self.assert_equal_args(analysis.ast_arc_possibilities(), arcs, "Possible ast arcs differ")
if arcs_missing is not None:
self.assert_equal_args(
diff --git a/tests/test_arcs.py b/tests/test_arcs.py
index df303d8..f136b75 100644
--- a/tests/test_arcs.py
+++ b/tests/test_arcs.py
@@ -3,6 +3,10 @@
"""Tests for coverage.py's arc measurement."""
+import collections
+from itertools import cycle, product
+import re
+
from tests.coveragetest import CoverageTest
import coverage
@@ -715,6 +719,180 @@ class MiscArcTest(CoverageTest):
)
+class AsyncTest(CoverageTest):
+ def setUp(self):
+ if env.PYVERSION < (3, 5):
+ self.skip("No point testing 3.5 syntax below 3.5")
+ super(AsyncTest, self).setUp()
+
+ def test_async(self):
+ self.check_coverage("""\
+ import asyncio
+
+ async def compute(x, y):
+ print("Compute %s + %s ..." % (x, y))
+ await asyncio.sleep(0.001)
+ return x + y
+
+ async def print_sum(x, y):
+ result = await compute(x, y)
+ print("%s + %s = %s" % (x, y, result))
+
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(print_sum(1, 2))
+ loop.close()
+ """,
+ arcz=
+ ".1 13 38 8C CD DE E. "
+ ".4 45 56 6-3 "
+ ".9 9A A-8",
+ arcz_missing="",
+ )
+ self.assertEqual(self.stdout(), "Compute 1 + 2 ...\n1 + 2 = 3\n")
+
+ def test_async_for(self):
+ self.check_coverage("""\
+ import asyncio
+
+ class AsyncIteratorWrapper: # 3
+ def __init__(self, obj): # 4
+ self._it = iter(obj)
+
+ async def __aiter__(self): # 7
+ return self
+
+ async def __anext__(self): # A
+ try:
+ return next(self._it)
+ except StopIteration:
+ raise StopAsyncIteration
+
+ async def doit(): # G
+ async for letter in AsyncIteratorWrapper("abc"):
+ print(letter)
+ print(".")
+
+ loop = asyncio.get_event_loop() # L
+ loop.run_until_complete(doit())
+ loop.close()
+ """,
+ arcz=
+ ".1 13 3G GL LM MN N. " # module main line
+ ".3 34 47 7A A-3 " # class definition
+ ".H HI IH HJ J-G " # doit
+ ".5 5-4 " # __init__
+ ".8 8-7 " # __aiter__
+ ".B BC C-A DE ", # __anext__
+ arcz_missing="",
+ )
+ self.assertEqual(self.stdout(), "a\nb\nc\n.\n")
+
+ def test_async_for2(self):
+ self.check_coverage("""\
+ async def go1():
+ async for x2 in y2:
+ try:
+ async for x4 in y4:
+ if a5:
+ break
+ else:
+ x8 = 1
+ except:
+ x10 = 1
+ x11 = 1
+ x12 = 1
+ """,
+ arcz=".1 1. .2 23 2C 34 45 56 6B",
+ )
+
+ def test_async_with(self):
+ self.check_coverage("""\
+ async def go():
+ async with x:
+ pass
+ """,
+ arcz=".1 1. .2 23 3.",
+ )
+
+ def test_async_it(self):
+ self.check_coverage("""\
+ async def func():
+ for x in g2:
+ x = 3
+ else:
+ x = 5
+ """,
+ arcz=".1 1. .2 23 32 25 5.",
+ )
+ self.check_coverage("""\
+ async def func():
+ async for x in g2:
+ x = 3
+ else:
+ x = 5
+ """,
+ arcz=".1 1. .2 23 32 25 5.",
+ )
+
+ def xxxx_async_is_same_flow(self):
+ SOURCE = """\
+ async def func():
+ for x in g2:
+ try:
+ x = g4
+ finally:
+ x = g6
+ try:
+ with g8 as x:
+ x = g9
+ continue
+ finally:
+ x = g12
+ for x in g13:
+ continue
+ else:
+ break
+ while g17:
+ x = g18
+ continue
+ else:
+ x = g21
+ for x in g22:
+ x = g23
+ continue
+ """
+
+ parts = re.split(r"(for |with )", SOURCE)
+ nchoices = len(parts) // 2
+
+ def only(s):
+ return [s]
+
+ def maybe_async(s):
+ return [s, "async "+s]
+
+ all_all_arcs = collections.defaultdict(list)
+ choices = [f(x) for f, x in zip(cycle([only, maybe_async]), parts)]
+ for i, result in enumerate(product(*choices)):
+ source = "".join(result)
+ self.make_file("async.py", source)
+ cov = coverage.Coverage(branch=True)
+ self.start_import_stop(cov, "async")
+ analysis = cov._analyze("async.py")
+ all_all_arcs[tuple(analysis.arc_possibilities())].append((i, source))
+
+ import pprint
+ pprint.pprint(list(all_all_arcs.keys()))
+ for arcs, numbers in all_all_arcs.items():
+ print(" ".join("{0:0{1}b}".format(x[0], nchoices) for x in numbers))
+ print(" {}".format(arcs))
+ for i, source in numbers:
+ print("-" * 80)
+ print(source)
+
+ assert len(all_all_arcs) == 1
+
+
class ExcludeTest(CoverageTest):
"""Tests of exclusions to indicate known partial branches."""