diff options
Diffstat (limited to 'coverage/parser.py')
-rw-r--r-- | coverage/parser.py | 50 |
1 files changed, 27 insertions, 23 deletions
diff --git a/coverage/parser.py b/coverage/parser.py index 497ddeb4..014b4ab5 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -38,7 +38,7 @@ class PythonParser(object): except IOError as err: raise NoSource( "No source for code: '%s': %s" % (self.filename, err) - ) + ) self.exclude = exclude @@ -62,8 +62,9 @@ class PythonParser(object): # The line numbers that start statements. self.statement_starts = set() - # Lazily-created ByteParser + # Lazily-created ByteParser and arc data. self._byte_parser = None + self._all_arcs = None @property def byte_parser(self): @@ -112,7 +113,7 @@ class PythonParser(object): print("%10s %5s %-20r %r" % ( tokenize.tok_name.get(toktype, toktype), nice_pair((slineno, elineno)), ttext, ltext - )) + )) if toktype == token.INDENT: indent += 1 elif toktype == token.DEDENT: @@ -191,6 +192,7 @@ class PythonParser(object): for (a, b) in arcs ] + @expensive def parse_source(self): """Parse source text to find executable lines, excluded lines, etc. @@ -203,12 +205,16 @@ class PythonParser(object): """ try: self._raw_parse() - except (tokenize.TokenError, IndentationError) as tokerr: - msg, lineno = tokerr.args # pylint: disable=unpacking-non-sequence + except (tokenize.TokenError, IndentationError) as err: + if hasattr(err, "lineno"): + lineno = err.lineno # IndentationError + else: + lineno = err.args[1][0] # TokenError raise NotPython( - "Couldn't parse '%s' as Python source: '%s' at %s" % - (self.filename, msg, lineno) + "Couldn't parse '%s' as Python source: '%s' at line %d" % ( + self.filename, err.args[0], lineno ) + ) excluded_lines = self.first_lines(self.excluded) ignore = set() @@ -220,23 +226,22 @@ class PythonParser(object): return lines, excluded_lines - @expensive def arcs(self): """Get information about the arcs available in the code. - Returns a sorted list of line number pairs. Line numbers have been + Returns a list of line number pairs. Line numbers have been normalized to the first line of multi-line statements. """ - all_arcs = [] - for l1, l2 in self.byte_parser._all_arcs(): - fl1 = self.first_line(l1) - fl2 = self.first_line(l2) - if fl1 != fl2: - all_arcs.append((fl1, fl2)) - return sorted(all_arcs) + if self._all_arcs is None: + self._all_arcs = [] + for l1, l2 in self.byte_parser._all_arcs(): + fl1 = self.first_line(l1) + fl2 = self.first_line(l2) + if fl1 != fl2: + self._all_arcs.append((fl1, fl2)) + return self._all_arcs - @expensive def exit_counts(self): """Get a mapping from line numbers to count of exits from that line. @@ -290,7 +295,7 @@ OPS_CODE_END = _opcode_set('RETURN_VALUE') OPS_CHUNK_END = _opcode_set( 'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'RETURN_VALUE', 'RAISE_VARARGS', 'BREAK_LOOP', 'CONTINUE_LOOP', - ) +) # Opcodes that unconditionally begin a new code chunk. By starting new chunks # with unconditional jump instructions, we neatly deal with jumps to jumps @@ -300,7 +305,7 @@ OPS_CHUNK_BEGIN = _opcode_set('JUMP_ABSOLUTE', 'JUMP_FORWARD') # Opcodes that push a block on the block stack. OPS_PUSH_BLOCK = _opcode_set( 'SETUP_LOOP', 'SETUP_EXCEPT', 'SETUP_FINALLY', 'SETUP_WITH' - ) +) # Block types for exception handling. OPS_EXCEPT_BLOCKS = _opcode_set('SETUP_EXCEPT', 'SETUP_FINALLY') @@ -343,10 +348,9 @@ class ByteParser(object): for attr in ['co_lnotab', 'co_firstlineno', 'co_consts', 'co_code']: if not hasattr(self.code, attr): raise CoverageException( - "This implementation of Python doesn't support code " - "analysis.\n" + "This implementation of Python doesn't support code analysis.\n" "Run coverage.py under CPython for this command." - ) + ) def child_parsers(self): """Iterate over all the code objects nested within this one. @@ -664,4 +668,4 @@ class Chunk(object): "!" if self.first else "", "v" if self.entrance else "", list(self.exits), - ) + ) |