diff options
author | Dmitry Shachnev <mitya57@gmail.com> | 2014-01-22 18:34:51 +0400 |
---|---|---|
committer | Dmitry Shachnev <mitya57@gmail.com> | 2014-01-22 18:34:51 +0400 |
commit | 953b33d3f721e58ab48490d33c141df1e4dd25c1 (patch) | |
tree | 26bce4c2ef75b4ebe8422685d8962aa07978bad2 /sphinx/pycode/__init__.py | |
parent | 317930a7fbd49b50fb8a144161a698fcafeab91a (diff) | |
parent | 5f13479408785818ee8b85d4172314ea5578fde3 (diff) | |
download | sphinx-git-953b33d3f721e58ab48490d33c141df1e4dd25c1.tar.gz |
Merge
Diffstat (limited to 'sphinx/pycode/__init__.py')
-rw-r--r-- | sphinx/pycode/__init__.py | 11 |
1 files changed, 6 insertions, 5 deletions
diff --git a/sphinx/pycode/__init__.py b/sphinx/pycode/__init__.py index 54e79da6b..ca5e8b095 100644 --- a/sphinx/pycode/__init__.py +++ b/sphinx/pycode/__init__.py @@ -8,6 +8,7 @@ :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ +from __future__ import print_function import sys from os import path @@ -182,7 +183,7 @@ class ModuleAnalyzer(object): return cls.cache['file', filename] try: fileobj = open(filename, 'rb') - except Exception, err: + except Exception as err: raise PycodeError('error opening %r' % filename, err) obj = cls(fileobj, modname, filename) cls.cache['file', filename] = obj @@ -202,7 +203,7 @@ class ModuleAnalyzer(object): obj = cls.for_string(source, modname) else: obj = cls.for_file(source, modname) - except PycodeError, err: + except PycodeError as err: cls.cache['module', modname] = err raise cls.cache['module', modname] = obj @@ -245,7 +246,7 @@ class ModuleAnalyzer(object): return try: self.tokens = list(tokenize.generate_tokens(self.source.readline)) - except tokenize.TokenError, err: + except tokenize.TokenError as err: raise PycodeError('tokenizing failed', err) self.source.close() @@ -256,7 +257,7 @@ class ModuleAnalyzer(object): self.tokenize() try: self.parsetree = pydriver.parse_tokens(self.tokens) - except parse.ParseError, err: + except parse.ParseError as err: raise PycodeError('parsing failed', err) def find_attr_docs(self, scope=''): @@ -344,4 +345,4 @@ if __name__ == '__main__': pprint.pprint(ma.find_tags()) x3 = time.time() #print nodes.nice_repr(ma.parsetree, number2name) - print "tokenizing %.4f, parsing %.4f, finding %.4f" % (x1-x0, x2-x1, x3-x2) + print("tokenizing %.4f, parsing %.4f, finding %.4f" % (x1-x0, x2-x1, x3-x2)) |