summaryrefslogtreecommitdiff
path: root/sphinx/pycode/pgen2/tokenize.py
diff options
context:
space:
mode:
Diffstat (limited to 'sphinx/pycode/pgen2/tokenize.py')
-rw-r--r--sphinx/pycode/pgen2/tokenize.py6
1 files changed, 4 insertions, 2 deletions
diff --git a/sphinx/pycode/pgen2/tokenize.py b/sphinx/pycode/pgen2/tokenize.py
index 7ad9f012c..93fd6578f 100644
--- a/sphinx/pycode/pgen2/tokenize.py
+++ b/sphinx/pycode/pgen2/tokenize.py
@@ -25,6 +25,8 @@ are the same, except instead of generating tokens, tokeneater is a callback
function to which the 5 fields described above are passed as 5 arguments,
each time a new token is found."""
+from __future__ import print_function
+
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
__credits__ = \
'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro'
@@ -146,8 +148,8 @@ class StopTokenizing(Exception): pass
def printtoken(type, token, scell, ecell, line): # for testing
srow, scol = scell
erow, ecol = ecell
- print "%d,%d-%d,%d:\t%s\t%s" % \
- (srow, scol, erow, ecol, tok_name[type], repr(token))
+ print("%d,%d-%d,%d:\t%s\t%s" %
+ (srow, scol, erow, ecol, tok_name[type], repr(token)))
def tokenize(readline, tokeneater=printtoken):
"""