summaryrefslogtreecommitdiff
path: root/Lib/test
diff options
context:
space:
mode:
authorEric Smith <eric@trueblade.com>2008-03-17 19:43:40 +0000
committerEric Smith <eric@trueblade.com>2008-03-17 19:43:40 +0000
commit0aed07ad80795bd5856ed60e7edcadeb353cf5a0 (patch)
tree55268a52e9e0686949dea35b218275817b2a1bbe /Lib/test
parent6f778cfb5ba0291ed29c51fbdd6e40760097fd36 (diff)
downloadcpython-git-0aed07ad80795bd5856ed60e7edcadeb353cf5a0.tar.gz
Added PEP 3127 support to tokenize (with tests); added PEP 3127 to NEWS.
Diffstat (limited to 'Lib/test')
-rw-r--r--Lib/test/test_tokenize.py12
1 files changed, 10 insertions, 2 deletions
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index d6cfb65d33..cbfafa8d14 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -4,7 +4,7 @@ Tests for the tokenize module.
>>> import glob, random, sys
The tests can be really simple. Given a small fragment of source
-code, print out a table with thokens. The ENDMARK is omitted for
+code, print out a table with tokens. The ENDMARK is omitted for
brevity.
>>> dump_tokens("1 + 1")
@@ -106,7 +106,7 @@ Some error-handling code
... "else: print 'Loaded'\\n")
True
-Balancing contunuation
+Balancing continuation
>>> roundtrip("a = (3,4, \\n"
... "5,6)\\n"
@@ -126,6 +126,14 @@ Ordinary integers and binary operators
NUMBER '0xff' (1, 0) (1, 4)
OP '<=' (1, 5) (1, 7)
NUMBER '255' (1, 8) (1, 11)
+ >>> dump_tokens("0b10 <= 255")
+ NUMBER '0b10' (1, 0) (1, 4)
+ OP '<=' (1, 5) (1, 7)
+ NUMBER '255' (1, 8) (1, 11)
+ >>> dump_tokens("0o123 <= 0123")
+ NUMBER '0o123' (1, 0) (1, 5)
+ OP '<=' (1, 6) (1, 8)
+ NUMBER '0123' (1, 9) (1, 13)
>>> dump_tokens("01234567 > ~0x15")
NUMBER '01234567' (1, 0) (1, 8)
OP '>' (1, 9) (1, 10)