summaryrefslogtreecommitdiff
path: root/Lib/test/test_tokenize.py
diff options
context:
space:
mode:
Diffstat (limited to 'Lib/test/test_tokenize.py')
-rw-r--r--Lib/test/test_tokenize.py12
1 files changed, 10 insertions, 2 deletions
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index d6cfb65d33..cbfafa8d14 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -4,7 +4,7 @@ Tests for the tokenize module.
>>> import glob, random, sys
The tests can be really simple. Given a small fragment of source
-code, print out a table with thokens. The ENDMARK is omitted for
+code, print out a table with tokens. The ENDMARK is omitted for
brevity.
>>> dump_tokens("1 + 1")
@@ -106,7 +106,7 @@ Some error-handling code
... "else: print 'Loaded'\\n")
True
-Balancing contunuation
+Balancing continuation
>>> roundtrip("a = (3,4, \\n"
... "5,6)\\n"
@@ -126,6 +126,14 @@ Ordinary integers and binary operators
NUMBER '0xff' (1, 0) (1, 4)
OP '<=' (1, 5) (1, 7)
NUMBER '255' (1, 8) (1, 11)
+ >>> dump_tokens("0b10 <= 255")
+ NUMBER '0b10' (1, 0) (1, 4)
+ OP '<=' (1, 5) (1, 7)
+ NUMBER '255' (1, 8) (1, 11)
+ >>> dump_tokens("0o123 <= 0123")
+ NUMBER '0o123' (1, 0) (1, 5)
+ OP '<=' (1, 6) (1, 8)
+ NUMBER '0123' (1, 9) (1, 13)
>>> dump_tokens("01234567 > ~0x15")
NUMBER '01234567' (1, 0) (1, 8)
OP '>' (1, 9) (1, 10)