summaryrefslogtreecommitdiff
path: root/test/test_readers/test_python/showtok
diff options
context:
space:
mode:
authorwiemann <wiemann@929543f6-e4f2-0310-98a6-ba3bd3dd1d04>2006-01-09 20:44:25 +0000
committerwiemann <wiemann@929543f6-e4f2-0310-98a6-ba3bd3dd1d04>2006-01-09 20:44:25 +0000
commitd77fdfef70e08114f57cbef5d91707df8717ea9f (patch)
tree49444e3486c0c333cb7b33dfa721296c08ee4ece /test/test_readers/test_python/showtok
parent53cd16ca6ca5f638cbe5956988e88f9339e355cf (diff)
parent3993c4097756e9885bcfbd07cb1cc1e4e95e50e4 (diff)
downloaddocutils-0.4.tar.gz
Release 0.4: tagging released revisiondocutils-0.4
git-svn-id: http://svn.code.sf.net/p/docutils/code/tags/docutils-0.4@4268 929543f6-e4f2-0310-98a6-ba3bd3dd1d04
Diffstat (limited to 'test/test_readers/test_python/showtok')
-rwxr-xr-xtest/test_readers/test_python/showtok40
1 files changed, 40 insertions, 0 deletions
diff --git a/test/test_readers/test_python/showtok b/test/test_readers/test_python/showtok
new file mode 100755
index 000000000..efd250ce1
--- /dev/null
+++ b/test/test_readers/test_python/showtok
@@ -0,0 +1,40 @@
+#! /usr/bin/env python
+
+
+"""
+This is a tool for exploring token lists generated by
+``tokenize.generate_tokens()`` from test data in
+docutils/test/test_readers/test_python/test_parser or stdin.
+
+Usage::
+
+ showtok <key> <index>
+
+ showtok < <module.py>
+
+Where ``<key>`` is the key to the ``totest`` dictionary, and ``<index>`` is
+the index of the list ``totest[key]``. If no arguments are given, stdin is
+used for input.
+"""
+
+import sys
+import tokenize
+import pprint
+from token import tok_name
+import test_parser
+
+def name_tokens(tokens):
+ for i in range(len(tokens)):
+ tup = tokens[i]
+ tokens[i] = (tok_name[tup[0]], tup)
+
+if len(sys.argv) > 1:
+ key, caseno = sys.argv[1:]
+ print 'totest["%s"][%s][0]:\n' % (key, caseno)
+ input_text = test_parser.totest[key][int(caseno)][0]
+else:
+ input_text = sys.stdin.read()
+print input_text
+tokens = list(tokenize.generate_tokens(iter(input_text.splitlines(1)).next))
+name_tokens(tokens)
+pprint.pprint(tokens)