diff options
Diffstat (limited to 'test/test_readers/test_python/showtok')
-rwxr-xr-x | test/test_readers/test_python/showtok | 40 |
1 files changed, 40 insertions, 0 deletions
diff --git a/test/test_readers/test_python/showtok b/test/test_readers/test_python/showtok new file mode 100755 index 000000000..efd250ce1 --- /dev/null +++ b/test/test_readers/test_python/showtok @@ -0,0 +1,40 @@ +#! /usr/bin/env python + + +""" +This is a tool for exploring token lists generated by +``tokenize.generate_tokens()`` from test data in +docutils/test/test_readers/test_python/test_parser or stdin. + +Usage:: + + showtok <key> <index> + + showtok < <module.py> + +Where ``<key>`` is the key to the ``totest`` dictionary, and ``<index>`` is +the index of the list ``totest[key]``. If no arguments are given, stdin is +used for input. +""" + +import sys +import tokenize +import pprint +from token import tok_name +import test_parser + +def name_tokens(tokens): + for i in range(len(tokens)): + tup = tokens[i] + tokens[i] = (tok_name[tup[0]], tup) + +if len(sys.argv) > 1: + key, caseno = sys.argv[1:] + print 'totest["%s"][%s][0]:\n' % (key, caseno) + input_text = test_parser.totest[key][int(caseno)][0] +else: + input_text = sys.stdin.read() +print input_text +tokens = list(tokenize.generate_tokens(iter(input_text.splitlines(1)).next)) +name_tokens(tokens) +pprint.pprint(tokens) |