summaryrefslogtreecommitdiff
path: root/docutils/test/test_readers/test_python/showtok
blob: efd250ce11689f65335f590cdba3fea2edba47a2 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
#! /usr/bin/env python


"""
This is a tool for exploring token lists generated by
``tokenize.generate_tokens()`` from test data in
docutils/test/test_readers/test_python/test_parser or stdin.

Usage::

    showtok <key> <index>

    showtok < <module.py>

Where ``<key>`` is the key to the ``totest`` dictionary, and ``<index>`` is
the index of the list ``totest[key]``.  If no arguments are given, stdin is
used for input.
"""

import sys
import tokenize
import pprint
from token import tok_name
import test_parser

def name_tokens(tokens):
    for i in range(len(tokens)):
        tup = tokens[i]
        tokens[i] = (tok_name[tup[0]], tup)

if len(sys.argv) > 1:
    key, caseno = sys.argv[1:]
    print 'totest["%s"][%s][0]:\n' % (key, caseno)
    input_text = test_parser.totest[key][int(caseno)][0]
else:
    input_text = sys.stdin.read()
print input_text
tokens = list(tokenize.generate_tokens(iter(input_text.splitlines(1)).next))
name_tokens(tokens)
pprint.pprint(tokens)