diff options
author | gbrandl <devnull@localhost> | 2006-12-10 12:07:57 +0100 |
---|---|---|
committer | gbrandl <devnull@localhost> | 2006-12-10 12:07:57 +0100 |
commit | e0e37bfe227c281f00de6a07e3384b3f53b0055e (patch) | |
tree | e632816ff30fdd9ef3d3dfbb90c537d9ac23822f /scripts/find_error.py | |
parent | 74d7585b086181a8cd62205938970b80fe7aa4df (diff) | |
download | pygments-e0e37bfe227c281f00de6a07e3384b3f53b0055e.tar.gz |
[svn] Add Groff lexer by Tim Hatch, find_error script, CSS lexer improvement.
Add analyse_text methods to some lexers.
Diffstat (limited to 'scripts/find_error.py')
-rw-r--r-- | scripts/find_error.py | 45 |
1 files changed, 45 insertions, 0 deletions
diff --git a/scripts/find_error.py b/scripts/find_error.py new file mode 100644 index 00000000..ac50a1d1 --- /dev/null +++ b/scripts/find_error.py @@ -0,0 +1,45 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +""" + Lexing error finder + ~~~~~~~~~~~~~~~~~~~ + + For the source files given on the command line, display + the text where Error tokens are being generated, along + with some context. + + :copyright: 2006 by Tim Hatch <tim@timhatch.com>. + :license: BSD, see LICENSE for more details. +""" + +import sys + +from pygments import highlight +from pygments.lexers import get_lexer_for_filename, get_lexer_by_name +from pygments.token import Error + +def main(fn): + try: + lx = get_lexer_for_filename(fn) + except ValueError: + try: + name, rest = fn.split("_", 1) + lx = get_lexer_by_name(name) + except ValueError: + raise AssertionError('no lexer found for file %r' % fn) + text = file(fn, 'U').read() + text = text.strip('\n') + '\n' + text = text.decode('latin1') + ntext = [] + for type, val in lx.get_tokens(text): + if type == Error: + print "Error parsing", fn + print "\n".join([' ' + repr(x) for x in ntext[-5:]]) + print `val` + "<<<" + return + ntext.append((type,val)) + + +if __name__ == "__main__": + for f in sys.argv[1:]: + main(f) |