diff options
author | Tim Hatch <tim@timhatch.com> | 2014-10-06 20:38:15 -0700 |
---|---|---|
committer | Tim Hatch <tim@timhatch.com> | 2014-10-06 20:38:15 -0700 |
commit | 053be80c14dace5f47d36507e7389f8b664e3898 (patch) | |
tree | 2a7a097db68466b7bd1d56bd6e50e045cd2f21e0 /scripts | |
parent | 335ae1e0f099b523ed362734300a8bed1cc54a7e (diff) | |
download | pygments-053be80c14dace5f47d36507e7389f8b664e3898.tar.gz |
Change how debug_lexer.py finds its lexers.
Previously it was way too easy to end up with None as the lexer, which later
fails. This considers aliases, same as the -l argument to pygmentize does.
Diffstat (limited to 'scripts')
-rwxr-xr-x | scripts/debug_lexer.py | 19 |
1 files changed, 12 insertions, 7 deletions
diff --git a/scripts/debug_lexer.py b/scripts/debug_lexer.py index 6164e09a..4d92ebb3 100755 --- a/scripts/debug_lexer.py +++ b/scripts/debug_lexer.py @@ -24,7 +24,7 @@ if os.path.isdir(os.path.join(srcpath, 'pygments')): from pygments.lexer import RegexLexer, ProfilingRegexLexer, ProfilingRegexLexerMeta -from pygments.lexers import find_lexer_class, find_lexer_class_for_filename +from pygments.lexers import get_lexer_by_name, find_lexer_class_for_filename from pygments.token import Error, Text, _TokenType from pygments.cmdline import _parse_options @@ -89,11 +89,10 @@ class DebuggingRegexLexer(RegexLexer): def main(fn, lexer=None, options={}): if lexer is not None: - lxcls = find_lexer_class(lexer) + lxcls = get_lexer_by_name(lexer).__class__ else: - try: - lxcls = find_lexer_class_for_filename(os.path.basename(fn)) - except ValueError: + lxcls = find_lexer_class_for_filename(os.path.basename(fn)) + if lxcls is None: try: name, rest = fn.split('_', 1) lxcls = find_lexer_class(name) @@ -114,6 +113,12 @@ def main(fn, lexer=None, options={}): elif lxcls.__bases__ == (DebuggingRegexLexer,): # already debugged before debug_lexer = True + else: + # HACK: ExtendedRegexLexer subclasses will only partially work here. + print(lxcls.__bases__) + lxcls.__bases__ = (DebuggingRegexLexer,) + debug_lexer = True + lx = lxcls(**options) lno = 1 text = open(fn, 'rb').read().decode('utf-8') @@ -122,7 +127,7 @@ def main(fn, lexer=None, options={}): states = [] def show_token(tok, state): - reprs = map(repr, tok) + reprs = list(map(repr, tok)) print(' ' + reprs[1] + ' ' + ' ' * (29-len(reprs[1])) + reprs[0], end=' ') if debug_lexer: print(' ' + ' ' * (29-len(reprs[0])) + repr(state), end=' ') @@ -157,7 +162,7 @@ def main(fn, lexer=None, options={}): else: states.append(None) if showall: - for tok, state in map(None, tokens, states): + for tok, state in zip(tokens, states): show_token(tok, state) return 0 |