diff options
author | mitsuhiko <devnull@localhost> | 2008-07-04 15:36:19 +0200 |
---|---|---|
committer | mitsuhiko <devnull@localhost> | 2008-07-04 15:36:19 +0200 |
commit | 610fb310f3341090f85f941c732fa3ccd377a69e (patch) | |
tree | b075e138056910a8fbea2d39b52e0fbf2edeab3d | |
parent | b8a6ca53e649a5de0801f27220f279720e603856 (diff) | |
download | pygments-610fb310f3341090f85f941c732fa3ccd377a69e.tar.gz |
raw token stream lexer is no longer associated with `*.raw`. This lexer is not commonly used and doesn't provide silent error handling. This can be a problem for applications directly interfacing pygments such as reviewboard:
http://groups.google.com/group/reviewboard/browse_thread/thread/179ec0a2fe968915/5aeef65110046f29
-rw-r--r-- | CHANGES | 4 | ||||
-rw-r--r-- | pygments/lexers/_mapping.py | 2 | ||||
-rw-r--r-- | pygments/lexers/special.py | 8 |
3 files changed, 11 insertions, 3 deletions
@@ -21,6 +21,10 @@ Version 0.11 - Support roman/sans/mono style defs and use them in the LaTeX formatter. +- the raw token formatter is no longer registered to ``*.raw`` + and it's documented that tokenization with this lexer may + raise exceptions. + Version 0.10 ------------ diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index c8fdfa43..3fa0c150 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -103,7 +103,7 @@ LEXERS = { 'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), 'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript'), ('text/x-python', 'application/x-python')), 'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)), - 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), ('*.raw',), ('application/x-pygments-tokens',)), + 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)), 'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()), 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), 'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst',)), diff --git a/pygments/lexers/special.py b/pygments/lexers/special.py index 8d66fc67..b7b5843e 100644 --- a/pygments/lexers/special.py +++ b/pygments/lexers/special.py @@ -39,7 +39,9 @@ line_re = re.compile('.*?\n') class RawTokenLexer(Lexer): """ - Recreate a token stream formatted with the `RawTokenFormatter`. + Recreate a token stream formatted with the `RawTokenFormatter`. This + lexer raises exceptions during parsing if the token stream in the + file is malformed. Additional options accepted: @@ -49,7 +51,7 @@ class RawTokenLexer(Lexer): """ name = 'Raw token data' aliases = ['raw'] - filenames = ['*.raw'] + filenames = [] mimetypes = ['application/x-pygments-tokens'] def __init__(self, **options): @@ -86,6 +88,8 @@ class RawTokenLexer(Lexer): ttype = Token ttypes = ttypestr.split('.')[1:] for ttype_ in ttypes: + if not ttype_ or not ttype_[0].isupper(): + raise ValueError('malformed token name') ttype = getattr(ttype, ttype_) _ttype_cache[ttypestr] = ttype val = val[2:-2].decode('unicode-escape') |