diff options
author | Jharrod LaFon <jharrod.lafon@gmail.com> | 2014-04-14 14:01:51 -0400 |
---|---|---|
committer | Jharrod LaFon <jharrod.lafon@gmail.com> | 2014-04-14 14:01:51 -0400 |
commit | acd5cf2113bb179731a94984bb826528a31fcb06 (patch) | |
tree | 66da33b35d68d2dc3ecbc6cfb9b401ac6351a6a9 /tests/test_basic_api.py | |
parent | a88ed45d9bdc2e158fe7d69be8e01f798ded7b8e (diff) | |
parent | 5d57fe78405ac06a306f5ed2dd1b630a909cbdfb (diff) | |
download | pygments-acd5cf2113bb179731a94984bb826528a31fcb06.tar.gz |
Merged head
Diffstat (limited to 'tests/test_basic_api.py')
-rw-r--r-- | tests/test_basic_api.py | 33 |
1 files changed, 21 insertions, 12 deletions
diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py index 1c6933a7..be7a4747 100644 --- a/tests/test_basic_api.py +++ b/tests/test_basic_api.py @@ -7,7 +7,8 @@ :license: BSD, see LICENSE for details. """ -import os +from __future__ import print_function + import random import unittest @@ -15,7 +16,7 @@ from pygments import lexers, formatters, filters, format from pygments.token import _TokenType, Text from pygments.lexer import RegexLexer from pygments.formatters.img import FontNotFound -from pygments.util import BytesIO, StringIO, bytes, b +from pygments.util import text_type, StringIO, xrange, ClassNotFound import support @@ -28,7 +29,7 @@ test_content = ''.join(test_content) + '\n' def test_lexer_import_all(): # instantiate every lexer, to see if the token type defs are correct - for x in lexers.LEXERS.keys(): + for x in lexers.LEXERS: c = getattr(lexers, x)() @@ -71,8 +72,8 @@ def test_lexer_classes(): assert isinstance(token, tuple) assert isinstance(token[0], _TokenType) if isinstance(token[1], str): - print repr(token[1]) - assert isinstance(token[1], unicode) + print(repr(token[1])) + assert isinstance(token[1], text_type) txt += token[1] assert txt == test_content, "%s lexer roundtrip failed: %r != %r" % \ (cls.name, test_content, txt) @@ -99,7 +100,8 @@ def test_lexer_options(): 'SqliteConsoleLexer', 'MatlabSessionLexer', 'ErlangShellLexer', 'BashSessionLexer', 'LiterateHaskellLexer', 'LiterateAgdaLexer', 'PostgresConsoleLexer', 'ElixirConsoleLexer', 'JuliaConsoleLexer', - 'RobotFrameworkLexer', 'DylanConsoleLexer', 'ShellSessionLexer'): + 'RobotFrameworkLexer', 'DylanConsoleLexer', 'ShellSessionLexer', + 'LiterateIdrisLexer'): inst = cls(ensurenl=False) ensure(inst.get_tokens('a\nb'), 'a\nb') inst = cls(ensurenl=False, stripall=True) @@ -127,7 +129,7 @@ def test_get_lexers(): ]: yield verify, func, args - for cls, (_, lname, aliases, _, mimetypes) in lexers.LEXERS.iteritems(): + for cls, (_, lname, aliases, _, mimetypes) in lexers.LEXERS.items(): assert cls == lexers.find_lexer_class(lname).__name__ for alias in aliases: @@ -136,6 +138,13 @@ def test_get_lexers(): for mimetype in mimetypes: assert cls == lexers.get_lexer_for_mimetype(mimetype).__class__.__name__ + try: + lexers.get_lexer_by_name(None) + except ClassNotFound: + pass + else: + raise Exception + def test_formatter_public_api(): ts = list(lexers.PythonLexer().get_tokens("def f(): pass")) @@ -162,7 +171,7 @@ def test_formatter_public_api(): pass inst.format(ts, out) - for formatter, info in formatters.FORMATTERS.iteritems(): + for formatter, info in formatters.FORMATTERS.items(): yield verify, formatter, info def test_formatter_encodings(): @@ -172,7 +181,7 @@ def test_formatter_encodings(): fmt = HtmlFormatter() tokens = [(Text, u"ä")] out = format(tokens, fmt) - assert type(out) is unicode + assert type(out) is text_type assert u"ä" in out # encoding option @@ -201,7 +210,7 @@ def test_formatter_unicode_handling(): if formatter.name != 'Raw tokens': out = format(tokens, inst) if formatter.unicodeoutput: - assert type(out) is unicode + assert type(out) is text_type inst = formatter(encoding='utf-8') out = format(tokens, inst) @@ -213,7 +222,7 @@ def test_formatter_unicode_handling(): out = format(tokens, inst) assert type(out) is bytes, '%s: %r' % (formatter, out) - for formatter, info in formatters.FORMATTERS.iteritems(): + for formatter, info in formatters.FORMATTERS.items(): yield verify, formatter @@ -241,7 +250,7 @@ class FiltersTest(unittest.TestCase): 'whitespace': {'spaces': True, 'tabs': True, 'newlines': True}, 'highlight': {'names': ['isinstance', 'lexers', 'x']}, } - for x in filters.FILTERS.keys(): + for x in filters.FILTERS: lx = lexers.PythonLexer() lx.add_filter(x, **filter_args.get(x, {})) fp = open(TESTFILE, 'rb') |