diff options
Diffstat (limited to 'tests/test_regexlexer.py')
-rw-r--r-- | tests/test_regexlexer.py | 68 |
1 files changed, 34 insertions, 34 deletions
diff --git a/tests/test_regexlexer.py b/tests/test_regexlexer.py index 683d6def..4e832361 100644 --- a/tests/test_regexlexer.py +++ b/tests/test_regexlexer.py @@ -3,15 +3,19 @@ Pygments regex lexer tests ~~~~~~~~~~~~~~~~~~~~~~~~~~ - :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -import unittest +import pytest from pygments.token import Text -from pygments.lexer import RegexLexer -from pygments.lexer import default +from pygments.lexer import RegexLexer, default + + +@pytest.fixture(scope='module') +def lexer(): + yield MyLexer() class MyLexer(RegexLexer): @@ -34,33 +38,29 @@ class MyLexer(RegexLexer): } -class TupleTransTest(unittest.TestCase): - def test(self): - lx = MyLexer() - toks = list(lx.get_tokens_unprocessed('abcde')) - self.assertEqual(toks, [ - (0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'), - (3, Text.Beer, 'd'), (4, Text.Root, 'e')]) - - def test_multiline(self): - lx = MyLexer() - toks = list(lx.get_tokens_unprocessed('a\ne')) - self.assertEqual(toks, [ - (0, Text.Root, 'a'), (1, Text, u'\n'), (2, Text.Root, 'e')]) - - def test_default(self): - lx = MyLexer() - toks = list(lx.get_tokens_unprocessed('d')) - self.assertEqual(toks, [(0, Text.Beer, 'd')]) - - -class PopEmptyTest(unittest.TestCase): - def test_regular(self): - lx = MyLexer() - toks = list(lx.get_tokens_unprocessed('#e')) - self.assertEqual(toks, [(0, Text.Root, '#'), (1, Text.Root, 'e')]) - - def test_tuple(self): - lx = MyLexer() - toks = list(lx.get_tokens_unprocessed('@e')) - self.assertEqual(toks, [(0, Text.Root, '@'), (1, Text.Root, 'e')]) +def test_tuple(lexer): + toks = list(lexer.get_tokens_unprocessed('abcde')) + assert toks == [ + (0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'), + (3, Text.Beer, 'd'), (4, Text.Root, 'e')] + + +def test_multiline(lexer): + toks = list(lexer.get_tokens_unprocessed('a\ne')) + assert toks == [ + (0, Text.Root, 'a'), (1, Text, u'\n'), (2, Text.Root, 'e')] + + +def test_default(lexer): + toks = list(lexer.get_tokens_unprocessed('d')) + assert toks == [(0, Text.Beer, 'd')] + + +def test_pop_empty_regular(lexer): + toks = list(lexer.get_tokens_unprocessed('#e')) + assert toks == [(0, Text.Root, '#'), (1, Text.Root, 'e')] + + +def test_pop_empty_tuple(lexer): + toks = list(lexer.get_tokens_unprocessed('@e')) + assert toks == [(0, Text.Root, '@'), (1, Text.Root, 'e')] |