diff options
Diffstat (limited to 'tests/test_qbasiclexer.py')
-rw-r--r-- | tests/test_qbasiclexer.py | 54 |
1 files changed, 27 insertions, 27 deletions
diff --git a/tests/test_qbasiclexer.py b/tests/test_qbasiclexer.py index e6212d65..3c64d69e 100644 --- a/tests/test_qbasiclexer.py +++ b/tests/test_qbasiclexer.py @@ -3,39 +3,39 @@ Tests for QBasic ~~~~~~~~~~~~~~~~ - :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -import unittest +import pytest from pygments.token import Token from pygments.lexers.basic import QBasicLexer -class QBasicTest(unittest.TestCase): - def setUp(self): - self.lexer = QBasicLexer() - self.maxDiff = None +@pytest.fixture(scope='module') +def lexer(): + yield QBasicLexer() - def testKeywordsWithDollar(self): - fragment = u'DIM x\nx = RIGHT$("abc", 1)\n' - expected = [ - (Token.Keyword.Declaration, u'DIM'), - (Token.Text.Whitespace, u' '), - (Token.Name.Variable.Global, u'x'), - (Token.Text, u'\n'), - (Token.Name.Variable.Global, u'x'), - (Token.Text.Whitespace, u' '), - (Token.Operator, u'='), - (Token.Text.Whitespace, u' '), - (Token.Keyword.Reserved, u'RIGHT$'), - (Token.Punctuation, u'('), - (Token.Literal.String.Double, u'"abc"'), - (Token.Punctuation, u','), - (Token.Text.Whitespace, u' '), - (Token.Literal.Number.Integer.Long, u'1'), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - ] - self.assertEqual(expected, list(self.lexer.get_tokens(fragment))) + +def test_keywords_with_dollar(lexer): + fragment = u'DIM x\nx = RIGHT$("abc", 1)\n' + expected = [ + (Token.Keyword.Declaration, u'DIM'), + (Token.Text.Whitespace, u' '), + (Token.Name.Variable.Global, u'x'), + (Token.Text, u'\n'), + (Token.Name.Variable.Global, u'x'), + (Token.Text.Whitespace, u' '), + (Token.Operator, u'='), + (Token.Text.Whitespace, u' '), + (Token.Keyword.Reserved, u'RIGHT$'), + (Token.Punctuation, u'('), + (Token.Literal.String.Double, u'"abc"'), + (Token.Punctuation, u','), + (Token.Text.Whitespace, u' '), + (Token.Literal.Number.Integer.Long, u'1'), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + ] + assert list(lexer.get_tokens(fragment)) == expected |