diff options
author | Tim Hatch <tim@timhatch.com> | 2014-04-29 17:03:52 -0700 |
---|---|---|
committer | Tim Hatch <tim@timhatch.com> | 2014-04-29 17:03:52 -0700 |
commit | 84525915f44875a4d179e2effefd72682c95bc7b (patch) | |
tree | 3bb74648952b6e3bf4e0f57304e77d327dc9fc8e /tests/test_qbasiclexer.py | |
parent | 9aed3827db61f0365257bd73a2176b114b51c044 (diff) | |
parent | 84120e282ef8d812f98fe6d66963fa39d2a71f98 (diff) | |
download | pygments-84525915f44875a4d179e2effefd72682c95bc7b.tar.gz |
Merged in jaingaurav2/pygments-main-926 (pull request #336)
Reorder regex expressions so that C keywords are matched before a label
Diffstat (limited to 'tests/test_qbasiclexer.py')
-rw-r--r-- | tests/test_qbasiclexer.py | 43 |
1 files changed, 43 insertions, 0 deletions
diff --git a/tests/test_qbasiclexer.py b/tests/test_qbasiclexer.py new file mode 100644 index 00000000..1b81b643 --- /dev/null +++ b/tests/test_qbasiclexer.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +""" + Tests for QBasic + ~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import glob +import os +import unittest + +from pygments.token import Token +from pygments.lexers.qbasic import QBasicLexer + +class QBasicTest(unittest.TestCase): + def setUp(self): + self.lexer = QBasicLexer() + self.maxDiff = None + + def testKeywordsWithDollar(self): + fragment = u'DIM x\nx = RIGHT$("abc", 1)\n' + expected = [ + (Token.Keyword.Declaration, u'DIM'), + (Token.Text.Whitespace, u' '), + (Token.Name.Variable.Global, u'x'), + (Token.Text, u'\n'), + (Token.Name.Variable.Global, u'x'), + (Token.Text.Whitespace, u' '), + (Token.Operator, u'='), + (Token.Text.Whitespace, u' '), + (Token.Keyword.Reserved, u'RIGHT$'), + (Token.Punctuation, u'('), + (Token.Literal.String.Double, u'"abc"'), + (Token.Punctuation, u','), + (Token.Text.Whitespace, u' '), + (Token.Literal.Number.Integer.Long, u'1'), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + ] + self.assertEqual(expected, list(self.lexer.get_tokens(fragment))) + |