diff options
author | Georg Brandl <georg@python.org> | 2014-10-08 09:21:15 +0200 |
---|---|---|
committer | Georg Brandl <georg@python.org> | 2014-10-08 09:21:15 +0200 |
commit | 444fb6fd9b3492040a36fcca672fee8175f8d603 (patch) | |
tree | 2bd411ca78decf276b95dc6b1788e594b2e35287 /tests/test_shell.py | |
parent | 491fec23ef01687906f5d71ee718522cd2917926 (diff) | |
parent | c1bfe4eed3805d3556bffa3c6b9cc2d3f6976205 (diff) | |
download | pygments-444fb6fd9b3492040a36fcca672fee8175f8d603.tar.gz |
Merged in leodemoura/pygments-main (pull request #399)
Diffstat (limited to 'tests/test_shell.py')
-rw-r--r-- | tests/test_shell.py | 63 |
1 files changed, 63 insertions, 0 deletions
diff --git a/tests/test_shell.py b/tests/test_shell.py new file mode 100644 index 00000000..eb09e8d1 --- /dev/null +++ b/tests/test_shell.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +""" + Basic Shell Tests + ~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import unittest + +from pygments.token import Token +from pygments.lexers import BashLexer + + +class BashTest(unittest.TestCase): + + def setUp(self): + self.lexer = BashLexer() + self.maxDiff = None + + def testCurlyNoEscapeAndQuotes(self): + fragment = u'echo "${a//["b"]/}"\n' + tokens = [ + (Token.Name.Builtin, u'echo'), + (Token.Text, u' '), + (Token.Literal.String.Double, u'"'), + (Token.String.Interpol, u'${'), + (Token.Name.Variable, u'a'), + (Token.Punctuation, u'//['), + (Token.Literal.String.Double, u'"b"'), + (Token.Punctuation, u']/'), + (Token.String.Interpol, u'}'), + (Token.Literal.String.Double, u'"'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testCurlyWithEscape(self): + fragment = u'echo ${a//[\\"]/}\n' + tokens = [ + (Token.Name.Builtin, u'echo'), + (Token.Text, u' '), + (Token.String.Interpol, u'${'), + (Token.Name.Variable, u'a'), + (Token.Punctuation, u'//['), + (Token.Literal.String.Escape, u'\\"'), + (Token.Punctuation, u']/'), + (Token.String.Interpol, u'}'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testParsedSingle(self): + fragment = u"a=$'abc\\''\n" + tokens = [ + (Token.Name.Variable, u'a'), + (Token.Operator, u'='), + (Token.Literal.String.Single, u"$'abc\\''"), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + |