summaryrefslogtreecommitdiff
path: root/tests/test_shell.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/test_shell.py')
-rw-r--r--tests/test_shell.py57
1 files changed, 55 insertions, 2 deletions
diff --git a/tests/test_shell.py b/tests/test_shell.py
index 4eb5a15a..e283793e 100644
--- a/tests/test_shell.py
+++ b/tests/test_shell.py
@@ -3,14 +3,14 @@
Basic Shell Tests
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import unittest
from pygments.token import Token
-from pygments.lexers import BashLexer
+from pygments.lexers import BashLexer, BashSessionLexer
class BashTest(unittest.TestCase):
@@ -87,3 +87,56 @@ class BashTest(unittest.TestCase):
(Token.Text, u'\n'),
]
self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testArrayNums(self):
+ fragment = u'a=(1 2 3)\n'
+ tokens = [
+ (Token.Name.Variable, u'a'),
+ (Token.Operator, u'='),
+ (Token.Operator, u'('),
+ (Token.Literal.Number, u'1'),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'2'),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'3'),
+ (Token.Operator, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testEndOfLineNums(self):
+ fragment = u'a=1\nb=2 # comment\n'
+ tokens = [
+ (Token.Name.Variable, u'a'),
+ (Token.Operator, u'='),
+ (Token.Literal.Number, u'1'),
+ (Token.Text, u'\n'),
+ (Token.Name.Variable, u'b'),
+ (Token.Operator, u'='),
+ (Token.Literal.Number, u'2'),
+ (Token.Text, u' '),
+ (Token.Comment.Single, u'# comment\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+class BashSessionTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = BashSessionLexer()
+ self.maxDiff = None
+
+ def testNeedsName(self):
+ fragment = u'$ echo \\\nhi\nhi\n'
+ tokens = [
+ (Token.Text, u''),
+ (Token.Generic.Prompt, u'$'),
+ (Token.Text, u' '),
+ (Token.Name.Builtin, u'echo'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Escape, u'\\\n'),
+ (Token.Text, u'hi'),
+ (Token.Text, u'\n'),
+ (Token.Generic.Output, u'hi\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+