diff options
author | Tim Hatch <tim@timhatch.com> | 2016-06-01 00:38:50 -0700 |
---|---|---|
committer | Tim Hatch <tim@timhatch.com> | 2016-06-01 00:38:50 -0700 |
commit | 0ed2df2cf1625f22d0ee8cfad7d0c711a2ee9fb6 (patch) | |
tree | 87eee8a2295a38268757f35ac0067016adf54248 | |
parent | df5dbfe0e3723440990d6c6ab0706f7a3773fdc9 (diff) | |
download | pygments-0ed2df2cf1625f22d0ee8cfad7d0c711a2ee9fb6.tar.gz |
Improve number highlighting in bash.
Fixes #1244
-rw-r--r-- | pygments/lexers/shell.py | 2 | ||||
-rw-r--r-- | tests/test_shell.py | 32 |
2 files changed, 33 insertions, 1 deletions
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py index c2f54fc2..b9368ade 100644 --- a/pygments/lexers/shell.py +++ b/pygments/lexers/shell.py @@ -83,7 +83,7 @@ class BashLexer(RegexLexer): (r'&', Punctuation), (r'\|', Punctuation), (r'\s+', Text), - (r'\d+(?= |\Z)', Number), + (r'\d+\b', Number), (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text), (r'<', Text), ], diff --git a/tests/test_shell.py b/tests/test_shell.py index 4eb5a15a..76065caa 100644 --- a/tests/test_shell.py +++ b/tests/test_shell.py @@ -87,3 +87,35 @@ class BashTest(unittest.TestCase): (Token.Text, u'\n'), ] self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testArrayNums(self): + fragment = u'a=(1 2 3)\n' + tokens = [ + (Token.Name.Variable, u'a'), + (Token.Operator, u'='), + (Token.Operator, u'('), + (Token.Literal.Number, u'1'), + (Token.Text, u' '), + (Token.Literal.Number, u'2'), + (Token.Text, u' '), + (Token.Literal.Number, u'3'), + (Token.Operator, u')'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testEndOfLineNums(self): + fragment = u'a=1\nb=2 # comment\n' + tokens = [ + (Token.Name.Variable, u'a'), + (Token.Operator, u'='), + (Token.Literal.Number, u'1'), + (Token.Text, u'\n'), + (Token.Name.Variable, u'b'), + (Token.Operator, u'='), + (Token.Literal.Number, u'2'), + (Token.Text, u' '), + (Token.Comment.Single, u'# comment\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + |