diff options
author | Tim Hatch <tim@timhatch.com> | 2016-05-31 21:46:37 -0700 |
---|---|---|
committer | Tim Hatch <tim@timhatch.com> | 2016-05-31 21:46:37 -0700 |
commit | d352bbdf4d2f6265d4ee934799775b044eb47678 (patch) | |
tree | 41072489f9b3c62d21ef8dcfe5ee93af91818e5f | |
parent | b2eb5cbe37ece98b456c6d2ae167e5d04378a985 (diff) | |
download | pygments-d352bbdf4d2f6265d4ee934799775b044eb47678.tar.gz |
Allow open c-style comments.
Fixes #1114
-rw-r--r-- | pygments/lexers/c_cpp.py | 6 | ||||
-rw-r--r-- | tests/test_cpp.py | 32 |
2 files changed, 36 insertions, 2 deletions
diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py index a8d75c0a..2a2419d4 100644 --- a/pygments/lexers/c_cpp.py +++ b/pygments/lexers/c_cpp.py @@ -46,8 +46,10 @@ class CFamilyLexer(RegexLexer): (r'\n', Text), (r'\s+', Text), (r'\\\n', Text), # line continuation - (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single), - (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single), + (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline), + # Open until EOF, so no ending delimeter + (r'/(\\\n)?[*][\w\W]*', Comment.Multiline), ], 'statements': [ (r'(L?)(")', bygroups(String.Affix, String), 'string'), diff --git a/tests/test_cpp.py b/tests/test_cpp.py new file mode 100644 index 00000000..e1b94a8e --- /dev/null +++ b/tests/test_cpp.py @@ -0,0 +1,32 @@ +""" + CPP Tests + ~~~~~~~~~ + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import unittest + +from pygments.lexers import CppLexer +from pygments.token import Token + + +class CppTest(unittest.TestCase): + def setUp(self): + self.lexer = CppLexer() + + def testGoodComment(self): + fragment = u'/* foo */\n' + tokens = [ + (Token.Comment.Multiline, u'/* foo */'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testOpenComment(self): + fragment = u'/* foo\n' + tokens = [ + (Token.Comment.Multiline, u'/* foo\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) |