summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGaurav Jain <gaurav@gauravjain.org>2014-04-29 22:45:58 -0400
committerGaurav Jain <gaurav@gauravjain.org>2014-04-29 22:45:58 -0400
commit93c7b130ad8712bff29c98ce619914cbc803f3f3 (patch)
tree2e06c26ea25ddd0bc0af1ab149cbba626362abeb
parent56d9659a5bccd4010f2fdc7becb8389aff363f83 (diff)
downloadpygments-93c7b130ad8712bff29c98ce619914cbc803f3f3.tar.gz
Use default state transition to eliminate unecessary empty tokens
-rw-r--r--pygments/lexers/compiled.py4
-rw-r--r--tests/test_clexer.py22
-rw-r--r--tests/test_objectiveclexer.py10
3 files changed, 3 insertions, 33 deletions
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
index 2e111deb..d70cf38f 100644
--- a/pygments/lexers/compiled.py
+++ b/pygments/lexers/compiled.py
@@ -13,7 +13,7 @@ import re
from string import Template
from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- this, combined, inherit, do_insertions
+ this, combined, inherit, do_insertions, default
from pygments.util import get_bool_opt, get_list_opt
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error, Literal, Generic
@@ -107,7 +107,7 @@ class CFamilyLexer(RegexLexer):
r'(' + _ws + r')?(;)',
bygroups(using(this), Name.Function, using(this), using(this),
Punctuation)),
- ('', Text, 'statement'),
+ default('statement'),
],
'statement' : [
include('whitespace'),
diff --git a/tests/test_clexer.py b/tests/test_clexer.py
index 5d251d2e..e359c44e 100644
--- a/tests/test_clexer.py
+++ b/tests/test_clexer.py
@@ -28,7 +28,7 @@ class CLexerTest(unittest.TestCase):
Number.Float, Number.Float], code.split()):
wanted.append(item)
wanted.append((Text, ' '))
- wanted = [(Text, '')] + wanted[:-1] + [(Text, '\n')]
+ wanted = wanted[:-1] + [(Text, '\n')]
self.assertEqual(list(self.lexer.get_tokens(code)), wanted)
def testSwitch(self):
@@ -44,15 +44,12 @@ class CLexerTest(unittest.TestCase):
}
'''
expected = [
- (Token.Text, u''),
(Token.Keyword.Type, u'int'),
(Token.Text, u' '),
(Token.Name.Function, u'main'),
- (Token.Text, u''),
(Token.Punctuation, u'('),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
- (Token.Text, u''),
(Token.Punctuation, u'{'),
(Token.Text, u'\n'),
(Token.Text, u' '),
@@ -83,7 +80,6 @@ class CLexerTest(unittest.TestCase):
(Token.Text, u'\n'),
(Token.Punctuation, u'}'),
(Token.Text, u'\n'),
- (Token.Text, u''),
]
self.assertEqual(expected, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
@@ -100,15 +96,12 @@ class CLexerTest(unittest.TestCase):
}
'''
expected = [
- (Token.Text, u''),
(Token.Keyword.Type, u'int'),
(Token.Text, u' '),
(Token.Name.Function, u'main'),
- (Token.Text, u''),
(Token.Punctuation, u'('),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
- (Token.Text, u''),
(Token.Punctuation, u'{'),
(Token.Text, u'\n'),
(Token.Text, u' '),
@@ -141,7 +134,6 @@ class CLexerTest(unittest.TestCase):
(Token.Text, u'\n'),
(Token.Punctuation, u'}'),
(Token.Text, u'\n'),
- (Token.Text, u''),
]
self.assertEqual(expected, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
@@ -154,15 +146,12 @@ class CLexerTest(unittest.TestCase):
}
'''
expected = [
- (Token.Text, u''),
(Token.Keyword.Type, u'int'),
(Token.Text, u' '),
(Token.Name.Function, u'main'),
- (Token.Text, u''),
(Token.Punctuation, u'('),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
- (Token.Text, u''),
(Token.Punctuation, u'{'),
(Token.Text, u'\n'),
(Token.Name.Label, u'foo'),
@@ -176,7 +165,6 @@ class CLexerTest(unittest.TestCase):
(Token.Text, u'\n'),
(Token.Punctuation, u'}'),
(Token.Text, u'\n'),
- (Token.Text, u''),
]
self.assertEqual(expected, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
@@ -189,15 +177,12 @@ class CLexerTest(unittest.TestCase):
}
'''
expected = [
- (Token.Text, u''),
(Token.Keyword.Type, u'int'),
(Token.Text, u' '),
(Token.Name.Function, u'main'),
- (Token.Text, u''),
(Token.Punctuation, u'('),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
- (Token.Text, u''),
(Token.Punctuation, u'{'),
(Token.Text, u'\n'),
(Token.Name.Label, u'foo'),
@@ -212,7 +197,6 @@ class CLexerTest(unittest.TestCase):
(Token.Text, u'\n'),
(Token.Punctuation, u'}'),
(Token.Text, u'\n'),
- (Token.Text, u''),
]
self.assertEqual(expected, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
@@ -225,15 +209,12 @@ class CLexerTest(unittest.TestCase):
}
'''
expected = [
- (Token.Text, u''),
(Token.Keyword.Type, u'int'),
(Token.Text, u' '),
(Token.Name.Function, u'main'),
- (Token.Text, u''),
(Token.Punctuation, u'('),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
- (Token.Text, u''),
(Token.Punctuation, u'{'),
(Token.Text, u'\n'),
(Token.Name.Label, u'foo'),
@@ -251,6 +232,5 @@ class CLexerTest(unittest.TestCase):
(Token.Text, u'\n'),
(Token.Punctuation, u'}'),
(Token.Text, u'\n'),
- (Token.Text, u''),
]
self.assertEqual(expected, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
diff --git a/tests/test_objectiveclexer.py b/tests/test_objectiveclexer.py
index 46fdb6d2..7339f6f7 100644
--- a/tests/test_objectiveclexer.py
+++ b/tests/test_objectiveclexer.py
@@ -22,20 +22,17 @@ class ObjectiveCLexerTest(unittest.TestCase):
def testLiteralNumberInt(self):
fragment = u'@(1);\n'
expected = [
- (Token.Text, u''),
(Token.Literal, u'@('),
(Token.Literal.Number.Integer, u'1'),
(Token.Literal, u')'),
(Token.Punctuation, u';'),
(Token.Text, u'\n'),
- (Token.Text, u''),
]
self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
def testLiteralNumberExpression(self):
fragment = u'@(1+2);\n'
expected = [
- (Token.Text, u''),
(Token.Literal, u'@('),
(Token.Literal.Number.Integer, u'1'),
(Token.Operator, u'+'),
@@ -43,14 +40,12 @@ class ObjectiveCLexerTest(unittest.TestCase):
(Token.Literal, u')'),
(Token.Punctuation, u';'),
(Token.Text, u'\n'),
- (Token.Text, u''),
]
self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
def testLiteralNumberNestedExpression(self):
fragment = u'@(1+(2+3));\n'
expected = [
- (Token.Text, u''),
(Token.Literal, u'@('),
(Token.Literal.Number.Integer, u'1'),
(Token.Operator, u'+'),
@@ -62,30 +57,25 @@ class ObjectiveCLexerTest(unittest.TestCase):
(Token.Literal, u')'),
(Token.Punctuation, u';'),
(Token.Text, u'\n'),
- (Token.Text, u''),
]
self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
def testLiteralNumberBool(self):
fragment = u'@NO;\n'
expected = [
- (Token.Text, u''),
(Token.Literal.Number, u'@NO'),
(Token.Punctuation, u';'),
(Token.Text, u'\n'),
- (Token.Text, u''),
]
self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
def testLieralNumberBoolExpression(self):
fragment = u'@(YES);\n'
expected = [
- (Token.Text, u''),
(Token.Literal, u'@('),
(Token.Name.Builtin, u'YES'),
(Token.Literal, u')'),
(Token.Punctuation, u';'),
(Token.Text, u'\n'),
- (Token.Text, u''),
]
self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))