summaryrefslogtreecommitdiff
path: root/tests/test_perllexer.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/test_perllexer.py')
-rw-r--r--tests/test_perllexer.py35
1 files changed, 22 insertions, 13 deletions
diff --git a/tests/test_perllexer.py b/tests/test_perllexer.py
index 15d06af3..21bf749c 100644
--- a/tests/test_perllexer.py
+++ b/tests/test_perllexer.py
@@ -39,10 +39,10 @@ def assert_single_token(lexer, s, token):
def assert_tokens(lexer, strings, expected_tokens):
"""Show that a given string generates the expected tokens."""
tokens = list(lexer.get_tokens_unprocessed(''.join(strings)))
- assert len(tokens) == len(expected_tokens)
- for index, s in enumerate(strings):
- assert s == tokens[index][2]
- assert expected_tokens[index] == tokens[index][1]
+ parsed_strings = [t[2] for t in tokens]
+ assert parsed_strings == strings
+ parsed_tokens = [t[1] for t in tokens]
+ assert parsed_tokens == expected_tokens
def assert_fast_tokenization(lexer, s):
@@ -161,21 +161,30 @@ def test_substitution_with_parenthesis(lexer):
# Namespaces/modules
def test_package_statement(lexer):
- assert_tokens(lexer, ['package', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
- assert_tokens(lexer, ['package', ' ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
+ assert_tokens(lexer, ['package', ' ', 'Foo'],
+ [Keyword, Text.Whitespace, Name.Namespace])
+ assert_tokens(lexer, ['package', ' ', 'Foo::Bar'],
+ [Keyword, Text.Whitespace, Name.Namespace])
def test_use_statement(lexer):
- assert_tokens(lexer, ['use', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
- assert_tokens(lexer, ['use', ' ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
+ assert_tokens(lexer, ['use', ' ', 'Foo'],
+ [Keyword, Text.Whitespace, Name.Namespace])
+ assert_tokens(lexer, ['use', ' ', 'Foo::Bar'],
+ [Keyword, Text.Whitespace, Name.Namespace])
def test_no_statement(lexer):
- assert_tokens(lexer, ['no', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
- assert_tokens(lexer, ['no', ' ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
+ assert_tokens(lexer, ['no', ' ', 'Foo'],
+ [Keyword, Text.Whitespace, Name.Namespace])
+ assert_tokens(lexer, ['no', ' ', 'Foo::Bar'],
+ [Keyword, Text.Whitespace, Name.Namespace])
def test_require_statement(lexer):
- assert_tokens(lexer, ['require', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
- assert_tokens(lexer, ['require', ' ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
- assert_tokens(lexer, ['require', ' ', '"Foo/Bar.pm"'], [Keyword, Text, String])
+ assert_tokens(lexer, ['require', ' ', 'Foo'],
+ [Keyword, Text.Whitespace, Name.Namespace])
+ assert_tokens(lexer, ['require', ' ', 'Foo::Bar'],
+ [Keyword, Text.Whitespace, Name.Namespace])
+ assert_tokens(lexer, ['require', ' ', '"Foo/Bar.pm"'],
+ [Keyword, Text.Whitespace, String])