diff options
author | Oleh Prypin <oleh@pryp.in> | 2021-01-18 20:39:02 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2021-01-18 20:39:02 +0100 |
commit | f0445be718da83541ea3401aad882f3937147263 (patch) | |
tree | 26bd361a410d8bea33ce259321fad63e7f3c61af /tests/test_kotlin.py | |
parent | 423c44a451db7e5f63147b1c1519661d745fc43a (diff) | |
download | pygments-git-f0445be718da83541ea3401aad882f3937147263.tar.gz |
Replace tests that assert on token output with auto-updatable samples (#1649)
Diffstat (limited to 'tests/test_kotlin.py')
-rw-r--r-- | tests/test_kotlin.py | 132 |
1 files changed, 0 insertions, 132 deletions
diff --git a/tests/test_kotlin.py b/tests/test_kotlin.py deleted file mode 100644 index 8d67f83d..00000000 --- a/tests/test_kotlin.py +++ /dev/null @@ -1,132 +0,0 @@ -""" - Basic JavaLexer Test - ~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import pytest - -from pygments.token import Text, Name, Keyword, Punctuation, String -from pygments.lexers import KotlinLexer - - -@pytest.fixture(scope='module') -def lexer(): - yield KotlinLexer() - - -def test_can_cope_with_backtick_names_in_functions(lexer): - fragment = 'fun `wo bble`' - tokens = [ - (Keyword, 'fun'), - (Text, ' '), - (Name.Function, '`wo bble`'), - (Text, '\n') - ] - assert list(lexer.get_tokens(fragment)) == tokens - - -def test_can_cope_with_commas_and_dashes_in_backtick_Names(lexer): - fragment = 'fun `wo,-bble`' - tokens = [ - (Keyword, 'fun'), - (Text, ' '), - (Name.Function, '`wo,-bble`'), - (Text, '\n') - ] - assert list(lexer.get_tokens(fragment)) == tokens - - -def test_can_cope_with_destructuring(lexer): - fragment = 'val (a, b) = ' - tokens = [ - (Keyword, 'val'), - (Text, ' '), - (Punctuation, '('), - (Name.Property, 'a'), - (Punctuation, ','), - (Text, ' '), - (Name.Property, 'b'), - (Punctuation, ')'), - (Text, ' '), - (Punctuation, '='), - (Text, ' '), - (Text, '\n') - ] - assert list(lexer.get_tokens(fragment)) == tokens - - -def test_can_cope_generics_in_destructuring(lexer): - fragment = 'val (a: List<Something>, b: Set<Wobble>) =' - tokens = [ - (Keyword, 'val'), - (Text, ' '), - (Punctuation, '('), - (Name.Property, 'a'), - (Punctuation, ':'), - (Text, ' '), - (Name.Property, 'List'), - (Punctuation, '<'), - (Name, 'Something'), - (Punctuation, '>'), - (Punctuation, ','), - (Text, ' '), - (Name.Property, 'b'), - (Punctuation, ':'), - (Text, ' '), - (Name.Property, 'Set'), - (Punctuation, '<'), - (Name, 'Wobble'), - (Punctuation, '>'), - (Punctuation, ')'), - (Text, ' '), - (Punctuation, '='), - (Text, '\n') - ] - assert list(lexer.get_tokens(fragment)) == tokens - - -def test_can_cope_with_generics(lexer): - fragment = 'inline fun <reified T : ContractState> VaultService.queryBy(): Vault.Page<T> {' - tokens = [ - (Keyword, 'inline fun'), - (Text, ' '), - (Punctuation, '<'), - (Keyword, 'reified'), - (Text, ' '), - (Name, 'T'), - (Text, ' '), - (Punctuation, ':'), - (Text, ' '), - (Name, 'ContractState'), - (Punctuation, '>'), - (Text, ' '), - (Name.Class, 'VaultService'), - (Punctuation, '.'), - (Name.Function, 'queryBy'), - (Punctuation, '('), - (Punctuation, ')'), - (Punctuation, ':'), - (Text, ' '), - (Name, 'Vault'), - (Punctuation, '.'), - (Name, 'Page'), - (Punctuation, '<'), - (Name, 'T'), - (Punctuation, '>'), - (Text, ' '), - (Punctuation, '{'), - (Text, '\n') - ] - assert list(lexer.get_tokens(fragment)) == tokens - - -def test_should_cope_with_multiline_comments(lexer): - fragment = '"""\nthis\nis\na\ncomment"""' - tokens = [ - (String, '"""\nthis\nis\na\ncomment"""'), - (Text, '\n') - ] - assert list(lexer.get_tokens(fragment)) == tokens |