summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorPaweł Fertyk <pfertyk@users.noreply.github.com>2020-06-01 14:48:05 +0200
committerGitHub <noreply@github.com>2020-06-01 14:48:05 +0200
commite5dc231aa0d780395436e55c621e22dacfaf97de (patch)
tree55adfa89291b5695b24652d86f9a85730032a14d /tests
parent231919b4ec7d6d0cb23940d414dd03d262d6a048 (diff)
downloadpygments-git-e5dc231aa0d780395436e55c621e22dacfaf97de.tar.gz
Add GDScript lexer (#1457)
* Added GDScript lexer * Fix regular expressions in GDScript lexer * Update GDScript lexer with the current version from Godot docs * Add tests for GDScript lexer * Update authors * Add an example file for GDScript * Implement analyze_text for GAP and GDScript * Fix example file name in tests * Update license Co-authored-by: Daniel J. Ramirez <djrmuv@gmail.com>
Diffstat (limited to 'tests')
-rw-r--r--tests/examplefiles/gdscript_example.gd77
-rw-r--r--tests/test_gdscript.py167
-rw-r--r--tests/test_lexers_other.py16
3 files changed, 258 insertions, 2 deletions
diff --git a/tests/examplefiles/gdscript_example.gd b/tests/examplefiles/gdscript_example.gd
new file mode 100644
index 00000000..d1043fc3
--- /dev/null
+++ b/tests/examplefiles/gdscript_example.gd
@@ -0,0 +1,77 @@
+# A file is a class!
+
+# Inheritance
+
+extends BaseClass
+
+# (optional) class definition with a custom icon
+
+class_name MyClass, "res://path/to/optional/icon.svg"
+
+
+# Member variables
+
+var a = 5
+var s = "Hello"
+var arr = [1, 2, 3]
+var dict = {"key": "value", 2: 3}
+var typed_var: int
+var inferred_type := "String"
+
+# Constants
+
+const ANSWER = 42
+const THE_NAME = "Charly"
+
+# Enums
+
+enum {UNIT_NEUTRAL, UNIT_ENEMY, UNIT_ALLY}
+enum Named {THING_1, THING_2, ANOTHER_THING = -1}
+
+# Built-in vector types
+
+var v2 = Vector2(1, 2)
+var v3 = Vector3(1, 2, 3)
+
+
+# Function
+
+func some_function(param1, param2):
+ var local_var = 5
+
+ if param1 < local_var:
+ print(param1)
+ elif param2 > 5:
+ print(param2)
+ else:
+ print("Fail!")
+
+ for i in range(20):
+ print(i)
+
+ while param2 != 0:
+ param2 -= 1
+
+ var local_var2 = param1 + 3
+ return local_var2
+
+
+# Functions override functions with the same name on the base/parent class.
+# If you still want to call them, use '.' (like 'super' in other languages).
+
+func something(p1, p2):
+ .something(p1, p2)
+
+
+# Inner class
+
+class Something:
+ var a = 10
+
+
+# Constructor
+
+func _init():
+ print("Constructed!")
+ var lv = Something.new()
+ print(lv.a)
diff --git a/tests/test_gdscript.py b/tests/test_gdscript.py
new file mode 100644
index 00000000..d52d8ff8
--- /dev/null
+++ b/tests/test_gdscript.py
@@ -0,0 +1,167 @@
+# -*- coding: utf-8 -*-
+"""
+ GDScript Tests
+ ~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import pytest
+
+from pygments.lexers import GDScriptLexer
+from pygments.token import Token
+
+
+@pytest.fixture(scope="module")
+def lexer():
+ yield GDScriptLexer()
+
+
+def test_variable_declaration_and_assigment(lexer):
+ fragment = "var abc = 5.4"
+ tokens = [
+ (Token.Keyword, "var"),
+ (Token.Text, " "),
+ (Token.Name, "abc"),
+ (Token.Text, " "),
+ (Token.Operator, "="),
+ (Token.Text, " "),
+ (Token.Number.Float, "5.4"),
+ (Token.Text, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_simple_function(lexer):
+ fragment = "func abc(arg):\n\tprint(\"Hello, World!\")"
+ tokens = [
+ (Token.Keyword, "func"),
+ (Token.Text, " "),
+ (Token.Name, "abc"),
+ (Token.Punctuation, "("),
+ (Token.Name, "arg"),
+ (Token.Punctuation, ")"),
+ (Token.Punctuation, ":"),
+ (Token.Text, "\n"),
+ (Token.Text, "\t"),
+ (Token.Name.Builtin, "print"),
+ (Token.Punctuation, "("),
+ (Token.Literal.String.Double, "\""),
+ (Token.Literal.String.Double, "Hello, World!"),
+ (Token.Literal.String.Double, "\""),
+ (Token.Punctuation, ")"),
+ (Token.Text, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_with_types(lexer):
+ fragment = "func abc(arg: String) -> void:\n\tprint(\"Hello\", arg)"
+ tokens = [
+ (Token.Keyword, "func"),
+ (Token.Text, " "),
+ (Token.Name, "abc"),
+ (Token.Punctuation, "("),
+ (Token.Name, "arg"),
+ (Token.Punctuation, ":"),
+ (Token.Text, " "),
+ (Token.Name.Builtin.Type, "String"),
+ (Token.Punctuation, ")"),
+ (Token.Text, " "),
+ (Token.Operator, "-"),
+ (Token.Operator, ">"),
+ (Token.Text, " "),
+ (Token.Name, "void"),
+ (Token.Punctuation, ":"),
+ (Token.Text, "\n"),
+ (Token.Text, "\t"),
+ (Token.Name.Builtin, "print"),
+ (Token.Punctuation, "("),
+ (Token.Literal.String.Double, "\""),
+ (Token.Literal.String.Double, "Hello"),
+ (Token.Literal.String.Double, "\""),
+ (Token.Punctuation, ","),
+ (Token.Text, " "),
+ (Token.Name, "arg"),
+ (Token.Punctuation, ")"),
+ (Token.Text, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_signal(lexer):
+ fragment = "signal sig (arg1, arg2)"
+ tokens = [
+ (Token.Keyword, "signal"),
+ (Token.Text, " "),
+ (Token.Name, "sig"),
+ (Token.Text, " "),
+ (Token.Punctuation, "("),
+ (Token.Name, "arg1"),
+ (Token.Punctuation, ","),
+ (Token.Text, " "),
+ (Token.Name, "arg2"),
+ (Token.Punctuation, ")"),
+ (Token.Text, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_export_array(lexer):
+ fragment = "export (Array, AudioStream) var streams"
+ tokens = [
+ (Token.Keyword, "export"),
+ (Token.Text, " "),
+ (Token.Punctuation, "("),
+ (Token.Name.Builtin.Type, "Array"),
+ (Token.Punctuation, ","),
+ (Token.Text, " "),
+ (Token.Name, "AudioStream"),
+ (Token.Punctuation, ")"),
+ (Token.Text, " "),
+ (Token.Keyword, "var"),
+ (Token.Text, " "),
+ (Token.Name, "streams"),
+ (Token.Text, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_inner_class(lexer):
+ fragment = "class InnerClass:\n\tvar a = 5"
+ tokens = [
+ (Token.Keyword, "class"),
+ (Token.Text, " "),
+ (Token.Name, "InnerClass"),
+ (Token.Punctuation, ":"),
+ (Token.Text, "\n"),
+ (Token.Text, "\t"),
+ (Token.Keyword, "var"),
+ (Token.Text, " "),
+ (Token.Name, "a"),
+ (Token.Text, " "),
+ (Token.Operator, "="),
+ (Token.Text, " "),
+ (Token.Literal.Number.Integer, "5"),
+ (Token.Text, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_comment(lexer):
+ fragment = "# Comment"
+ tokens = [
+ (Token.Comment.Single, "# Comment"),
+ (Token.Text, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_multiline_string(lexer):
+ fragment = '"""\nMultiline\n"""'
+ tokens = [
+ (Token.Literal.String.Doc, '"""\nMultiline\n"""'),
+ (Token.Text, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py
index 3e8d3fc1..70ffba17 100644
--- a/tests/test_lexers_other.py
+++ b/tests/test_lexers_other.py
@@ -12,8 +12,9 @@ import os
import pytest
-from pygments.lexers import guess_lexer
-from pygments.lexers.scripting import EasytrieveLexer, JclLexer, RexxLexer
+from pygments.lexers import (
+ EasytrieveLexer, GAPLexer, GDScriptLexer, JclLexer, RexxLexer, guess_lexer
+)
def _example_file_path(filename):
@@ -68,3 +69,14 @@ def test_rexx_can_guess_from_text():
parse value greeting "hello" name "!"
say name''')
assert val > 0.2
+
+
+@pytest.mark.parametrize("file_path, lexer", [
+ ("gdscript_example.gd", GDScriptLexer),
+ ("example.gd", GAPLexer),
+])
+def test_chooses_correct_lexer_for_example_files(file_path, lexer):
+ with open(_example_file_path(file_path), "rb") as fp:
+ text = fp.read().decode("utf-8")
+ guessed_lexer = guess_lexer(text)
+ assert guessed_lexer.name == lexer.name