summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorThomas Aglassinger <roskakori@users.sourceforge.net>2016-03-21 08:29:44 +0100
committerThomas Aglassinger <roskakori@users.sourceforge.net>2016-03-21 08:29:44 +0100
commit1d5aed09f1f7fcfa27c73d03ffc1a61e686658da (patch)
treee58d5ca42db23b95a639b942d46423d2c76d934d /tests
parenta4a3df4ac8c645d890df8fc264733045108a3f53 (diff)
downloadpygments-1d5aed09f1f7fcfa27c73d03ffc1a61e686658da.tar.gz
Added lexer for Transact-SQL as used by Microsoft SQL Server and Sybase.
Diffstat (limited to 'tests')
-rw-r--r--tests/examplefiles/test_transact-sql.txt68
-rw-r--r--tests/test_sql.py76
2 files changed, 144 insertions, 0 deletions
diff --git a/tests/examplefiles/test_transact-sql.txt b/tests/examplefiles/test_transact-sql.txt
new file mode 100644
index 00000000..90e06c34
--- /dev/null
+++ b/tests/examplefiles/test_transact-sql.txt
@@ -0,0 +1,68 @@
+-- Example Transact-SQL file.
+--
+-- We cannot use "*.sql" as file suffix because then the automatic tests for
+-- the ANSI SQL lexer would attempt to read it and in turn detect errors in
+-- it which would cause a test case to fail.
+
+-- Single line comment
+/* A comment
+ * spawning two lines. */
+ /* An indented comment
+ * spawning multiple
+ * lines. */
+/* A /* nested */ comment. */
+
+select
+ left(emp.firstname, 1) + '.' + [emp.surname] as "Name",
+ dep.name as [Department]
+into
+ #temp_employee
+from
+ employee as emp
+ inner join department as dep on
+ dep.ident_code = emp.department_id
+where
+ emp.date_of_birth >= '1990-01-01';
+go
+
+declare @TextToFind nvarchar(100) = N'some
+text across
+multiple lines';
+
+set @TextToFind = 'hello' + ' world';
+set @TextTiFind += '!';
+
+delete from
+ [server].[database].[schema].[table]
+where
+ [Text] = @TextToFind and author Not LIKE '%some%';
+
+goto overthere;
+overthere:
+
+select
+ 123 as "int 1",
+ +123 as "int 2",
+ -123 as "int 3",
+ 0x20 as "hex int",
+ 123.45 as "float 1",
+ -1.23e45 as "float 2"
+ +1.23E+45 as "float 3",
+ -1.23e-45 as "float 4";
+
+Select @@Error, $PARTITion.RangePF1(10);
+
+select top 3 Ähnliches from Müll;
+
+-- Example transaction
+BEGIN TRAN
+
+BEGIN TRY
+ INSERT INTO #temp_employe(Name, Department) VALUES ('L. Miller', 'Sales')
+ iNsErT inTO #temp_employe(Name, Department) VaLuEs ('M. Webster', 'Helpdesk')
+ COMMIT TRAN
+END TRY
+BEGIN CATCH
+ print 'cannot perform transaction; rolling back';
+ ROLLBACK TRAN
+END CATCH
diff --git a/tests/test_sql.py b/tests/test_sql.py
new file mode 100644
index 00000000..37a81ff8
--- /dev/null
+++ b/tests/test_sql.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+"""
+ Pygments SQL lexers tests
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+import io
+import os.path
+import unittest
+
+from pygments.lexers.sql import TransactSqlLexer
+from pygments.token import Comment, Error, Name, Number, Whitespace
+
+
+class TransactSqlLexerTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = TransactSqlLexer()
+
+ def _assertAreTokensOfType(self, examples, expected_token_type):
+ for test_number, example in enumerate(examples.split(), 1):
+ token_count = 0
+ for token_type, token_value in self.lexer.get_tokens(example):
+ if token_type != Whitespace:
+ token_count += 1
+ self.assertEqual(
+ token_type, expected_token_type,
+ 'token_type #%d for %s is be %s but must be %s' %
+ (test_number, token_value, token_type, expected_token_type))
+ self.assertEqual(
+ token_count, 1,
+ '%s must yield exactly 1 token instead of %d' %
+ (example, token_count))
+
+ def _assertTokensMatch(self, text, expected_tokens_without_trailing_newline):
+ actual_tokens = tuple(self.lexer.get_tokens(text))
+ if (len(actual_tokens) >= 1) and (actual_tokens[-1] == (Whitespace, '\n')):
+ actual_tokens = tuple(actual_tokens[:-1])
+ self.assertEqual(
+ expected_tokens_without_trailing_newline, actual_tokens,
+ 'text must yield expected tokens: %s' % text)
+
+ def test_can_lex_float(self):
+ self._assertAreTokensOfType(
+ '1.2 1.2e3 1.2e+3 1.2e-3 1e2', Number.Float)
+ self._assertTokensMatch(
+ '1e2.1e2',
+ ((Number.Float, '1e2'), (Number.Float, '.1e2'))
+ )
+
+ def test_can_lex_names(self):
+ self._assertAreTokensOfType(
+ u'thingy thingy123 _thingy _ _123 Ähnliches Müll #temp1 ##temp2', Name)
+
+ def test_can_lex_comments(self):
+ self._assertTokensMatch('--\n', ((Comment.Single, '--\n'),))
+ self._assertTokensMatch('/**/', (
+ (Comment.Multiline, '/*'), (Comment.Multiline, '*/')
+ ))
+ self._assertTokensMatch('/*/**/*/', (
+ (Comment.Multiline, '/*'),
+ (Comment.Multiline, '/*'),
+ (Comment.Multiline, '*/'),
+ (Comment.Multiline, '*/'),
+ ))
+
+ def test_can_lex_example_file(self):
+ tests_path = os.path.dirname(__file__)
+ example_path = os.path.join(tests_path, 'examplefiles', 'test_transact-sql.txt')
+
+ with io.open(example_path, 'r', encoding='utf-8') as example_file:
+ example_code = example_file.read()
+ for token_type, token_value in self.lexer.get_tokens(example_code):
+ self.assertNotEqual(Error, token_type, 'token_value=%r' % token_value)