diff options
author | Thomas Aglassinger <roskakori@users.sourceforge.net> | 2016-03-21 08:29:44 +0100 |
---|---|---|
committer | Thomas Aglassinger <roskakori@users.sourceforge.net> | 2016-03-21 08:29:44 +0100 |
commit | 1d5aed09f1f7fcfa27c73d03ffc1a61e686658da (patch) | |
tree | e58d5ca42db23b95a639b942d46423d2c76d934d | |
parent | a4a3df4ac8c645d890df8fc264733045108a3f53 (diff) | |
download | pygments-1d5aed09f1f7fcfa27c73d03ffc1a61e686658da.tar.gz |
Added lexer for Transact-SQL as used by Microsoft SQL Server and Sybase.
-rw-r--r-- | AUTHORS | 2 | ||||
-rw-r--r-- | pygments/lexers/_mapping.py | 1 | ||||
-rw-r--r-- | pygments/lexers/_tsql_builtins.py | 1006 | ||||
-rw-r--r-- | pygments/lexers/sql.py | 57 | ||||
-rw-r--r-- | tests/examplefiles/test_transact-sql.txt | 68 | ||||
-rw-r--r-- | tests/test_sql.py | 76 |
6 files changed, 1207 insertions, 3 deletions
@@ -7,7 +7,7 @@ Other contributors, listed alphabetically, are: * Sam Aaron -- Ioke lexer * Ali Afshar -- image formatter -* Thomas Aglassinger -- Easytrieve, JCL and Rexx lexers +* Thomas Aglassinger -- Easytrieve, JCL, Rexx and Transact-SQL lexers * Muthiah Annamalai -- Ezhil lexer * Kumar Appaiah -- Debian control lexer * Andreas Amann -- AppleScript lexer diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index dba6d69a..e34adac2 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -393,6 +393,7 @@ LEXERS = { 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), 'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)), 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), + 'TransactSqlLexer': ('pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)), 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), 'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')), 'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)), diff --git a/pygments/lexers/_tsql_builtins.py b/pygments/lexers/_tsql_builtins.py new file mode 100644 index 00000000..7e2f4a3e --- /dev/null +++ b/pygments/lexers/_tsql_builtins.py @@ -0,0 +1,1006 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers._tsql_builtins + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This file contains the names of functions for Transact-SQL used by + ``pygments.lexers.sql.TransactSqlLexer. These lists describe SQL Server + 2012 and have been manually collected from: + + * https://msdn.microsoft.com/en-us/library/ms189822.aspx + * https://msdn.microsoft.com/en-us/library/ms174318.aspx + * https://msdn.microsoft.com/en-us/library/ms187752.aspx + + :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +OPERATORS = ( + '!<', + '!=', + '!>', + '<', + '<=', + '<>', + '=', + '>', + '>=', + '+', + '+=' + '-', + '-=', + '*', + '*=', + '/', + '/=', + '%', + '%=', + '&', + '&=', + '|', + '|=', + '^', + '^=', + '~', + '::', +) + +OPERATOR_WORDS = ( + 'all', + 'and', + 'any', + 'between', + 'except', + 'exists', + 'in', + 'intersect', + 'like', + 'not', + 'or', + 'some', + 'union', +) + +_KEYWORDS_SERVER = ( + 'add', + 'all', + 'alter', + 'and', + 'any', + 'as', + 'asc', + 'authorization', + 'backup', + 'begin', + 'between', + 'break', + 'browse', + 'bulk', + 'by', + 'cascade', + 'case', + 'catch', + 'check', + 'checkpoint', + 'close', + 'clustered', + 'coalesce', + 'collate', + 'column', + 'commit', + 'compute', + 'constraint', + 'contains', + 'containstable', + 'continue', + 'convert', + 'create', + 'cross', + 'current', + 'current_date', + 'current_time', + 'current_timestamp', + 'current_user', + 'cursor', + 'database', + 'dbcc', + 'deallocate', + 'declare', + 'default', + 'delete', + 'deny', + 'desc', + 'disk', + 'distinct', + 'distributed', + 'double', + 'drop', + 'dump', + 'else', + 'end', + 'errlvl', + 'escape', + 'except', + 'exec', + 'execute', + 'exists', + 'exit', + 'external', + 'fetch', + 'file', + 'fillfactor', + 'for', + 'foreign', + 'freetext', + 'freetexttable', + 'from', + 'full', + 'function', + 'goto', + 'grant', + 'group', + 'having', + 'holdlock', + 'identity', + 'identity_insert', + 'identitycol', + 'if', + 'in', + 'index', + 'inner', + 'insert', + 'intersect', + 'into', + 'is', + 'join', + 'key', + 'kill', + 'left', + 'like', + 'lineno', + 'load', + 'merge', + 'national', + 'nocheck', + 'nonclustered', + 'not', + 'null', + 'nullif', + 'of', + 'off', + 'offsets', + 'on', + 'open', + 'opendatasource', + 'openquery', + 'openrowset', + 'openxml', + 'option', + 'or', + 'order', + 'outer', + 'over', + 'percent', + 'pivot', + 'plan', + 'precision', + 'primary', + 'print', + 'proc', + 'procedure', + 'public', + 'raiserror', + 'read', + 'readtext', + 'reconfigure', + 'references', + 'replication', + 'restore', + 'restrict', + 'return', + 'revert', + 'revoke', + 'right', + 'rollback', + 'rowcount', + 'rowguidcol', + 'rule', + 'save', + 'schema', + 'securityaudit', + 'select', + 'semantickeyphrasetable', + 'semanticsimilaritydetailstable', + 'semanticsimilaritytable', + 'session_user', + 'set', + 'setuser', + 'shutdown', + 'some', + 'statistics', + 'system_user', + 'table', + 'tablesample', + 'textsize', + 'then', + 'throw', + 'to', + 'top', + 'tran', + 'transaction', + 'trigger', + 'truncate', + 'try', + 'try_convert', + 'tsequal', + 'union', + 'unique', + 'unpivot', + 'update', + 'updatetext', + 'use', + 'user', + 'values', + 'varying', + 'view', + 'waitfor', + 'when', + 'where', + 'while', + 'with', + 'within', + 'writetext', +) + +_KEYWORDS_FUTURE = ( + 'absolute', + 'action', + 'admin', + 'after', + 'aggregate', + 'alias', + 'allocate', + 'are', + 'array', + 'asensitive', + 'assertion', + 'asymmetric', + 'at', + 'atomic', + 'before', + 'binary', + 'bit', + 'blob', + 'boolean', + 'both', + 'breadth', + 'call', + 'called', + 'cardinality', + 'cascaded', + 'cast', + 'catalog', + 'char', + 'character', + 'class', + 'clob', + 'collation', + 'collect', + 'completion', + 'condition', + 'connect', + 'connection', + 'constraints', + 'constructor', + 'corr', + 'corresponding', + 'covar_pop', + 'covar_samp', + 'cube', + 'cume_dist', + 'current_catalog', + 'current_default_transform_group', + 'current_path', + 'current_role', + 'current_schema', + 'current_transform_group_for_type', + 'cycle', + 'data', + 'date', + 'day', + 'dec', + 'decimal', + 'deferrable', + 'deferred', + 'depth', + 'deref', + 'describe', + 'descriptor', + 'destroy', + 'destructor', + 'deterministic', + 'diagnostics', + 'dictionary', + 'disconnect', + 'domain', + 'dynamic', + 'each', + 'element', + 'end-exec', + 'equals', + 'every', + 'exception', + 'false', + 'filter', + 'first', + 'float', + 'found', + 'free', + 'fulltexttable', + 'fusion', + 'general', + 'get', + 'global', + 'go', + 'grouping', + 'hold', + 'host', + 'hour', + 'ignore', + 'immediate', + 'indicator', + 'initialize', + 'initially', + 'inout', + 'input', + 'int', + 'integer', + 'intersection', + 'interval', + 'isolation', + 'iterate', + 'language', + 'large', + 'last', + 'lateral', + 'leading', + 'less', + 'level', + 'like_regex', + 'limit', + 'ln', + 'local', + 'localtime', + 'localtimestamp', + 'locator', + 'map', + 'match', + 'member', + 'method', + 'minute', + 'mod', + 'modifies', + 'modify', + 'module', + 'month', + 'multiset', + 'names', + 'natural', + 'nchar', + 'nclob', + 'new', + 'next', + 'no', + 'none', + 'normalize', + 'numeric', + 'object', + 'occurrences_regex', + 'old', + 'only', + 'operation', + 'ordinality', + 'out', + 'output', + 'overlay', + 'pad', + 'parameter', + 'parameters', + 'partial', + 'partition', + 'path', + 'percent_rank', + 'percentile_cont', + 'percentile_disc', + 'position_regex', + 'postfix', + 'prefix', + 'preorder', + 'prepare', + 'preserve', + 'prior', + 'privileges', + 'range', + 'reads', + 'real', + 'recursive', + 'ref', + 'referencing', + 'regr_avgx', + 'regr_avgy', + 'regr_count', + 'regr_intercept', + 'regr_r2', + 'regr_slope', + 'regr_sxx', + 'regr_sxy', + 'regr_syy', + 'relative', + 'release', + 'result', + 'returns', + 'role', + 'rollup', + 'routine', + 'row', + 'rows', + 'savepoint', + 'scope', + 'scroll', + 'search', + 'second', + 'section', + 'sensitive', + 'sequence', + 'session', + 'sets', + 'similar', + 'size', + 'smallint', + 'space', + 'specific', + 'specifictype', + 'sql', + 'sqlexception', + 'sqlstate', + 'sqlwarning', + 'start', + 'state', + 'statement', + 'static', + 'stddev_pop', + 'stddev_samp', + 'structure', + 'submultiset', + 'substring_regex', + 'symmetric', + 'system', + 'temporary', + 'terminate', + 'than', + 'time', + 'timestamp', + 'timezone_hour', + 'timezone_minute', + 'trailing', + 'translate_regex', + 'translation', + 'treat', + 'true', + 'uescape', + 'under', + 'unknown', + 'unnest', + 'usage', + 'using', + 'value', + 'var_pop', + 'var_samp', + 'varchar', + 'variable', + 'whenever', + 'width_bucket', + 'window', + 'within', + 'without', + 'work', + 'write', + 'xmlagg', + 'xmlattributes', + 'xmlbinary', + 'xmlcast', + 'xmlcomment', + 'xmlconcat', + 'xmldocument', + 'xmlelement', + 'xmlexists', + 'xmlforest', + 'xmliterate', + 'xmlnamespaces', + 'xmlparse', + 'xmlpi', + 'xmlquery', + 'xmlserialize', + 'xmltable', + 'xmltext', + 'xmlvalidate', + 'year', + 'zone', +) + +_KEYWORDS_ODBC = ( + 'absolute', + 'action', + 'ada', + 'add', + 'all', + 'allocate', + 'alter', + 'and', + 'any', + 'are', + 'as', + 'asc', + 'assertion', + 'at', + 'authorization', + 'avg', + 'begin', + 'between', + 'bit', + 'bit_length', + 'both', + 'by', + 'cascade', + 'cascaded', + 'case', + 'cast', + 'catalog', + 'char', + 'char_length', + 'character', + 'character_length', + 'check', + 'close', + 'coalesce', + 'collate', + 'collation', + 'column', + 'commit', + 'connect', + 'connection', + 'constraint', + 'constraints', + 'continue', + 'convert', + 'corresponding', + 'count', + 'create', + 'cross', + 'current', + 'current_date', + 'current_time', + 'current_timestamp', + 'current_user', + 'cursor', + 'date', + 'day', + 'deallocate', + 'dec', + 'decimal', + 'declare', + 'default', + 'deferrable', + 'deferred', + 'delete', + 'desc', + 'describe', + 'descriptor', + 'diagnostics', + 'disconnect', + 'distinct', + 'domain', + 'double', + 'drop', + 'else', + 'end', + 'end-exec', + 'escape', + 'except', + 'exception', + 'exec', + 'execute', + 'exists', + 'external', + 'extract', + 'false', + 'fetch', + 'first', + 'float', + 'for', + 'foreign', + 'fortran', + 'found', + 'from', + 'full', + 'get', + 'global', + 'go', + 'goto', + 'grant', + 'group', + 'having', + 'hour', + 'identity', + 'immediate', + 'in', + 'include', + 'index', + 'indicator', + 'initially', + 'inner', + 'input', + 'insensitive', + 'insert', + 'int', + 'integer', + 'intersect', + 'interval', + 'into', + 'is', + 'isolation', + 'join', + 'key', + 'language', + 'last', + 'leading', + 'left', + 'level', + 'like', + 'local', + 'lower', + 'match', + 'max', + 'min', + 'minute', + 'module', + 'month', + 'names', + 'national', + 'natural', + 'nchar', + 'next', + 'no', + 'none', + 'not', + 'null', + 'nullif', + 'numeric', + 'octet_length', + 'of', + 'on', + 'only', + 'open', + 'option', + 'or', + 'order', + 'outer', + 'output', + 'overlaps', + 'pad', + 'partial', + 'pascal', + 'position', + 'precision', + 'prepare', + 'preserve', + 'primary', + 'prior', + 'privileges', + 'procedure', + 'public', + 'read', + 'real', + 'references', + 'relative', + 'restrict', + 'revoke', + 'right', + 'rollback', + 'rows', + 'schema', + 'scroll', + 'second', + 'section', + 'select', + 'session', + 'session_user', + 'set', + 'size', + 'smallint', + 'some', + 'space', + 'sql', + 'sqlca', + 'sqlcode', + 'sqlerror', + 'sqlstate', + 'sqlwarning', + 'substring', + 'sum', + 'system_user', + 'table', + 'temporary', + 'then', + 'time', + 'timestamp', + 'timezone_hour', + 'timezone_minute', + 'to', + 'trailing', + 'transaction', + 'translate', + 'translation', + 'trim', + 'true', + 'union', + 'unique', + 'unknown', + 'update', + 'upper', + 'usage', + 'user', + 'using', + 'value', + 'values', + 'varchar', + 'varying', + 'view', + 'when', + 'whenever', + 'where', + 'with', + 'work', + 'write', + 'year', + 'zone', +) + +KEYWORDS = sorted(set(_KEYWORDS_FUTURE + _KEYWORDS_ODBC + _KEYWORDS_SERVER)) + +TYPES = ( + 'bigint', + 'binary', + 'bit', + 'char', + 'cursor', + 'date', + 'datetime', + 'datetime2', + 'datetimeoffset', + 'decimal', + 'float', + 'hierarchyid', + 'image', + 'int', + 'money', + 'nchar', + 'ntext', + 'numeric', + 'nvarchar', + 'real', + 'smalldatetime', + 'smallint', + 'smallmoney', + 'sql_variant', + 'table', + 'text', + 'time', + 'timestamp', + 'tinyint', + 'uniqueidentifier', + 'varbinary', + 'varchar', + 'xml', +) + +FUNCTIONS = ( + '$partition', + 'abs', + 'acos', + 'app_name', + 'applock_mode', + 'applock_test', + 'ascii', + 'asin', + 'assemblyproperty', + 'atan', + 'atn2', + 'avg', + 'binary_checksum', + 'cast', + 'ceiling', + 'certencoded', + 'certprivatekey', + 'char', + 'charindex', + 'checksum', + 'checksum_agg', + 'choose', + 'col_length', + 'col_name', + 'columnproperty', + 'compress', + 'concat', + 'connectionproperty', + 'context_info', + 'convert', + 'cos', + 'cot', + 'count', + 'count_big', + 'current_request_id', + 'current_timestamp', + 'current_transaction_id', + 'current_user', + 'cursor_status', + 'database_principal_id', + 'databasepropertyex', + 'dateadd', + 'datediff', + 'datediff_big', + 'datefromparts', + 'datename', + 'datepart', + 'datetime2fromparts', + 'datetimefromparts', + 'datetimeoffsetfromparts', + 'day', + 'db_id', + 'db_name', + 'decompress', + 'degrees', + 'dense_rank', + 'difference', + 'eomonth', + 'error_line', + 'error_message', + 'error_number', + 'error_procedure', + 'error_severity', + 'error_state', + 'exp', + 'file_id', + 'file_idex', + 'file_name', + 'filegroup_id', + 'filegroup_name', + 'filegroupproperty', + 'fileproperty', + 'floor', + 'format', + 'formatmessage', + 'fulltextcatalogproperty', + 'fulltextserviceproperty', + 'get_filestream_transaction_context', + 'getansinull', + 'getdate', + 'getutcdate', + 'grouping', + 'grouping_id', + 'has_perms_by_name', + 'host_id', + 'host_name', + 'iif', + 'index_col', + 'indexkey_property', + 'indexproperty', + 'is_member', + 'is_rolemember', + 'is_srvrolemember', + 'isdate', + 'isjson', + 'isnull', + 'isnumeric', + 'json_modify', + 'json_query', + 'json_value', + 'left', + 'len', + 'log', + 'log10', + 'lower', + 'ltrim', + 'max', + 'min', + 'min_active_rowversion', + 'month', + 'nchar', + 'newid', + 'newsequentialid', + 'ntile', + 'object_definition', + 'object_id', + 'object_name', + 'object_schema_name', + 'objectproperty', + 'objectpropertyex', + 'opendatasource', + 'openjson', + 'openquery', + 'openrowset', + 'openxml', + 'original_db_name', + 'original_login', + 'parse', + 'parsename', + 'patindex', + 'permissions', + 'pi', + 'power', + 'pwdcompare', + 'pwdencrypt', + 'quotename', + 'radians', + 'rand', + 'rank', + 'replace', + 'replicate', + 'reverse', + 'right', + 'round', + 'row_number', + 'rowcount_big', + 'rtrim', + 'schema_id', + 'schema_name', + 'scope_identity', + 'serverproperty', + 'session_context', + 'session_user', + 'sign', + 'sin', + 'smalldatetimefromparts', + 'soundex', + 'sp_helplanguage', + 'space', + 'sqrt', + 'square', + 'stats_date', + 'stdev', + 'stdevp', + 'str', + 'string_escape', + 'string_split', + 'stuff', + 'substring', + 'sum', + 'suser_id', + 'suser_name', + 'suser_sid', + 'suser_sname', + 'switchoffset', + 'sysdatetime', + 'sysdatetimeoffset', + 'system_user', + 'sysutcdatetime', + 'tan', + 'textptr', + 'textvalid', + 'timefromparts', + 'todatetimeoffset', + 'try_cast', + 'try_convert', + 'try_parse', + 'type_id', + 'type_name', + 'typeproperty', + 'unicode', + 'upper', + 'user_id', + 'user_name', + 'var', + 'varp', + 'xact_state', + 'year', +) diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py index 7c06226b..f6539c36 100644 --- a/pygments/lexers/sql.py +++ b/pygments/lexers/sql.py @@ -41,17 +41,19 @@ import re from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words -from pygments.token import Punctuation, \ +from pygments.token import Punctuation, Whitespace, Error, \ Text, Comment, Operator, Keyword, Name, String, Number, Generic from pygments.lexers import get_lexer_by_name, ClassNotFound from pygments.util import iteritems from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \ PSEUDO_TYPES, PLPGSQL_KEYWORDS +from pygments.lexers import _tsql_builtins __all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer', - 'SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer', 'RqlLexer'] + 'SqlLexer', 'TransactSqlLexer', 'MySqlLexer', + 'SqliteConsoleLexer', 'RqlLexer'] line_re = re.compile('.*?\n') @@ -479,6 +481,57 @@ class SqlLexer(RegexLexer): } +class TransactSqlLexer(RegexLexer): + """ + Transact-SQL (T-SQL) is Microsoft's and Sybase's proprietary extension to + SQL. + + The list of keywords includes ODBC and keywords reserved for future use.. + """ + + name = 'Transact-SQL' + aliases = ['tsql', 't-sql'] + filenames = ['*.sql'] + mimetypes = ['text/x-tsql'] + + flags = re.IGNORECASE | re.UNICODE + tokens = { + 'root': [ + (r'\s+', Whitespace), + (r'--.*?\n', Comment.Single), + (r'/\*', Comment.Multiline, 'multiline-comments'), + (r'(-=|-)', Operator), # HACK: For some reason these operators do not work as part of OPERATORS. + (words(_tsql_builtins.OPERATORS), Operator), + (words(_tsql_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word), + (words(_tsql_builtins.TYPES, suffix=r'\b'), Name.Class), + (words(_tsql_builtins.FUNCTIONS, suffix=r'\b'), Name.Function), + (r'(goto)(\s+)(\w+\b)', bygroups(Keyword, Whitespace, Name.Label)), + (words(_tsql_builtins.KEYWORDS, suffix=r'\b'), Keyword), + (r'(\[)([^]]+)(\])', bygroups(Operator, Name, Operator)), + (r'0x[0-9a-f]+', Number.Hex), + (r'[0-9]*\.?[0-9]+(e[+-]?[0-9]+)?', Number.Float), + (r'[0-9]+', Number.Integer), + (r"'(''|[^'])*'", String.Single), + (r'"(""|[^"])*"', String.Symbol), + (r'[;(),.]', Punctuation), + # Below we use \w even for the first "real" character because + # tokens starting with a digit have already been recognized + # as Number above. + (r'@@\w+', Name.Builtin), + (r'@\w+', Name.Variable), + (r'(\w+)(:)', bygroups(Name.Label, Punctuation)), + (r'#?#?\w+', Name), + (r'\?', Name.Variable.Magic), # Parameter for prepared statements + ], + 'multiline-comments': [ + (r'/\*', Comment.Multiline, 'multiline-comments'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[^/*]+', Comment.Multiline), + (r'[/*]', Comment.Multiline) + ] + } + + class MySqlLexer(RegexLexer): """ Special lexer for MySQL. diff --git a/tests/examplefiles/test_transact-sql.txt b/tests/examplefiles/test_transact-sql.txt new file mode 100644 index 00000000..90e06c34 --- /dev/null +++ b/tests/examplefiles/test_transact-sql.txt @@ -0,0 +1,68 @@ +-- Example Transact-SQL file. +-- +-- We cannot use "*.sql" as file suffix because then the automatic tests for +-- the ANSI SQL lexer would attempt to read it and in turn detect errors in +-- it which would cause a test case to fail. + +-- Single line comment +/* A comment + * spawning two lines. */ + /* An indented comment + * spawning multiple + * lines. */ +/* A /* nested */ comment. */ + +select + left(emp.firstname, 1) + '.' + [emp.surname] as "Name", + dep.name as [Department] +into + #temp_employee +from + employee as emp + inner join department as dep on + dep.ident_code = emp.department_id +where + emp.date_of_birth >= '1990-01-01'; +go + +declare @TextToFind nvarchar(100) = N'some +text across +multiple lines'; + +set @TextToFind = 'hello' + ' world'; +set @TextTiFind += '!'; + +delete from + [server].[database].[schema].[table] +where + [Text] = @TextToFind and author Not LIKE '%some%'; + +goto overthere; +overthere: + +select + 123 as "int 1", + +123 as "int 2", + -123 as "int 3", + 0x20 as "hex int", + 123.45 as "float 1", + -1.23e45 as "float 2" + +1.23E+45 as "float 3", + -1.23e-45 as "float 4"; + +Select @@Error, $PARTITion.RangePF1(10); + +select top 3 Ähnliches from Müll; + +-- Example transaction +BEGIN TRAN + +BEGIN TRY + INSERT INTO #temp_employe(Name, Department) VALUES ('L. Miller', 'Sales') + iNsErT inTO #temp_employe(Name, Department) VaLuEs ('M. Webster', 'Helpdesk') + COMMIT TRAN +END TRY +BEGIN CATCH + print 'cannot perform transaction; rolling back'; + ROLLBACK TRAN +END CATCH diff --git a/tests/test_sql.py b/tests/test_sql.py new file mode 100644 index 00000000..37a81ff8 --- /dev/null +++ b/tests/test_sql.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +""" + Pygments SQL lexers tests + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" +import io +import os.path +import unittest + +from pygments.lexers.sql import TransactSqlLexer +from pygments.token import Comment, Error, Name, Number, Whitespace + + +class TransactSqlLexerTest(unittest.TestCase): + + def setUp(self): + self.lexer = TransactSqlLexer() + + def _assertAreTokensOfType(self, examples, expected_token_type): + for test_number, example in enumerate(examples.split(), 1): + token_count = 0 + for token_type, token_value in self.lexer.get_tokens(example): + if token_type != Whitespace: + token_count += 1 + self.assertEqual( + token_type, expected_token_type, + 'token_type #%d for %s is be %s but must be %s' % + (test_number, token_value, token_type, expected_token_type)) + self.assertEqual( + token_count, 1, + '%s must yield exactly 1 token instead of %d' % + (example, token_count)) + + def _assertTokensMatch(self, text, expected_tokens_without_trailing_newline): + actual_tokens = tuple(self.lexer.get_tokens(text)) + if (len(actual_tokens) >= 1) and (actual_tokens[-1] == (Whitespace, '\n')): + actual_tokens = tuple(actual_tokens[:-1]) + self.assertEqual( + expected_tokens_without_trailing_newline, actual_tokens, + 'text must yield expected tokens: %s' % text) + + def test_can_lex_float(self): + self._assertAreTokensOfType( + '1.2 1.2e3 1.2e+3 1.2e-3 1e2', Number.Float) + self._assertTokensMatch( + '1e2.1e2', + ((Number.Float, '1e2'), (Number.Float, '.1e2')) + ) + + def test_can_lex_names(self): + self._assertAreTokensOfType( + u'thingy thingy123 _thingy _ _123 Ähnliches Müll #temp1 ##temp2', Name) + + def test_can_lex_comments(self): + self._assertTokensMatch('--\n', ((Comment.Single, '--\n'),)) + self._assertTokensMatch('/**/', ( + (Comment.Multiline, '/*'), (Comment.Multiline, '*/') + )) + self._assertTokensMatch('/*/**/*/', ( + (Comment.Multiline, '/*'), + (Comment.Multiline, '/*'), + (Comment.Multiline, '*/'), + (Comment.Multiline, '*/'), + )) + + def test_can_lex_example_file(self): + tests_path = os.path.dirname(__file__) + example_path = os.path.join(tests_path, 'examplefiles', 'test_transact-sql.txt') + + with io.open(example_path, 'r', encoding='utf-8') as example_file: + example_code = example_file.read() + for token_type, token_value in self.lexer.get_tokens(example_code): + self.assertNotEqual(Error, token_type, 'token_value=%r' % token_value) |