diff options
author | Georg Brandl <georg@python.org> | 2012-02-05 10:14:52 +0100 |
---|---|---|
committer | Georg Brandl <georg@python.org> | 2012-02-05 10:14:52 +0100 |
commit | 5d5fed57921597f1021773533cef2a7ef53835a3 (patch) | |
tree | 94730f3cc994dd63714b6e713400097e6126d00c | |
parent | f1bfaccd15f7f178421feb897272062d67cbe681 (diff) | |
download | pygments-5d5fed57921597f1021773533cef2a7ef53835a3.tar.gz |
Module reorganizing: move sql stuff together, create new "shell" module.
-rw-r--r-- | pygments/lexers/_mapping.py | 18 | ||||
-rw-r--r-- | pygments/lexers/compiled.py | 4 | ||||
-rw-r--r-- | pygments/lexers/other.py | 507 | ||||
-rw-r--r-- | pygments/lexers/postgres.py | 326 | ||||
-rw-r--r-- | pygments/lexers/shell.py | 273 | ||||
-rw-r--r-- | pygments/lexers/sql.py | 556 |
6 files changed, 854 insertions, 830 deletions
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 7cdd1ed0..fb4e0703 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -34,9 +34,9 @@ LEXERS = { 'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), 'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), 'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()), - 'BashLexer': ('pygments.lexers.other', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass'), ('application/x-sh', 'application/x-shellscript')), - 'BashSessionLexer': ('pygments.lexers.other', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)), - 'BatchLexer': ('pygments.lexers.other', 'Batchfile', ('bat',), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), + 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass'), ('application/x-sh', 'application/x-shellscript')), + 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)), + 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat',), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), 'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), 'BlitzMaxLexer': ('pygments.lexers.compiled', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), @@ -148,7 +148,7 @@ LEXERS = { 'MoonScriptLexer': ('pygments.lexers.agile', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), 'MuPADLexer': ('pygments.lexers.math', 'MuPAD', ('mupad',), ('*.mu',), ()), 'MxmlLexer': ('pygments.lexers.web', 'MXML', ('mxml',), ('*.mxml',), ()), - 'MySqlLexer': ('pygments.lexers.other', 'MySQL', ('mysql',), (), ('text/x-mysql',)), + 'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)), 'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)), 'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)), 'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')), @@ -163,7 +163,6 @@ LEXERS = { 'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), 'ObjectiveCLexer': ('pygments.lexers.compiled', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m',), ('text/x-objective-c',)), 'ObjectiveJLexer': ('pygments.lexers.web', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)), - 'OcamlLexer': ('pygments.lexers.compiled', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)), 'OcamlLexer': ('pygments.lexers.functional', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)), 'OctaveLexer': ('pygments.lexers.math', 'Octave', ('octave',), ('*.m',), ('text/octave',)), 'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), @@ -172,9 +171,12 @@ LEXERS = { 'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm'), ('text/x-perl', 'application/x-perl')), 'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]'), ('text/x-php',)), 'PlPgsqlLexer': ('pygments.lexers.postgres', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), + 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), 'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript',), ('*.ps', '*.eps'), ('application/postscript',)), 'PostgresConsoleLexer': ('pygments.lexers.postgres', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), + 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), 'PostgresLexer': ('pygments.lexers.postgres', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), + 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), 'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), 'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), 'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties',), ('*.properties',), ('text/x-java-properties',)), @@ -212,13 +214,13 @@ LEXERS = { 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), 'SnobolLexer': ('pygments.lexers.other', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), 'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list'), ('sources.list',), ()), - 'SqlLexer': ('pygments.lexers.other', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)), - 'SqliteConsoleLexer': ('pygments.lexers.other', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)), + 'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)), + 'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)), 'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)), 'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)), 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('sv',), ('*.sv', '*.svh'), ('text/x-systemverilog',)), 'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), - 'TcshLexer': ('pygments.lexers.other', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), + 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), 'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), 'UrbiscriptLexer': ('pygments.lexers.other', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index c10f7252..90503888 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -22,8 +22,8 @@ from pygments.token import \ # backwards compatibility from pygments.lexers.functional import OcamlLexer -__all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'ECLexer', 'JavaLexer', - 'ScalaLexer', 'DylanLexer', 'OcamlLexer', 'ObjectiveCLexer', +__all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'ECLexer', + 'JavaLexer', 'ScalaLexer', 'DylanLexer', 'ObjectiveCLexer', 'FortranLexer', 'GLShaderLexer', 'PrologLexer', 'CythonLexer', 'ValaLexer', 'OocLexer', 'GoLexer', 'FelixLexer', 'AdaLexer', 'Modula2Lexer', 'BlitzMaxLexer', 'NimrodLexer', 'GosuLexer', diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index c40029a2..462b5483 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -11,213 +11,25 @@ import re -from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \ - this, do_insertions, combined, ExtendedRegexLexer +from pygments.lexer import RegexLexer, include, bygroups, using, \ + this, combined, ExtendedRegexLexer from pygments.token import Error, Punctuation, Literal, Token, \ Text, Comment, Operator, Keyword, Name, String, Number, Generic -from pygments.util import shebang_matches from pygments.lexers.web import HtmlLexer -__all__ = ['SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer', 'BrainfuckLexer', - 'BashLexer', 'BatchLexer', 'BefungeLexer', 'RedcodeLexer', - 'MOOCodeLexer', 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer', - 'GnuplotLexer', 'PovrayLexer', 'AppleScriptLexer', - 'BashSessionLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer', - 'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer', - 'PostScriptLexer', 'AutohotkeyLexer', 'GoodDataCLLexer', - 'MaqlLexer', 'ProtoBufLexer', 'HybrisLexer', 'AwkLexer', - 'Cfengine3Lexer', 'SnobolLexer', 'ECLLexer', 'UrbiscriptLexer', - 'OpenEdgeLexer'] +# backwards compatibility +from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer +from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \ + TcshLexer -line_re = re.compile('.*?\n') - - -class SqlLexer(RegexLexer): - """ - Lexer for Structured Query Language. Currently, this lexer does - not recognize any special syntax except ANSI SQL. - """ - - name = 'SQL' - aliases = ['sql'] - filenames = ['*.sql'] - mimetypes = ['text/x-sql'] - - flags = re.IGNORECASE - tokens = { - 'root': [ - (r'\s+', Text), - (r'--.*?\n', Comment.Single), - (r'/\*', Comment.Multiline, 'multiline-comments'), - (r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|' - r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|' - r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|' - r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|' - r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|' - r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|' - r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|' - r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|' - r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|' - r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|' - r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|' - r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|' - r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|' - r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|' - r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|' - r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|' - r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|' - r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|' - r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|' - r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|' - r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|' - r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|' - r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|' - r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|' - r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|' - r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|' - r'EXCEPT|ESCEPTION|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|' - r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|' - r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|' - r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|' - r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|' - r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|' - r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|' - r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|' - r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|' - r'KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|' - r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LIMIT|LISTEN|LOAD|' - r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|' - r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|' - r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|' - r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|' - r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|' - r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|' - r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|' - r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|' - r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|' - r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|' - r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|' - r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|' - r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|' - r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|' - r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|' - r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|' - r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|' - r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|' - r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|' - r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|' - r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|' - r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|' - r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|' - r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|' - r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|' - r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|' - r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|' - r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|' - r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|' - r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|' - r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|' - r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|' - r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|' - r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|' - r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|' - r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|' - r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|' - r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword), - (r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|' - r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|' - r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b', - Name.Builtin), - (r'[+*/<>=~!@#%^&|`?^-]', Operator), - (r'[0-9]+', Number.Integer), - # TODO: Backslash escapes? - (r"'(''|[^'])*'", String.Single), - (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL - (r'[a-zA-Z_][a-zA-Z0-9_]*', Name), - (r'[;:()\[\],\.]', Punctuation) - ], - 'multiline-comments': [ - (r'/\*', Comment.Multiline, 'multiline-comments'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[^/\*]+', Comment.Multiline), - (r'[/*]', Comment.Multiline) - ] - } - - -class MySqlLexer(RegexLexer): - """ - Special lexer for MySQL. - """ - - name = 'MySQL' - aliases = ['mysql'] - mimetypes = ['text/x-mysql'] - - flags = re.IGNORECASE - tokens = { - 'root': [ - (r'\s+', Text), - (r'(#|--\s+).*?\n', Comment.Single), - (r'/\*', Comment.Multiline, 'multiline-comments'), - (r'[0-9]+', Number.Integer), - (r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float), - # TODO: add backslash escapes - (r"'(''|[^'])*'", String.Single), - (r'"(""|[^"])*"', String.Double), - (r"`(``|[^`])*`", String.Symbol), - (r'[+*/<>=~!@#%^&|`?^-]', Operator), - (r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|' - r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|' - r'tinyblob|mediumblob|longblob|blob|float|double|double\s+' - r'precision|real|numeric|dec|decimal|timestamp|year|char|' - r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?', - bygroups(Keyword.Type, Text, Punctuation)), - (r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|' - r'bigint|binary|blob|both|by|call|cascade|case|change|char|' - r'character|check|collate|column|condition|constraint|continue|' - r'convert|create|cross|current_date|current_time|' - r'current_timestamp|current_user|cursor|database|databases|' - r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|' - r'declare|default|delayed|delete|desc|describe|deterministic|' - r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|' - r'enclosed|escaped|exists|exit|explain|fetch|float|float4|float8' - r'|for|force|foreign|from|fulltext|grant|group|having|' - r'high_priority|hour_microsecond|hour_minute|hour_second|if|' - r'ignore|in|index|infile|inner|inout|insensitive|insert|int|' - r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|' - r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|' - r'localtime|localtimestamp|lock|long|loop|low_priority|match|' - r'minute_microsecond|minute_second|mod|modifies|natural|' - r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|' - r'or|order|out|outer|outfile|precision|primary|procedure|purge|' - r'raid0|read|reads|real|references|regexp|release|rename|repeat|' - r'replace|require|restrict|return|revoke|right|rlike|schema|' - r'schemas|second_microsecond|select|sensitive|separator|set|' - r'show|smallint|soname|spatial|specific|sql|sql_big_result|' - r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|' - r'sqlwarning|ssl|starting|straight_join|table|terminated|then|' - r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|' - r'usage|use|using|utc_date|utc_time|utc_timestamp|values|' - r'varying|when|where|while|with|write|x509|xor|year_month|' - r'zerofill)\b', Keyword), - # TODO: this list is not complete - (r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo), - (r'(true|false|null)', Name.Constant), - (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()', - bygroups(Name.Function, Text, Punctuation)), - (r'[a-zA-Z_][a-zA-Z0-9_]*', Name), - (r'@[A-Za-z0-9]*[._]*[A-Za-z0-9]*', Name.Variable), - (r'[;:()\[\],\.]', Punctuation) - ], - 'multiline-comments': [ - (r'/\*', Comment.Multiline, 'multiline-comments'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[^/\*]+', Comment.Multiline), - (r'[/*]', Comment.Multiline) - ] - } +__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer', + 'SmalltalkLexer', 'LogtalkLexer', 'GnuplotLexer', 'PovrayLexer', + 'AppleScriptLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer', + 'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer', 'PostScriptLexer', + 'AutohotkeyLexer', 'GoodDataCLLexer', 'MaqlLexer', 'ProtoBufLexer', + 'HybrisLexer', 'AwkLexer', 'Cfengine3Lexer', 'SnobolLexer', + 'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer'] class ECLLexer(RegexLexer): @@ -320,47 +132,6 @@ class ECLLexer(RegexLexer): } - -class SqliteConsoleLexer(Lexer): - """ - Lexer for example sessions using sqlite3. - - *New in Pygments 0.11.* - """ - - name = 'sqlite3con' - aliases = ['sqlite3'] - filenames = ['*.sqlite3-console'] - mimetypes = ['text/x-sqlite3-console'] - - def get_tokens_unprocessed(self, data): - sql = SqlLexer(**self.options) - - curcode = '' - insertions = [] - for match in line_re.finditer(data): - line = match.group() - if line.startswith('sqlite> ') or line.startswith(' ...> '): - insertions.append((len(curcode), - [(0, Generic.Prompt, line[:8])])) - curcode += line[8:] - else: - if curcode: - for item in do_insertions(insertions, - sql.get_tokens_unprocessed(curcode)): - yield item - curcode = '' - insertions = [] - if line.startswith('SQL error: '): - yield (match.start(), Generic.Traceback, line) - else: - yield (match.start(), Generic.Output, line) - if curcode: - for item in do_insertions(insertions, - sql.get_tokens_unprocessed(curcode)): - yield item - - class BrainfuckLexer(RegexLexer): """ Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_ @@ -423,191 +194,6 @@ class BefungeLexer(RegexLexer): } - -class BashLexer(RegexLexer): - """ - Lexer for (ba|k|)sh shell scripts. - - *New in Pygments 0.6.* - """ - - name = 'Bash' - aliases = ['bash', 'sh', 'ksh'] - filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass'] - mimetypes = ['application/x-sh', 'application/x-shellscript'] - - tokens = { - 'root': [ - include('basic'), - (r'\$\(\(', Keyword, 'math'), - (r'\$\(', Keyword, 'paren'), - (r'\${#?', Keyword, 'curly'), - (r'`', String.Backtick, 'backticks'), - include('data'), - ], - 'basic': [ - (r'\b(if|fi|else|while|do|done|for|then|return|function|case|' - r'select|continue|until|esac|elif)\s*\b', - Keyword), - (r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|' - r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|' - r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|' - r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|' - r'shopt|source|suspend|test|time|times|trap|true|type|typeset|' - r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)', - Name.Builtin), - (r'#.*\n', Comment), - (r'\\[\w\W]', String.Escape), - (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)), - (r'[\[\]{}()=]', Operator), - (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String), - (r'&&|\|\|', Operator), - ], - 'data': [ - (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double), - (r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single), - (r';', Text), - (r'\s+', Text), - (r'[^=\s\n\[\]{}()$"\'`\\<]+', Text), - (r'\d+(?= |\Z)', Number), - (r'\$#?(\w+|.)', Name.Variable), - (r'<', Text), - ], - 'curly': [ - (r'}', Keyword, '#pop'), - (r':-', Keyword), - (r'[a-zA-Z0-9_]+', Name.Variable), - (r'[^}:"\'`$]+', Punctuation), - (r':', Punctuation), - include('root'), - ], - 'paren': [ - (r'\)', Keyword, '#pop'), - include('root'), - ], - 'math': [ - (r'\)\)', Keyword, '#pop'), - (r'[-+*/%^|&]|\*\*|\|\|', Operator), - (r'\d+', Number), - include('root'), - ], - 'backticks': [ - (r'`', String.Backtick, '#pop'), - include('root'), - ], - } - - def analyse_text(text): - return shebang_matches(text, r'(ba|z|)sh') - - -class BashSessionLexer(Lexer): - """ - Lexer for simplistic shell sessions. - - *New in Pygments 1.1.* - """ - - name = 'Bash Session' - aliases = ['console'] - filenames = ['*.sh-session'] - mimetypes = ['application/x-shell-session'] - - def get_tokens_unprocessed(self, text): - bashlexer = BashLexer(**self.options) - - pos = 0 - curcode = '' - insertions = [] - - for match in line_re.finditer(text): - line = match.group() - m = re.match(r'^((?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)?|\[\S+[@:]' - r'[^\n]+\].+)[$#%])(.*\n?)', line) - if m: - # To support output lexers (say diff output), the output - # needs to be broken by prompts whenever the output lexer - # changes. - if not insertions: - pos = match.start() - - insertions.append((len(curcode), - [(0, Generic.Prompt, m.group(1))])) - curcode += m.group(2) - elif line.startswith('>'): - insertions.append((len(curcode), - [(0, Generic.Prompt, line[:1])])) - curcode += line[1:] - else: - if insertions: - toks = bashlexer.get_tokens_unprocessed(curcode) - for i, t, v in do_insertions(insertions, toks): - yield pos+i, t, v - yield match.start(), Generic.Output, line - insertions = [] - curcode = '' - if insertions: - for i, t, v in do_insertions(insertions, - bashlexer.get_tokens_unprocessed(curcode)): - yield pos+i, t, v - - -class BatchLexer(RegexLexer): - """ - Lexer for the DOS/Windows Batch file format. - - *New in Pygments 0.7.* - """ - name = 'Batchfile' - aliases = ['bat'] - filenames = ['*.bat', '*.cmd'] - mimetypes = ['application/x-dos-batch'] - - flags = re.MULTILINE | re.IGNORECASE - - tokens = { - 'root': [ - # Lines can start with @ to prevent echo - (r'^\s*@', Punctuation), - (r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)), - (r'".*?"', String.Double), - (r"'.*?'", String.Single), - # If made more specific, make sure you still allow expansions - # like %~$VAR:zlt - (r'%%?[~$:\w]+%?', Name.Variable), - (r'::.*', Comment), # Technically :: only works at BOL - (r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)), - (r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)), - (r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)), - (r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|' - r'setlocal|shift|errorlevel|exist|defined|cmdextversion|' - r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword), - (r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator), - include('basic'), - (r'.', Text), - ], - 'echo': [ - # Escapes only valid within echo args? - (r'\^\^|\^<|\^>|\^\|', String.Escape), - (r'\n', Text, '#pop'), - include('basic'), - (r'[^\'"^]+', Text), - ], - 'basic': [ - (r'".*?"', String.Double), - (r"'.*?'", String.Single), - (r'`.*?`', String.Backtick), - (r'-?\d+', Number), - (r',', Punctuation), - (r'=', Operator), - (r'/\S+', Name), - (r':\w+', Name.Label), - (r'\w:\w+', Text), - (r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)), - ], - } - - class RedcodeLexer(RegexLexer): """ A simple Redcode lexer based on ICWS'94. @@ -809,73 +395,6 @@ class SmalltalkLexer(RegexLexer): } -class TcshLexer(RegexLexer): - """ - Lexer for tcsh scripts. - - *New in Pygments 0.10.* - """ - - name = 'Tcsh' - aliases = ['tcsh', 'csh'] - filenames = ['*.tcsh', '*.csh'] - mimetypes = ['application/x-csh'] - - tokens = { - 'root': [ - include('basic'), - (r'\$\(', Keyword, 'paren'), - (r'\${#?', Keyword, 'curly'), - (r'`', String.Backtick, 'backticks'), - include('data'), - ], - 'basic': [ - (r'\b(if|endif|else|while|then|foreach|case|default|' - r'continue|goto|breaksw|end|switch|endsw)\s*\b', - Keyword), - (r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|' - r'complete|dirs|echo|echotc|eval|exec|exit|' - r'fg|filetest|getxvers|glob|getspath|hashstat|history|hup|inlib|jobs|kill|' - r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|' - r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|set|shift|' - r'sched|setenv|setpath|settc|setty|setxvers|shift|source|stop|suspend|' - r'source|suspend|telltc|time|' - r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|' - r'ver|wait|warp|watchlog|where|which)\s*\b', - Name.Builtin), - (r'#.*\n', Comment), - (r'\\[\w\W]', String.Escape), - (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)), - (r'[\[\]{}()=]+', Operator), - (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String), - ], - 'data': [ - (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double), - (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single), - (r'\s+', Text), - (r'[^=\s\n\[\]{}()$"\'`\\]+', Text), - (r'\d+(?= |\Z)', Number), - (r'\$#?(\w+|.)', Name.Variable), - ], - 'curly': [ - (r'}', Keyword, '#pop'), - (r':-', Keyword), - (r'[a-zA-Z0-9_]+', Name.Variable), - (r'[^}:"\'`$]+', Punctuation), - (r':', Punctuation), - include('root'), - ], - 'paren': [ - (r'\)', Keyword, '#pop'), - include('root'), - ], - 'backticks': [ - (r'`', String.Backtick, '#pop'), - include('root'), - ], - } - - class LogtalkLexer(RegexLexer): """ For `Logtalk <http://logtalk.org/>`_ source code. diff --git a/pygments/lexers/postgres.py b/pygments/lexers/postgres.py deleted file mode 100644 index 08bc92f9..00000000 --- a/pygments/lexers/postgres.py +++ /dev/null @@ -1,326 +0,0 @@ -# -*- coding: utf-8 -*- -""" - pygments.lexers.postgres - ~~~~~~~~~~~~~~~~~~~~~~~~ - - Lexers for PostgreSQL-specific SQL and psql interactive session. - - `PostgresLexer` - A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL - lexer are: - - - keywords and data types list parsed from the PG docs (run the - `_postgres_builtins` module to update them); - - Content of $-strings parsed using a specific lexer, e.g. the content - of a PL/Python function is parsed using the Python lexer; - - parse PG specific constructs: E-strings, $-strings, U&-strings, - different operators and punctuation. - - `PlPgsqlLexer` - A lexer for the PL/pgSQL language. Adds a few specific construct on - top of the PG SQL lexer (such as <<label>>). - - `PostgresConsoleLexer` - A lexer to highlight an interactive psql session: - - - identifies the prompt and does its best to detect the end of command - in multiline statement where not all the lines are prefixed by a - prompt, telling them apart from the output; - - highlights errors in the output and notification levels; - - handles psql backslash commands. - - The ``tests/examplefiles`` contains a few test files with data to be - parsed by these lexers. - - :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import re -from copy import deepcopy - -from pygments.lexer import Lexer, RegexLexer, do_insertions -from pygments.token import Punctuation, \ - Text, Comment, Operator, Keyword, Name, String, Number, Generic -from pygments.lexers import get_lexer_by_name, ClassNotFound - -from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \ - PSEUDO_TYPES, PLPGSQL_KEYWORDS - - -__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer'] - -line_re = re.compile('.*?\n') - -language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE) - -def language_callback(lexer, match): - """Parse the content of a $-string using a lexer - - The lexer is chosen looking for a nearby LANGUAGE. - - Note: this function should have been a `PostgresBase` method, but the - rules deepcopy fails in this case. - """ - l = None - m = language_re.match(lexer.text[match.end():match.end()+100]) - if m is not None: - l = lexer._get_lexer(m.group(1)) - else: - m = list(language_re.finditer( - lexer.text[max(0, match.start()-100):match.start()])) - if m: - l = lexer._get_lexer(m[-1].group(1)) - - if l: - yield (match.start(1), String, match.group(1)) - for x in l.get_tokens_unprocessed(match.group(2)): - yield x - yield (match.start(3), String, match.group(3)) - - else: - yield (match.start(), String, match.group()) - - -class PostgresBase(object): - """Base class for Postgres-related lexers. - - This is implemented as a mixin to avoid the Lexer metaclass kicking in. - this way the different lexer don't have a common Lexer ancestor. If they - had, _tokens could be created on this ancestor and not updated for the - other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming - seem to suggest that regexp lexers are not really subclassable. - - `language_callback` should really be our method, but this breaks deepcopy. - """ - def get_tokens_unprocessed(self, text, *args): - # Have a copy of the entire text to be used by `language_callback`. - self.text = text - for x in super(PostgresBase, self).get_tokens_unprocessed( - text, *args): - yield x - - def _get_lexer(self, lang): - if lang.lower() == 'sql': - return get_lexer_by_name('postgresql', **self.options) - - tries = [ lang ] - if lang.startswith('pl'): - tries.append(lang[2:]) - if lang.endswith('u'): - tries.append(lang[:-1]) - if lang.startswith('pl') and lang.endswith('u'): - tries.append(lang[2:-1]) - - for l in tries: - try: - return get_lexer_by_name(l, **self.options) - except ClassNotFound: - pass - else: - # TODO: better logging - # print >>sys.stderr, "language not found:", lang - return None - - -class PostgresLexer(PostgresBase, RegexLexer): - """ - Lexer for the PostgreSQL dialect of SQL. - - *New in Pygments 1.5.* - """ - - name = 'PostgreSQL SQL dialect' - aliases = ['postgresql', 'postgres'] - mimetypes = ['text/x-postgresql'] - - flags = re.IGNORECASE - tokens = { - 'root': [ - (r'\s+', Text), - (r'--.*?\n', Comment.Single), - (r'/\*', Comment.Multiline, 'multiline-comments'), - (r'(' + '|'.join([s.replace(" ", "\s+") - for s in DATATYPES + PSEUDO_TYPES]) - + r')\b', Name.Builtin), - (r'(' + '|'.join(KEYWORDS) + r')\b', Keyword), - (r'[+*/<>=~!@#%^&|`?^-]+', Operator), - (r'::', Operator), # cast - (r'\$\d+', Name.Variable), - (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float), - (r'[0-9]+', Number.Integer), - (r"(E|U&)?'(''|[^'])*'", String.Single), - (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier - (r'(?ms)(\$[^\$]*\$)(.*?)(\1)', language_callback), - (r'[a-zA-Z_][a-zA-Z0-9_]*', Name), - - # psql variable in SQL - (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable), - - (r'[;:()\[\]\{\},\.]', Punctuation), - ], - 'multiline-comments': [ - (r'/\*', Comment.Multiline, 'multiline-comments'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[^/\*]+', Comment.Multiline), - (r'[/*]', Comment.Multiline) - ], - } - - -class PlPgsqlLexer(PostgresBase, RegexLexer): - """ - Handle the extra syntax in Pl/pgSQL language. - - *New in Pygments 1.5.* - """ - name = 'PL/pgSQL' - aliases = ['plpgsql'] - mimetypes = ['text/x-plpgsql'] - - flags = re.IGNORECASE - tokens = deepcopy(PostgresLexer.tokens) - - # extend the keywords list - for i, pattern in enumerate(tokens['root']): - if pattern[1] == Keyword: - tokens['root'][i] = ( - r'(' + '|'.join(KEYWORDS + PLPGSQL_KEYWORDS) + r')\b', - Keyword) - del i - break - else: - assert 0, "SQL keywords not found" - - # Add specific PL/pgSQL rules (before the SQL ones) - tokens['root'][:0] = [ - (r'\%[a-z][a-z0-9_]*\b', Name.Builtin), # actually, a datatype - (r':=', Operator), - (r'\<\<[a-z][a-z0-9_]*\>\>', Name.Label), - (r'\#[a-z][a-z0-9_]*\b', Keyword.Pseudo), # #variable_conflict - ] - - -class PsqlRegexLexer(PostgresBase, RegexLexer): - """ - Extend the PostgresLexer adding support specific for psql commands. - - This is not a complete psql lexer yet as it lacks prompt support - and output rendering. - """ - - name = 'PostgreSQL console - regexp based lexer' - aliases = [] # not public - - flags = re.IGNORECASE - tokens = deepcopy(PostgresLexer.tokens) - - tokens['root'].append( - (r'\\[^\s]+', Keyword.Pseudo, 'psql-command')) - tokens['psql-command'] = [ - (r'\n', Text, 'root'), - (r'\s+', Text), - (r'\\[^\s]+', Keyword.Pseudo), - (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable), - (r"'(''|[^'])*'", String.Single), - (r"`([^`])*`", String.Backtick), - (r"[^\s]+", String.Symbol), - ] - -re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]') -re_psql_command = re.compile(r'\s*\\') -re_end_command = re.compile(r';\s*(--.*?)?$') -re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$') -re_error = re.compile(r'(ERROR|FATAL):') -re_message = re.compile( - r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|' - r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)') - -def lookahead(x): - """Wrap an iterator and allow pushing back an item.""" - for i in x: - while 1: - i = yield i - if i is None: - break - yield i - - -class PostgresConsoleLexer(Lexer): - """ - Lexer for psql sessions. - - *New in Pygments 1.5.* - """ - - name = 'PostgreSQL console (psql)' - aliases = ['psql', 'postgresql-console', 'postgres-console'] - mimetypes = ['text/x-postgresql-psql'] - - def get_tokens_unprocessed(self, data): - sql = PsqlRegexLexer(**self.options) - - lines = lookahead(line_re.findall(data)) - - # prompt-output cycle - while 1: - - # consume the lines of the command: start with an optional prompt - # and continue until the end of command is detected - curcode = '' - insertions = [] - while 1: - try: - line = lines.next() - except StopIteration: - # allow the emission of partially collected items - # the repl loop will be broken below - break - - # Identify a shell prompt in case of psql commandline example - if line.startswith('$') and not curcode: - lexer = get_lexer_by_name('console', **self.options) - for x in lexer.get_tokens_unprocessed(line): - yield x - break - - # Identify a psql prompt - mprompt = re_prompt.match(line) - if mprompt is not None: - insertions.append((len(curcode), - [(0, Generic.Prompt, mprompt.group())])) - curcode += line[len(mprompt.group()):] - else: - curcode += line - - # Check if this is the end of the command - # TODO: better handle multiline comments at the end with - # a lexer with an external state? - if re_psql_command.match(curcode) \ - or re_end_command.search(curcode): - break - - # Emit the combined stream of command and prompt(s) - for item in do_insertions(insertions, - sql.get_tokens_unprocessed(curcode)): - yield item - - # Emit the output lines - out_token = Generic.Output - while 1: - line = lines.next() - mprompt = re_prompt.match(line) - if mprompt is not None: - # push the line back to have it processed by the prompt - lines.send(line) - break - - mmsg = re_message.match(line) - if mmsg is not None: - if mmsg.group(1).startswith("ERROR") \ - or mmsg.group(1).startswith("FATAL"): - out_token = Generic.Error - yield (mmsg.start(1), Generic.Strong, mmsg.group(1)) - yield (mmsg.start(2), out_token, mmsg.group(2)) - else: - yield (0, out_token, line) diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py new file mode 100644 index 00000000..2e0d4223 --- /dev/null +++ b/pygments/lexers/shell.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.shell + ~~~~~~~~~~~~~~~~~~~~~ + + Lexers for various shells. + + :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, include +from pygments.token import Punctuation, \ + Text, Comment, Operator, Keyword, Name, String, Number, Generic +from pygments.util import shebang_matches + + +__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer'] + +line_re = re.compile('.*?\n') + + +class BashLexer(RegexLexer): + """ + Lexer for (ba|k|)sh shell scripts. + + *New in Pygments 0.6.* + """ + + name = 'Bash' + aliases = ['bash', 'sh', 'ksh'] + filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass'] + mimetypes = ['application/x-sh', 'application/x-shellscript'] + + tokens = { + 'root': [ + include('basic'), + (r'\$\(\(', Keyword, 'math'), + (r'\$\(', Keyword, 'paren'), + (r'\${#?', Keyword, 'curly'), + (r'`', String.Backtick, 'backticks'), + include('data'), + ], + 'basic': [ + (r'\b(if|fi|else|while|do|done|for|then|return|function|case|' + r'select|continue|until|esac|elif)\s*\b', + Keyword), + (r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|' + r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|' + r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|' + r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|' + r'shopt|source|suspend|test|time|times|trap|true|type|typeset|' + r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)', + Name.Builtin), + (r'#.*\n', Comment), + (r'\\[\w\W]', String.Escape), + (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)), + (r'[\[\]{}()=]', Operator), + (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String), + (r'&&|\|\|', Operator), + ], + 'data': [ + (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double), + (r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single), + (r';', Text), + (r'\s+', Text), + (r'[^=\s\n\[\]{}()$"\'`\\<]+', Text), + (r'\d+(?= |\Z)', Number), + (r'\$#?(\w+|.)', Name.Variable), + (r'<', Text), + ], + 'curly': [ + (r'}', Keyword, '#pop'), + (r':-', Keyword), + (r'[a-zA-Z0-9_]+', Name.Variable), + (r'[^}:"\'`$]+', Punctuation), + (r':', Punctuation), + include('root'), + ], + 'paren': [ + (r'\)', Keyword, '#pop'), + include('root'), + ], + 'math': [ + (r'\)\)', Keyword, '#pop'), + (r'[-+*/%^|&]|\*\*|\|\|', Operator), + (r'\d+', Number), + include('root'), + ], + 'backticks': [ + (r'`', String.Backtick, '#pop'), + include('root'), + ], + } + + def analyse_text(text): + return shebang_matches(text, r'(ba|z|)sh') + + +class BashSessionLexer(Lexer): + """ + Lexer for simplistic shell sessions. + + *New in Pygments 1.1.* + """ + + name = 'Bash Session' + aliases = ['console'] + filenames = ['*.sh-session'] + mimetypes = ['application/x-shell-session'] + + def get_tokens_unprocessed(self, text): + bashlexer = BashLexer(**self.options) + + pos = 0 + curcode = '' + insertions = [] + + for match in line_re.finditer(text): + line = match.group() + m = re.match(r'^((?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)?|\[\S+[@:]' + r'[^\n]+\].+)[$#%])(.*\n?)', line) + if m: + # To support output lexers (say diff output), the output + # needs to be broken by prompts whenever the output lexer + # changes. + if not insertions: + pos = match.start() + + insertions.append((len(curcode), + [(0, Generic.Prompt, m.group(1))])) + curcode += m.group(2) + elif line.startswith('>'): + insertions.append((len(curcode), + [(0, Generic.Prompt, line[:1])])) + curcode += line[1:] + else: + if insertions: + toks = bashlexer.get_tokens_unprocessed(curcode) + for i, t, v in do_insertions(insertions, toks): + yield pos+i, t, v + yield match.start(), Generic.Output, line + insertions = [] + curcode = '' + if insertions: + for i, t, v in do_insertions(insertions, + bashlexer.get_tokens_unprocessed(curcode)): + yield pos+i, t, v + + +class BatchLexer(RegexLexer): + """ + Lexer for the DOS/Windows Batch file format. + + *New in Pygments 0.7.* + """ + name = 'Batchfile' + aliases = ['bat'] + filenames = ['*.bat', '*.cmd'] + mimetypes = ['application/x-dos-batch'] + + flags = re.MULTILINE | re.IGNORECASE + + tokens = { + 'root': [ + # Lines can start with @ to prevent echo + (r'^\s*@', Punctuation), + (r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)), + (r'".*?"', String.Double), + (r"'.*?'", String.Single), + # If made more specific, make sure you still allow expansions + # like %~$VAR:zlt + (r'%%?[~$:\w]+%?', Name.Variable), + (r'::.*', Comment), # Technically :: only works at BOL + (r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)), + (r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)), + (r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)), + (r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|' + r'setlocal|shift|errorlevel|exist|defined|cmdextversion|' + r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword), + (r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator), + include('basic'), + (r'.', Text), + ], + 'echo': [ + # Escapes only valid within echo args? + (r'\^\^|\^<|\^>|\^\|', String.Escape), + (r'\n', Text, '#pop'), + include('basic'), + (r'[^\'"^]+', Text), + ], + 'basic': [ + (r'".*?"', String.Double), + (r"'.*?'", String.Single), + (r'`.*?`', String.Backtick), + (r'-?\d+', Number), + (r',', Punctuation), + (r'=', Operator), + (r'/\S+', Name), + (r':\w+', Name.Label), + (r'\w:\w+', Text), + (r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)), + ], + } + + +class TcshLexer(RegexLexer): + """ + Lexer for tcsh scripts. + + *New in Pygments 0.10.* + """ + + name = 'Tcsh' + aliases = ['tcsh', 'csh'] + filenames = ['*.tcsh', '*.csh'] + mimetypes = ['application/x-csh'] + + tokens = { + 'root': [ + include('basic'), + (r'\$\(', Keyword, 'paren'), + (r'\${#?', Keyword, 'curly'), + (r'`', String.Backtick, 'backticks'), + include('data'), + ], + 'basic': [ + (r'\b(if|endif|else|while|then|foreach|case|default|' + r'continue|goto|breaksw|end|switch|endsw)\s*\b', + Keyword), + (r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|' + r'complete|dirs|echo|echotc|eval|exec|exit|' + r'fg|filetest|getxvers|glob|getspath|hashstat|history|hup|inlib|jobs|kill|' + r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|' + r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|set|shift|' + r'sched|setenv|setpath|settc|setty|setxvers|shift|source|stop|suspend|' + r'source|suspend|telltc|time|' + r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|' + r'ver|wait|warp|watchlog|where|which)\s*\b', + Name.Builtin), + (r'#.*\n', Comment), + (r'\\[\w\W]', String.Escape), + (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)), + (r'[\[\]{}()=]+', Operator), + (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String), + ], + 'data': [ + (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double), + (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single), + (r'\s+', Text), + (r'[^=\s\n\[\]{}()$"\'`\\]+', Text), + (r'\d+(?= |\Z)', Number), + (r'\$#?(\w+|.)', Name.Variable), + ], + 'curly': [ + (r'}', Keyword, '#pop'), + (r':-', Keyword), + (r'[a-zA-Z0-9_]+', Name.Variable), + (r'[^}:"\'`$]+', Punctuation), + (r':', Punctuation), + include('root'), + ], + 'paren': [ + (r'\)', Keyword, '#pop'), + include('root'), + ], + 'backticks': [ + (r'`', String.Backtick, '#pop'), + include('root'), + ], + } diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py new file mode 100644 index 00000000..1148a689 --- /dev/null +++ b/pygments/lexers/sql.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.sql + ~~~~~~~~~~~~~~~~~~~ + + Lexers for various SQL dialects and related interactive sessions. + + Postgres specific lexers: + + `PostgresLexer` + A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL + lexer are: + + - keywords and data types list parsed from the PG docs (run the + `_postgres_builtins` module to update them); + - Content of $-strings parsed using a specific lexer, e.g. the content + of a PL/Python function is parsed using the Python lexer; + - parse PG specific constructs: E-strings, $-strings, U&-strings, + different operators and punctuation. + + `PlPgsqlLexer` + A lexer for the PL/pgSQL language. Adds a few specific construct on + top of the PG SQL lexer (such as <<label>>). + + `PostgresConsoleLexer` + A lexer to highlight an interactive psql session: + + - identifies the prompt and does its best to detect the end of command + in multiline statement where not all the lines are prefixed by a + prompt, telling them apart from the output; + - highlights errors in the output and notification levels; + - handles psql backslash commands. + + The ``tests/examplefiles`` contains a few test files with data to be + parsed by these lexers. + + :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re +from copy import deepcopy + +from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups +from pygments.token import Punctuation, \ + Text, Comment, Operator, Keyword, Name, String, Number, Generic +from pygments.lexers import get_lexer_by_name, ClassNotFound + +from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \ + PSEUDO_TYPES, PLPGSQL_KEYWORDS + + +__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer', + 'SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer'] + +line_re = re.compile('.*?\n') + +language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE) + +def language_callback(lexer, match): + """Parse the content of a $-string using a lexer + + The lexer is chosen looking for a nearby LANGUAGE. + + Note: this function should have been a `PostgresBase` method, but the + rules deepcopy fails in this case. + """ + l = None + m = language_re.match(lexer.text[match.end():match.end()+100]) + if m is not None: + l = lexer._get_lexer(m.group(1)) + else: + m = list(language_re.finditer( + lexer.text[max(0, match.start()-100):match.start()])) + if m: + l = lexer._get_lexer(m[-1].group(1)) + + if l: + yield (match.start(1), String, match.group(1)) + for x in l.get_tokens_unprocessed(match.group(2)): + yield x + yield (match.start(3), String, match.group(3)) + + else: + yield (match.start(), String, match.group()) + + +class PostgresBase(object): + """Base class for Postgres-related lexers. + + This is implemented as a mixin to avoid the Lexer metaclass kicking in. + this way the different lexer don't have a common Lexer ancestor. If they + had, _tokens could be created on this ancestor and not updated for the + other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming + seem to suggest that regexp lexers are not really subclassable. + + `language_callback` should really be our method, but this breaks deepcopy. + """ + def get_tokens_unprocessed(self, text, *args): + # Have a copy of the entire text to be used by `language_callback`. + self.text = text + for x in super(PostgresBase, self).get_tokens_unprocessed( + text, *args): + yield x + + def _get_lexer(self, lang): + if lang.lower() == 'sql': + return get_lexer_by_name('postgresql', **self.options) + + tries = [ lang ] + if lang.startswith('pl'): + tries.append(lang[2:]) + if lang.endswith('u'): + tries.append(lang[:-1]) + if lang.startswith('pl') and lang.endswith('u'): + tries.append(lang[2:-1]) + + for l in tries: + try: + return get_lexer_by_name(l, **self.options) + except ClassNotFound: + pass + else: + # TODO: better logging + # print >>sys.stderr, "language not found:", lang + return None + + +class PostgresLexer(PostgresBase, RegexLexer): + """ + Lexer for the PostgreSQL dialect of SQL. + + *New in Pygments 1.5.* + """ + + name = 'PostgreSQL SQL dialect' + aliases = ['postgresql', 'postgres'] + mimetypes = ['text/x-postgresql'] + + flags = re.IGNORECASE + tokens = { + 'root': [ + (r'\s+', Text), + (r'--.*?\n', Comment.Single), + (r'/\*', Comment.Multiline, 'multiline-comments'), + (r'(' + '|'.join([s.replace(" ", "\s+") + for s in DATATYPES + PSEUDO_TYPES]) + + r')\b', Name.Builtin), + (r'(' + '|'.join(KEYWORDS) + r')\b', Keyword), + (r'[+*/<>=~!@#%^&|`?^-]+', Operator), + (r'::', Operator), # cast + (r'\$\d+', Name.Variable), + (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float), + (r'[0-9]+', Number.Integer), + (r"(E|U&)?'(''|[^'])*'", String.Single), + (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier + (r'(?ms)(\$[^\$]*\$)(.*?)(\1)', language_callback), + (r'[a-zA-Z_][a-zA-Z0-9_]*', Name), + + # psql variable in SQL + (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable), + + (r'[;:()\[\]\{\},\.]', Punctuation), + ], + 'multiline-comments': [ + (r'/\*', Comment.Multiline, 'multiline-comments'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[^/\*]+', Comment.Multiline), + (r'[/*]', Comment.Multiline) + ], + } + + +class PlPgsqlLexer(PostgresBase, RegexLexer): + """ + Handle the extra syntax in Pl/pgSQL language. + + *New in Pygments 1.5.* + """ + name = 'PL/pgSQL' + aliases = ['plpgsql'] + mimetypes = ['text/x-plpgsql'] + + flags = re.IGNORECASE + tokens = deepcopy(PostgresLexer.tokens) + + # extend the keywords list + for i, pattern in enumerate(tokens['root']): + if pattern[1] == Keyword: + tokens['root'][i] = ( + r'(' + '|'.join(KEYWORDS + PLPGSQL_KEYWORDS) + r')\b', + Keyword) + del i + break + else: + assert 0, "SQL keywords not found" + + # Add specific PL/pgSQL rules (before the SQL ones) + tokens['root'][:0] = [ + (r'\%[a-z][a-z0-9_]*\b', Name.Builtin), # actually, a datatype + (r':=', Operator), + (r'\<\<[a-z][a-z0-9_]*\>\>', Name.Label), + (r'\#[a-z][a-z0-9_]*\b', Keyword.Pseudo), # #variable_conflict + ] + + +class PsqlRegexLexer(PostgresBase, RegexLexer): + """ + Extend the PostgresLexer adding support specific for psql commands. + + This is not a complete psql lexer yet as it lacks prompt support + and output rendering. + """ + + name = 'PostgreSQL console - regexp based lexer' + aliases = [] # not public + + flags = re.IGNORECASE + tokens = deepcopy(PostgresLexer.tokens) + + tokens['root'].append( + (r'\\[^\s]+', Keyword.Pseudo, 'psql-command')) + tokens['psql-command'] = [ + (r'\n', Text, 'root'), + (r'\s+', Text), + (r'\\[^\s]+', Keyword.Pseudo), + (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable), + (r"'(''|[^'])*'", String.Single), + (r"`([^`])*`", String.Backtick), + (r"[^\s]+", String.Symbol), + ] + +re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]') +re_psql_command = re.compile(r'\s*\\') +re_end_command = re.compile(r';\s*(--.*?)?$') +re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$') +re_error = re.compile(r'(ERROR|FATAL):') +re_message = re.compile( + r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|' + r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)') + +def lookahead(x): + """Wrap an iterator and allow pushing back an item.""" + for i in x: + while 1: + i = yield i + if i is None: + break + yield i + + +class PostgresConsoleLexer(Lexer): + """ + Lexer for psql sessions. + + *New in Pygments 1.5.* + """ + + name = 'PostgreSQL console (psql)' + aliases = ['psql', 'postgresql-console', 'postgres-console'] + mimetypes = ['text/x-postgresql-psql'] + + def get_tokens_unprocessed(self, data): + sql = PsqlRegexLexer(**self.options) + + lines = lookahead(line_re.findall(data)) + + # prompt-output cycle + while 1: + + # consume the lines of the command: start with an optional prompt + # and continue until the end of command is detected + curcode = '' + insertions = [] + while 1: + try: + line = lines.next() + except StopIteration: + # allow the emission of partially collected items + # the repl loop will be broken below + break + + # Identify a shell prompt in case of psql commandline example + if line.startswith('$') and not curcode: + lexer = get_lexer_by_name('console', **self.options) + for x in lexer.get_tokens_unprocessed(line): + yield x + break + + # Identify a psql prompt + mprompt = re_prompt.match(line) + if mprompt is not None: + insertions.append((len(curcode), + [(0, Generic.Prompt, mprompt.group())])) + curcode += line[len(mprompt.group()):] + else: + curcode += line + + # Check if this is the end of the command + # TODO: better handle multiline comments at the end with + # a lexer with an external state? + if re_psql_command.match(curcode) \ + or re_end_command.search(curcode): + break + + # Emit the combined stream of command and prompt(s) + for item in do_insertions(insertions, + sql.get_tokens_unprocessed(curcode)): + yield item + + # Emit the output lines + out_token = Generic.Output + while 1: + line = lines.next() + mprompt = re_prompt.match(line) + if mprompt is not None: + # push the line back to have it processed by the prompt + lines.send(line) + break + + mmsg = re_message.match(line) + if mmsg is not None: + if mmsg.group(1).startswith("ERROR") \ + or mmsg.group(1).startswith("FATAL"): + out_token = Generic.Error + yield (mmsg.start(1), Generic.Strong, mmsg.group(1)) + yield (mmsg.start(2), out_token, mmsg.group(2)) + else: + yield (0, out_token, line) + + +class SqlLexer(RegexLexer): + """ + Lexer for Structured Query Language. Currently, this lexer does + not recognize any special syntax except ANSI SQL. + """ + + name = 'SQL' + aliases = ['sql'] + filenames = ['*.sql'] + mimetypes = ['text/x-sql'] + + flags = re.IGNORECASE + tokens = { + 'root': [ + (r'\s+', Text), + (r'--.*?\n', Comment.Single), + (r'/\*', Comment.Multiline, 'multiline-comments'), + (r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|' + r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|' + r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|' + r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|' + r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|' + r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|' + r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|' + r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|' + r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|' + r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|' + r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|' + r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|' + r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|' + r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|' + r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|' + r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|' + r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|' + r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|' + r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|' + r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|' + r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|' + r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|' + r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|' + r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|' + r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|' + r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|' + r'EXCEPT|ESCEPTION|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|' + r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|' + r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|' + r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|' + r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|' + r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|' + r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|' + r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|' + r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|' + r'KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|' + r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LIMIT|LISTEN|LOAD|' + r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|' + r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|' + r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|' + r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|' + r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|' + r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|' + r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|' + r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|' + r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|' + r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|' + r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|' + r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|' + r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|' + r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|' + r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|' + r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|' + r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|' + r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|' + r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|' + r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|' + r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|' + r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|' + r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|' + r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|' + r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|' + r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|' + r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|' + r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|' + r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|' + r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|' + r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|' + r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|' + r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|' + r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|' + r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|' + r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|' + r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|' + r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword), + (r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|' + r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|' + r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b', + Name.Builtin), + (r'[+*/<>=~!@#%^&|`?^-]', Operator), + (r'[0-9]+', Number.Integer), + # TODO: Backslash escapes? + (r"'(''|[^'])*'", String.Single), + (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL + (r'[a-zA-Z_][a-zA-Z0-9_]*', Name), + (r'[;:()\[\],\.]', Punctuation) + ], + 'multiline-comments': [ + (r'/\*', Comment.Multiline, 'multiline-comments'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[^/\*]+', Comment.Multiline), + (r'[/*]', Comment.Multiline) + ] + } + + +class MySqlLexer(RegexLexer): + """ + Special lexer for MySQL. + """ + + name = 'MySQL' + aliases = ['mysql'] + mimetypes = ['text/x-mysql'] + + flags = re.IGNORECASE + tokens = { + 'root': [ + (r'\s+', Text), + (r'(#|--\s+).*?\n', Comment.Single), + (r'/\*', Comment.Multiline, 'multiline-comments'), + (r'[0-9]+', Number.Integer), + (r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float), + # TODO: add backslash escapes + (r"'(''|[^'])*'", String.Single), + (r'"(""|[^"])*"', String.Double), + (r"`(``|[^`])*`", String.Symbol), + (r'[+*/<>=~!@#%^&|`?^-]', Operator), + (r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|' + r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|' + r'tinyblob|mediumblob|longblob|blob|float|double|double\s+' + r'precision|real|numeric|dec|decimal|timestamp|year|char|' + r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?', + bygroups(Keyword.Type, Text, Punctuation)), + (r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|' + r'bigint|binary|blob|both|by|call|cascade|case|change|char|' + r'character|check|collate|column|condition|constraint|continue|' + r'convert|create|cross|current_date|current_time|' + r'current_timestamp|current_user|cursor|database|databases|' + r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|' + r'declare|default|delayed|delete|desc|describe|deterministic|' + r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|' + r'enclosed|escaped|exists|exit|explain|fetch|float|float4|float8' + r'|for|force|foreign|from|fulltext|grant|group|having|' + r'high_priority|hour_microsecond|hour_minute|hour_second|if|' + r'ignore|in|index|infile|inner|inout|insensitive|insert|int|' + r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|' + r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|' + r'localtime|localtimestamp|lock|long|loop|low_priority|match|' + r'minute_microsecond|minute_second|mod|modifies|natural|' + r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|' + r'or|order|out|outer|outfile|precision|primary|procedure|purge|' + r'raid0|read|reads|real|references|regexp|release|rename|repeat|' + r'replace|require|restrict|return|revoke|right|rlike|schema|' + r'schemas|second_microsecond|select|sensitive|separator|set|' + r'show|smallint|soname|spatial|specific|sql|sql_big_result|' + r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|' + r'sqlwarning|ssl|starting|straight_join|table|terminated|then|' + r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|' + r'usage|use|using|utc_date|utc_time|utc_timestamp|values|' + r'varying|when|where|while|with|write|x509|xor|year_month|' + r'zerofill)\b', Keyword), + # TODO: this list is not complete + (r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo), + (r'(true|false|null)', Name.Constant), + (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()', + bygroups(Name.Function, Text, Punctuation)), + (r'[a-zA-Z_][a-zA-Z0-9_]*', Name), + (r'@[A-Za-z0-9]*[._]*[A-Za-z0-9]*', Name.Variable), + (r'[;:()\[\],\.]', Punctuation) + ], + 'multiline-comments': [ + (r'/\*', Comment.Multiline, 'multiline-comments'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[^/\*]+', Comment.Multiline), + (r'[/*]', Comment.Multiline) + ] + } + + +class SqliteConsoleLexer(Lexer): + """ + Lexer for example sessions using sqlite3. + + *New in Pygments 0.11.* + """ + + name = 'sqlite3con' + aliases = ['sqlite3'] + filenames = ['*.sqlite3-console'] + mimetypes = ['text/x-sqlite3-console'] + + def get_tokens_unprocessed(self, data): + sql = SqlLexer(**self.options) + + curcode = '' + insertions = [] + for match in line_re.finditer(data): + line = match.group() + if line.startswith('sqlite> ') or line.startswith(' ...> '): + insertions.append((len(curcode), + [(0, Generic.Prompt, line[:8])])) + curcode += line[8:] + else: + if curcode: + for item in do_insertions(insertions, + sql.get_tokens_unprocessed(curcode)): + yield item + curcode = '' + insertions = [] + if line.startswith('SQL error: '): + yield (match.start(), Generic.Traceback, line) + else: + yield (match.start(), Generic.Output, line) + if curcode: + for item in do_insertions(insertions, + sql.get_tokens_unprocessed(curcode)): + yield item |