summaryrefslogtreecommitdiff
path: root/pygments
diff options
context:
space:
mode:
Diffstat (limited to 'pygments')
-rw-r--r--pygments/lexers/_mapping.py7
-rw-r--r--pygments/lexers/agile.py68
-rw-r--r--pygments/lexers/dotnet.py5
-rw-r--r--pygments/lexers/graph.py81
-rw-r--r--pygments/lexers/other.py312
-rw-r--r--pygments/lexers/qbasic.py157
-rw-r--r--pygments/lexers/shell.py5
-rw-r--r--pygments/lexers/text.py29
8 files changed, 658 insertions, 6 deletions
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index d9311558..c77ff1dc 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -60,6 +60,7 @@ LEXERS = {
'CbmBasicV2Lexer': ('pygments.lexers.other', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
+ 'ChaiscriptLexer': ('pygments.lexers.agile', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
'ChapelLexer': ('pygments.lexers.compiled', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
@@ -87,6 +88,7 @@ LEXERS = {
'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
'CudaLexer': ('pygments.lexers.compiled', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
+ 'CypherLexer': ('pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()),
'CythonLexer': ('pygments.lexers.compiled', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
'DLexer': ('pygments.lexers.compiled', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
@@ -97,6 +99,7 @@ LEXERS = {
'DgLexer': ('pygments.lexers.agile', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
+ 'DockerLexer': ('pygments.lexers.text', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
'DtdLexer': ('pygments.lexers.web', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
'DylanConsoleLexer': ('pygments.lexers.compiled', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
@@ -242,6 +245,7 @@ LEXERS = {
'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
+ 'PanLexer': ('pygments.lexers.other', 'Pan', ('pan',), ('*.pan',), ()),
'PawnLexer': ('pygments.lexers.other', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
'Perl6Lexer': ('pygments.lexers.agile', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')),
'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')),
@@ -264,6 +268,7 @@ LEXERS = {
'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')),
'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)),
+ 'QBasicLexer': ('pygments.lexers.qbasic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
'QmlLexer': ('pygments.lexers.web', 'QML', ('qml',), ('*.qml',), ('application/x-qml',)),
'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
'RPMSpecLexer': ('pygments.lexers.other', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
@@ -280,11 +285,13 @@ LEXERS = {
'RdLexer': ('pygments.lexers.math', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
'RebolLexer': ('pygments.lexers.other', 'REBOL', ('rebol',), ('*.r', '*.r3'), ('text/x-rebol',)),
'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()),
+ 'RedLexer': ('pygments.lexers.other', 'Red', ('red','red/system'), ('*.red', '*.reds'), ('text/x-red','text/x-red-system',)),
'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
'RexxLexer': ('pygments.lexers.other', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)),
'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
+ 'RslLexer': ('pygments.lexers.other', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index b575f29c..a3e60f59 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -23,7 +23,8 @@ __all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
'Python3Lexer', 'Python3TracebackLexer', 'RubyLexer',
'RubyConsoleLexer', 'PerlLexer', 'LuaLexer', 'MoonScriptLexer',
'CrocLexer', 'MiniDLexer', 'IoLexer', 'TclLexer', 'FactorLexer',
- 'FancyLexer', 'DgLexer', 'Perl6Lexer', 'HyLexer']
+ 'FancyLexer', 'DgLexer', 'Perl6Lexer', 'HyLexer',
+ 'ChaiscriptLexer']
# b/w compatibility
from pygments.lexers.functional import SchemeLexer
@@ -2461,3 +2462,68 @@ class HyLexer(RegexLexer):
def analyse_text(text):
if '(import ' in text or '(defn ' in text:
return 0.9
+
+
+class ChaiscriptLexer(RegexLexer):
+ """
+ For `ChaiScript <http://chaiscript.com/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'ChaiScript'
+ aliases = ['chai', 'chaiscript']
+ filenames = ['*.chai']
+ mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'^\#.*?\n', Comment.Single)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ (r'', Text, '#pop')
+ ],
+ 'badregex': [
+ ('\n', Text, '#pop')
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'[=+\-*/]', Operator),
+ (r'(for|in|while|do|break|return|continue|if|else|'
+ r'throw|try|catch'
+ r')\b', Keyword, 'slashstartsregex'),
+ (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(attr|def|fun)\b', Keyword.Reserved),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(eval|throw)\b', Name.Builtin),
+ (r'`\S+`', Name.Builtin),
+ (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"', String.Double, 'dqstring'),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ],
+ 'dqstring': [
+ (r'\${[^"}]+?}', String.Iterpol),
+ (r'\$', String.Double),
+ (r'\\\\', String.Double),
+ (r'\\"', String.Double),
+ (r'[^\\\\\\"$]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index 0754ba02..a281e6ab 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -459,6 +459,11 @@ class VbNetLexer(RegexLexer):
]
}
+ def analyse_text(text):
+ if re.search(r'^\s*#If', text, re.I) or re.search(r'^\s*(Module|Namespace)', re.I):
+ return 0.5
+
+
class GenericAspxLexer(RegexLexer):
"""
diff --git a/pygments/lexers/graph.py b/pygments/lexers/graph.py
new file mode 100644
index 00000000..fccba5a4
--- /dev/null
+++ b/pygments/lexers/graph.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.graph
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for graph query languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this
+from pygments.token import Keyword, Punctuation, Text, Comment, Operator, Name,\
+ String, Number, Generic, Whitespace
+
+
+__all__ = ['CypherLexer']
+
+
+class CypherLexer(RegexLexer):
+ """
+ For `Cypher Query Language
+ <http://docs.neo4j.org/chunked/milestone/cypher-query-lang.html>`_
+
+ For the Cypher version in Neo4J 2.0
+
+ .. versionadded:: 2.0
+ """
+ name = 'Cypher'
+ aliases = ['cypher']
+ filenames = ['*.cyp','*.cypher']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('keywords'),
+ include('clauses'),
+ include('relations'),
+ include('strings'),
+ include('whitespace'),
+ include('barewords'),
+ ],
+ 'comment': [
+ (r'^.*//.*\n', Comment.Single),
+ ],
+ 'keywords': [
+ (r'(create|order|match|limit|set|skip|start|return|with|where|'
+ r'delete|foreach|not|by)\b', Keyword),
+ ],
+ 'clauses': [
+ # TODO: many missing ones, see http://docs.neo4j.org/refcard/2.0/
+ (r'(all|any|as|asc|create|create\s+unique|delete|'
+ r'desc|distinct|foreach|in|is\s+null|limit|match|none|'
+ r'order\s+by|return|set|skip|single|start|union|where|with)\b',
+ Keyword),
+ ],
+ 'relations': [
+ (r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)),
+ (r'(<-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
+ (r'-->|<--|\[|\]', Operator),
+ (r'<|>|<>|=|<=|=>|\(|\)|\||:|,|;', Punctuation),
+ (r'[.*{}]', Punctuation),
+ ],
+ 'strings': [
+ (r'"(?:\\[tbnrf\'\"\\]|[^\\"])*"', String),
+ (r'`(?:``|[^`])+`', Name.Variable),
+ ],
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ ],
+ 'barewords': [
+ (r'[a-z][a-zA-Z0-9_]*', Name),
+ (r'\d+', Number),
+ ],
+ }
+
+
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index 261fa304..454f0e42 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -37,7 +37,8 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer',
'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer',
'CbmBasicV2Lexer', 'AutoItLexer', 'RexxLexer', 'APLLexer',
- 'LSLLexer', 'AmbientTalkLexer', 'PawnLexer', 'VCTreeStatusLexer']
+ 'LSLLexer', 'AmbientTalkLexer', 'PawnLexer', 'VCTreeStatusLexer',
+ 'RslLexer', 'PanLexer', 'RedLexer']
class LSLLexer(RegexLexer):
@@ -1865,7 +1866,7 @@ class GherkinLexer(RegexLexer):
tokens = {
'comments': [
- (r'#.*$', Comment),
+ (r'^\s*#.*$', Comment),
],
'feature_elements' : [
(step_keywords, Keyword, "step_content_stack"),
@@ -1894,6 +1895,7 @@ class GherkinLexer(RegexLexer):
],
'narrative': [
include('scenario_sections_on_stack'),
+ include('comments'),
(r"(\s|.)", Name.Function),
],
'table_vars': [
@@ -4118,3 +4120,309 @@ class VCTreeStatusLexer(RegexLexer):
(r'.*\n', Text)
]
}
+
+
+class RslLexer(RegexLexer):
+ """
+ `RSL <http://en.wikipedia.org/wiki/RAISE>`_ is the formal specification
+ language used in RAISE (Rigorous Approach to Industrial Software Engineering)
+ method.
+
+ .. versionadded:: 2.0
+ """
+ name = 'RSL'
+ aliases = ['rsl']
+ filenames = ['*.rsl']
+ mimetypes = ['text/rsl']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root':[
+ (r'\b(Bool|Char|Int|Nat|Real|Text|Unit|abs|all|always|any|as|'
+ r'axiom|card|case|channel|chaos|class|devt_relation|dom|elems|'
+ r'else|elif|end|exists|extend|false|for|hd|hide|if|in|is|inds|'
+ r'initialise|int|inter|isin|len|let|local|ltl_assertion|object|'
+ r'of|out|post|pre|read|real|rng|scheme|skip|stop|swap|then|'
+ r'thoery|test_case|tl|transition_system|true|type|union|until|'
+ r'use|value|variable|while|with|write|~isin|-inflist|-infset|'
+ r'-list|-set)\b', Keyword),
+ (r'(variable|value)\b', Keyword.Declaration),
+ (r'--.*?\n', Comment),
+ (r'<:.*?:>', Comment),
+ (r'\{!.*?!\}', Comment),
+ (r'/\*.*?\*/', Comment),
+ (r'^[ \t]*([\w]+)[ \t]*:[^:]', Name.Function),
+ (r'(^[ \t]*)([\w]+)([ \t]*\([\w\s,]*\)[ \t]*)(is|as)',
+ bygroups(Text, Name.Function, Text, Keyword)),
+ (r'\b[A-Z]\w*\b',Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'".*"',String),
+ (r'\'.\'',String.Char),
+ (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|'
+ r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)',
+ Operator),
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'.', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ """
+ Check for the most common text in the beginning of a RSL file.
+ """
+ if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None:
+ return 1.0
+ else:
+ return 0.01
+
+
+class PanLexer(RegexLexer):
+ """
+ Lexer for `pan <http://github.com/quattor/pan/>`_ source files.
+
+ Based on tcsh lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pan'
+ aliases = ['pan']
+ filenames = ['*.pan']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'\(', Keyword, 'paren'),
+ (r'{', Keyword, 'curly'),
+ include('data'),
+ ],
+ 'basic': [
+ (r'\b(if|for|with|else|type|bind|while|valid|final|prefix|unique|'
+ r'object|foreach|include|template|function|variable|structure|'
+ r'extensible|declaration)\s*\b',
+ Keyword),
+ (r'\b(file_contents|format|index|length|match|matches|replace|'
+ r'splice|split|substr|to_lowercase|to_uppercase|debug|error|'
+ r'traceback|deprecated|base64_decode|base64_encode|digest|escape|'
+ r'unescape|append|create|first|nlist|key|length|list|merge|next|'
+ r'prepend|splice|is_boolean|is_defined|is_double|is_list|is_long|'
+ r'is_nlist|is_null|is_number|is_property|is_resource|is_string|'
+ r'to_boolean|to_double|to_long|to_string|clone|delete|exists|'
+ r'path_exists|if_exists|return|value)\s*\b',
+ Name.Builtin),
+ (r'#.*', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]{}()=]+', Operator),
+ (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
+ ],
+ 'data': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'\s+', Text),
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
+ (r'\d+(?= |\Z)', Number),
+ ],
+ 'curly': [
+ (r'}', Keyword, '#pop'),
+ (r':-', Keyword),
+ (r'[a-zA-Z0-9_]+', Name.Variable),
+ (r'[^}:"\'`$]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ }
+
+class RedLexer(RegexLexer):
+ """
+ A `Red-language <http://www.red-lang.org/>`_ lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Red'
+ aliases = ['red', 'red/system']
+ filenames = ['*.red', '*.reds']
+ mimetypes = ['text/x-red', 'text/x-red-system']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ escape_re = r'(?:\^\([0-9a-fA-F]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(
+ r'(if|unless|either|any|all|while|until|loop|repeat|'
+ r'foreach|forall|func|function|does|has|switch|'
+ r'case|reduce|compose|get|set|print|prin|equal\?|'
+ r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
+ r'greater-or-equal\?|same\?|not|type\?|stats|'
+ r'bind|union|replace|charset|routine)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(
+ r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
+ r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
+ r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
+ r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
+ r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|update|write)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(
+ r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|none|crlf|dot|null-byte)$', word):
+ yield match.start(), Name.Builtin.Pseudo, word
+ elif re.match(
+ r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|#switch|#default|#get-definition)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(
+ r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|raise-error|'
+ r'return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|quote|forever)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match(
+ r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|any-struct\?|'
+ r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|none\?|'
+ r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|word\?|any-series\?)$', word):
+ yield match.start(), Keyword, word
+ elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
+ elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|===|<>|<|>|<=|>=|<<|>>|<<<|>>>|%|-\*\*)$', word):
+ yield match.start(), Operator, word
+ elif re.match(".*\!$", word):
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ elif re.match(":.*", word):
+ yield match.start(), Generic.Subheading, word # get-word
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'[^R]+', Comment),
+ (r'Red/System\s+\[', Generic.Strong, 'script'),
+ (r'Red\s+\[', Generic.Strong, 'script'),
+ (r'R', Comment)
+ ],
+ 'script': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#{[0-9a-fA-F\s]*}', Number.Hex),
+ (r'2#{', Number.Hex, 'bin2'),
+ (r'64#{[0-9a-zA-Z+/=\s]*}', Number.Hex),
+ (r'([0-9a-fA-F]+)(h)((\s)|(?=[\[\]{}""\(\)]))', bygroups(Number.Hex, Name.Variable, Whitespace)),
+ (r'"', String, 'string'),
+ (r'{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(\^{^")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-zA-Z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-\/][0-9a-zA-Z]+[\-\/]\d+(\/\d+\:\d+((\:\d+)?'
+ r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+[xX]\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]\(\)]', Generic.Strong),
+ (r'[a-zA-Z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url
+ (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s', Comment, 'comment'),
+ (r'/[^(\^{^")\s/[\]]*', Name.Attribute),
+ (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[a-zA-Z0-9:._-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(\^{^")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(\^")]+', String),
+ (escape_re, String.Escape),
+ (r'[\(|\)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(\^{^})]+', String),
+ (escape_re, String.Escape),
+ (r'[\(|\)]+', String),
+ (r'\^.', String.Escape),
+ (r'{', String, '#push'),
+ (r'}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(\^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(\^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[\(|\)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([0-1]\s*){8}', Number.Hex),
+ (r'}', Number.Hex, '#pop'),
+ ],
+ 'comment': [
+ (r'"', Comment, 'commentString1'),
+ (r'{', Comment, 'commentString2'),
+ (r'\[', Comment, 'commentBlock'),
+ (r'[^(\s{\"\[]+', Comment, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(\^")]+', Comment),
+ (escape_re, Comment),
+ (r'[\(|\)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(\^{^})]+', Comment),
+ (escape_re, Comment),
+ (r'[\(|\)]+', Comment),
+ (r'\^.', Comment),
+ (r'{', Comment, '#push'),
+ (r'}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'[^(\[\])]+', Comment),
+ ],
+ }
diff --git a/pygments/lexers/qbasic.py b/pygments/lexers/qbasic.py
new file mode 100644
index 00000000..80b80f9f
--- /dev/null
+++ b/pygments/lexers/qbasic.py
@@ -0,0 +1,157 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.qbasic
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Simple lexer for Microsoft QBasic source code.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Text, Name, Comment, String, Keyword, Punctuation, \
+ Number, Operator
+
+__all__ = ['QBasicLexer']
+
+
+class QBasicLexer(RegexLexer):
+ """
+ For
+ `QBasic <http://en.wikipedia.org/wiki/QBasic>`_
+ source code.
+ """
+
+ name = 'QBasic'
+ aliases = ['qbasic', 'basic']
+ filenames = ['*.BAS', '*.bas']
+ mimetypes = ['text/basic']
+
+ declarations = ['DATA', 'LET']
+
+ functions = [
+ 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG',
+ 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI',
+ 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV',
+ 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE',
+ 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT',
+ 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF',
+ 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$',
+ 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY',
+ 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD',
+ 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC',
+ 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN',
+ 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR',
+ 'VARPTR$', 'VARSEG'
+ ]
+
+ metacommands = ['$DYNAMIC', '$INCLUDE', '$STATIC']
+
+ operators = ['AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR']
+
+ statements = [
+ 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE',
+ 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR',
+ 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA',
+ 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT',
+ 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP',
+ 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD',
+ 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO',
+ 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY',
+ 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE',
+ 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME',
+ 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY',
+ 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM',
+ 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN',
+ 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING',
+ 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM',
+ 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN',
+ 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP',
+ 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM',
+ 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK',
+ 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE'
+ ]
+
+ keywords = [
+ 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY',
+ 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF',
+ 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD',
+ 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE',
+ 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND'
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\n+', Text),
+ (r'\s+', Text.Whitespace),
+ (r'^(\s*)(\d*)(\s*)(REM .*)$',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace,
+ Comment.Single)),
+ (r'^(\s*)(\d+)(\s*)',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace)),
+ (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global),
+ (r'(?=[^"]*)\'.*$', Comment.Single),
+ (r'"[^\n\"]*"', String.Double),
+ (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)),
+ (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name)),
+ (r'(DIM)(\s+)(SHARED)(\s+)([^\s\(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name.Variable.Global)),
+ (r'(DIM)(\s+)([^\s\(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)),
+ (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)',
+ bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace,
+ Operator)),
+ (r'(GOTO|GOSUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ (r'(SUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ include('declarations'),
+ include('functions'),
+ include('metacommands'),
+ include('operators'),
+ include('statements'),
+ include('keywords'),
+ (r'[a-zA-Z_]\w*[\$@#&!]', Name.Variable.Global),
+ (r'[a-zA-Z_]\w*\:', Name.Label),
+ (r'\-?\d*\.\d+[@|#]?', Number.Float),
+ (r'\-?\d+[@|#]', Number.Float),
+ (r'\-?\d+#?', Number.Integer.Long),
+ (r'\-?\d+#?', Number.Integer),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator),
+ (r'[\[\]{}(),;]', Punctuation),
+ (r'[\w]+', Name.Variable.Global),
+ ],
+ # can't use regular \b because of X$()
+ 'declarations': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)),
+ Keyword.Declaration),
+ ],
+ 'functions': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)),
+ Keyword.Reserved),
+ ],
+ 'metacommands': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)),
+ Keyword.Constant),
+ ],
+ 'operators': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word),
+ ],
+ 'statements': [
+ (r'\b(%s)\b' % '|'.join(map(re.escape, statements)),
+ Keyword.Reserved),
+ ],
+ 'keywords': [
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ ],
+ }
+
+ def analyse_text(text):
+ return 0.2
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
index c07ff6ee..f809dae9 100644
--- a/pygments/lexers/shell.py
+++ b/pygments/lexers/shell.py
@@ -296,17 +296,18 @@ class TcshLexer(RegexLexer):
r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
r'ver|wait|warp|watchlog|where|which)\s*\b',
Name.Builtin),
- (r'#.*\n', Comment),
+ (r'#.*', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]+', Operator),
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
],
'data': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Text),
- (r'[^=\s\[\]{}()$"\'`\\]+', Text),
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
],
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index 6c2a4119..fc1cc6a6 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -26,7 +26,7 @@ __all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer
'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer', 'EbnfLexer',
- 'TodotxtLexer']
+ 'TodotxtLexer', 'DockerLexer']
class IniLexer(RegexLexer):
@@ -2017,3 +2017,30 @@ class TodotxtLexer(RegexLexer):
('\s+', IncompleteTaskText),
],
}
+
+
+class DockerLexer(RegexLexer):
+ """
+ Lexer for `Docker <http://docker.io>`_ configuration files.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Docker'
+ aliases = ['docker', 'dockerfile']
+ filenames = ['Dockerfile', '*.docker']
+ mimetypes = ['text/x-dockerfile-config']
+
+ _keywords = (r'(?:FROM|MAINTAINER|RUN|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|'
+ r'VOLUME|WORKDIR)')
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,),
+ bygroups(Name.Keyword, Whitespace, Keyword)),
+ (_keywords + r'\b', Keyword),
+ (r'#.*', Comment),
+ (r'.+', using(BashLexer)),
+ ],
+ }