summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--AUTHORS1
-rw-r--r--pygments/lexers/_mapping.py7
-rw-r--r--pygments/lexers/agile.py68
-rw-r--r--pygments/lexers/dotnet.py5
-rw-r--r--pygments/lexers/graph.py81
-rw-r--r--pygments/lexers/other.py312
-rw-r--r--pygments/lexers/qbasic.py157
-rw-r--r--pygments/lexers/shell.py5
-rw-r--r--pygments/lexers/text.py29
-rw-r--r--tests/examplefiles/example.chai6
-rw-r--r--tests/examplefiles/example.feature16
-rw-r--r--tests/examplefiles/example.red257
-rw-r--r--tests/examplefiles/example.reds124
-rw-r--r--tests/examplefiles/qbasic_example2
-rw-r--r--tests/examplefiles/test.cyp123
-rw-r--r--tests/examplefiles/test.pan54
-rw-r--r--tests/examplefiles/test.rsl111
-rw-r--r--tests/test_qbasiclexer.py43
18 files changed, 1395 insertions, 6 deletions
diff --git a/AUTHORS b/AUTHORS
index e0e19fcc..9ad64a43 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -157,6 +157,7 @@ Other contributors, listed alphabetically, are:
* Pepijn de Vos -- HTML formatter CTags support
* Whitney Young -- ObjectiveC lexer
* Matthias Vallentin -- Bro lexer
+* Linh Vu Hong -- RSL lexer
* Nathan Weizenbaum -- Haml and Sass lexers
* Dietmar Winkler -- Modelica lexer
* Nils Winter -- Smalltalk lexer
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index d9311558..c77ff1dc 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -60,6 +60,7 @@ LEXERS = {
'CbmBasicV2Lexer': ('pygments.lexers.other', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
+ 'ChaiscriptLexer': ('pygments.lexers.agile', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
'ChapelLexer': ('pygments.lexers.compiled', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
@@ -87,6 +88,7 @@ LEXERS = {
'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
'CudaLexer': ('pygments.lexers.compiled', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
+ 'CypherLexer': ('pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()),
'CythonLexer': ('pygments.lexers.compiled', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
'DLexer': ('pygments.lexers.compiled', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
@@ -97,6 +99,7 @@ LEXERS = {
'DgLexer': ('pygments.lexers.agile', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
+ 'DockerLexer': ('pygments.lexers.text', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
'DtdLexer': ('pygments.lexers.web', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
'DylanConsoleLexer': ('pygments.lexers.compiled', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
@@ -242,6 +245,7 @@ LEXERS = {
'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
+ 'PanLexer': ('pygments.lexers.other', 'Pan', ('pan',), ('*.pan',), ()),
'PawnLexer': ('pygments.lexers.other', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
'Perl6Lexer': ('pygments.lexers.agile', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')),
'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')),
@@ -264,6 +268,7 @@ LEXERS = {
'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')),
'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)),
+ 'QBasicLexer': ('pygments.lexers.qbasic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
'QmlLexer': ('pygments.lexers.web', 'QML', ('qml',), ('*.qml',), ('application/x-qml',)),
'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
'RPMSpecLexer': ('pygments.lexers.other', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
@@ -280,11 +285,13 @@ LEXERS = {
'RdLexer': ('pygments.lexers.math', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
'RebolLexer': ('pygments.lexers.other', 'REBOL', ('rebol',), ('*.r', '*.r3'), ('text/x-rebol',)),
'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()),
+ 'RedLexer': ('pygments.lexers.other', 'Red', ('red','red/system'), ('*.red', '*.reds'), ('text/x-red','text/x-red-system',)),
'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
'RexxLexer': ('pygments.lexers.other', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)),
'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
+ 'RslLexer': ('pygments.lexers.other', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index b575f29c..a3e60f59 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -23,7 +23,8 @@ __all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
'Python3Lexer', 'Python3TracebackLexer', 'RubyLexer',
'RubyConsoleLexer', 'PerlLexer', 'LuaLexer', 'MoonScriptLexer',
'CrocLexer', 'MiniDLexer', 'IoLexer', 'TclLexer', 'FactorLexer',
- 'FancyLexer', 'DgLexer', 'Perl6Lexer', 'HyLexer']
+ 'FancyLexer', 'DgLexer', 'Perl6Lexer', 'HyLexer',
+ 'ChaiscriptLexer']
# b/w compatibility
from pygments.lexers.functional import SchemeLexer
@@ -2461,3 +2462,68 @@ class HyLexer(RegexLexer):
def analyse_text(text):
if '(import ' in text or '(defn ' in text:
return 0.9
+
+
+class ChaiscriptLexer(RegexLexer):
+ """
+ For `ChaiScript <http://chaiscript.com/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'ChaiScript'
+ aliases = ['chai', 'chaiscript']
+ filenames = ['*.chai']
+ mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'^\#.*?\n', Comment.Single)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ (r'', Text, '#pop')
+ ],
+ 'badregex': [
+ ('\n', Text, '#pop')
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'[=+\-*/]', Operator),
+ (r'(for|in|while|do|break|return|continue|if|else|'
+ r'throw|try|catch'
+ r')\b', Keyword, 'slashstartsregex'),
+ (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(attr|def|fun)\b', Keyword.Reserved),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(eval|throw)\b', Name.Builtin),
+ (r'`\S+`', Name.Builtin),
+ (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"', String.Double, 'dqstring'),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ],
+ 'dqstring': [
+ (r'\${[^"}]+?}', String.Iterpol),
+ (r'\$', String.Double),
+ (r'\\\\', String.Double),
+ (r'\\"', String.Double),
+ (r'[^\\\\\\"$]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index 0754ba02..a281e6ab 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -459,6 +459,11 @@ class VbNetLexer(RegexLexer):
]
}
+ def analyse_text(text):
+ if re.search(r'^\s*#If', text, re.I) or re.search(r'^\s*(Module|Namespace)', re.I):
+ return 0.5
+
+
class GenericAspxLexer(RegexLexer):
"""
diff --git a/pygments/lexers/graph.py b/pygments/lexers/graph.py
new file mode 100644
index 00000000..fccba5a4
--- /dev/null
+++ b/pygments/lexers/graph.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.graph
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for graph query languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this
+from pygments.token import Keyword, Punctuation, Text, Comment, Operator, Name,\
+ String, Number, Generic, Whitespace
+
+
+__all__ = ['CypherLexer']
+
+
+class CypherLexer(RegexLexer):
+ """
+ For `Cypher Query Language
+ <http://docs.neo4j.org/chunked/milestone/cypher-query-lang.html>`_
+
+ For the Cypher version in Neo4J 2.0
+
+ .. versionadded:: 2.0
+ """
+ name = 'Cypher'
+ aliases = ['cypher']
+ filenames = ['*.cyp','*.cypher']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('keywords'),
+ include('clauses'),
+ include('relations'),
+ include('strings'),
+ include('whitespace'),
+ include('barewords'),
+ ],
+ 'comment': [
+ (r'^.*//.*\n', Comment.Single),
+ ],
+ 'keywords': [
+ (r'(create|order|match|limit|set|skip|start|return|with|where|'
+ r'delete|foreach|not|by)\b', Keyword),
+ ],
+ 'clauses': [
+ # TODO: many missing ones, see http://docs.neo4j.org/refcard/2.0/
+ (r'(all|any|as|asc|create|create\s+unique|delete|'
+ r'desc|distinct|foreach|in|is\s+null|limit|match|none|'
+ r'order\s+by|return|set|skip|single|start|union|where|with)\b',
+ Keyword),
+ ],
+ 'relations': [
+ (r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)),
+ (r'(<-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
+ (r'-->|<--|\[|\]', Operator),
+ (r'<|>|<>|=|<=|=>|\(|\)|\||:|,|;', Punctuation),
+ (r'[.*{}]', Punctuation),
+ ],
+ 'strings': [
+ (r'"(?:\\[tbnrf\'\"\\]|[^\\"])*"', String),
+ (r'`(?:``|[^`])+`', Name.Variable),
+ ],
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ ],
+ 'barewords': [
+ (r'[a-z][a-zA-Z0-9_]*', Name),
+ (r'\d+', Number),
+ ],
+ }
+
+
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index 261fa304..454f0e42 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -37,7 +37,8 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer',
'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer',
'CbmBasicV2Lexer', 'AutoItLexer', 'RexxLexer', 'APLLexer',
- 'LSLLexer', 'AmbientTalkLexer', 'PawnLexer', 'VCTreeStatusLexer']
+ 'LSLLexer', 'AmbientTalkLexer', 'PawnLexer', 'VCTreeStatusLexer',
+ 'RslLexer', 'PanLexer', 'RedLexer']
class LSLLexer(RegexLexer):
@@ -1865,7 +1866,7 @@ class GherkinLexer(RegexLexer):
tokens = {
'comments': [
- (r'#.*$', Comment),
+ (r'^\s*#.*$', Comment),
],
'feature_elements' : [
(step_keywords, Keyword, "step_content_stack"),
@@ -1894,6 +1895,7 @@ class GherkinLexer(RegexLexer):
],
'narrative': [
include('scenario_sections_on_stack'),
+ include('comments'),
(r"(\s|.)", Name.Function),
],
'table_vars': [
@@ -4118,3 +4120,309 @@ class VCTreeStatusLexer(RegexLexer):
(r'.*\n', Text)
]
}
+
+
+class RslLexer(RegexLexer):
+ """
+ `RSL <http://en.wikipedia.org/wiki/RAISE>`_ is the formal specification
+ language used in RAISE (Rigorous Approach to Industrial Software Engineering)
+ method.
+
+ .. versionadded:: 2.0
+ """
+ name = 'RSL'
+ aliases = ['rsl']
+ filenames = ['*.rsl']
+ mimetypes = ['text/rsl']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root':[
+ (r'\b(Bool|Char|Int|Nat|Real|Text|Unit|abs|all|always|any|as|'
+ r'axiom|card|case|channel|chaos|class|devt_relation|dom|elems|'
+ r'else|elif|end|exists|extend|false|for|hd|hide|if|in|is|inds|'
+ r'initialise|int|inter|isin|len|let|local|ltl_assertion|object|'
+ r'of|out|post|pre|read|real|rng|scheme|skip|stop|swap|then|'
+ r'thoery|test_case|tl|transition_system|true|type|union|until|'
+ r'use|value|variable|while|with|write|~isin|-inflist|-infset|'
+ r'-list|-set)\b', Keyword),
+ (r'(variable|value)\b', Keyword.Declaration),
+ (r'--.*?\n', Comment),
+ (r'<:.*?:>', Comment),
+ (r'\{!.*?!\}', Comment),
+ (r'/\*.*?\*/', Comment),
+ (r'^[ \t]*([\w]+)[ \t]*:[^:]', Name.Function),
+ (r'(^[ \t]*)([\w]+)([ \t]*\([\w\s,]*\)[ \t]*)(is|as)',
+ bygroups(Text, Name.Function, Text, Keyword)),
+ (r'\b[A-Z]\w*\b',Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'".*"',String),
+ (r'\'.\'',String.Char),
+ (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|'
+ r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)',
+ Operator),
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'.', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ """
+ Check for the most common text in the beginning of a RSL file.
+ """
+ if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None:
+ return 1.0
+ else:
+ return 0.01
+
+
+class PanLexer(RegexLexer):
+ """
+ Lexer for `pan <http://github.com/quattor/pan/>`_ source files.
+
+ Based on tcsh lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pan'
+ aliases = ['pan']
+ filenames = ['*.pan']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'\(', Keyword, 'paren'),
+ (r'{', Keyword, 'curly'),
+ include('data'),
+ ],
+ 'basic': [
+ (r'\b(if|for|with|else|type|bind|while|valid|final|prefix|unique|'
+ r'object|foreach|include|template|function|variable|structure|'
+ r'extensible|declaration)\s*\b',
+ Keyword),
+ (r'\b(file_contents|format|index|length|match|matches|replace|'
+ r'splice|split|substr|to_lowercase|to_uppercase|debug|error|'
+ r'traceback|deprecated|base64_decode|base64_encode|digest|escape|'
+ r'unescape|append|create|first|nlist|key|length|list|merge|next|'
+ r'prepend|splice|is_boolean|is_defined|is_double|is_list|is_long|'
+ r'is_nlist|is_null|is_number|is_property|is_resource|is_string|'
+ r'to_boolean|to_double|to_long|to_string|clone|delete|exists|'
+ r'path_exists|if_exists|return|value)\s*\b',
+ Name.Builtin),
+ (r'#.*', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]{}()=]+', Operator),
+ (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
+ ],
+ 'data': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'\s+', Text),
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
+ (r'\d+(?= |\Z)', Number),
+ ],
+ 'curly': [
+ (r'}', Keyword, '#pop'),
+ (r':-', Keyword),
+ (r'[a-zA-Z0-9_]+', Name.Variable),
+ (r'[^}:"\'`$]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ }
+
+class RedLexer(RegexLexer):
+ """
+ A `Red-language <http://www.red-lang.org/>`_ lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Red'
+ aliases = ['red', 'red/system']
+ filenames = ['*.red', '*.reds']
+ mimetypes = ['text/x-red', 'text/x-red-system']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ escape_re = r'(?:\^\([0-9a-fA-F]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(
+ r'(if|unless|either|any|all|while|until|loop|repeat|'
+ r'foreach|forall|func|function|does|has|switch|'
+ r'case|reduce|compose|get|set|print|prin|equal\?|'
+ r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
+ r'greater-or-equal\?|same\?|not|type\?|stats|'
+ r'bind|union|replace|charset|routine)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(
+ r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
+ r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
+ r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
+ r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
+ r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|update|write)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(
+ r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|none|crlf|dot|null-byte)$', word):
+ yield match.start(), Name.Builtin.Pseudo, word
+ elif re.match(
+ r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|#switch|#default|#get-definition)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(
+ r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|raise-error|'
+ r'return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|quote|forever)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match(
+ r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|any-struct\?|'
+ r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|none\?|'
+ r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|word\?|any-series\?)$', word):
+ yield match.start(), Keyword, word
+ elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
+ elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|===|<>|<|>|<=|>=|<<|>>|<<<|>>>|%|-\*\*)$', word):
+ yield match.start(), Operator, word
+ elif re.match(".*\!$", word):
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ elif re.match(":.*", word):
+ yield match.start(), Generic.Subheading, word # get-word
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'[^R]+', Comment),
+ (r'Red/System\s+\[', Generic.Strong, 'script'),
+ (r'Red\s+\[', Generic.Strong, 'script'),
+ (r'R', Comment)
+ ],
+ 'script': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#{[0-9a-fA-F\s]*}', Number.Hex),
+ (r'2#{', Number.Hex, 'bin2'),
+ (r'64#{[0-9a-zA-Z+/=\s]*}', Number.Hex),
+ (r'([0-9a-fA-F]+)(h)((\s)|(?=[\[\]{}""\(\)]))', bygroups(Number.Hex, Name.Variable, Whitespace)),
+ (r'"', String, 'string'),
+ (r'{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(\^{^")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-zA-Z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-\/][0-9a-zA-Z]+[\-\/]\d+(\/\d+\:\d+((\:\d+)?'
+ r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+[xX]\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]\(\)]', Generic.Strong),
+ (r'[a-zA-Z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url
+ (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s', Comment, 'comment'),
+ (r'/[^(\^{^")\s/[\]]*', Name.Attribute),
+ (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[a-zA-Z0-9:._-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(\^{^")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(\^")]+', String),
+ (escape_re, String.Escape),
+ (r'[\(|\)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(\^{^})]+', String),
+ (escape_re, String.Escape),
+ (r'[\(|\)]+', String),
+ (r'\^.', String.Escape),
+ (r'{', String, '#push'),
+ (r'}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(\^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(\^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[\(|\)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([0-1]\s*){8}', Number.Hex),
+ (r'}', Number.Hex, '#pop'),
+ ],
+ 'comment': [
+ (r'"', Comment, 'commentString1'),
+ (r'{', Comment, 'commentString2'),
+ (r'\[', Comment, 'commentBlock'),
+ (r'[^(\s{\"\[]+', Comment, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(\^")]+', Comment),
+ (escape_re, Comment),
+ (r'[\(|\)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(\^{^})]+', Comment),
+ (escape_re, Comment),
+ (r'[\(|\)]+', Comment),
+ (r'\^.', Comment),
+ (r'{', Comment, '#push'),
+ (r'}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'[^(\[\])]+', Comment),
+ ],
+ }
diff --git a/pygments/lexers/qbasic.py b/pygments/lexers/qbasic.py
new file mode 100644
index 00000000..80b80f9f
--- /dev/null
+++ b/pygments/lexers/qbasic.py
@@ -0,0 +1,157 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.qbasic
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Simple lexer for Microsoft QBasic source code.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Text, Name, Comment, String, Keyword, Punctuation, \
+ Number, Operator
+
+__all__ = ['QBasicLexer']
+
+
+class QBasicLexer(RegexLexer):
+ """
+ For
+ `QBasic <http://en.wikipedia.org/wiki/QBasic>`_
+ source code.
+ """
+
+ name = 'QBasic'
+ aliases = ['qbasic', 'basic']
+ filenames = ['*.BAS', '*.bas']
+ mimetypes = ['text/basic']
+
+ declarations = ['DATA', 'LET']
+
+ functions = [
+ 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG',
+ 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI',
+ 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV',
+ 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE',
+ 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT',
+ 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF',
+ 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$',
+ 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY',
+ 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD',
+ 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC',
+ 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN',
+ 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR',
+ 'VARPTR$', 'VARSEG'
+ ]
+
+ metacommands = ['$DYNAMIC', '$INCLUDE', '$STATIC']
+
+ operators = ['AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR']
+
+ statements = [
+ 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE',
+ 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR',
+ 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA',
+ 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT',
+ 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP',
+ 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD',
+ 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO',
+ 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY',
+ 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE',
+ 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME',
+ 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY',
+ 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM',
+ 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN',
+ 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING',
+ 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM',
+ 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN',
+ 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP',
+ 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM',
+ 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK',
+ 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE'
+ ]
+
+ keywords = [
+ 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY',
+ 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF',
+ 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD',
+ 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE',
+ 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND'
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\n+', Text),
+ (r'\s+', Text.Whitespace),
+ (r'^(\s*)(\d*)(\s*)(REM .*)$',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace,
+ Comment.Single)),
+ (r'^(\s*)(\d+)(\s*)',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace)),
+ (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global),
+ (r'(?=[^"]*)\'.*$', Comment.Single),
+ (r'"[^\n\"]*"', String.Double),
+ (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)),
+ (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name)),
+ (r'(DIM)(\s+)(SHARED)(\s+)([^\s\(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name.Variable.Global)),
+ (r'(DIM)(\s+)([^\s\(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)),
+ (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)',
+ bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace,
+ Operator)),
+ (r'(GOTO|GOSUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ (r'(SUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ include('declarations'),
+ include('functions'),
+ include('metacommands'),
+ include('operators'),
+ include('statements'),
+ include('keywords'),
+ (r'[a-zA-Z_]\w*[\$@#&!]', Name.Variable.Global),
+ (r'[a-zA-Z_]\w*\:', Name.Label),
+ (r'\-?\d*\.\d+[@|#]?', Number.Float),
+ (r'\-?\d+[@|#]', Number.Float),
+ (r'\-?\d+#?', Number.Integer.Long),
+ (r'\-?\d+#?', Number.Integer),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator),
+ (r'[\[\]{}(),;]', Punctuation),
+ (r'[\w]+', Name.Variable.Global),
+ ],
+ # can't use regular \b because of X$()
+ 'declarations': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)),
+ Keyword.Declaration),
+ ],
+ 'functions': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)),
+ Keyword.Reserved),
+ ],
+ 'metacommands': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)),
+ Keyword.Constant),
+ ],
+ 'operators': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word),
+ ],
+ 'statements': [
+ (r'\b(%s)\b' % '|'.join(map(re.escape, statements)),
+ Keyword.Reserved),
+ ],
+ 'keywords': [
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ ],
+ }
+
+ def analyse_text(text):
+ return 0.2
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
index c07ff6ee..f809dae9 100644
--- a/pygments/lexers/shell.py
+++ b/pygments/lexers/shell.py
@@ -296,17 +296,18 @@ class TcshLexer(RegexLexer):
r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
r'ver|wait|warp|watchlog|where|which)\s*\b',
Name.Builtin),
- (r'#.*\n', Comment),
+ (r'#.*', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]+', Operator),
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
],
'data': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Text),
- (r'[^=\s\[\]{}()$"\'`\\]+', Text),
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
],
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index 6c2a4119..fc1cc6a6 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -26,7 +26,7 @@ __all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer
'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer', 'EbnfLexer',
- 'TodotxtLexer']
+ 'TodotxtLexer', 'DockerLexer']
class IniLexer(RegexLexer):
@@ -2017,3 +2017,30 @@ class TodotxtLexer(RegexLexer):
('\s+', IncompleteTaskText),
],
}
+
+
+class DockerLexer(RegexLexer):
+ """
+ Lexer for `Docker <http://docker.io>`_ configuration files.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Docker'
+ aliases = ['docker', 'dockerfile']
+ filenames = ['Dockerfile', '*.docker']
+ mimetypes = ['text/x-dockerfile-config']
+
+ _keywords = (r'(?:FROM|MAINTAINER|RUN|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|'
+ r'VOLUME|WORKDIR)')
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,),
+ bygroups(Name.Keyword, Whitespace, Keyword)),
+ (_keywords + r'\b', Keyword),
+ (r'#.*', Comment),
+ (r'.+', using(BashLexer)),
+ ],
+ }
diff --git a/tests/examplefiles/example.chai b/tests/examplefiles/example.chai
new file mode 100644
index 00000000..85f53c38
--- /dev/null
+++ b/tests/examplefiles/example.chai
@@ -0,0 +1,6 @@
+var f = fun(x) { x + 2; }
+// comment
+puts(someFunc(2 + 2 - 1 * 5 / 4));
+var x = "str";
+def dosomething(lhs, rhs) { print("lhs: ${lhs}, rhs: ${rhs}"); }
+callfunc(`+`, 1, 4);
diff --git a/tests/examplefiles/example.feature b/tests/examplefiles/example.feature
new file mode 100644
index 00000000..a26268da
--- /dev/null
+++ b/tests/examplefiles/example.feature
@@ -0,0 +1,16 @@
+# First comment
+Feature: My amazing feature
+ Feature description line 1
+ Feature description line 2
+
+#comment
+Scenario Outline: My detailed scenario #string
+ Given That <x> is set
+ When When I <subtract>
+ Then I should get the <remain#der>
+
+ # indented comment
+ Examples:
+ | x | subtract | remain#der |
+ | 12 | 5\|3 | #73 |
+ | #the | 10 | 15 |
diff --git a/tests/examplefiles/example.red b/tests/examplefiles/example.red
new file mode 100644
index 00000000..37c17ef8
--- /dev/null
+++ b/tests/examplefiles/example.red
@@ -0,0 +1,257 @@
+Red [
+ Title: "Red console"
+ Author: ["Nenad Rakocevic" "Kaj de Vos"]
+ File: %console.red
+ Tabs: 4
+ Rights: "Copyright (C) 2012-2013 Nenad Rakocevic. All rights reserved."
+ License: {
+ Distributed under the Boost Software License, Version 1.0.
+ See https://github.com/dockimbel/Red/blob/master/BSL-License.txt
+ }
+ Purpose: "Just some code for testing Pygments colorizer"
+ Language: http://www.red-lang.org/
+]
+
+#system-global [
+ #either OS = 'Windows [
+ #import [
+ "kernel32.dll" stdcall [
+ AttachConsole: "AttachConsole" [
+ processID [integer!]
+ return: [integer!]
+ ]
+ SetConsoleTitle: "SetConsoleTitleA" [
+ title [c-string!]
+ return: [integer!]
+ ]
+ ReadConsole: "ReadConsoleA" [
+ consoleInput [integer!]
+ buffer [byte-ptr!]
+ charsToRead [integer!]
+ numberOfChars [int-ptr!]
+ inputControl [int-ptr!]
+ return: [integer!]
+ ]
+ ]
+ ]
+ line-buffer-size: 16 * 1024
+ line-buffer: allocate line-buffer-size
+ ][
+ #switch OS [
+ MacOSX [
+ #define ReadLine-library "libreadline.dylib"
+ ]
+ #default [
+ #define ReadLine-library "libreadline.so.6"
+ #define History-library "libhistory.so.6"
+ ]
+ ]
+ #import [
+ ReadLine-library cdecl [
+ read-line: "readline" [ ; Read a line from the console.
+ prompt [c-string!]
+ return: [c-string!]
+ ]
+ rl-bind-key: "rl_bind_key" [
+ key [integer!]
+ command [integer!]
+ return: [integer!]
+ ]
+ rl-insert: "rl_insert" [
+ count [integer!]
+ key [integer!]
+ return: [integer!]
+ ]
+ ]
+ #if OS <> 'MacOSX [
+ History-library cdecl [
+ add-history: "add_history" [ ; Add line to the history.
+ line [c-string!]
+ ]
+ ]
+ ]
+ ]
+
+ rl-insert-wrapper: func [
+ [cdecl]
+ count [integer!]
+ key [integer!]
+ return: [integer!]
+ ][
+ rl-insert count key
+ ]
+
+ ]
+]
+
+Windows?: system/platform = 'Windows
+
+read-argument: routine [
+ /local
+ args [str-array!]
+ str [red-string!]
+][
+ if system/args-count <> 2 [
+ SET_RETURN(none-value)
+ exit
+ ]
+ args: system/args-list + 1 ;-- skip binary filename
+ str: simple-io/read-txt args/item
+ SET_RETURN(str)
+]
+
+init-console: routine [
+ str [string!]
+ /local
+ ret
+][
+ #either OS = 'Windows [
+ ;ret: AttachConsole -1
+ ;if zero? ret [print-line "ReadConsole failed!" halt]
+
+ ret: SetConsoleTitle as c-string! string/rs-head str
+ if zero? ret [print-line "SetConsoleTitle failed!" halt]
+ ][
+ rl-bind-key as-integer tab as-integer :rl-insert-wrapper
+ ]
+]
+
+input: routine [
+ prompt [string!]
+ /local
+ len ret str buffer line
+][
+ #either OS = 'Windows [
+ len: 0
+ print as c-string! string/rs-head prompt
+ ret: ReadConsole stdin line-buffer line-buffer-size :len null
+ if zero? ret [print-line "ReadConsole failed!" halt]
+ len: len + 1
+ line-buffer/len: null-byte
+ str: string/load as c-string! line-buffer len
+ ][
+ line: read-line as c-string! string/rs-head prompt
+ if line = null [halt] ; EOF
+
+ #if OS <> 'MacOSX [add-history line]
+
+ str: string/load line 1 + length? line
+; free as byte-ptr! line
+ ]
+ SET_RETURN(str)
+]
+
+count-delimiters: function [
+ buffer [string!]
+ return: [block!]
+][
+ list: copy [0 0]
+ c: none
+
+ foreach c buffer [
+ case [
+ escaped? [
+ escaped?: no
+ ]
+ in-comment? [
+ switch c [
+ #"^/" [in-comment?: no]
+ ]
+ ]
+ 'else [
+ switch c [
+ #"^^" [escaped?: yes]
+ #";" [if zero? list/2 [in-comment?: yes]]
+ #"[" [list/1: list/1 + 1]
+ #"]" [list/1: list/1 - 1]
+ #"{" [list/2: list/2 + 1]
+ #"}" [list/2: list/2 - 1]
+ ]
+ ]
+ ]
+ ]
+ list
+]
+
+do-console: function [][
+ buffer: make string! 10000
+ prompt: red-prompt: "red>> "
+ mode: 'mono
+
+ switch-mode: [
+ mode: case [
+ cnt/1 > 0 ['block]
+ cnt/2 > 0 ['string]
+ 'else [
+ prompt: red-prompt
+ do eval
+ 'mono
+ ]
+ ]
+ prompt: switch mode [
+ block ["[^-"]
+ string ["{^-"]
+ mono [red-prompt]
+ ]
+ ]
+
+ eval: [
+ code: load/all buffer
+
+ unless tail? code [
+ set/any 'result do code
+
+ unless unset? :result [
+ if 67 = length? result: mold/part :result 67 [ ;-- optimized for width = 72
+ clear back tail result
+ append result "..."
+ ]
+ print ["==" result]
+ ]
+ ]
+ clear buffer
+ ]
+
+ while [true][
+ unless tail? line: input prompt [
+ append buffer line
+ cnt: count-delimiters buffer
+
+ either Windows? [
+ remove skip tail buffer -2 ;-- clear extra CR (Windows)
+ ][
+ append buffer lf ;-- Unix
+ ]
+
+ switch mode [
+ block [if cnt/1 <= 0 [do switch-mode]]
+ string [if cnt/2 <= 0 [do switch-mode]]
+ mono [do either any [cnt/1 > 0 cnt/2 > 0][switch-mode][eval]]
+ ]
+ ]
+ ]
+]
+
+q: :quit
+
+if script: read-argument [
+ script: load script
+ either any [
+ script/1 <> 'Red
+ not block? script/2
+ ][
+ print "*** Error: not a Red program!"
+ ][
+ do skip script 2
+ ]
+ quit
+]
+
+init-console "Red Console"
+
+print {
+-=== Red Console alpha version ===-
+(only ASCII input supported)
+}
+
+do-console \ No newline at end of file
diff --git a/tests/examplefiles/example.reds b/tests/examplefiles/example.reds
new file mode 100644
index 00000000..dd4ad0f9
--- /dev/null
+++ b/tests/examplefiles/example.reds
@@ -0,0 +1,124 @@
+Red/System [
+ Title: "Red/System example file"
+ Purpose: "Just some code for testing Pygments colorizer"
+ Language: http://www.red-lang.org/
+]
+
+#include %../common/FPU-configuration.reds
+
+; C types
+
+#define time! long!
+#define clock! long!
+
+date!: alias struct! [
+ second [integer!] ; 0-61 (60?)
+ minute [integer!] ; 0-59
+ hour [integer!] ; 0-23
+
+ day [integer!] ; 1-31
+ month [integer!] ; 0-11
+ year [integer!] ; Since 1900
+
+ weekday [integer!] ; 0-6 since Sunday
+ yearday [integer!] ; 0-365
+ daylight-saving-time? [integer!] ; Negative: unknown
+]
+
+#either OS = 'Windows [
+ #define clocks-per-second 1000
+][
+ ; CLOCKS_PER_SEC value for Syllable, Linux (XSI-conformant systems)
+ ; TODO: check for other systems
+ #define clocks-per-second 1000'000
+]
+
+#import [LIBC-file cdecl [
+
+ ; Error handling
+
+ form-error: "strerror" [ ; Return error description.
+ code [integer!]
+ return: [c-string!]
+ ]
+ print-error: "perror" [ ; Print error to standard error output.
+ string [c-string!]
+ ]
+
+
+ ; Memory management
+
+ make: "calloc" [ ; Allocate zero-filled memory.
+ chunks [size!]
+ size [size!]
+ return: [binary!]
+ ]
+ resize: "realloc" [ ; Resize memory allocation.
+ memory [binary!]
+ size [size!]
+ return: [binary!]
+ ]
+ ]
+
+ JVM!: alias struct! [
+ reserved0 [int-ptr!]
+ reserved1 [int-ptr!]
+ reserved2 [int-ptr!]
+
+ DestroyJavaVM [function! [[JNICALL] vm [JVM-ptr!] return: [jint!]]]
+ AttachCurrentThread [function! [[JNICALL] vm [JVM-ptr!] penv [struct! [p [int-ptr!]]] args [byte-ptr!] return: [jint!]]]
+ DetachCurrentThread [function! [[JNICALL] vm [JVM-ptr!] return: [jint!]]]
+ GetEnv [function! [[JNICALL] vm [JVM-ptr!] penv [struct! [p [int-ptr!]]] version [integer!] return: [jint!]]]
+ AttachCurrentThreadAsDaemon [function! [[JNICALL] vm [JVM-ptr!] penv [struct! [p [int-ptr!]]] args [byte-ptr!] return: [jint!]]]
+]
+
+ ;just some datatypes for testing:
+
+ #some-hash
+ 10-1-2013
+ quit
+
+ ;binary:
+ #{00FF0000}
+ #{00FF0000 FF000000}
+ #{00FF0000 FF000000} ;with tab instead of space
+ 2#{00001111}
+ 64#{/wAAAA==}
+ 64#{/wAAA A==} ;with space inside
+ 64#{/wAAA A==} ;with tab inside
+
+
+ ;string with char
+ {bla ^(ff) foo}
+ {bla ^(( foo}
+ ;some numbers:
+ 12
+ 1'000
+ 1.2
+ FF00FF00h
+
+ ;some tests of hexa number notation with not common ending
+ [ff00h ff00h] ff00h{} FFh"foo" 00h(1 + 2) (AEh)
+
+;normal words:
+foo char
+
+;get-word
+:foo
+
+;lit-word:
+'foo 'foo
+
+to-integer foo
+foo/(a + 1)/b
+
+call/output reform ['which interpreter] path: copy ""
+
+ version-1.1: 00010001h
+
+ #if type = 'exe [
+ push system/stack/frame ;-- save previous frame pointer
+ system/stack/frame: system/stack/top ;-- @@ reposition frame pointer just after the catch flag
+]
+push CATCH_ALL ;-- exceptions root barrier
+push 0 ;-- keep stack aligned on 64-bit \ No newline at end of file
diff --git a/tests/examplefiles/qbasic_example b/tests/examplefiles/qbasic_example
new file mode 100644
index 00000000..27041af6
--- /dev/null
+++ b/tests/examplefiles/qbasic_example
@@ -0,0 +1,2 @@
+10 print RIGHT$("hi there", 5)
+20 goto 10
diff --git a/tests/examplefiles/test.cyp b/tests/examplefiles/test.cyp
new file mode 100644
index 00000000..37465a4d
--- /dev/null
+++ b/tests/examplefiles/test.cyp
@@ -0,0 +1,123 @@
+//test comment
+START a = node(*)
+MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+RETURN a.name, m.title, d.name;
+
+START a = node(*)
+MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+WITH d,m,count(a) as Actors
+WHERE Actors > 4
+RETURN d.name as Director,m.title as Movie, Actors ORDER BY Actors;
+
+START a=node(*)
+MATCH p=(a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+return p;
+
+START a = node(*)
+MATCH p1=(a)-[:ACTED_IN]->(m), p2=d-[:DIRECTED]->(m)
+WHERE m.title="The Matrix"
+RETURN p1, p2;
+
+START a = node(*)
+MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+WHERE a=d
+RETURN a.name;
+
+START a = node(*)
+MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+WHERE a=d
+RETURN a.name;
+
+START a=node(*)
+MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+RETURN a.name, d.name, count(*) as Movies,collect(m.title) as Titles
+ORDER BY (Movies) DESC
+LIMIT 5;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+RETURN keanu;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[:ACTED_IN]->(movie)
+RETURN movie.title;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[r:ACTED_IN]->(movie)
+WHERE "Neo" in r.roles
+RETURN DISTINCT movie.title;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[:ACTED_IN]->()<-[:DIRECTED]-(director)
+RETURN director.name;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[:ACTED_IN]->(movie)<-[:ACTED_IN]-(n)
+WHERE n.born < keanu.born
+RETURN DISTINCT n.name, keanu.born ,n.born;
+
+START keanu=node:node_auto_index(name="Keanu Reeves"),
+ hugo=node:node_auto_index(name="Hugo Weaving")
+MATCH (keanu)-[:ACTED_IN]->(movie)
+WHERE NOT((hugo)-[:ACTED_IN]->(movie))
+RETURN DISTINCT movie.title;
+
+START a = node(*)
+MATCH (a)-[:ACTED_IN]->(m)
+WITH a,count(m) as Movies
+RETURN a.name as Actor, Movies ORDER BY Movies;
+
+START keanu=node:node_auto_index(name="Keanu Reeves"),actor
+MATCH past=(keanu)-[:ACTED_IN]->()<-[:ACTED_IN]-(),
+ actors=(actor)-[:ACTED_IN]->()
+WHERE hasnt=actors NOT IN past
+RETURN hasnt;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[:ACTED_IN]->()<-[:ACTED_IN]-(c),
+ (c)-[:ACTED_IN]->()<-[:ACTED_IN]-(coc)
+WHERE NOT((keanu)-[:ACTED_IN]->()<-[:ACTED_IN]-(coc))
+AND coc > keanu
+RETURN coc.name, count(coc)
+ORDER BY count(coc) DESC
+LIMIT 3;
+
+START kevin=node:node_auto_index(name="Kevin Bacon"),
+ movie=node:node_auto_index(name="Mystic River")
+MATCH (kevin)-[:ACTED_IN]->(movie)
+RETURN DISTINCT movie.title;
+
+CREATE (n
+ {
+ title:"Mystic River",
+ released:1993,
+ tagline:"We bury our sins here, Dave. We wash them clean."
+ }
+ ) RETURN n;
+
+
+START movie=node:node_auto_index(title="Mystic River")
+SET movie.released = 2003
+RETURN movie;
+
+start emil=node:node_auto_index(name="Emil Eifrem") MATCH emil-[r]->(n) DELETE r, emil;
+
+START a=node(*)
+MATCH (a)-[:ACTED_IN]->()<-[:ACTED_IN]-(b)
+CREATE UNIQUE (a)-[:KNOWS]->(b);
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[:KNOWS*2]->(fof)
+WHERE keanu <> fof
+RETURN distinct fof.name;
+
+START charlize=node:node_auto_index(name="Charlize Theron"),
+ bacon=node:node_auto_index(name="Kevin Bacon")
+MATCH p=shortestPath((charlize)-[:KNOWS*]->(bacon))
+RETURN extract(n in nodes(p) | n.name)[1];
+
+START actors=node:
+
+MATCH (alice)-[:`REALLY LIKES`]->(bob)
+MATCH (alice)-[:`REALLY ``LIKES```]->(bob)
+myFancyIdentifier.`(weird property name)`
+"string\t\n\b\f\\\''\""
diff --git a/tests/examplefiles/test.pan b/tests/examplefiles/test.pan
new file mode 100644
index 00000000..56c8bd62
--- /dev/null
+++ b/tests/examplefiles/test.pan
@@ -0,0 +1,54 @@
+object template pantest;
+
+# Very simple pan test file
+"/long/decimal" = 123;
+"/long/octal" = 0755;
+"/long/hexadecimal" = 0xFF;
+
+"/double/simple" = 0.01;
+"/double/pi" = 3.14159;
+"/double/exponent" = 1e-8;
+"/double/scientific" = 1.3E10;
+
+"/string/single" = 'Faster, but escapes like \t, \n and \x3d don''t work, but '' should work.';
+"/string/double" = "Slower, but escapes like \t, \n and \x3d do work";
+
+variable TEST = 2;
+
+"/x2" = to_string(TEST);
+"/x2" ?= 'Default value';
+
+"/x3" = 1 + 2 + value("/long/decimal");
+
+"/x4" = undef;
+
+"/x5" = null;
+
+variable e ?= error("Test error message");
+
+# include gmond config for services-monitoring
+include { 'site/ganglia/gmond/services-monitoring' };
+
+"/software/packages"=pkg_repl("httpd","2.2.3-43.sl5.3",PKG_ARCH_DEFAULT);
+"/software/packages"=pkg_repl("php");
+
+# Example function
+function show_things_view_for_stuff = {
+ thing = ARGV[0];
+ foreach( i; mything; STUFF ) {
+ if ( thing == mything ) {
+ return( true );
+ } else {
+ return SELF;
+ };
+ };
+ false;
+};
+
+variable HERE = <<EOF;
+; This example demonstrates an in-line heredoc style config file
+[main]
+awesome = true
+EOF
+
+variable small = false;#This should be highlighted normally again.
diff --git a/tests/examplefiles/test.rsl b/tests/examplefiles/test.rsl
new file mode 100644
index 00000000..d6c9fc9a
--- /dev/null
+++ b/tests/examplefiles/test.rsl
@@ -0,0 +1,111 @@
+scheme COMPILER =
+class
+ type
+ Prog == mk_Prog(stmt : Stmt),
+
+ Stmt ==
+ mk_Asgn(ide : Identifier, expr : Expr) |
+ mk_If(cond : Expr, s1 : Stmt, s2 : Stmt) |
+ mk_Seq(head : Stmt, last : Stmt),
+
+ Expr ==
+ mk_Const(const : Int) |
+ mk_Plus(fst : Expr, snd : Expr) |
+ mk_Id(ide : Identifier),
+ Identifier = Text
+
+type /* storage for program variables */
+ `Sigma = Identifier -m-> Int
+
+value
+ m : Prog -> `Sigma -> `Sigma
+ m(p)(`sigma) is m(stmt(p))(`sigma),
+
+ m : Stmt -> `Sigma -> `Sigma
+ m(s)(`sigma) is
+ case s of
+ mk_Asgn(i, e) -> `sigma !! [i +> m(e)(`sigma)],
+ mk_Seq(s1, s2) -> m(s2)(m(s1)(`sigma)),
+ mk_If(c, s1, s2) ->
+ if m(c)(`sigma) ~= 0 then m(s1)(`sigma) else m(s2)(`sigma) end
+ end,
+
+ m : Expr -> `Sigma -> Int
+ m(e)(`sigma) is
+ case e of
+ mk_Const(n) -> n,
+ mk_Plus(e1, e2) -> m(e1)(`sigma) + m(e2)(`sigma),
+ mk_Id(id) -> if id isin dom `sigma then `sigma(id) else 0 end
+ end
+
+type
+ MProg = Inst-list,
+ Inst ==
+ mk_Push(ide1 : Identifier) |
+ mk_Pop(Unit) |
+ mk_Add(Unit) |
+ mk_Cnst(val : Int) |
+ mk_Store(ide2 : Identifier) |
+ mk_Jumpfalse(off1 : Int) |
+ mk_Jump(off2 : Int)
+
+
+/* An interpreter for SMALL instructions */
+
+type Stack = Int-list
+value
+ I : MProg >< Int >< Stack -> (`Sigma ->`Sigma)
+ I(mp, pc, s)(`sigma) is
+ if pc <= 0 \/ pc > len mp then `sigma else
+ case mp(pc) of
+ mk_Push(x) -> if x isin dom `sigma
+ then I(mp, pc + 1, <.`sigma(x).> ^ s)(`sigma)
+ else I(mp, pc + 1, <.0.> ^ s)(`sigma) end,
+ mk_Pop(()) -> if len s = 0 then `sigma
+ else I(mp, pc + 1, tl s)(`sigma) end,
+ mk_Cnst(n) -> I(mp, pc + 1, <.n.> ^ s)(`sigma),
+ mk_Add(()) -> if len s < 2 then `sigma
+ else I(mp, pc + 1,<.s(1) + s(2).> ^ tl tl s)(`sigma) end,
+ mk_Store(x) -> if len s = 0 then `sigma
+ else I(mp, pc + 1, s)(`sigma !! [x +> s(1)]) end,
+ mk_Jumpfalse(n) -> if len s = 0 then `sigma
+ elsif hd s ~= 0 then I(mp, pc + 1, s)(`sigma)
+ else I(mp, pc + n, s)(`sigma) end,
+ mk_Jump(n) -> I(mp, pc + n, s)(`sigma)
+ end
+ end
+
+value
+ comp_Prog : Prog -> MProg
+ comp_Prog(p) is comp_Stmt(stmt(p)),
+
+ comp_Stmt : Stmt -> MProg
+ comp_Stmt(s) is
+ case s of
+ mk_Asgn(id, e) -> comp_Expr(e) ^ <. mk_Store(id), mk_Pop() .>,
+ mk_Seq(s1, s2) -> comp_Stmt(s1) ^ comp_Stmt(s2),
+ mk_If(e, s1, s2) ->
+ let
+ ce = comp_Expr(e),
+ cs1 = comp_Stmt(s1), cs2 = comp_Stmt(s2)
+ in
+ ce ^
+ <. mk_Jumpfalse(len cs1 + 3) .> ^
+ <. mk_Pop() .> ^
+ cs1 ^
+ <. mk_Jump(len cs2 + 2) .> ^
+ <. mk_Pop() .> ^
+ cs2
+ end
+ end,
+
+ comp_Expr : Expr -> MProg
+ comp_Expr(e) is
+ case e of
+ mk_Const(n) -> <. mk_Cnst(n) .>,
+ mk_Plus(e1, e2) ->
+ comp_Expr(e1) ^ comp_Expr(e2) ^ <. mk_Add() .>,
+ mk_Id(id) -> <. mk_Push(id) .>
+ end
+
+end
diff --git a/tests/test_qbasiclexer.py b/tests/test_qbasiclexer.py
new file mode 100644
index 00000000..1b81b643
--- /dev/null
+++ b/tests/test_qbasiclexer.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+"""
+ Tests for QBasic
+ ~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import glob
+import os
+import unittest
+
+from pygments.token import Token
+from pygments.lexers.qbasic import QBasicLexer
+
+class QBasicTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = QBasicLexer()
+ self.maxDiff = None
+
+ def testKeywordsWithDollar(self):
+ fragment = u'DIM x\nx = RIGHT$("abc", 1)\n'
+ expected = [
+ (Token.Keyword.Declaration, u'DIM'),
+ (Token.Text.Whitespace, u' '),
+ (Token.Name.Variable.Global, u'x'),
+ (Token.Text, u'\n'),
+ (Token.Name.Variable.Global, u'x'),
+ (Token.Text.Whitespace, u' '),
+ (Token.Operator, u'='),
+ (Token.Text.Whitespace, u' '),
+ (Token.Keyword.Reserved, u'RIGHT$'),
+ (Token.Punctuation, u'('),
+ (Token.Literal.String.Double, u'"abc"'),
+ (Token.Punctuation, u','),
+ (Token.Text.Whitespace, u' '),
+ (Token.Literal.Number.Integer.Long, u'1'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
+