summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--AUTHORS1
-rwxr-xr-xexternal/autopygmentize4
-rw-r--r--pygments/lexers/_mapping.py7
-rw-r--r--pygments/lexers/bibtex.py156
-rw-r--r--pygments/lexers/data.py6
-rw-r--r--pygments/lexers/esoteric.py60
-rw-r--r--pygments/lexers/javascript.py62
-rw-r--r--pygments/lexers/jvm.py30
-rw-r--r--pygments/lexers/lisp.py712
-rw-r--r--pygments/lexers/make.py4
-rw-r--r--pygments/lexers/nimrod.py22
-rw-r--r--pygments/lexers/rust.py41
-rw-r--r--pygments/lexers/sql.py6
-rw-r--r--tests/examplefiles/capdl_example.cdl64
-rw-r--r--tests/examplefiles/durexmania.aheui4
-rw-r--r--tests/examplefiles/example.juttle110
-rw-r--r--tests/examplefiles/example.yaml9
-rw-r--r--tests/examplefiles/fibonacci.tokigun.aheui4
-rw-r--r--tests/examplefiles/hello-world.puzzlet.aheui10
-rw-r--r--tests/examplefiles/plain.bst1097
-rw-r--r--tests/examplefiles/test.bib77
-rw-r--r--tests/test_bibtex.py236
22 files changed, 2341 insertions, 381 deletions
diff --git a/AUTHORS b/AUTHORS
index 2fc9a97e..d94869cd 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -65,6 +65,7 @@ Other contributors, listed alphabetically, are:
* Alex Gilding -- BlitzBasic lexer
* Bertrand Goetzmann -- Groovy lexer
* Krzysiek Goj -- Scala lexer
+* Andrey Golovizin -- BibTeX lexers
* Matt Good -- Genshi, Cheetah lexers
* Michał Górny -- vim modeline support
* Alex Gosse -- TrafficScript lexer
diff --git a/external/autopygmentize b/external/autopygmentize
index d2f969a1..7873c22a 100755
--- a/external/autopygmentize
+++ b/external/autopygmentize
@@ -66,8 +66,8 @@ if [[ "$lexer" == text ]]; then
fi
encoding=$(file --mime-encoding --uncompress $file_common_opts "$file")
-if [[ $encoding == "us-asciibinarybinary" ]]; then
- encoding="us-ascii"
+if [[ $encoding == "binary" ]]; then
+ encoding="latin1"
fi
if [[ -n "$lexer" ]]; then
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index e34adac2..c543310a 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -24,6 +24,7 @@ LEXERS = {
'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
'AdlLexer': ('pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
+ 'AheuiLexer': ('pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()),
'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)),
'AmplLexer': ('pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()),
@@ -46,11 +47,13 @@ LEXERS = {
'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
+ 'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
+ 'BibTeXLexer': ('pygments.lexers.bibtex', 'BibTeX', ('bib', 'bibtex'), ('*.bib',), ('text/x-bibtex',)),
'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
@@ -68,6 +71,7 @@ LEXERS = {
'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
'CadlLexer': ('pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()),
+ 'CapDLLexer': ('pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()),
'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
@@ -210,6 +214,7 @@ LEXERS = {
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()),
'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
+ 'JuttleLexer': ('pygments.lexers.javascript', 'Juttle', ('juttle', 'juttle'), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')),
'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
@@ -278,7 +283,7 @@ LEXERS = {
'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')),
'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
- 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)),
+ 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
diff --git a/pygments/lexers/bibtex.py b/pygments/lexers/bibtex.py
new file mode 100644
index 00000000..cbaedca2
--- /dev/null
+++ b/pygments/lexers/bibtex.py
@@ -0,0 +1,156 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.bibtex
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for BibTeX bibliography data and styles
+
+ :copyright: Copyright 2005-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, default, words
+from pygments.token import Name, Comment, String, Error, Number, Text, Keyword, Punctuation
+
+__all__ = ['BibTeXLexer', 'BSTLexer']
+
+
+class BibTeXLexer(ExtendedRegexLexer):
+ """
+ A lexer for BibTeX bibliography data format.
+
+ .. versionadded:: 2.2
+ """
+
+ name = 'BibTeX'
+ aliases = ['bib', 'bibtex']
+ filenames = ['*.bib']
+ mimetypes = ["text/x-bibtex"]
+ flags = re.IGNORECASE
+
+ ALLOWED_CHARS = r'@!$&*+\-./:;<>?\[\\\]^`|~'
+ IDENTIFIER = '[{0}][{1}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
+
+ def open_brace_callback(self, match, ctx):
+ opening_brace = match.group()
+ ctx.opening_brace = opening_brace
+ yield match.start(), Punctuation, opening_brace
+ ctx.pos = match.end()
+
+ def close_brace_callback(self, match, ctx):
+ closing_brace = match.group()
+ if (
+ ctx.opening_brace == '{' and closing_brace != '}' or
+ ctx.opening_brace == '(' and closing_brace != ')'
+ ):
+ yield match.start(), Error, closing_brace
+ else:
+ yield match.start(), Punctuation, closing_brace
+ del ctx.opening_brace
+ ctx.pos = match.end()
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ ('@comment', Comment),
+ ('@preamble', Name.Class, ('closing-brace', 'value', 'opening-brace')),
+ ('@string', Name.Class, ('closing-brace', 'field', 'opening-brace')),
+ ('@' + IDENTIFIER, Name.Class, ('closing-brace', 'command-body', 'opening-brace')),
+ ('.+', Comment),
+ ],
+ 'opening-brace': [
+ include('whitespace'),
+ (r'[{(]', open_brace_callback, '#pop'),
+ ],
+ 'closing-brace': [
+ include('whitespace'),
+ (r'[})]', close_brace_callback, '#pop'),
+ ],
+ 'command-body': [
+ include('whitespace'),
+ (r'[^\s\,\}]+', Name.Label, ('#pop', 'fields')),
+ ],
+ 'fields': [
+ include('whitespace'),
+ (',', Punctuation, 'field'),
+ default('#pop'),
+ ],
+ 'field': [
+ include('whitespace'),
+ (IDENTIFIER, Name.Attribute, ('value', '=')),
+ default('#pop'),
+ ],
+ '=': [
+ include('whitespace'),
+ ('=', Punctuation, '#pop'),
+ ],
+ 'value': [
+ include('whitespace'),
+ (IDENTIFIER, Name.Variable),
+ ('"', String, 'quoted-string'),
+ (r'\{', String, 'braced-string'),
+ (r'[\d]+', Number),
+ ('#', Punctuation),
+ default('#pop'),
+ ],
+ 'quoted-string': [
+ (r'\{', String, 'braced-string'),
+ ('"', String, '#pop'),
+ ('[^\{\"]+', String),
+ ],
+ 'braced-string': [
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ('[^\{\}]+', String),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+ }
+
+
+class BSTLexer(RegexLexer):
+ """
+ A lexer for BibTeX bibliography styles.
+
+ .. versionadded:: 2.2
+ """
+
+ name = 'BST'
+ aliases = ['bst', 'bst-pybtex']
+ filenames = ['*.bst']
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ (words(['read', 'sort']), Keyword),
+ (words(['execute', 'integers', 'iterate', 'reverse', 'strings']), Keyword, ('group')),
+ (words(['function', 'macro']), Keyword, ('group', 'group')),
+ (words(['entry']), Keyword, ('group', 'group', 'group')),
+ ],
+ 'group': [
+ include('whitespace'),
+ (r'\{', Punctuation, ('#pop', 'group-end', 'body')),
+ ],
+ 'group-end': [
+ include('whitespace'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'body': [
+ include('whitespace'),
+ (r"\'[^#\"\{\}\s]+", Name.Function),
+ (r'[^#\"\{\}\s]+\$', Name.Builtin),
+ (r'[^#\"\{\}\s]+', Name.Variable),
+ (r'"[^\"]*"', String),
+ (r'#-?\d+', Number),
+ (r'\{', Punctuation, ('group-end', 'body')),
+ default('#pop'),
+ ],
+ 'whitespace': [
+ ('\s+', Text),
+ ('%.*?$', Comment.SingleLine),
+ ],
+ }
diff --git a/pygments/lexers/data.py b/pygments/lexers/data.py
index c88375d5..84d02f49 100644
--- a/pygments/lexers/data.py
+++ b/pygments/lexers/data.py
@@ -247,10 +247,10 @@ class YamlLexer(ExtendedRegexLexer):
# tags, anchors, aliases
'descriptors': [
# a full-form tag
- (r'!<[\w;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
+ (r'!<[\w#;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
# a tag in the form '!', '!suffix' or '!handle!suffix'
- (r'!(?:[\w-]+)?'
- r'(?:![\w;/?:@&=+$,.!~*\'()\[\]%-]+)?', Keyword.Type),
+ (r'!(?:[\w-]+!)?'
+ r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]+', Keyword.Type),
# an anchor
(r'&[\w-]+', Name.Label),
# an alias
diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py
index c9db26b5..c99f7392 100644
--- a/pygments/lexers/esoteric.py
+++ b/pygments/lexers/esoteric.py
@@ -13,7 +13,8 @@ from pygments.lexer import RegexLexer, include, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error
-__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer']
+__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer',
+ 'CapDLLexer']
class BrainfuckLexer(RegexLexer):
@@ -143,6 +144,63 @@ class CAmkESLexer(RegexLexer):
}
+class CapDLLexer(RegexLexer):
+ """
+ Basic lexer for
+ `CapDL <https://ssrg.nicta.com.au/publications/nictaabstracts/Kuz_KLW_10.abstract.pml>`_.
+
+ The source of the primary tool that reads such specifications is available
+ at https://github.com/seL4/capdl/tree/master/capDL-tool. Note that this
+ lexer only supports a subset of the grammar. For example, identifiers can
+ shadow type names, but these instances are currently incorrectly
+ highlighted as types. Supporting this would need a stateful lexer that is
+ considered unnecessarily complex for now.
+ """
+ name = 'CapDL'
+ aliases = ['capdl']
+ filenames = ['*.cdl']
+
+ tokens = {
+ 'root':[
+
+ # C pre-processor directive
+ (r'^\s*#.*\n', Comment.Preproc),
+
+ # Whitespace, comments
+ (r'\s+', Text),
+ (r'/\*(.|\n)*?\*/', Comment),
+ (r'(//|--).*\n', Comment),
+
+ (r'[<>\[\(\)\{\},:;=\]]', Punctuation),
+ (r'\.\.', Punctuation),
+
+ (words(('arch', 'arm11', 'caps', 'child_of', 'ia32', 'irq', 'maps',
+ 'objects'), suffix=r'\b'), Keyword),
+
+ (words(('aep', 'asid_pool', 'cnode', 'ep', 'frame', 'io_device',
+ 'io_ports', 'io_pt', 'notification', 'pd', 'pt', 'tcb',
+ 'ut', 'vcpu'), suffix=r'\b'), Keyword.Type),
+
+ # Properties
+ (words(('asid', 'addr', 'badge', 'cached', 'dom', 'domainID', 'elf',
+ 'fault_ep', 'G', 'guard', 'guard_size', 'init', 'ip',
+ 'prio', 'sp', 'R', 'RG', 'RX', 'RW', 'RWG', 'RWX', 'W',
+ 'WG', 'WX', 'level', 'masked', 'master_reply', 'paddr',
+ 'ports', 'reply', 'uncached'), suffix=r'\b'),
+ Keyword.Reserved),
+
+ # Literals
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+(\.\d+)?(k|M)?', Number),
+ (words(('bits',), suffix=r'\b'), Number),
+ (words(('cspace', 'vspace', 'reply_slot', 'caller_slot',
+ 'ipc_buffer_slot'), suffix=r'\b'), Number),
+
+ # Identifiers
+ (r'[a-zA-Z_][-_@\.\w]*', Name),
+ ],
+ }
+
class RedcodeLexer(RegexLexer):
"""
A simple Redcode lexer based on ICWS'94.
diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py
index 5dca6832..d8ce87d4 100644
--- a/pygments/lexers/javascript.py
+++ b/pygments/lexers/javascript.py
@@ -20,7 +20,7 @@ import pygments.unistring as uni
__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
- 'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer']
+ 'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer', 'JuttleLexer']
JS_IDENT_START = ('(?:[$_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') +
']|\\\\u[a-fA-F0-9]{4})')
@@ -1438,3 +1438,63 @@ class EarlGreyLexer(RegexLexer):
(r'\d+', Number.Integer)
],
}
+
+class JuttleLexer(RegexLexer):
+ """
+ For `Juttle`_ source code.
+
+ .. _Juttle: https://github.com/juttle/juttle
+
+ """
+
+ name = 'Juttle'
+ aliases = ['juttle', 'juttle']
+ filenames = ['*.juttle']
+ mimetypes = ['application/juttle', 'application/x-juttle',
+ 'text/x-juttle', 'text/juttle']
+
+ flags = re.DOTALL | re.UNICODE | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r':\d{2}:\d{2}:\d{2}(\.\d*)?:', String.Moment),
+ (r':(now|beginning|end|forever|yesterday|today|tomorrow|(\d+(\.\d*)?|\.\d+)(ms|[smhdwMy])?):', String.Moment),
+ (r':\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(\.\d*)?)?(Z|[+-]\d{2}:\d{2}|[+-]\d{4})?:', String.Moment),
+ (r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?'
+ r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?)'
+ r'|[ ]+(ago|from[ ]+now))*:', String.Moment),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(import|return|continue|if|else)\b', Keyword, 'slashstartsregex'),
+ (r'(var|const|function|reducer|sub|input)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(batch|emit|filter|head|join|keep|pace|pass|put|read|reduce|remove|'
+ r'sequence|skip|sort|split|tail|unbatch|uniq|view|write)\b', Keyword.Reserved),
+ (r'(true|false|null|Infinity)\b', Keyword.Constant),
+ (r'(Array|Date|Juttle|Math|Number|Object|RegExp|String)\b', Name.Builtin),
+ (JS_IDENT, Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single)
+ ]
+
+ }
diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py
index af7f8105..5d747561 100644
--- a/pygments/lexers/jvm.py
+++ b/pygments/lexers/jvm.py
@@ -59,25 +59,27 @@ class JavaLexer(RegexLexer):
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
+ 'class'),
+ (r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Text),
+ 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
(r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
(r'([^\W\d]|\$)[\w$]*', Name),
- (r'([0-9](_*[0-9]+)*\.([0-9](_*[0-9]+)*)?|'
- r'([0-9](_*[0-9]+)*)?\.[0-9](_*[0-9]+)*)'
- r'([eE][+\-]?[0-9](_*[0-9]+)*)?[fFdD]?|'
- r'[0-9][eE][+\-]?[0-9](_*[0-9]+)*[fFdD]?|'
- r'[0-9]([eE][+\-]?[0-9](_*[0-9]+)*)?[fFdD]|'
- r'0[xX]([0-9a-fA-F](_*[0-9a-fA-F]+)*\.?|'
- r'([0-9a-fA-F](_*[0-9a-fA-F]+)*)?\.[0-9a-fA-F](_*[0-9a-fA-F]+)*)'
- r'[pP][+\-]?[0-9](_*[0-9]+)*[fFdD]?', Number.Float),
- (r'0[xX][0-9a-fA-F](_*[0-9a-fA-F]+)*[lL]?', Number.Hex),
- (r'0[bB][01](_*[01]+)*[lL]?', Number.Bin),
- (r'0(_*[0-7]+)+[lL]?', Number.Oct),
- (r'0|[1-9](_*[0-9]+)*[lL]?', Number.Integer),
+ (r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
+ r'\.[0-9][0-9_]*)'
+ r'([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|'
+ r'[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|'
+ r'[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|'
+ r'0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|'
+ r'([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)'
+ r'[pP][+\-]?[0-9][0-9_]*[fFdD]?', Number.Float),
+ (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?', Number.Hex),
+ (r'0[bB][01][01_]*[lL]?', Number.Bin),
+ (r'0[0-7_]+[lL]?', Number.Oct),
+ (r'0|[1-9][0-9_]*[lL]?', Number.Integer),
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'\n', Text)
],
diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py
index b15fd0c0..05f02a3c 100644
--- a/pygments/lexers/lisp.py
+++ b/pygments/lexers/lisp.py
@@ -483,65 +483,68 @@ class RacketLexer(RegexLexer):
'case-lambda', 'class', 'class*', 'class-field-accessor',
'class-field-mutator', 'class/c', 'class/derived', 'combine-in',
'combine-out', 'command-line', 'compound-unit', 'compound-unit/infer',
- 'cond', 'contract', 'contract-out', 'contract-struct', 'contracted',
- 'define', 'define-compound-unit', 'define-compound-unit/infer',
- 'define-contract-struct', 'define-custom-hash-types',
- 'define-custom-set-types', 'define-for-syntax',
- 'define-local-member-name', 'define-logger', 'define-match-expander',
- 'define-member-name', 'define-module-boundary-contract',
- 'define-namespace-anchor', 'define-opt/c', 'define-sequence-syntax',
- 'define-serializable-class', 'define-serializable-class*',
- 'define-signature', 'define-signature-form', 'define-struct',
- 'define-struct/contract', 'define-struct/derived', 'define-syntax',
- 'define-syntax-rule', 'define-syntaxes', 'define-unit',
- 'define-unit-binding', 'define-unit-from-context',
- 'define-unit/contract', 'define-unit/new-import-export',
- 'define-unit/s', 'define-values', 'define-values-for-export',
- 'define-values-for-syntax', 'define-values/invoke-unit',
- 'define-values/invoke-unit/infer', 'define/augment',
- 'define/augment-final', 'define/augride', 'define/contract',
- 'define/final-prop', 'define/match', 'define/overment',
- 'define/override', 'define/override-final', 'define/private',
- 'define/public', 'define/public-final', 'define/pubment',
- 'define/subexpression-pos-prop', 'delay', 'delay/idle', 'delay/name',
- 'delay/strict', 'delay/sync', 'delay/thread', 'do', 'else', 'except',
- 'except-in', 'except-out', 'export', 'extends', 'failure-cont',
- 'false', 'false/c', 'field', 'field-bound?', 'file',
+ 'cond', 'cons/dc', 'contract', 'contract-out', 'contract-struct',
+ 'contracted', 'define', 'define-compound-unit',
+ 'define-compound-unit/infer', 'define-contract-struct',
+ 'define-custom-hash-types', 'define-custom-set-types',
+ 'define-for-syntax', 'define-local-member-name', 'define-logger',
+ 'define-match-expander', 'define-member-name',
+ 'define-module-boundary-contract', 'define-namespace-anchor',
+ 'define-opt/c', 'define-sequence-syntax', 'define-serializable-class',
+ 'define-serializable-class*', 'define-signature',
+ 'define-signature-form', 'define-struct', 'define-struct/contract',
+ 'define-struct/derived', 'define-syntax', 'define-syntax-rule',
+ 'define-syntaxes', 'define-unit', 'define-unit-binding',
+ 'define-unit-from-context', 'define-unit/contract',
+ 'define-unit/new-import-export', 'define-unit/s', 'define-values',
+ 'define-values-for-export', 'define-values-for-syntax',
+ 'define-values/invoke-unit', 'define-values/invoke-unit/infer',
+ 'define/augment', 'define/augment-final', 'define/augride',
+ 'define/contract', 'define/final-prop', 'define/match',
+ 'define/overment', 'define/override', 'define/override-final',
+ 'define/private', 'define/public', 'define/public-final',
+ 'define/pubment', 'define/subexpression-pos-prop',
+ 'define/subexpression-pos-prop/name', 'delay', 'delay/idle',
+ 'delay/name', 'delay/strict', 'delay/sync', 'delay/thread', 'do',
+ 'else', 'except', 'except-in', 'except-out', 'export', 'extends',
+ 'failure-cont', 'false', 'false/c', 'field', 'field-bound?', 'file',
'flat-murec-contract', 'flat-rec-contract', 'for', 'for*', 'for*/and',
- 'for*/first', 'for*/fold', 'for*/fold/derived', 'for*/hash',
- 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list', 'for*/lists',
- 'for*/mutable-set', 'for*/mutable-seteq', 'for*/mutable-seteqv',
- 'for*/or', 'for*/product', 'for*/set', 'for*/seteq', 'for*/seteqv',
- 'for*/sum', 'for*/vector', 'for*/weak-set', 'for*/weak-seteq',
- 'for*/weak-seteqv', 'for-label', 'for-meta', 'for-syntax',
- 'for-template', 'for/and', 'for/first', 'for/fold', 'for/fold/derived',
- 'for/hash', 'for/hasheq', 'for/hasheqv', 'for/last', 'for/list',
- 'for/lists', 'for/mutable-set', 'for/mutable-seteq',
- 'for/mutable-seteqv', 'for/or', 'for/product', 'for/set', 'for/seteq',
- 'for/seteqv', 'for/sum', 'for/vector', 'for/weak-set',
- 'for/weak-seteq', 'for/weak-seteqv', 'gen:custom-write', 'gen:dict',
- 'gen:equal+hash', 'gen:set', 'gen:stream', 'generic', 'get-field',
- 'if', 'implies', 'import', 'include', 'include-at/relative-to',
+ 'for*/async', 'for*/first', 'for*/fold', 'for*/fold/derived',
+ 'for*/hash', 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list',
+ 'for*/lists', 'for*/mutable-set', 'for*/mutable-seteq',
+ 'for*/mutable-seteqv', 'for*/or', 'for*/product', 'for*/set',
+ 'for*/seteq', 'for*/seteqv', 'for*/stream', 'for*/sum', 'for*/vector',
+ 'for*/weak-set', 'for*/weak-seteq', 'for*/weak-seteqv', 'for-label',
+ 'for-meta', 'for-syntax', 'for-template', 'for/and', 'for/async',
+ 'for/first', 'for/fold', 'for/fold/derived', 'for/hash', 'for/hasheq',
+ 'for/hasheqv', 'for/last', 'for/list', 'for/lists', 'for/mutable-set',
+ 'for/mutable-seteq', 'for/mutable-seteqv', 'for/or', 'for/product',
+ 'for/set', 'for/seteq', 'for/seteqv', 'for/stream', 'for/sum',
+ 'for/vector', 'for/weak-set', 'for/weak-seteq', 'for/weak-seteqv',
+ 'gen:custom-write', 'gen:dict', 'gen:equal+hash', 'gen:set',
+ 'gen:stream', 'generic', 'get-field', 'hash/dc', 'if', 'implies',
+ 'import', 'include', 'include-at/relative-to',
'include-at/relative-to/reader', 'include/reader', 'inherit',
'inherit-field', 'inherit/inner', 'inherit/super', 'init',
'init-depend', 'init-field', 'init-rest', 'inner', 'inspect',
- 'instantiate', 'interface', 'interface*', 'invoke-unit',
- 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*', 'let*-values',
- 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc', 'let/ec',
- 'letrec', 'letrec-syntax', 'letrec-syntaxes', 'letrec-syntaxes+values',
- 'letrec-values', 'lib', 'link', 'local', 'local-require', 'log-debug',
- 'log-error', 'log-fatal', 'log-info', 'log-warning', 'match', 'match*',
- 'match*/derived', 'match-define', 'match-define-values',
- 'match-lambda', 'match-lambda*', 'match-lambda**', 'match-let',
- 'match-let*', 'match-let*-values', 'match-let-values', 'match-letrec',
- 'match/derived', 'match/values', 'member-name-key', 'method-contract?',
- 'mixin', 'module', 'module*', 'module+', 'nand', 'new', 'nor',
- 'object-contract', 'object/c', 'only', 'only-in', 'only-meta-in',
- 'open', 'opt/c', 'or', 'overment', 'overment*', 'override',
- 'override*', 'override-final', 'override-final*', 'parameterize',
- 'parameterize*', 'parameterize-break', 'parametric->/c', 'place',
- 'place*', 'planet', 'prefix', 'prefix-in', 'prefix-out', 'private',
- 'private*', 'prompt-tag/c', 'protect-out', 'provide',
+ 'instantiate', 'interface', 'interface*', 'invariant-assertion',
+ 'invoke-unit', 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*',
+ 'let*-values', 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc',
+ 'let/ec', 'letrec', 'letrec-syntax', 'letrec-syntaxes',
+ 'letrec-syntaxes+values', 'letrec-values', 'lib', 'link', 'local',
+ 'local-require', 'log-debug', 'log-error', 'log-fatal', 'log-info',
+ 'log-warning', 'match', 'match*', 'match*/derived', 'match-define',
+ 'match-define-values', 'match-lambda', 'match-lambda*',
+ 'match-lambda**', 'match-let', 'match-let*', 'match-let*-values',
+ 'match-let-values', 'match-letrec', 'match-letrec-values',
+ 'match/derived', 'match/values', 'member-name-key', 'mixin', 'module',
+ 'module*', 'module+', 'nand', 'new', 'nor', 'object-contract',
+ 'object/c', 'only', 'only-in', 'only-meta-in', 'open', 'opt/c', 'or',
+ 'overment', 'overment*', 'override', 'override*', 'override-final',
+ 'override-final*', 'parameterize', 'parameterize*',
+ 'parameterize-break', 'parametric->/c', 'place', 'place*',
+ 'place/context', 'planet', 'prefix', 'prefix-in', 'prefix-out',
+ 'private', 'private*', 'prompt-tag/c', 'protect-out', 'provide',
'provide-signature-elements', 'provide/contract', 'public', 'public*',
'public-final', 'public-final*', 'pubment', 'pubment*', 'quasiquote',
'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax',
@@ -549,41 +552,46 @@ class RacketLexer(RegexLexer):
'relative-in', 'rename', 'rename-in', 'rename-inner', 'rename-out',
'rename-super', 'require', 'send', 'send*', 'send+', 'send-generic',
'send/apply', 'send/keyword-apply', 'set!', 'set!-values',
- 'set-field!', 'shared', 'stream', 'stream-cons', 'struct', 'struct*',
- 'struct-copy', 'struct-field-index', 'struct-out', 'struct/c',
- 'struct/ctc', 'struct/dc', 'submod', 'super', 'super-instantiate',
- 'super-make-object', 'super-new', 'syntax', 'syntax-case',
- 'syntax-case*', 'syntax-id-rules', 'syntax-rules', 'syntax/loc', 'tag',
- 'this', 'this%', 'thunk', 'thunk*', 'time', 'unconstrained-domain->',
- 'unit', 'unit-from-context', 'unit/c', 'unit/new-import-export',
- 'unit/s', 'unless', 'unquote', 'unquote-splicing', 'unsyntax',
- 'unsyntax-splicing', 'values/drop', 'when', 'with-continuation-mark',
- 'with-contract', 'with-handlers', 'with-handlers*', 'with-method',
- 'with-syntax', u'λ'
+ 'set-field!', 'shared', 'stream', 'stream*', 'stream-cons', 'struct',
+ 'struct*', 'struct-copy', 'struct-field-index', 'struct-out',
+ 'struct/c', 'struct/ctc', 'struct/dc', 'submod', 'super',
+ 'super-instantiate', 'super-make-object', 'super-new', 'syntax',
+ 'syntax-case', 'syntax-case*', 'syntax-id-rules', 'syntax-rules',
+ 'syntax/loc', 'tag', 'this', 'this%', 'thunk', 'thunk*', 'time',
+ 'unconstrained-domain->', 'unit', 'unit-from-context', 'unit/c',
+ 'unit/new-import-export', 'unit/s', 'unless', 'unquote',
+ 'unquote-splicing', 'unsyntax', 'unsyntax-splicing', 'values/drop',
+ 'when', 'with-continuation-mark', 'with-contract',
+ 'with-contract-continuation-mark', 'with-handlers', 'with-handlers*',
+ 'with-method', 'with-syntax', u'λ'
)
# Generated by example.rkt
_builtins = (
- '*', '+', '-', '/', '<', '</c', '<=', '<=/c', '=', '=/c', '>', '>/c',
- '>=', '>=/c', 'abort-current-continuation', 'abs', 'absolute-path?',
- 'acos', 'add-between', 'add1', 'alarm-evt', 'always-evt', 'and/c',
- 'andmap', 'angle', 'any/c', 'append', 'append*', 'append-map', 'apply',
- 'argmax', 'argmin', 'arithmetic-shift', 'arity-at-least',
- 'arity-at-least-value', 'arity-at-least?', 'arity-checking-wrapper',
- 'arity-includes?', 'arity=?', 'asin', 'assf', 'assoc', 'assq', 'assv',
- 'atan', 'bad-number-of-results', 'banner', 'base->-doms/c',
- 'base->-rngs/c', 'base->?', 'between/c', 'bitwise-and',
- 'bitwise-bit-field', 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not',
- 'bitwise-xor', 'blame-add-car-context', 'blame-add-cdr-context',
- 'blame-add-context', 'blame-add-missing-party',
- 'blame-add-nth-arg-context', 'blame-add-or-context',
+ '*', '*list/c', '+', '-', '/', '<', '</c', '<=', '<=/c', '=', '=/c',
+ '>', '>/c', '>=', '>=/c', 'abort-current-continuation', 'abs',
+ 'absolute-path?', 'acos', 'add-between', 'add1', 'alarm-evt',
+ 'always-evt', 'and/c', 'andmap', 'angle', 'any/c', 'append', 'append*',
+ 'append-map', 'apply', 'argmax', 'argmin', 'arithmetic-shift',
+ 'arity-at-least', 'arity-at-least-value', 'arity-at-least?',
+ 'arity-checking-wrapper', 'arity-includes?', 'arity=?',
+ 'arrow-contract-info', 'arrow-contract-info-accepts-arglist',
+ 'arrow-contract-info-chaperone-procedure',
+ 'arrow-contract-info-check-first-order', 'arrow-contract-info?',
+ 'asin', 'assf', 'assoc', 'assq', 'assv', 'atan',
+ 'bad-number-of-results', 'banner', 'base->-doms/c', 'base->-rngs/c',
+ 'base->?', 'between/c', 'bitwise-and', 'bitwise-bit-field',
+ 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not', 'bitwise-xor',
+ 'blame-add-car-context', 'blame-add-cdr-context', 'blame-add-context',
+ 'blame-add-missing-party', 'blame-add-nth-arg-context',
'blame-add-range-context', 'blame-add-unknown-context',
'blame-context', 'blame-contract', 'blame-fmt->-string',
- 'blame-negative', 'blame-original?', 'blame-positive',
- 'blame-replace-negative', 'blame-source', 'blame-swap',
- 'blame-swapped?', 'blame-update', 'blame-value', 'blame?', 'boolean=?',
- 'boolean?', 'bound-identifier=?', 'box', 'box-cas!', 'box-immutable',
- 'box-immutable/c', 'box/c', 'box?', 'break-enabled', 'break-thread',
+ 'blame-missing-party?', 'blame-negative', 'blame-original?',
+ 'blame-positive', 'blame-replace-negative', 'blame-source',
+ 'blame-swap', 'blame-swapped?', 'blame-update', 'blame-value',
+ 'blame?', 'boolean=?', 'boolean?', 'bound-identifier=?', 'box',
+ 'box-cas!', 'box-immutable', 'box-immutable/c', 'box/c', 'box?',
+ 'break-enabled', 'break-parameterization?', 'break-thread',
'build-chaperone-contract-property', 'build-compound-type-name',
'build-contract-property', 'build-flat-contract-property',
'build-list', 'build-path', 'build-path/convention-type',
@@ -613,67 +621,76 @@ class RacketLexer(RegexLexer):
'call-with-output-file*', 'call-with-output-string',
'call-with-parameterization', 'call-with-semaphore',
'call-with-semaphore/enable-break', 'call-with-values', 'call/cc',
- 'call/ec', 'car', 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr',
- 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr',
- 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get', 'channel-put',
- 'channel-put-evt', 'channel-put-evt?', 'channel-try-get', 'channel/c',
- 'channel?', 'chaperone-box', 'chaperone-channel',
- 'chaperone-continuation-mark-key', 'chaperone-contract-property?',
- 'chaperone-contract?', 'chaperone-evt', 'chaperone-hash',
- 'chaperone-of?', 'chaperone-procedure', 'chaperone-prompt-tag',
+ 'call/ec', 'car', 'cartesian-product', 'cdaaar', 'cdaadr', 'cdaar',
+ 'cdadar', 'cdaddr', 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar',
+ 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get',
+ 'channel-put', 'channel-put-evt', 'channel-put-evt?',
+ 'channel-try-get', 'channel/c', 'channel?', 'chaperone-box',
+ 'chaperone-channel', 'chaperone-continuation-mark-key',
+ 'chaperone-contract-property?', 'chaperone-contract?', 'chaperone-evt',
+ 'chaperone-hash', 'chaperone-hash-set', 'chaperone-of?',
+ 'chaperone-procedure', 'chaperone-procedure*', 'chaperone-prompt-tag',
'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector',
'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?',
'char-ci<=?', 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?',
'char-downcase', 'char-foldcase', 'char-general-category',
- 'char-graphic?', 'char-iso-control?', 'char-lower-case?',
- 'char-numeric?', 'char-punctuation?', 'char-ready?', 'char-symbolic?',
- 'char-title-case?', 'char-titlecase', 'char-upcase',
- 'char-upper-case?', 'char-utf-8-length', 'char-whitespace?', 'char<=?',
- 'char<?', 'char=?', 'char>=?', 'char>?', 'char?',
- 'check-duplicate-identifier', 'checked-procedure-check-and-extract',
- 'choice-evt', 'class->interface', 'class-info', 'class?',
- 'cleanse-path', 'close-input-port', 'close-output-port',
+ 'char-graphic?', 'char-in', 'char-in/c', 'char-iso-control?',
+ 'char-lower-case?', 'char-numeric?', 'char-punctuation?',
+ 'char-ready?', 'char-symbolic?', 'char-title-case?', 'char-titlecase',
+ 'char-upcase', 'char-upper-case?', 'char-utf-8-length',
+ 'char-whitespace?', 'char<=?', 'char<?', 'char=?', 'char>=?', 'char>?',
+ 'char?', 'check-duplicate-identifier', 'check-duplicates',
+ 'checked-procedure-check-and-extract', 'choice-evt',
+ 'class->interface', 'class-info', 'class-seal', 'class-unseal',
+ 'class?', 'cleanse-path', 'close-input-port', 'close-output-port',
'coerce-chaperone-contract', 'coerce-chaperone-contracts',
'coerce-contract', 'coerce-contract/f', 'coerce-contracts',
'coerce-flat-contract', 'coerce-flat-contracts', 'collect-garbage',
- 'collection-file-path', 'collection-path', 'compile',
+ 'collection-file-path', 'collection-path', 'combinations', 'compile',
'compile-allow-set!-undefined', 'compile-context-preservation-enabled',
'compile-enforce-module-constants', 'compile-syntax',
- 'compiled-expression?', 'compiled-module-expression?',
- 'complete-path?', 'complex?', 'compose', 'compose1', 'conjugate',
- 'cons', 'cons/c', 'cons?', 'const', 'continuation-mark-key/c',
- 'continuation-mark-key?', 'continuation-mark-set->context',
- 'continuation-mark-set->list', 'continuation-mark-set->list*',
- 'continuation-mark-set-first', 'continuation-mark-set?',
- 'continuation-marks', 'continuation-prompt-available?',
- 'continuation-prompt-tag?', 'continuation?',
- 'contract-continuation-mark-key', 'contract-first-order',
- 'contract-first-order-passes?', 'contract-name', 'contract-proc',
+ 'compiled-expression-recompile', 'compiled-expression?',
+ 'compiled-module-expression?', 'complete-path?', 'complex?', 'compose',
+ 'compose1', 'conjoin', 'conjugate', 'cons', 'cons/c', 'cons?', 'const',
+ 'continuation-mark-key/c', 'continuation-mark-key?',
+ 'continuation-mark-set->context', 'continuation-mark-set->list',
+ 'continuation-mark-set->list*', 'continuation-mark-set-first',
+ 'continuation-mark-set?', 'continuation-marks',
+ 'continuation-prompt-available?', 'continuation-prompt-tag?',
+ 'continuation?', 'contract-continuation-mark-key',
+ 'contract-custom-write-property-proc', 'contract-exercise',
+ 'contract-first-order', 'contract-first-order-passes?',
+ 'contract-late-neg-projection', 'contract-name', 'contract-proc',
'contract-projection', 'contract-property?',
- 'contract-random-generate', 'contract-stronger?',
- 'contract-struct-exercise', 'contract-struct-generate',
- 'contract-val-first-projection', 'contract?', 'convert-stream',
- 'copy-directory/files', 'copy-file', 'copy-port', 'cos', 'cosh',
- 'count', 'current-blame-format', 'current-break-parameterization',
- 'current-code-inspector', 'current-command-line-arguments',
- 'current-compile', 'current-compiled-file-roots',
- 'current-continuation-marks', 'current-contract-region',
- 'current-custodian', 'current-directory', 'current-directory-for-user',
- 'current-drive', 'current-environment-variables', 'current-error-port',
- 'current-eval', 'current-evt-pseudo-random-generator',
- 'current-future', 'current-gc-milliseconds',
- 'current-get-interaction-input-port', 'current-inexact-milliseconds',
- 'current-input-port', 'current-inspector',
- 'current-library-collection-links', 'current-library-collection-paths',
- 'current-load', 'current-load-extension',
- 'current-load-relative-directory', 'current-load/use-compiled',
- 'current-locale', 'current-logger', 'current-memory-use',
- 'current-milliseconds', 'current-module-declare-name',
- 'current-module-declare-source', 'current-module-name-resolver',
- 'current-module-path-for-load', 'current-namespace',
- 'current-output-port', 'current-parameterization',
- 'current-preserved-thread-cell-values', 'current-print',
- 'current-process-milliseconds', 'current-prompt-read',
+ 'contract-random-generate', 'contract-random-generate-fail',
+ 'contract-random-generate-fail?',
+ 'contract-random-generate-get-current-environment',
+ 'contract-random-generate-stash', 'contract-random-generate/choose',
+ 'contract-stronger?', 'contract-struct-exercise',
+ 'contract-struct-generate', 'contract-struct-late-neg-projection',
+ 'contract-struct-list-contract?', 'contract-val-first-projection',
+ 'contract?', 'convert-stream', 'copy-directory/files', 'copy-file',
+ 'copy-port', 'cos', 'cosh', 'count', 'current-blame-format',
+ 'current-break-parameterization', 'current-code-inspector',
+ 'current-command-line-arguments', 'current-compile',
+ 'current-compiled-file-roots', 'current-continuation-marks',
+ 'current-contract-region', 'current-custodian', 'current-directory',
+ 'current-directory-for-user', 'current-drive',
+ 'current-environment-variables', 'current-error-port', 'current-eval',
+ 'current-evt-pseudo-random-generator',
+ 'current-force-delete-permissions', 'current-future',
+ 'current-gc-milliseconds', 'current-get-interaction-input-port',
+ 'current-inexact-milliseconds', 'current-input-port',
+ 'current-inspector', 'current-library-collection-links',
+ 'current-library-collection-paths', 'current-load',
+ 'current-load-extension', 'current-load-relative-directory',
+ 'current-load/use-compiled', 'current-locale', 'current-logger',
+ 'current-memory-use', 'current-milliseconds',
+ 'current-module-declare-name', 'current-module-declare-source',
+ 'current-module-name-resolver', 'current-module-path-for-load',
+ 'current-namespace', 'current-output-port', 'current-parameterization',
+ 'current-plumber', 'current-preserved-thread-cell-values',
+ 'current-print', 'current-process-milliseconds', 'current-prompt-read',
'current-pseudo-random-generator', 'current-read-interaction',
'current-reader-guard', 'current-readtable', 'current-seconds',
'current-security-guard', 'current-subprocess-custodian-mode',
@@ -700,14 +717,15 @@ class RacketLexer(RegexLexer):
'dict-mutable?', 'dict-ref', 'dict-ref!', 'dict-remove',
'dict-remove!', 'dict-set', 'dict-set!', 'dict-set*', 'dict-set*!',
'dict-update', 'dict-update!', 'dict-value-contract', 'dict-values',
- 'dict?', 'directory-exists?', 'directory-list', 'display',
+ 'dict?', 'directory-exists?', 'directory-list', 'disjoin', 'display',
'display-lines', 'display-lines-to-file', 'display-to-file',
- 'displayln', 'double-flonum?', 'drop', 'drop-right', 'dropf',
- 'dropf-right', 'dump-memory-stats', 'dup-input-port',
- 'dup-output-port', 'dynamic-get-field', 'dynamic-place',
- 'dynamic-place*', 'dynamic-require', 'dynamic-require-for-syntax',
- 'dynamic-send', 'dynamic-set-field!', 'dynamic-wind', 'eighth',
- 'empty', 'empty-sequence', 'empty-stream', 'empty?',
+ 'displayln', 'double-flonum?', 'drop', 'drop-common-prefix',
+ 'drop-right', 'dropf', 'dropf-right', 'dump-memory-stats',
+ 'dup-input-port', 'dup-output-port', 'dynamic->*', 'dynamic-get-field',
+ 'dynamic-object/c', 'dynamic-place', 'dynamic-place*',
+ 'dynamic-require', 'dynamic-require-for-syntax', 'dynamic-send',
+ 'dynamic-set-field!', 'dynamic-wind', 'eighth', 'empty',
+ 'empty-sequence', 'empty-stream', 'empty?',
'environment-variables-copy', 'environment-variables-names',
'environment-variables-ref', 'environment-variables-set!',
'environment-variables?', 'eof', 'eof-evt', 'eof-object?',
@@ -757,10 +775,10 @@ class RacketLexer(RegexLexer):
'exn:missing-module?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?',
'exp', 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once',
'expand-syntax-to-top-form', 'expand-to-top-form', 'expand-user-path',
- 'explode-path', 'expt', 'externalizable<%>', 'false?', 'field-names',
- 'fifth', 'file->bytes', 'file->bytes-lines', 'file->lines',
- 'file->list', 'file->string', 'file->value', 'file-exists?',
- 'file-name-from-path', 'file-or-directory-identity',
+ 'explode-path', 'expt', 'externalizable<%>', 'failure-result/c',
+ 'false?', 'field-names', 'fifth', 'file->bytes', 'file->bytes-lines',
+ 'file->lines', 'file->list', 'file->string', 'file->value',
+ 'file-exists?', 'file-name-from-path', 'file-or-directory-identity',
'file-or-directory-modify-seconds', 'file-or-directory-permissions',
'file-position', 'file-position*', 'file-size',
'file-stream-buffer-mode', 'file-stream-port?', 'file-truncate',
@@ -769,26 +787,27 @@ class RacketLexer(RegexLexer):
'filesystem-root-list', 'filter', 'filter-map', 'filter-not',
'filter-read-input-port', 'find-executable-path', 'find-files',
'find-library-collection-links', 'find-library-collection-paths',
- 'find-relative-path', 'find-system-path', 'findf', 'first', 'fixnum?',
- 'flat-contract', 'flat-contract-predicate', 'flat-contract-property?',
- 'flat-contract?', 'flat-named-contract', 'flatten',
- 'floating-point-bytes->real', 'flonum?', 'floor', 'flush-output',
- 'fold-files', 'foldl', 'foldr', 'for-each', 'force', 'format',
- 'fourth', 'fprintf', 'free-identifier=?', 'free-label-identifier=?',
- 'free-template-identifier=?', 'free-transformer-identifier=?',
- 'fsemaphore-count', 'fsemaphore-post', 'fsemaphore-try-wait?',
- 'fsemaphore-wait', 'fsemaphore?', 'future', 'future?',
- 'futures-enabled?', 'gcd', 'generate-member-key',
+ 'find-relative-path', 'find-system-path', 'findf', 'first',
+ 'first-or/c', 'fixnum?', 'flat-contract', 'flat-contract-predicate',
+ 'flat-contract-property?', 'flat-contract?', 'flat-named-contract',
+ 'flatten', 'floating-point-bytes->real', 'flonum?', 'floor',
+ 'flush-output', 'fold-files', 'foldl', 'foldr', 'for-each', 'force',
+ 'format', 'fourth', 'fprintf', 'free-identifier=?',
+ 'free-label-identifier=?', 'free-template-identifier=?',
+ 'free-transformer-identifier=?', 'fsemaphore-count', 'fsemaphore-post',
+ 'fsemaphore-try-wait?', 'fsemaphore-wait', 'fsemaphore?', 'future',
+ 'future?', 'futures-enabled?', 'gcd', 'generate-member-key',
'generate-temporaries', 'generic-set?', 'generic?', 'gensym',
'get-output-bytes', 'get-output-string', 'get-preference',
- 'get/build-val-first-projection', 'getenv',
- 'global-port-print-handler', 'group-execute-bit', 'group-read-bit',
- 'group-write-bit', 'guard-evt', 'handle-evt', 'handle-evt?',
- 'has-contract?', 'hash', 'hash->list', 'hash-clear', 'hash-clear!',
- 'hash-copy', 'hash-copy-clear', 'hash-count', 'hash-empty?',
- 'hash-eq?', 'hash-equal?', 'hash-eqv?', 'hash-for-each',
- 'hash-has-key?', 'hash-iterate-first', 'hash-iterate-key',
- 'hash-iterate-next', 'hash-iterate-value', 'hash-keys', 'hash-map',
+ 'get/build-late-neg-projection', 'get/build-val-first-projection',
+ 'getenv', 'global-port-print-handler', 'group-by', 'group-execute-bit',
+ 'group-read-bit', 'group-write-bit', 'guard-evt', 'handle-evt',
+ 'handle-evt?', 'has-blame?', 'has-contract?', 'hash', 'hash->list',
+ 'hash-clear', 'hash-clear!', 'hash-copy', 'hash-copy-clear',
+ 'hash-count', 'hash-empty?', 'hash-eq?', 'hash-equal?', 'hash-eqv?',
+ 'hash-for-each', 'hash-has-key?', 'hash-iterate-first',
+ 'hash-iterate-key', 'hash-iterate-key+value', 'hash-iterate-next',
+ 'hash-iterate-pair', 'hash-iterate-value', 'hash-keys', 'hash-map',
'hash-placeholder?', 'hash-ref', 'hash-ref!', 'hash-remove',
'hash-remove!', 'hash-set', 'hash-set!', 'hash-set*', 'hash-set*!',
'hash-update', 'hash-update!', 'hash-values', 'hash-weak?', 'hash/c',
@@ -798,48 +817,61 @@ class RacketLexer(RegexLexer):
'identifier-prune-to-source-module',
'identifier-remove-from-definition-context',
'identifier-template-binding', 'identifier-transformer-binding',
- 'identifier?', 'identity', 'imag-part', 'immutable?',
+ 'identifier?', 'identity', 'if/c', 'imag-part', 'immutable?',
'impersonate-box', 'impersonate-channel',
'impersonate-continuation-mark-key', 'impersonate-hash',
- 'impersonate-procedure', 'impersonate-prompt-tag',
+ 'impersonate-hash-set', 'impersonate-procedure',
+ 'impersonate-procedure*', 'impersonate-prompt-tag',
'impersonate-struct', 'impersonate-vector', 'impersonator-contract?',
'impersonator-ephemeron', 'impersonator-of?',
- 'impersonator-prop:application-mark', 'impersonator-prop:contracted',
+ 'impersonator-prop:application-mark', 'impersonator-prop:blame',
+ 'impersonator-prop:contracted',
'impersonator-property-accessor-procedure?', 'impersonator-property?',
'impersonator?', 'implementation?', 'implementation?/c', 'in-bytes',
- 'in-bytes-lines', 'in-cycle', 'in-dict', 'in-dict-keys',
- 'in-dict-pairs', 'in-dict-values', 'in-directory', 'in-hash',
- 'in-hash-keys', 'in-hash-pairs', 'in-hash-values', 'in-indexed',
- 'in-input-port-bytes', 'in-input-port-chars', 'in-lines', 'in-list',
- 'in-mlist', 'in-naturals', 'in-parallel', 'in-permutations', 'in-port',
- 'in-producer', 'in-range', 'in-sequences', 'in-set', 'in-stream',
- 'in-string', 'in-value', 'in-values*-sequence', 'in-values-sequence',
- 'in-vector', 'inexact->exact', 'inexact-real?', 'inexact?',
- 'infinite?', 'input-port-append', 'input-port?', 'inspector?',
- 'instanceof/c', 'integer->char', 'integer->integer-bytes',
- 'integer-bytes->integer', 'integer-in', 'integer-length',
- 'integer-sqrt', 'integer-sqrt/remainder', 'integer?',
+ 'in-bytes-lines', 'in-combinations', 'in-cycle', 'in-dict',
+ 'in-dict-keys', 'in-dict-pairs', 'in-dict-values', 'in-directory',
+ 'in-hash', 'in-hash-keys', 'in-hash-pairs', 'in-hash-values',
+ 'in-immutable-hash', 'in-immutable-hash-keys',
+ 'in-immutable-hash-pairs', 'in-immutable-hash-values',
+ 'in-immutable-set', 'in-indexed', 'in-input-port-bytes',
+ 'in-input-port-chars', 'in-lines', 'in-list', 'in-mlist',
+ 'in-mutable-hash', 'in-mutable-hash-keys', 'in-mutable-hash-pairs',
+ 'in-mutable-hash-values', 'in-mutable-set', 'in-naturals',
+ 'in-parallel', 'in-permutations', 'in-port', 'in-producer', 'in-range',
+ 'in-sequences', 'in-set', 'in-slice', 'in-stream', 'in-string',
+ 'in-syntax', 'in-value', 'in-values*-sequence', 'in-values-sequence',
+ 'in-vector', 'in-weak-hash', 'in-weak-hash-keys', 'in-weak-hash-pairs',
+ 'in-weak-hash-values', 'in-weak-set', 'inexact->exact',
+ 'inexact-real?', 'inexact?', 'infinite?', 'input-port-append',
+ 'input-port?', 'inspector?', 'instanceof/c', 'integer->char',
+ 'integer->integer-bytes', 'integer-bytes->integer', 'integer-in',
+ 'integer-length', 'integer-sqrt', 'integer-sqrt/remainder', 'integer?',
'interface->method-names', 'interface-extension?', 'interface?',
+ 'internal-definition-context-binding-identifiers',
+ 'internal-definition-context-introduce',
'internal-definition-context-seal', 'internal-definition-context?',
'is-a?', 'is-a?/c', 'keyword->string', 'keyword-apply', 'keyword<?',
'keyword?', 'keywords-match', 'kill-thread', 'last', 'last-pair',
'lcm', 'length', 'liberal-define-context?', 'link-exists?', 'list',
- 'list*', 'list->bytes', 'list->mutable-set', 'list->mutable-seteq',
- 'list->mutable-seteqv', 'list->set', 'list->seteq', 'list->seteqv',
- 'list->string', 'list->vector', 'list->weak-set', 'list->weak-seteq',
- 'list->weak-seteqv', 'list-ref', 'list-tail', 'list/c', 'list?',
- 'listof', 'load', 'load-extension', 'load-on-demand-enabled',
- 'load-relative', 'load-relative-extension', 'load/cd',
- 'load/use-compiled', 'local-expand', 'local-expand/capture-lifts',
+ 'list*', 'list*of', 'list->bytes', 'list->mutable-set',
+ 'list->mutable-seteq', 'list->mutable-seteqv', 'list->set',
+ 'list->seteq', 'list->seteqv', 'list->string', 'list->vector',
+ 'list->weak-set', 'list->weak-seteq', 'list->weak-seteqv',
+ 'list-contract?', 'list-prefix?', 'list-ref', 'list-set', 'list-tail',
+ 'list-update', 'list/c', 'list?', 'listen-port-number?', 'listof',
+ 'load', 'load-extension', 'load-on-demand-enabled', 'load-relative',
+ 'load-relative-extension', 'load/cd', 'load/use-compiled',
+ 'local-expand', 'local-expand/capture-lifts',
'local-transformer-expand', 'local-transformer-expand/capture-lifts',
- 'locale-string-encoding', 'log', 'log-level?', 'log-max-level',
- 'log-message', 'log-receiver?', 'logger-name', 'logger?', 'magnitude',
- 'make-arity-at-least', 'make-base-empty-namespace',
- 'make-base-namespace', 'make-bytes', 'make-channel',
- 'make-chaperone-contract', 'make-continuation-mark-key',
- 'make-continuation-prompt-tag', 'make-contract', 'make-custodian',
- 'make-custodian-box', 'make-custom-hash', 'make-custom-hash-types',
- 'make-custom-set', 'make-custom-set-types', 'make-date', 'make-date*',
+ 'locale-string-encoding', 'log', 'log-all-levels', 'log-level-evt',
+ 'log-level?', 'log-max-level', 'log-message', 'log-receiver?',
+ 'logger-name', 'logger?', 'magnitude', 'make-arity-at-least',
+ 'make-base-empty-namespace', 'make-base-namespace', 'make-bytes',
+ 'make-channel', 'make-chaperone-contract',
+ 'make-continuation-mark-key', 'make-continuation-prompt-tag',
+ 'make-contract', 'make-custodian', 'make-custodian-box',
+ 'make-custom-hash', 'make-custom-hash-types', 'make-custom-set',
+ 'make-custom-set-types', 'make-date', 'make-date*',
'make-derived-parameter', 'make-directory', 'make-directory*',
'make-do-sequence', 'make-empty-namespace',
'make-environment-variables', 'make-ephemeron', 'make-exn',
@@ -871,27 +903,28 @@ class RacketLexer(RegexLexer):
'make-limited-input-port', 'make-list', 'make-lock-file-name',
'make-log-receiver', 'make-logger', 'make-mixin-contract',
'make-mutable-custom-set', 'make-none/c', 'make-object',
- 'make-output-port', 'make-parameter', 'make-phantom-bytes',
- 'make-pipe', 'make-pipe-with-specials', 'make-placeholder',
- 'make-polar', 'make-prefab-struct', 'make-primitive-class',
- 'make-proj-contract', 'make-pseudo-random-generator',
- 'make-reader-graph', 'make-readtable', 'make-rectangular',
- 'make-rename-transformer', 'make-resolved-module-path',
- 'make-security-guard', 'make-semaphore', 'make-set!-transformer',
- 'make-shared-bytes', 'make-sibling-inspector', 'make-special-comment',
- 'make-srcloc', 'make-string', 'make-struct-field-accessor',
- 'make-struct-field-mutator', 'make-struct-type',
- 'make-struct-type-property', 'make-syntax-delta-introducer',
- 'make-syntax-introducer', 'make-temporary-file',
- 'make-tentative-pretty-print-output-port', 'make-thread-cell',
- 'make-thread-group', 'make-vector', 'make-weak-box',
- 'make-weak-custom-hash', 'make-weak-custom-set', 'make-weak-hash',
- 'make-weak-hasheq', 'make-weak-hasheqv', 'make-will-executor', 'map',
- 'match-equality-test', 'matches-arity-exactly?', 'max', 'mcar', 'mcdr',
- 'mcons', 'member', 'member-name-key-hash-code', 'member-name-key=?',
- 'member-name-key?', 'memf', 'memq', 'memv', 'merge-input',
- 'method-in-interface?', 'min', 'mixin-contract', 'module->exports',
- 'module->imports', 'module->language-info', 'module->namespace',
+ 'make-output-port', 'make-parameter', 'make-parent-directory*',
+ 'make-phantom-bytes', 'make-pipe', 'make-pipe-with-specials',
+ 'make-placeholder', 'make-plumber', 'make-polar', 'make-prefab-struct',
+ 'make-primitive-class', 'make-proj-contract',
+ 'make-pseudo-random-generator', 'make-reader-graph', 'make-readtable',
+ 'make-rectangular', 'make-rename-transformer',
+ 'make-resolved-module-path', 'make-security-guard', 'make-semaphore',
+ 'make-set!-transformer', 'make-shared-bytes', 'make-sibling-inspector',
+ 'make-special-comment', 'make-srcloc', 'make-string',
+ 'make-struct-field-accessor', 'make-struct-field-mutator',
+ 'make-struct-type', 'make-struct-type-property',
+ 'make-syntax-delta-introducer', 'make-syntax-introducer',
+ 'make-temporary-file', 'make-tentative-pretty-print-output-port',
+ 'make-thread-cell', 'make-thread-group', 'make-vector',
+ 'make-weak-box', 'make-weak-custom-hash', 'make-weak-custom-set',
+ 'make-weak-hash', 'make-weak-hasheq', 'make-weak-hasheqv',
+ 'make-will-executor', 'map', 'match-equality-test',
+ 'matches-arity-exactly?', 'max', 'mcar', 'mcdr', 'mcons', 'member',
+ 'member-name-key-hash-code', 'member-name-key=?', 'member-name-key?',
+ 'memf', 'memq', 'memv', 'merge-input', 'method-in-interface?', 'min',
+ 'mixin-contract', 'module->exports', 'module->imports',
+ 'module->language-info', 'module->namespace',
'module-compiled-cross-phase-persistent?', 'module-compiled-exports',
'module-compiled-imports', 'module-compiled-language-info',
'module-compiled-name', 'module-compiled-submodules',
@@ -912,25 +945,25 @@ class RacketLexer(RegexLexer):
'namespace-unprotect-module', 'namespace-variable-value', 'namespace?',
'nan?', 'natural-number/c', 'negate', 'negative?', 'never-evt',
u'new-∀/c', u'new-∃/c', 'newline', 'ninth', 'non-empty-listof',
- 'none/c', 'normal-case-path', 'normalize-arity', 'normalize-path',
- 'normalized-arity?', 'not', 'not/c', 'null', 'null?', 'number->string',
- 'number?', 'numerator', 'object%', 'object->vector', 'object-info',
- 'object-interface', 'object-method-arity-includes?', 'object-name',
- 'object=?', 'object?', 'odd?', 'one-of/c', 'open-input-bytes',
- 'open-input-file', 'open-input-output-file', 'open-input-string',
- 'open-output-bytes', 'open-output-file', 'open-output-nowhere',
- 'open-output-string', 'or/c', 'order-of-magnitude', 'ormap',
- 'other-execute-bit', 'other-read-bit', 'other-write-bit',
- 'output-port?', 'pair?', 'parameter-procedure=?', 'parameter/c',
- 'parameter?', 'parameterization?', 'parse-command-line', 'partition',
- 'path->bytes', 'path->complete-path', 'path->directory-path',
- 'path->string', 'path-add-suffix', 'path-convention-type',
- 'path-element->bytes', 'path-element->string', 'path-element?',
- 'path-for-some-system?', 'path-list-string->path-list', 'path-only',
- 'path-replace-suffix', 'path-string?', 'path<?', 'path?',
- 'pathlist-closure', 'peek-byte', 'peek-byte-or-special', 'peek-bytes',
- 'peek-bytes!', 'peek-bytes!-evt', 'peek-bytes-avail!',
- 'peek-bytes-avail!*', 'peek-bytes-avail!-evt',
+ 'non-empty-string?', 'none/c', 'normal-case-path', 'normalize-arity',
+ 'normalize-path', 'normalized-arity?', 'not', 'not/c', 'null', 'null?',
+ 'number->string', 'number?', 'numerator', 'object%', 'object->vector',
+ 'object-info', 'object-interface', 'object-method-arity-includes?',
+ 'object-name', 'object-or-false=?', 'object=?', 'object?', 'odd?',
+ 'one-of/c', 'open-input-bytes', 'open-input-file',
+ 'open-input-output-file', 'open-input-string', 'open-output-bytes',
+ 'open-output-file', 'open-output-nowhere', 'open-output-string',
+ 'or/c', 'order-of-magnitude', 'ormap', 'other-execute-bit',
+ 'other-read-bit', 'other-write-bit', 'output-port?', 'pair?',
+ 'parameter-procedure=?', 'parameter/c', 'parameter?',
+ 'parameterization?', 'parse-command-line', 'partition', 'path->bytes',
+ 'path->complete-path', 'path->directory-path', 'path->string',
+ 'path-add-suffix', 'path-convention-type', 'path-element->bytes',
+ 'path-element->string', 'path-element?', 'path-for-some-system?',
+ 'path-list-string->path-list', 'path-only', 'path-replace-suffix',
+ 'path-string?', 'path<?', 'path?', 'pathlist-closure', 'peek-byte',
+ 'peek-byte-or-special', 'peek-bytes', 'peek-bytes!', 'peek-bytes!-evt',
+ 'peek-bytes-avail!', 'peek-bytes-avail!*', 'peek-bytes-avail!-evt',
'peek-bytes-avail!/enable-break', 'peek-bytes-evt', 'peek-char',
'peek-char-or-special', 'peek-string', 'peek-string!',
'peek-string!-evt', 'peek-string-evt', 'peeking-input-port',
@@ -940,18 +973,20 @@ class RacketLexer(RegexLexer):
'place-dead-evt', 'place-enabled?', 'place-kill', 'place-location?',
'place-message-allowed?', 'place-sleep', 'place-wait', 'place?',
'placeholder-get', 'placeholder-set!', 'placeholder?',
+ 'plumber-add-flush!', 'plumber-flush-all',
+ 'plumber-flush-handle-remove!', 'plumber-flush-handle?', 'plumber?',
'poll-guard-evt', 'port->bytes', 'port->bytes-lines', 'port->lines',
'port->list', 'port->string', 'port-closed-evt', 'port-closed?',
'port-commit-peeked', 'port-count-lines!', 'port-count-lines-enabled',
'port-counts-lines?', 'port-display-handler', 'port-file-identity',
- 'port-file-unlock', 'port-next-location', 'port-print-handler',
- 'port-progress-evt', 'port-provides-progress-evts?',
- 'port-read-handler', 'port-try-file-lock?', 'port-write-handler',
- 'port-writes-atomic?', 'port-writes-special?', 'port?', 'positive?',
- 'predicate/c', 'prefab-key->struct-type', 'prefab-key?',
- 'prefab-struct-key', 'preferences-lock-file-mode', 'pregexp',
- 'pregexp?', 'pretty-display', 'pretty-format', 'pretty-print',
- 'pretty-print-.-symbol-without-bars',
+ 'port-file-unlock', 'port-next-location', 'port-number?',
+ 'port-print-handler', 'port-progress-evt',
+ 'port-provides-progress-evts?', 'port-read-handler',
+ 'port-try-file-lock?', 'port-write-handler', 'port-writes-atomic?',
+ 'port-writes-special?', 'port?', 'positive?', 'predicate/c',
+ 'prefab-key->struct-type', 'prefab-key?', 'prefab-struct-key',
+ 'preferences-lock-file-mode', 'pregexp', 'pregexp?', 'pretty-display',
+ 'pretty-format', 'pretty-print', 'pretty-print-.-symbol-without-bars',
'pretty-print-abbreviate-read-macros', 'pretty-print-columns',
'pretty-print-current-style-table', 'pretty-print-depth',
'pretty-print-exact-as-decimal', 'pretty-print-extend-style-table',
@@ -967,30 +1002,36 @@ class RacketLexer(RegexLexer):
'print-pair-curly-braces', 'print-reader-abbreviations',
'print-struct', 'print-syntax-width', 'print-unreadable',
'print-vector-length', 'printable/c', 'printable<%>', 'printf',
- 'procedure->method', 'procedure-arity', 'procedure-arity-includes/c',
- 'procedure-arity-includes?', 'procedure-arity?',
- 'procedure-closure-contents-eq?', 'procedure-extract-target',
- 'procedure-keywords', 'procedure-reduce-arity',
- 'procedure-reduce-keyword-arity', 'procedure-rename',
+ 'println', 'procedure->method', 'procedure-arity',
+ 'procedure-arity-includes/c', 'procedure-arity-includes?',
+ 'procedure-arity?', 'procedure-closure-contents-eq?',
+ 'procedure-extract-target', 'procedure-keywords',
+ 'procedure-reduce-arity', 'procedure-reduce-keyword-arity',
+ 'procedure-rename', 'procedure-result-arity', 'procedure-specialize',
'procedure-struct-type?', 'procedure?', 'process', 'process*',
'process*/ports', 'process/ports', 'processor-count', 'progress-evt?',
- 'promise-forced?', 'promise-running?', 'promise/c', 'promise?',
- 'prop:arity-string', 'prop:chaperone-contract',
- 'prop:checked-procedure', 'prop:contract', 'prop:contracted',
- 'prop:custom-print-quotable', 'prop:custom-write', 'prop:dict',
- 'prop:dict/contract', 'prop:equal+hash', 'prop:evt',
- 'prop:exn:missing-module', 'prop:exn:srclocs', 'prop:flat-contract',
+ 'promise-forced?', 'promise-running?', 'promise/c', 'promise/name?',
+ 'promise?', 'prop:arity-string', 'prop:arrow-contract',
+ 'prop:arrow-contract-get-info', 'prop:arrow-contract?', 'prop:blame',
+ 'prop:chaperone-contract', 'prop:checked-procedure', 'prop:contract',
+ 'prop:contracted', 'prop:custom-print-quotable', 'prop:custom-write',
+ 'prop:dict', 'prop:dict/contract', 'prop:equal+hash', 'prop:evt',
+ 'prop:exn:missing-module', 'prop:exn:srclocs',
+ 'prop:expansion-contexts', 'prop:flat-contract',
'prop:impersonator-of', 'prop:input-port',
- 'prop:liberal-define-context', 'prop:opt-chaperone-contract',
- 'prop:opt-chaperone-contract-get-test', 'prop:opt-chaperone-contract?',
+ 'prop:liberal-define-context', 'prop:object-name',
+ 'prop:opt-chaperone-contract', 'prop:opt-chaperone-contract-get-test',
+ 'prop:opt-chaperone-contract?', 'prop:orc-contract',
+ 'prop:orc-contract-get-subcontracts', 'prop:orc-contract?',
'prop:output-port', 'prop:place-location', 'prop:procedure',
- 'prop:rename-transformer', 'prop:sequence', 'prop:set!-transformer',
- 'prop:stream', 'proper-subset?', 'pseudo-random-generator->vector',
- 'pseudo-random-generator-vector?', 'pseudo-random-generator?',
- 'put-preferences', 'putenv', 'quotient', 'quotient/remainder',
- 'radians->degrees', 'raise', 'raise-argument-error',
- 'raise-arguments-error', 'raise-arity-error', 'raise-blame-error',
- 'raise-contract-error', 'raise-mismatch-error',
+ 'prop:recursive-contract', 'prop:recursive-contract-unroll',
+ 'prop:recursive-contract?', 'prop:rename-transformer', 'prop:sequence',
+ 'prop:set!-transformer', 'prop:stream', 'proper-subset?',
+ 'pseudo-random-generator->vector', 'pseudo-random-generator-vector?',
+ 'pseudo-random-generator?', 'put-preferences', 'putenv', 'quotient',
+ 'quotient/remainder', 'radians->degrees', 'raise',
+ 'raise-argument-error', 'raise-arguments-error', 'raise-arity-error',
+ 'raise-blame-error', 'raise-contract-error', 'raise-mismatch-error',
'raise-not-cons-blame-error', 'raise-range-error',
'raise-result-error', 'raise-syntax-error', 'raise-type-error',
'raise-user-error', 'random', 'random-seed', 'range', 'rational?',
@@ -1001,11 +1042,12 @@ class RacketLexer(RegexLexer):
'read-bytes', 'read-bytes!', 'read-bytes!-evt', 'read-bytes-avail!',
'read-bytes-avail!*', 'read-bytes-avail!-evt',
'read-bytes-avail!/enable-break', 'read-bytes-evt', 'read-bytes-line',
- 'read-bytes-line-evt', 'read-case-sensitive', 'read-char',
+ 'read-bytes-line-evt', 'read-case-sensitive', 'read-cdot', 'read-char',
'read-char-or-special', 'read-curly-brace-as-paren',
- 'read-decimal-as-inexact', 'read-eval-print-loop', 'read-language',
- 'read-line', 'read-line-evt', 'read-on-demand-source',
- 'read-square-bracket-as-paren', 'read-string', 'read-string!',
+ 'read-curly-brace-with-tag', 'read-decimal-as-inexact',
+ 'read-eval-print-loop', 'read-language', 'read-line', 'read-line-evt',
+ 'read-on-demand-source', 'read-square-bracket-as-paren',
+ 'read-square-bracket-with-tag', 'read-string', 'read-string!',
'read-string!-evt', 'read-string-evt', 'read-syntax',
'read-syntax/recursive', 'read/recursive', 'readtable-mapping',
'readtable?', 'real->decimal-string', 'real->double-flonum',
@@ -1023,20 +1065,21 @@ class RacketLexer(RegexLexer):
'regexp-quote', 'regexp-replace', 'regexp-replace*',
'regexp-replace-quote', 'regexp-replaces', 'regexp-split',
'regexp-try-match', 'regexp?', 'relative-path?', 'relocate-input-port',
- 'relocate-output-port', 'remainder', 'remove', 'remove*',
- 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*',
- 'rename-file-or-directory', 'rename-transformer-target',
- 'rename-transformer?', 'reroot-path', 'resolve-path',
- 'resolved-module-path-name', 'resolved-module-path?', 'rest',
- 'reverse', 'round', 'second', 'seconds->date', 'security-guard?',
- 'semaphore-peek-evt', 'semaphore-peek-evt?', 'semaphore-post',
- 'semaphore-try-wait?', 'semaphore-wait', 'semaphore-wait/enable-break',
- 'semaphore?', 'sequence->list', 'sequence->stream',
- 'sequence-add-between', 'sequence-andmap', 'sequence-append',
- 'sequence-count', 'sequence-filter', 'sequence-fold',
- 'sequence-for-each', 'sequence-generate', 'sequence-generate*',
- 'sequence-length', 'sequence-map', 'sequence-ormap', 'sequence-ref',
- 'sequence-tail', 'sequence?', 'set', 'set!-transformer-procedure',
+ 'relocate-output-port', 'remainder', 'remf', 'remf*', 'remove',
+ 'remove*', 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*',
+ 'rename-contract', 'rename-file-or-directory',
+ 'rename-transformer-target', 'rename-transformer?', 'replace-evt',
+ 'reroot-path', 'resolve-path', 'resolved-module-path-name',
+ 'resolved-module-path?', 'rest', 'reverse', 'round', 'second',
+ 'seconds->date', 'security-guard?', 'semaphore-peek-evt',
+ 'semaphore-peek-evt?', 'semaphore-post', 'semaphore-try-wait?',
+ 'semaphore-wait', 'semaphore-wait/enable-break', 'semaphore?',
+ 'sequence->list', 'sequence->stream', 'sequence-add-between',
+ 'sequence-andmap', 'sequence-append', 'sequence-count',
+ 'sequence-filter', 'sequence-fold', 'sequence-for-each',
+ 'sequence-generate', 'sequence-generate*', 'sequence-length',
+ 'sequence-map', 'sequence-ormap', 'sequence-ref', 'sequence-tail',
+ 'sequence/c', 'sequence?', 'set', 'set!-transformer-procedure',
'set!-transformer?', 'set->list', 'set->stream', 'set-add', 'set-add!',
'set-box!', 'set-clear', 'set-clear!', 'set-copy', 'set-copy-clear',
'set-count', 'set-empty?', 'set-eq?', 'set-equal?', 'set-eqv?',
@@ -1044,38 +1087,39 @@ class RacketLexer(RegexLexer):
'set-intersect', 'set-intersect!', 'set-map', 'set-mcar!', 'set-mcdr!',
'set-member?', 'set-mutable?', 'set-phantom-bytes!',
'set-port-next-location!', 'set-remove', 'set-remove!', 'set-rest',
- 'set-subtract', 'set-subtract!', 'set-symmetric-difference',
- 'set-symmetric-difference!', 'set-union', 'set-union!', 'set-weak?',
- 'set/c', 'set=?', 'set?', 'seteq', 'seteqv', 'seventh', 'sgn',
- 'shared-bytes', 'shell-execute', 'shrink-path-wrt', 'shuffle',
- 'simple-form-path', 'simplify-path', 'sin', 'single-flonum?', 'sinh',
- 'sixth', 'skip-projection-wrapper?', 'sleep',
+ 'set-some-basic-contracts!', 'set-subtract', 'set-subtract!',
+ 'set-symmetric-difference', 'set-symmetric-difference!', 'set-union',
+ 'set-union!', 'set-weak?', 'set/c', 'set=?', 'set?', 'seteq', 'seteqv',
+ 'seventh', 'sgn', 'shared-bytes', 'shell-execute', 'shrink-path-wrt',
+ 'shuffle', 'simple-form-path', 'simplify-path', 'sin',
+ 'single-flonum?', 'sinh', 'sixth', 'skip-projection-wrapper?', 'sleep',
'some-system-path->string', 'sort', 'special-comment-value',
'special-comment?', 'special-filter-input-port', 'split-at',
- 'split-at-right', 'split-path', 'splitf-at', 'splitf-at-right', 'sqr',
- 'sqrt', 'srcloc', 'srcloc->string', 'srcloc-column', 'srcloc-line',
- 'srcloc-position', 'srcloc-source', 'srcloc-span', 'srcloc?',
- 'stop-after', 'stop-before', 'stream->list', 'stream-add-between',
- 'stream-andmap', 'stream-append', 'stream-count', 'stream-empty?',
- 'stream-filter', 'stream-first', 'stream-fold', 'stream-for-each',
- 'stream-length', 'stream-map', 'stream-ormap', 'stream-ref',
- 'stream-rest', 'stream-tail', 'stream?', 'string',
- 'string->bytes/latin-1', 'string->bytes/locale', 'string->bytes/utf-8',
- 'string->immutable-string', 'string->keyword', 'string->list',
- 'string->number', 'string->path', 'string->path-element',
- 'string->some-system-path', 'string->symbol',
+ 'split-at-right', 'split-common-prefix', 'split-path', 'splitf-at',
+ 'splitf-at-right', 'sqr', 'sqrt', 'srcloc', 'srcloc->string',
+ 'srcloc-column', 'srcloc-line', 'srcloc-position', 'srcloc-source',
+ 'srcloc-span', 'srcloc?', 'stop-after', 'stop-before', 'stream->list',
+ 'stream-add-between', 'stream-andmap', 'stream-append', 'stream-count',
+ 'stream-empty?', 'stream-filter', 'stream-first', 'stream-fold',
+ 'stream-for-each', 'stream-length', 'stream-map', 'stream-ormap',
+ 'stream-ref', 'stream-rest', 'stream-tail', 'stream/c', 'stream?',
+ 'string', 'string->bytes/latin-1', 'string->bytes/locale',
+ 'string->bytes/utf-8', 'string->immutable-string', 'string->keyword',
+ 'string->list', 'string->number', 'string->path',
+ 'string->path-element', 'string->some-system-path', 'string->symbol',
'string->uninterned-symbol', 'string->unreadable-symbol',
'string-append', 'string-append*', 'string-ci<=?', 'string-ci<?',
- 'string-ci=?', 'string-ci>=?', 'string-ci>?', 'string-copy',
- 'string-copy!', 'string-downcase', 'string-environment-variable-name?',
- 'string-fill!', 'string-foldcase', 'string-join', 'string-len/c',
- 'string-length', 'string-locale-ci<?', 'string-locale-ci=?',
- 'string-locale-ci>?', 'string-locale-downcase', 'string-locale-upcase',
- 'string-locale<?', 'string-locale=?', 'string-locale>?',
- 'string-no-nuls?', 'string-normalize-nfc', 'string-normalize-nfd',
- 'string-normalize-nfkc', 'string-normalize-nfkd',
- 'string-normalize-spaces', 'string-ref', 'string-replace',
- 'string-set!', 'string-split', 'string-titlecase', 'string-trim',
+ 'string-ci=?', 'string-ci>=?', 'string-ci>?', 'string-contains?',
+ 'string-copy', 'string-copy!', 'string-downcase',
+ 'string-environment-variable-name?', 'string-fill!', 'string-foldcase',
+ 'string-join', 'string-len/c', 'string-length', 'string-locale-ci<?',
+ 'string-locale-ci=?', 'string-locale-ci>?', 'string-locale-downcase',
+ 'string-locale-upcase', 'string-locale<?', 'string-locale=?',
+ 'string-locale>?', 'string-no-nuls?', 'string-normalize-nfc',
+ 'string-normalize-nfd', 'string-normalize-nfkc',
+ 'string-normalize-nfkd', 'string-normalize-spaces', 'string-port?',
+ 'string-prefix?', 'string-ref', 'string-replace', 'string-set!',
+ 'string-split', 'string-suffix?', 'string-titlecase', 'string-trim',
'string-upcase', 'string-utf-8-length', 'string<=?', 'string<?',
'string=?', 'string>=?', 'string>?', 'string?', 'struct->vector',
'struct-accessor-procedure?', 'struct-constructor-procedure?',
@@ -1084,10 +1128,11 @@ class RacketLexer(RegexLexer):
'struct-type-make-constructor', 'struct-type-make-predicate',
'struct-type-property-accessor-procedure?', 'struct-type-property/c',
'struct-type-property?', 'struct-type?', 'struct:arity-at-least',
- 'struct:date', 'struct:date*', 'struct:exn', 'struct:exn:break',
- 'struct:exn:break:hang-up', 'struct:exn:break:terminate',
- 'struct:exn:fail', 'struct:exn:fail:contract',
- 'struct:exn:fail:contract:arity', 'struct:exn:fail:contract:blame',
+ 'struct:arrow-contract-info', 'struct:date', 'struct:date*',
+ 'struct:exn', 'struct:exn:break', 'struct:exn:break:hang-up',
+ 'struct:exn:break:terminate', 'struct:exn:fail',
+ 'struct:exn:fail:contract', 'struct:exn:fail:contract:arity',
+ 'struct:exn:fail:contract:blame',
'struct:exn:fail:contract:continuation',
'struct:exn:fail:contract:divide-by-zero',
'struct:exn:fail:contract:non-fixnum-result',
@@ -1105,16 +1150,17 @@ class RacketLexer(RegexLexer):
'struct:wrapped-extra-arg-arrow', 'struct?', 'sub1', 'subbytes',
'subclass?', 'subclass?/c', 'subprocess', 'subprocess-group-enabled',
'subprocess-kill', 'subprocess-pid', 'subprocess-status',
- 'subprocess-wait', 'subprocess?', 'subset?', 'substring',
+ 'subprocess-wait', 'subprocess?', 'subset?', 'substring', 'suggest/c',
'symbol->string', 'symbol-interned?', 'symbol-unreadable?', 'symbol<?',
'symbol=?', 'symbol?', 'symbols', 'sync', 'sync/enable-break',
'sync/timeout', 'sync/timeout/enable-break', 'syntax->datum',
- 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-disarm',
- 'syntax-e', 'syntax-line', 'syntax-local-bind-syntaxes',
- 'syntax-local-certifier', 'syntax-local-context',
- 'syntax-local-expand-expression', 'syntax-local-get-shadower',
+ 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-debug-info',
+ 'syntax-disarm', 'syntax-e', 'syntax-line',
+ 'syntax-local-bind-syntaxes', 'syntax-local-certifier',
+ 'syntax-local-context', 'syntax-local-expand-expression',
+ 'syntax-local-get-shadower', 'syntax-local-identifier-as-binding',
'syntax-local-introduce', 'syntax-local-lift-context',
- 'syntax-local-lift-expression',
+ 'syntax-local-lift-expression', 'syntax-local-lift-module',
'syntax-local-lift-module-end-declaration',
'syntax-local-lift-provide', 'syntax-local-lift-require',
'syntax-local-lift-values-expression',
@@ -1126,20 +1172,22 @@ class RacketLexer(RegexLexer):
'syntax-local-phase-level', 'syntax-local-submodules',
'syntax-local-transforming-module-provides?', 'syntax-local-value',
'syntax-local-value/immediate', 'syntax-original?', 'syntax-position',
- 'syntax-property', 'syntax-property-symbol-keys', 'syntax-protect',
- 'syntax-rearm', 'syntax-recertify', 'syntax-shift-phase-level',
- 'syntax-source', 'syntax-source-module', 'syntax-span', 'syntax-taint',
+ 'syntax-property', 'syntax-property-preserved?',
+ 'syntax-property-symbol-keys', 'syntax-protect', 'syntax-rearm',
+ 'syntax-recertify', 'syntax-shift-phase-level', 'syntax-source',
+ 'syntax-source-module', 'syntax-span', 'syntax-taint',
'syntax-tainted?', 'syntax-track-origin',
- 'syntax-transforming-module-expression?', 'syntax-transforming?',
- 'syntax/c', 'syntax?', 'system', 'system*', 'system*/exit-code',
+ 'syntax-transforming-module-expression?',
+ 'syntax-transforming-with-lifts?', 'syntax-transforming?', 'syntax/c',
+ 'syntax?', 'system', 'system*', 'system*/exit-code',
'system-big-endian?', 'system-idle-evt', 'system-language+country',
'system-library-subpath', 'system-path-convention-type', 'system-type',
- 'system/exit-code', 'tail-marks-match?', 'take', 'take-right', 'takef',
- 'takef-right', 'tan', 'tanh', 'tcp-abandon-port', 'tcp-accept',
- 'tcp-accept-evt', 'tcp-accept-ready?', 'tcp-accept/enable-break',
- 'tcp-addresses', 'tcp-close', 'tcp-connect',
- 'tcp-connect/enable-break', 'tcp-listen', 'tcp-listener?', 'tcp-port?',
- 'tentative-pretty-print-port-cancel',
+ 'system/exit-code', 'tail-marks-match?', 'take', 'take-common-prefix',
+ 'take-right', 'takef', 'takef-right', 'tan', 'tanh',
+ 'tcp-abandon-port', 'tcp-accept', 'tcp-accept-evt',
+ 'tcp-accept-ready?', 'tcp-accept/enable-break', 'tcp-addresses',
+ 'tcp-close', 'tcp-connect', 'tcp-connect/enable-break', 'tcp-listen',
+ 'tcp-listener?', 'tcp-port?', 'tentative-pretty-print-port-cancel',
'tentative-pretty-print-port-transfer', 'tenth', 'terminal-port?',
'the-unsupplied-arg', 'third', 'thread', 'thread-cell-ref',
'thread-cell-set!', 'thread-cell-values?', 'thread-cell?',
@@ -1162,7 +1210,7 @@ class RacketLexer(RegexLexer):
'uncaught-exception-handler', 'unit?', 'unspecified-dom',
'unsupplied-arg?', 'use-collection-link-paths',
'use-compiled-file-paths', 'use-user-specific-search-paths',
- 'user-execute-bit', 'user-read-bit', 'user-write-bit',
+ 'user-execute-bit', 'user-read-bit', 'user-write-bit', 'value-blame',
'value-contract', 'values', 'variable-reference->empty-namespace',
'variable-reference->module-base-phase',
'variable-reference->module-declaration-inspector',
@@ -1194,8 +1242,8 @@ class RacketLexer(RegexLexer):
'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt',
'write-bytes-avail/enable-break', 'write-char', 'write-special',
'write-special-avail*', 'write-special-evt', 'write-string',
- 'write-to-file', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a', '~e', '~r',
- '~s', '~v'
+ 'write-to-file', 'writeln', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a',
+ '~e', '~r', '~s', '~v'
)
_opening_parenthesis = r'[([{]'
diff --git a/pygments/lexers/make.py b/pygments/lexers/make.py
index f5eac127..9b6273d7 100644
--- a/pygments/lexers/make.py
+++ b/pygments/lexers/make.py
@@ -90,7 +90,7 @@ class BaseMakefileLexer(RegexLexer):
bygroups(Keyword, Text), 'export'),
(r'export\s+', Keyword),
# assignment
- (r'([\w${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
+ (r'([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
# strings
(r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double),
@@ -102,7 +102,7 @@ class BaseMakefileLexer(RegexLexer):
(r'\$\(', Keyword, 'expansion'),
],
'expansion': [
- (r'[^$a-zA-Z_)]+', Text),
+ (r'[^$a-zA-Z_()]+', Text),
(r'[a-zA-Z_]+', Name.Variable),
(r'\$', Keyword),
(r'\(', Keyword, '#push'),
diff --git a/pygments/lexers/nimrod.py b/pygments/lexers/nimrod.py
index 00b849a6..e1bbcc03 100644
--- a/pygments/lexers/nimrod.py
+++ b/pygments/lexers/nimrod.py
@@ -3,7 +3,7 @@
pygments.lexers.nimrod
~~~~~~~~~~~~~~~~~~~~~~
- Lexer for the Nimrod language.
+ Lexer for the Nim language (formerly known as Nimrod).
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
@@ -20,15 +20,15 @@ __all__ = ['NimrodLexer']
class NimrodLexer(RegexLexer):
"""
- For `Nimrod <http://nimrod-code.org/>`_ source code.
+ For `Nim <http://nim-lang.org/>`_ source code.
.. versionadded:: 1.5
"""
name = 'Nimrod'
- aliases = ['nimrod', 'nim']
+ aliases = ['nim', 'nimrod']
filenames = ['*.nim', '*.nimrod']
- mimetypes = ['text/x-nimrod']
+ mimetypes = ['text/x-nim']
flags = re.MULTILINE | re.IGNORECASE | re.UNICODE
@@ -43,13 +43,13 @@ class NimrodLexer(RegexLexer):
return "|".join(newWords)
keywords = [
- 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break',
- 'case', 'cast', 'const', 'continue', 'converter', 'discard',
- 'distinct', 'div', 'elif', 'else', 'end', 'enum', 'except', 'finally',
- 'for', 'generic', 'if', 'implies', 'in', 'yield',
- 'is', 'isnot', 'iterator', 'lambda', 'let', 'macro', 'method',
- 'mod', 'not', 'notin', 'object', 'of', 'or', 'out', 'proc',
- 'ptr', 'raise', 'ref', 'return', 'shl', 'shr', 'template', 'try',
+ 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break', 'case',
+ 'cast', 'concept', 'const', 'continue', 'converter', 'defer', 'discard',
+ 'distinct', 'div', 'do', 'elif', 'else', 'end', 'enum', 'except',
+ 'export', 'finally', 'for', 'func', 'if', 'in', 'yield', 'interface',
+ 'is', 'isnot', 'iterator', 'let', 'macro', 'method', 'mixin', 'mod',
+ 'not', 'notin', 'object', 'of', 'or', 'out', 'proc', 'ptr', 'raise',
+ 'ref', 'return', 'shared', 'shl', 'shr', 'static', 'template', 'try',
'tuple', 'type', 'when', 'while', 'with', 'without', 'xor'
]
diff --git a/pygments/lexers/rust.py b/pygments/lexers/rust.py
index 5d1162b8..d3d98ee8 100644
--- a/pygments/lexers/rust.py
+++ b/pygments/lexers/rust.py
@@ -18,7 +18,7 @@ __all__ = ['RustLexer']
class RustLexer(RegexLexer):
"""
- Lexer for the Rust programming language (version 1.0).
+ Lexer for the Rust programming language (version 1.10).
.. versionadded:: 1.6
"""
@@ -49,17 +49,21 @@ class RustLexer(RegexLexer):
(r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
# Keywords
(words((
- 'as', 'box', 'crate', 'do', 'else', 'enum', 'extern', # break and continue are in labels
- 'fn', 'for', 'if', 'impl', 'in', 'loop', 'match', 'mut', 'priv',
- 'proc', 'pub', 'ref', 'return', 'static', 'struct',
- 'trait', 'true', 'type', 'unsafe', 'while'), suffix=r'\b'),
+ 'as', 'box', 'const', 'crate', 'else', 'extern',
+ 'for', 'if', 'impl', 'in', 'loop', 'match', 'move',
+ 'mut', 'pub', 'ref', 'return', 'static', 'super',
+ 'trait', 'unsafe', 'use', 'where', 'while'), suffix=r'\b'),
Keyword),
- (words(('alignof', 'be', 'const', 'offsetof', 'pure', 'sizeof',
- 'typeof', 'once', 'unsized', 'yield'), suffix=r'\b'),
+ (words(('abstract', 'alignof', 'become', 'do', 'final', 'macro',
+ 'offsetof', 'override', 'priv', 'proc', 'pure', 'sizeof',
+ 'typeof', 'unsized', 'virtual', 'yield'), suffix=r'\b'),
Keyword.Reserved),
- (r'(mod|use)\b', Keyword.Namespace),
(r'(true|false)\b', Keyword.Constant),
+ (r'mod\b', Keyword, 'modname'),
(r'let\b', Keyword.Declaration),
+ (r'fn\b', Keyword, 'funcname'),
+ (r'(struct|enum|type|union)\b', Keyword, 'typename'),
+ (r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
(words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', 'usize',
'isize', 'f32', 'f64', 'str', 'bool'), suffix=r'\b'),
Keyword.Type),
@@ -88,11 +92,11 @@ class RustLexer(RegexLexer):
'Ok', 'Err',
'SliceConcatExt',
'String', 'ToString',
- 'Vec',
- ), suffix=r'\b'),
+ 'Vec'), suffix=r'\b'),
Name.Builtin),
# Labels
- (r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?', bygroups(Keyword, Text.Whitespace, Name.Label)),
+ (r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?',
+ bygroups(Keyword, Text.Whitespace, Name.Label)),
# Character Literal
(r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
@@ -148,6 +152,21 @@ class RustLexer(RegexLexer):
(r'\*/', String.Doc, '#pop'),
(r'[*/]', String.Doc),
],
+ 'modname': [
+ (r'\s+', Text),
+ (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
+ default('#pop'),
+ ],
+ 'funcname': [
+ (r'\s+', Text),
+ (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
+ default('#pop'),
+ ],
+ 'typename': [
+ (r'\s+', Text),
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ default('#pop'),
+ ],
'number_lit': [
(r'[ui](8|16|32|64|size)', Keyword, '#pop'),
(r'f(32|64)', Keyword, '#pop'),
diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py
index 7487a757..e225a66e 100644
--- a/pygments/lexers/sql.py
+++ b/pygments/lexers/sql.py
@@ -153,7 +153,7 @@ class PostgresLexer(PostgresBase, RegexLexer):
tokens = {
'root': [
(r'\s+', Text),
- (r'--.*?\n', Comment.Single),
+ (r'--.*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'(' + '|'.join(s.replace(" ", "\s+")
for s in DATATYPES + PSEUDO_TYPES)
@@ -380,7 +380,7 @@ class SqlLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Text),
- (r'--.*?\n', Comment.Single),
+ (r'--.*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(words((
'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER', 'AGGREGATE',
@@ -550,7 +550,7 @@ class MySqlLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Text),
- (r'(#|--\s+).*?\n', Comment.Single),
+ (r'(#|--\s+).*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'[0-9]+', Number.Integer),
(r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
diff --git a/tests/examplefiles/capdl_example.cdl b/tests/examplefiles/capdl_example.cdl
new file mode 100644
index 00000000..050e56a6
--- /dev/null
+++ b/tests/examplefiles/capdl_example.cdl
@@ -0,0 +1,64 @@
+#ifdef ARCH_ARM
+arch arm11
+#else
+arch ia32
+#endif
+
+objects {
+ my_ep = ep /* A synchronous endpoint */
+
+ /* Two thread control blocks */
+ tcb1 = tcb
+ tcb2 = tcb
+
+ /* Four frames of physical memory */
+ frame1 = frame (4k)
+ frame2 = frame (4k)
+ frame3 = frame (4k)
+ frame4 = frame (4k)
+
+ /* Two page tables */
+ pt1 = pt
+ pt2 = pt
+
+ /* Two page directories */
+ pd1 = pd
+ pd2 = pd
+
+ /* Two capability nodes */
+ cnode1 = cnode (2 bits)
+ cnode2 = cnode (3 bits)
+}
+caps {
+ cnode1 {
+ 0x1: frame1 (RW) /* read/write */
+ 0x2: my_ep (R) /* read-only */
+ }
+ cnode2 {
+ 0x1: my_ep (W) /* write-only */
+ }
+ tcb1 {
+ vspace: pd1
+ ipc_buffer_slot: frame1
+ cspace: cnode1
+ }
+ pd1 {
+ 0x10: pt1
+ }
+ pt1 {
+ 0x8: frame1 (RW)
+ 0x9: frame2 (R)
+ }
+ tcb2 {
+ vspace: pd2
+ ipc_buffer_slot: frame3
+ cspace: cnode2
+ }
+ pd2 {
+ 0x10: pt2
+ }
+ pt2 {
+ 0x10: frame3 (RW)
+ 0x12: frame4 (R)
+ }
+}
diff --git a/tests/examplefiles/durexmania.aheui b/tests/examplefiles/durexmania.aheui
new file mode 100644
index 00000000..89654c00
--- /dev/null
+++ b/tests/examplefiles/durexmania.aheui
@@ -0,0 +1,4 @@
+우주메이저☆듀렉스전도사♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♡먊
+삶은밥과야근밥샤주세양♡밥사밥사밥사밥사밥사땅땅땅빵☆따밦내발따밦다빵맣밥밥밥내놔밥줘밥밥밥밗땅땅땅박밝땅땅딻타밟타맣밦밣따박타맣밦밣따박타맣밦밣따박타맣박빵빵빵빵따따따따맣삶몲
+Original Source by @harunene // Run it on AheuiChem(http://yoo2001818.github.io/AheuiChem/)
+https://gist.github.com/item4/ca870a63b390da6cc6f1
diff --git a/tests/examplefiles/example.juttle b/tests/examplefiles/example.juttle
new file mode 100644
index 00000000..ae861996
--- /dev/null
+++ b/tests/examplefiles/example.juttle
@@ -0,0 +1,110 @@
+/* Block comment */
+/*
+ Multiline block
+ comment
+*/
+
+// inline comment
+function juttleFunction(arg) {
+ if (arg == null) {
+ return null;
+ }
+ else if (arg == 0) {
+ return 'zero';
+ }
+ else if (arg == 1) {
+ return "one";
+ }
+ else {
+ return 1.1;
+ }
+}
+
+reducer juttleReducer(field) {
+ var x = 0;
+ function update() {
+ x = *field;
+ }
+
+ function result() {
+ return x;
+ }
+}
+
+sub myemit(limit) {
+ emit -limit limit
+}
+
+input test: text -default 'input';
+const object = {
+ xyz: 123,
+ name: 'something'
+};
+
+const array = [
+ :2016-01-01:,
+ :2016-01-01T01:00:00:,
+ :2016-01-01T01:00:00.000:,
+ :2016-01-01T01:00:00.000Z:,
+ :2016-01-01T01:00:00.000-0800:,
+ :2016-01-01T01:00:00.000-08:00:,
+ :00:00:01:,
+ :00:00:00.001:,
+ :now:,
+ :beginning:,
+ :end:,
+ :forever:,
+ :yesterday:,
+ :today:,
+ :tomorrow:,
+ :1:,
+ :1.1:,
+ :1s:,
+ :1 second:,
+ :1 seconds:,
+ :100ms:,
+ :100 millisecond:,
+ :100 milliseconds:,
+ :1d:,
+ :1 day:,
+ :1 days:,
+ :.2h:,
+ :1.2h:,
+ :.2 hour:,
+ :1.2 hours:,
+ :.5d:,
+ :1.5d:,
+ :.5 day:,
+ :1.5 days:,
+ :5m:,
+ :5 minutes:,
+ :10w:,
+ :10 weeks:,
+ :10M:,
+ :10 months:,
+ :100y:,
+ :100 years:,
+ :1 year and 2 months and 2 days:
+];
+
+emit
+ | batch :10 minutes:
+ | filter x=true
+ | head 1
+ | join
+ | keep x
+ | pace -every :1 minute:
+ | pass
+ | put y=false
+ | remove z
+ | sequence
+ | skip 1
+ | sort field -desc
+ | split field
+ | tail 10
+ | unbatch
+ | uniq field
+;
+
+read adapter -last :day: 'search' AND field~/pattern/ OR field == 'string'
+ | write adapter
diff --git a/tests/examplefiles/example.yaml b/tests/examplefiles/example.yaml
index 9c0ed9d0..17544c02 100644
--- a/tests/examplefiles/example.yaml
+++ b/tests/examplefiles/example.yaml
@@ -1,3 +1,12 @@
+#
+# Regression tests
+#
+
+%TAG ! tag:example.com:foo/
+---
+test: !foo/bar {a: 'asdf'}
+test2: fred
+...
#
# Examples from the Preview section of the YAML specification
diff --git a/tests/examplefiles/fibonacci.tokigun.aheui b/tests/examplefiles/fibonacci.tokigun.aheui
new file mode 100644
index 00000000..afa2ca05
--- /dev/null
+++ b/tests/examplefiles/fibonacci.tokigun.aheui
@@ -0,0 +1,4 @@
+바싹반박나싼순
+뿌멓떠벌번멍뻐
+쌀삭쌀살다순옭
+어어선썬설썩옭
diff --git a/tests/examplefiles/hello-world.puzzlet.aheui b/tests/examplefiles/hello-world.puzzlet.aheui
new file mode 100644
index 00000000..e7ef3a62
--- /dev/null
+++ b/tests/examplefiles/hello-world.puzzlet.aheui
@@ -0,0 +1,10 @@
+밤밣따빠밣밟따뿌
+빠맣파빨받밤뚜뭏
+돋밬탕빠맣붏두붇
+볻뫃박발뚷투뭏붖
+뫃도뫃희멓뭏뭏붘
+뫃봌토범더벌뿌뚜
+뽑뽀멓멓더벓뻐뚠
+뽀덩벐멓뻐덕더벅
+
+https://github.com/aheui/snippets/blob/master/hello-world/hello-world.puzzlet.aheui
diff --git a/tests/examplefiles/plain.bst b/tests/examplefiles/plain.bst
new file mode 100644
index 00000000..7adf4bb0
--- /dev/null
+++ b/tests/examplefiles/plain.bst
@@ -0,0 +1,1097 @@
+% BibTeX standard bibliography style `plain'
+ % Version 0.99b (8-Dec-10 release) for BibTeX versions 0.99a or later.
+ % Copyright (C) 1984, 1985, 1988, 2010 Howard Trickey and Oren Patashnik.
+ % Unlimited copying and redistribution of this file are permitted as long as
+ % it is unmodified. Modifications (and redistribution of modified versions)
+ % are also permitted, but only if the resulting file is renamed to something
+ % besides btxbst.doc, plain.bst, unsrt.bst, alpha.bst, and abbrv.bst.
+ % This restriction helps ensure that all standard styles are identical.
+ % The file btxbst.doc has the documentation for this style.
+
+ENTRY
+ { address
+ author
+ booktitle
+ chapter
+ edition
+ editor
+ howpublished
+ institution
+ journal
+ key
+ month
+ note
+ number
+ organization
+ pages
+ publisher
+ school
+ series
+ title
+ type
+ volume
+ year
+ }
+ {}
+ { label }
+
+INTEGERS { output.state before.all mid.sentence after.sentence after.block }
+
+FUNCTION {init.state.consts}
+{ #0 'before.all :=
+ #1 'mid.sentence :=
+ #2 'after.sentence :=
+ #3 'after.block :=
+}
+
+STRINGS { s t }
+
+FUNCTION {output.nonnull}
+{ 's :=
+ output.state mid.sentence =
+ { ", " * write$ }
+ { output.state after.block =
+ { add.period$ write$
+ newline$
+ "\newblock " write$
+ }
+ { output.state before.all =
+ 'write$
+ { add.period$ " " * write$ }
+ if$
+ }
+ if$
+ mid.sentence 'output.state :=
+ }
+ if$
+ s
+}
+
+FUNCTION {output}
+{ duplicate$ empty$
+ 'pop$
+ 'output.nonnull
+ if$
+}
+
+FUNCTION {output.check}
+{ 't :=
+ duplicate$ empty$
+ { pop$ "empty " t * " in " * cite$ * warning$ }
+ 'output.nonnull
+ if$
+}
+
+FUNCTION {output.bibitem}
+{ newline$
+ "\bibitem{" write$
+ cite$ write$
+ "}" write$
+ newline$
+ ""
+ before.all 'output.state :=
+}
+
+FUNCTION {fin.entry}
+{ add.period$
+ write$
+ newline$
+}
+
+FUNCTION {new.block}
+{ output.state before.all =
+ 'skip$
+ { after.block 'output.state := }
+ if$
+}
+
+FUNCTION {new.sentence}
+{ output.state after.block =
+ 'skip$
+ { output.state before.all =
+ 'skip$
+ { after.sentence 'output.state := }
+ if$
+ }
+ if$
+}
+
+FUNCTION {not}
+{ { #0 }
+ { #1 }
+ if$
+}
+
+FUNCTION {and}
+{ 'skip$
+ { pop$ #0 }
+ if$
+}
+
+FUNCTION {or}
+{ { pop$ #1 }
+ 'skip$
+ if$
+}
+
+FUNCTION {new.block.checka}
+{ empty$
+ 'skip$
+ 'new.block
+ if$
+}
+
+FUNCTION {new.block.checkb}
+{ empty$
+ swap$ empty$
+ and
+ 'skip$
+ 'new.block
+ if$
+}
+
+FUNCTION {new.sentence.checka}
+{ empty$
+ 'skip$
+ 'new.sentence
+ if$
+}
+
+FUNCTION {new.sentence.checkb}
+{ empty$
+ swap$ empty$
+ and
+ 'skip$
+ 'new.sentence
+ if$
+}
+
+FUNCTION {field.or.null}
+{ duplicate$ empty$
+ { pop$ "" }
+ 'skip$
+ if$
+}
+
+FUNCTION {emphasize}
+{ duplicate$ empty$
+ { pop$ "" }
+ { "{\em " swap$ * "}" * }
+ if$
+}
+
+INTEGERS { nameptr namesleft numnames }
+
+FUNCTION {format.names}
+{ 's :=
+ #1 'nameptr :=
+ s num.names$ 'numnames :=
+ numnames 'namesleft :=
+ { namesleft #0 > }
+ { s nameptr "{ff~}{vv~}{ll}{, jj}" format.name$ 't :=
+ nameptr #1 >
+ { namesleft #1 >
+ { ", " * t * }
+ { numnames #2 >
+ { "," * }
+ 'skip$
+ if$
+ t "others" =
+ { " et~al." * }
+ { " and " * t * }
+ if$
+ }
+ if$
+ }
+ 't
+ if$
+ nameptr #1 + 'nameptr :=
+ namesleft #1 - 'namesleft :=
+ }
+ while$
+}
+
+FUNCTION {format.authors}
+{ author empty$
+ { "" }
+ { author format.names }
+ if$
+}
+
+FUNCTION {format.editors}
+{ editor empty$
+ { "" }
+ { editor format.names
+ editor num.names$ #1 >
+ { ", editors" * }
+ { ", editor" * }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.title}
+{ title empty$
+ { "" }
+ { title "t" change.case$ }
+ if$
+}
+
+FUNCTION {n.dashify}
+{ 't :=
+ ""
+ { t empty$ not }
+ { t #1 #1 substring$ "-" =
+ { t #1 #2 substring$ "--" = not
+ { "--" *
+ t #2 global.max$ substring$ 't :=
+ }
+ { { t #1 #1 substring$ "-" = }
+ { "-" *
+ t #2 global.max$ substring$ 't :=
+ }
+ while$
+ }
+ if$
+ }
+ { t #1 #1 substring$ *
+ t #2 global.max$ substring$ 't :=
+ }
+ if$
+ }
+ while$
+}
+
+FUNCTION {format.date}
+{ year empty$
+ { month empty$
+ { "" }
+ { "there's a month but no year in " cite$ * warning$
+ month
+ }
+ if$
+ }
+ { month empty$
+ 'year
+ { month " " * year * }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.btitle}
+{ title emphasize
+}
+
+FUNCTION {tie.or.space.connect}
+{ duplicate$ text.length$ #3 <
+ { "~" }
+ { " " }
+ if$
+ swap$ * *
+}
+
+FUNCTION {either.or.check}
+{ empty$
+ 'pop$
+ { "can't use both " swap$ * " fields in " * cite$ * warning$ }
+ if$
+}
+
+FUNCTION {format.bvolume}
+{ volume empty$
+ { "" }
+ { "volume" volume tie.or.space.connect
+ series empty$
+ 'skip$
+ { " of " * series emphasize * }
+ if$
+ "volume and number" number either.or.check
+ }
+ if$
+}
+
+FUNCTION {format.number.series}
+{ volume empty$
+ { number empty$
+ { series field.or.null }
+ { output.state mid.sentence =
+ { "number" }
+ { "Number" }
+ if$
+ number tie.or.space.connect
+ series empty$
+ { "there's a number but no series in " cite$ * warning$ }
+ { " in " * series * }
+ if$
+ }
+ if$
+ }
+ { "" }
+ if$
+}
+
+FUNCTION {format.edition}
+{ edition empty$
+ { "" }
+ { output.state mid.sentence =
+ { edition "l" change.case$ " edition" * }
+ { edition "t" change.case$ " edition" * }
+ if$
+ }
+ if$
+}
+
+INTEGERS { multiresult }
+
+FUNCTION {multi.page.check}
+{ 't :=
+ #0 'multiresult :=
+ { multiresult not
+ t empty$ not
+ and
+ }
+ { t #1 #1 substring$
+ duplicate$ "-" =
+ swap$ duplicate$ "," =
+ swap$ "+" =
+ or or
+ { #1 'multiresult := }
+ { t #2 global.max$ substring$ 't := }
+ if$
+ }
+ while$
+ multiresult
+}
+
+FUNCTION {format.pages}
+{ pages empty$
+ { "" }
+ { pages multi.page.check
+ { "pages" pages n.dashify tie.or.space.connect }
+ { "page" pages tie.or.space.connect }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.vol.num.pages}
+{ volume field.or.null
+ number empty$
+ 'skip$
+ { "(" number * ")" * *
+ volume empty$
+ { "there's a number but no volume in " cite$ * warning$ }
+ 'skip$
+ if$
+ }
+ if$
+ pages empty$
+ 'skip$
+ { duplicate$ empty$
+ { pop$ format.pages }
+ { ":" * pages n.dashify * }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.chapter.pages}
+{ chapter empty$
+ 'format.pages
+ { type empty$
+ { "chapter" }
+ { type "l" change.case$ }
+ if$
+ chapter tie.or.space.connect
+ pages empty$
+ 'skip$
+ { ", " * format.pages * }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.in.ed.booktitle}
+{ booktitle empty$
+ { "" }
+ { editor empty$
+ { "In " booktitle emphasize * }
+ { "In " format.editors * ", " * booktitle emphasize * }
+ if$
+ }
+ if$
+}
+
+FUNCTION {empty.misc.check}
+{ author empty$ title empty$ howpublished empty$
+ month empty$ year empty$ note empty$
+ and and and and and
+ key empty$ not and
+ { "all relevant fields are empty in " cite$ * warning$ }
+ 'skip$
+ if$
+}
+
+FUNCTION {format.thesis.type}
+{ type empty$
+ 'skip$
+ { pop$
+ type "t" change.case$
+ }
+ if$
+}
+
+FUNCTION {format.tr.number}
+{ type empty$
+ { "Technical Report" }
+ 'type
+ if$
+ number empty$
+ { "t" change.case$ }
+ { number tie.or.space.connect }
+ if$
+}
+
+FUNCTION {format.article.crossref}
+{ key empty$
+ { journal empty$
+ { "need key or journal for " cite$ * " to crossref " * crossref *
+ warning$
+ ""
+ }
+ { "In {\em " journal * "\/}" * }
+ if$
+ }
+ { "In " key * }
+ if$
+ " \cite{" * crossref * "}" *
+}
+
+FUNCTION {format.crossref.editor}
+{ editor #1 "{vv~}{ll}" format.name$
+ editor num.names$ duplicate$
+ #2 >
+ { pop$ " et~al." * }
+ { #2 <
+ 'skip$
+ { editor #2 "{ff }{vv }{ll}{ jj}" format.name$ "others" =
+ { " et~al." * }
+ { " and " * editor #2 "{vv~}{ll}" format.name$ * }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.book.crossref}
+{ volume empty$
+ { "empty volume in " cite$ * "'s crossref of " * crossref * warning$
+ "In "
+ }
+ { "Volume" volume tie.or.space.connect
+ " of " *
+ }
+ if$
+ editor empty$
+ editor field.or.null author field.or.null =
+ or
+ { key empty$
+ { series empty$
+ { "need editor, key, or series for " cite$ * " to crossref " *
+ crossref * warning$
+ "" *
+ }
+ { "{\em " * series * "\/}" * }
+ if$
+ }
+ { key * }
+ if$
+ }
+ { format.crossref.editor * }
+ if$
+ " \cite{" * crossref * "}" *
+}
+
+FUNCTION {format.incoll.inproc.crossref}
+{ editor empty$
+ editor field.or.null author field.or.null =
+ or
+ { key empty$
+ { booktitle empty$
+ { "need editor, key, or booktitle for " cite$ * " to crossref " *
+ crossref * warning$
+ ""
+ }
+ { "In {\em " booktitle * "\/}" * }
+ if$
+ }
+ { "In " key * }
+ if$
+ }
+ { "In " format.crossref.editor * }
+ if$
+ " \cite{" * crossref * "}" *
+}
+
+FUNCTION {article}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ crossref missing$
+ { journal emphasize "journal" output.check
+ format.vol.num.pages output
+ format.date "year" output.check
+ }
+ { format.article.crossref output.nonnull
+ format.pages output
+ }
+ if$
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {book}
+{ output.bibitem
+ author empty$
+ { format.editors "author and editor" output.check }
+ { format.authors output.nonnull
+ crossref missing$
+ { "author and editor" editor either.or.check }
+ 'skip$
+ if$
+ }
+ if$
+ new.block
+ format.btitle "title" output.check
+ crossref missing$
+ { format.bvolume output
+ new.block
+ format.number.series output
+ new.sentence
+ publisher "publisher" output.check
+ address output
+ }
+ { new.block
+ format.book.crossref output.nonnull
+ }
+ if$
+ format.edition output
+ format.date "year" output.check
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {booklet}
+{ output.bibitem
+ format.authors output
+ new.block
+ format.title "title" output.check
+ howpublished address new.block.checkb
+ howpublished output
+ address output
+ format.date output
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {inbook}
+{ output.bibitem
+ author empty$
+ { format.editors "author and editor" output.check }
+ { format.authors output.nonnull
+ crossref missing$
+ { "author and editor" editor either.or.check }
+ 'skip$
+ if$
+ }
+ if$
+ new.block
+ format.btitle "title" output.check
+ crossref missing$
+ { format.bvolume output
+ format.chapter.pages "chapter and pages" output.check
+ new.block
+ format.number.series output
+ new.sentence
+ publisher "publisher" output.check
+ address output
+ }
+ { format.chapter.pages "chapter and pages" output.check
+ new.block
+ format.book.crossref output.nonnull
+ }
+ if$
+ format.edition output
+ format.date "year" output.check
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {incollection}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ crossref missing$
+ { format.in.ed.booktitle "booktitle" output.check
+ format.bvolume output
+ format.number.series output
+ format.chapter.pages output
+ new.sentence
+ publisher "publisher" output.check
+ address output
+ format.edition output
+ format.date "year" output.check
+ }
+ { format.incoll.inproc.crossref output.nonnull
+ format.chapter.pages output
+ }
+ if$
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {inproceedings}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ crossref missing$
+ { format.in.ed.booktitle "booktitle" output.check
+ format.bvolume output
+ format.number.series output
+ format.pages output
+ address empty$
+ { organization publisher new.sentence.checkb
+ organization output
+ publisher output
+ format.date "year" output.check
+ }
+ { address output.nonnull
+ format.date "year" output.check
+ new.sentence
+ organization output
+ publisher output
+ }
+ if$
+ }
+ { format.incoll.inproc.crossref output.nonnull
+ format.pages output
+ }
+ if$
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {conference} { inproceedings }
+
+FUNCTION {manual}
+{ output.bibitem
+ author empty$
+ { organization empty$
+ 'skip$
+ { organization output.nonnull
+ address output
+ }
+ if$
+ }
+ { format.authors output.nonnull }
+ if$
+ new.block
+ format.btitle "title" output.check
+ author empty$
+ { organization empty$
+ { address new.block.checka
+ address output
+ }
+ 'skip$
+ if$
+ }
+ { organization address new.block.checkb
+ organization output
+ address output
+ }
+ if$
+ format.edition output
+ format.date output
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {mastersthesis}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ "Master's thesis" format.thesis.type output.nonnull
+ school "school" output.check
+ address output
+ format.date "year" output.check
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {misc}
+{ output.bibitem
+ format.authors output
+ title howpublished new.block.checkb
+ format.title output
+ howpublished new.block.checka
+ howpublished output
+ format.date output
+ new.block
+ note output
+ fin.entry
+ empty.misc.check
+}
+
+FUNCTION {phdthesis}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.btitle "title" output.check
+ new.block
+ "PhD thesis" format.thesis.type output.nonnull
+ school "school" output.check
+ address output
+ format.date "year" output.check
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {proceedings}
+{ output.bibitem
+ editor empty$
+ { organization output }
+ { format.editors output.nonnull }
+ if$
+ new.block
+ format.btitle "title" output.check
+ format.bvolume output
+ format.number.series output
+ address empty$
+ { editor empty$
+ { publisher new.sentence.checka }
+ { organization publisher new.sentence.checkb
+ organization output
+ }
+ if$
+ publisher output
+ format.date "year" output.check
+ }
+ { address output.nonnull
+ format.date "year" output.check
+ new.sentence
+ editor empty$
+ 'skip$
+ { organization output }
+ if$
+ publisher output
+ }
+ if$
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {techreport}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ format.tr.number output.nonnull
+ institution "institution" output.check
+ address output
+ format.date "year" output.check
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {unpublished}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ note "note" output.check
+ format.date output
+ fin.entry
+}
+
+FUNCTION {default.type} { misc }
+
+MACRO {jan} {"January"}
+
+MACRO {feb} {"February"}
+
+MACRO {mar} {"March"}
+
+MACRO {apr} {"April"}
+
+MACRO {may} {"May"}
+
+MACRO {jun} {"June"}
+
+MACRO {jul} {"July"}
+
+MACRO {aug} {"August"}
+
+MACRO {sep} {"September"}
+
+MACRO {oct} {"October"}
+
+MACRO {nov} {"November"}
+
+MACRO {dec} {"December"}
+
+MACRO {acmcs} {"ACM Computing Surveys"}
+
+MACRO {acta} {"Acta Informatica"}
+
+MACRO {cacm} {"Communications of the ACM"}
+
+MACRO {ibmjrd} {"IBM Journal of Research and Development"}
+
+MACRO {ibmsj} {"IBM Systems Journal"}
+
+MACRO {ieeese} {"IEEE Transactions on Software Engineering"}
+
+MACRO {ieeetc} {"IEEE Transactions on Computers"}
+
+MACRO {ieeetcad}
+ {"IEEE Transactions on Computer-Aided Design of Integrated Circuits"}
+
+MACRO {ipl} {"Information Processing Letters"}
+
+MACRO {jacm} {"Journal of the ACM"}
+
+MACRO {jcss} {"Journal of Computer and System Sciences"}
+
+MACRO {scp} {"Science of Computer Programming"}
+
+MACRO {sicomp} {"SIAM Journal on Computing"}
+
+MACRO {tocs} {"ACM Transactions on Computer Systems"}
+
+MACRO {tods} {"ACM Transactions on Database Systems"}
+
+MACRO {tog} {"ACM Transactions on Graphics"}
+
+MACRO {toms} {"ACM Transactions on Mathematical Software"}
+
+MACRO {toois} {"ACM Transactions on Office Information Systems"}
+
+MACRO {toplas} {"ACM Transactions on Programming Languages and Systems"}
+
+MACRO {tcs} {"Theoretical Computer Science"}
+
+READ
+
+FUNCTION {sortify}
+{ purify$
+ "l" change.case$
+}
+
+INTEGERS { len }
+
+FUNCTION {chop.word}
+{ 's :=
+ 'len :=
+ s #1 len substring$ =
+ { s len #1 + global.max$ substring$ }
+ 's
+ if$
+}
+
+FUNCTION {sort.format.names}
+{ 's :=
+ #1 'nameptr :=
+ ""
+ s num.names$ 'numnames :=
+ numnames 'namesleft :=
+ { namesleft #0 > }
+ { nameptr #1 >
+ { " " * }
+ 'skip$
+ if$
+ s nameptr "{vv{ } }{ll{ }}{ ff{ }}{ jj{ }}" format.name$ 't :=
+ nameptr numnames = t "others" = and
+ { "et al" * }
+ { t sortify * }
+ if$
+ nameptr #1 + 'nameptr :=
+ namesleft #1 - 'namesleft :=
+ }
+ while$
+}
+
+FUNCTION {sort.format.title}
+{ 't :=
+ "A " #2
+ "An " #3
+ "The " #4 t chop.word
+ chop.word
+ chop.word
+ sortify
+ #1 global.max$ substring$
+}
+
+FUNCTION {author.sort}
+{ author empty$
+ { key empty$
+ { "to sort, need author or key in " cite$ * warning$
+ ""
+ }
+ { key sortify }
+ if$
+ }
+ { author sort.format.names }
+ if$
+}
+
+FUNCTION {author.editor.sort}
+{ author empty$
+ { editor empty$
+ { key empty$
+ { "to sort, need author, editor, or key in " cite$ * warning$
+ ""
+ }
+ { key sortify }
+ if$
+ }
+ { editor sort.format.names }
+ if$
+ }
+ { author sort.format.names }
+ if$
+}
+
+FUNCTION {author.organization.sort}
+{ author empty$
+ { organization empty$
+ { key empty$
+ { "to sort, need author, organization, or key in " cite$ * warning$
+ ""
+ }
+ { key sortify }
+ if$
+ }
+ { "The " #4 organization chop.word sortify }
+ if$
+ }
+ { author sort.format.names }
+ if$
+}
+
+FUNCTION {editor.organization.sort}
+{ editor empty$
+ { organization empty$
+ { key empty$
+ { "to sort, need editor, organization, or key in " cite$ * warning$
+ ""
+ }
+ { key sortify }
+ if$
+ }
+ { "The " #4 organization chop.word sortify }
+ if$
+ }
+ { editor sort.format.names }
+ if$
+}
+
+FUNCTION {presort}
+{ type$ "book" =
+ type$ "inbook" =
+ or
+ 'author.editor.sort
+ { type$ "proceedings" =
+ 'editor.organization.sort
+ { type$ "manual" =
+ 'author.organization.sort
+ 'author.sort
+ if$
+ }
+ if$
+ }
+ if$
+ " "
+ *
+ year field.or.null sortify
+ *
+ " "
+ *
+ title field.or.null
+ sort.format.title
+ *
+ #1 entry.max$ substring$
+ 'sort.key$ :=
+}
+
+ITERATE {presort}
+
+SORT
+
+STRINGS { longest.label }
+
+INTEGERS { number.label longest.label.width }
+
+FUNCTION {initialize.longest.label}
+{ "" 'longest.label :=
+ #1 'number.label :=
+ #0 'longest.label.width :=
+}
+
+FUNCTION {longest.label.pass}
+{ number.label int.to.str$ 'label :=
+ number.label #1 + 'number.label :=
+ label width$ longest.label.width >
+ { label 'longest.label :=
+ label width$ 'longest.label.width :=
+ }
+ 'skip$
+ if$
+}
+
+EXECUTE {initialize.longest.label}
+
+ITERATE {longest.label.pass}
+
+FUNCTION {begin.bib}
+{ preamble$ empty$
+ 'skip$
+ { preamble$ write$ newline$ }
+ if$
+ "\begin{thebibliography}{" longest.label * "}" * write$ newline$
+}
+
+EXECUTE {begin.bib}
+
+EXECUTE {init.state.consts}
+
+ITERATE {call.type$}
+
+FUNCTION {end.bib}
+{ newline$
+ "\end{thebibliography}" write$ newline$
+}
+
+EXECUTE {end.bib}
diff --git a/tests/examplefiles/test.bib b/tests/examplefiles/test.bib
new file mode 100644
index 00000000..87e558d8
--- /dev/null
+++ b/tests/examplefiles/test.bib
@@ -0,0 +1,77 @@
+This is an example BibTeX file.
+This text is a comment.
+
+@preamble{"%%% example BibTeX file"}
+
+@Preamble{"\newcommand{\noopsort}[1]{} "
+ "\newcommand{\noopsort}[1]{} "}
+
+@String{SCI = "Science"}
+
+@STRING{JFernandez = "Fernandez, Julio M."}
+@StRiNg{HGaub = "Gaub, Hermann E."}
+@string{MGautel = "Gautel, Mathias"}
+@String{FOesterhelt = "Oesterhelt, Filipp"}
+@String{MRief = "Rief, Matthias"}
+
+@Article{rief97b,
+ author = MRief #" and "# MGautel #" and "# FOesterhelt
+ #" and "# JFernandez #" and "# HGaub,
+ title = "Reversible Unfolding of Individual Titin
+ Immunoglobulin Domains by {AFM}",
+ journal = SCI,
+ volume = 276,
+ number = 5315,
+ pages = "1109--1112",
+ year = 1997,
+ doi = "10.1126/science.276.5315.1109",
+ URL = "http://www.sciencemag.org/cgi/content/abstract/276/5315/1109",
+ eprint = "http://www.sciencemag.org/cgi/reprint/276/5315/1109.pdf",
+}
+
+
+Parens can be used instead of braces:
+
+@ARTICLE(ruckenstein-diffusion,
+ author = "Liu, Hongquin and Ruckenstein, Eli",
+ language = "english",
+ title = "Predicting the Diffusion Coefficient in Supercritical Fluids",
+ journal = "Ind. Eng. Chem. Res.",
+ volume = "36",
+ year = "1997",
+ pages = "888-895"
+)
+
+@book{
+ viktorov-methods,
+ author = "Викторов, Михаил Маркович",
+ publisher = "Л.: <<Химия>>",
+ title = "Методы вычисления физико-химических величин и прикладные расчёты",
+ language = "russian",
+ year = "1977",
+ isbn = "000-0000000000",
+}
+
+@comment{jackson-commented-out,
+ author = "Jackson, P\'eter",
+ publisher = "Some Publisher",
+ language = "english",
+ title = "Some Title",
+ series = "Some series",
+ booktitle = "Commented Out",
+ number = "3",
+ edition = "Second",
+ year = "1933",
+ pages = "44--59"
+}
+
+@booklet{test-booklet,
+ author = "de Last, Jr., First Middle",
+ language = "english",
+ title = "Just a booklet",
+ year = 2006,
+ month = jan,
+ address = "Moscow",
+ howpublished = "Published by Foo"
+}
+
diff --git a/tests/test_bibtex.py b/tests/test_bibtex.py
new file mode 100644
index 00000000..3b07d899
--- /dev/null
+++ b/tests/test_bibtex.py
@@ -0,0 +1,236 @@
+# -*- coding: utf-8 -*-
+"""
+ BibTeX Test
+ ~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import textwrap
+import unittest
+
+from pygments.lexers import BibTeXLexer, BSTLexer
+from pygments.token import Token
+
+
+class BibTeXTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = BibTeXLexer()
+
+ def testPreamble(self):
+ data = u'@PREAMBLE{"% some LaTeX code here"}'
+ tokens = [
+ (Token.Name.Class, u'@PREAMBLE'),
+ (Token.Punctuation, u'{'),
+ (Token.String, u'"'),
+ (Token.String, u'% some LaTeX code here'),
+ (Token.String, u'"'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
+
+ def testString(self):
+ data = u'@STRING(SCI = "Science")'
+ tokens = [
+ (Token.Name.Class, u'@STRING'),
+ (Token.Punctuation, u'('),
+ (Token.Name.Attribute, u'SCI'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'='),
+ (Token.Text, u' '),
+ (Token.String, u'"'),
+ (Token.String, u'Science'),
+ (Token.String, u'"'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
+
+ def testEntry(self):
+ data = u"""
+ This is a comment.
+
+ @ARTICLE{ruckenstein-diffusion,
+ author = "Liu, Hongquin" # and # "Ruckenstein, Eli",
+ year = 1997,
+ month = JAN,
+ pages = "888-895"
+ }
+ """
+
+ tokens = [
+ (Token.Comment, u'This is a comment.'),
+ (Token.Text, u'\n\n'),
+ (Token.Name.Class, u'@ARTICLE'),
+ (Token.Punctuation, u'{'),
+ (Token.Name.Label, u'ruckenstein-diffusion'),
+ (Token.Punctuation, u','),
+ (Token.Text, u'\n '),
+ (Token.Name.Attribute, u'author'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'='),
+ (Token.Text, u' '),
+ (Token.String, u'"'),
+ (Token.String, u'Liu, Hongquin'),
+ (Token.String, u'"'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'#'),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u'and'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'#'),
+ (Token.Text, u' '),
+ (Token.String, u'"'),
+ (Token.String, u'Ruckenstein, Eli'),
+ (Token.String, u'"'),
+ (Token.Punctuation, u','),
+ (Token.Text, u'\n '),
+ (Token.Name.Attribute, u'year'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'='),
+ (Token.Text, u' '),
+ (Token.Number, u'1997'),
+ (Token.Punctuation, u','),
+ (Token.Text, u'\n '),
+ (Token.Name.Attribute, u'month'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'='),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u'JAN'),
+ (Token.Punctuation, u','),
+ (Token.Text, u'\n '),
+ (Token.Name.Attribute, u'pages'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'='),
+ (Token.Text, u' '),
+ (Token.String, u'"'),
+ (Token.String, u'888-895'),
+ (Token.String, u'"'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(textwrap.dedent(data))), tokens)
+
+ def testComment(self):
+ data = '@COMMENT{test}'
+ tokens = [
+ (Token.Comment, u'@COMMENT'),
+ (Token.Comment, u'{test}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
+
+ def testMissingBody(self):
+ data = '@ARTICLE xxx'
+ tokens = [
+ (Token.Name.Class, u'@ARTICLE'),
+ (Token.Text, u' '),
+ (Token.Error, u'x'),
+ (Token.Error, u'x'),
+ (Token.Error, u'x'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
+
+ def testMismatchedBrace(self):
+ data = '@PREAMBLE(""}'
+ tokens = [
+ (Token.Name.Class, u'@PREAMBLE'),
+ (Token.Punctuation, u'('),
+ (Token.String, u'"'),
+ (Token.String, u'"'),
+ (Token.Error, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
+
+
+class BSTTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = BSTLexer()
+
+ def testBasicBST(self):
+ data = """
+ % BibTeX standard bibliography style `plain'
+
+ INTEGERS { output.state before.all }
+
+ FUNCTION {sort.format.title}
+ { 't :=
+ "A " #2
+ "An " #3
+ "The " #4 t chop.word
+ chop.word
+ chop.word
+ sortify
+ #1 global.max$ substring$
+ }
+
+ ITERATE {call.type$}
+ """
+ tokens = [
+ (Token.Comment.SingleLine, "% BibTeX standard bibliography style `plain'"),
+ (Token.Text, u'\n\n'),
+ (Token.Keyword, u'INTEGERS'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'{'),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u'output.state'),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u'before.all'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n\n'),
+ (Token.Keyword, u'FUNCTION'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'{'),
+ (Token.Name.Variable, u'sort.format.title'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'{'),
+ (Token.Text, u' '),
+ (Token.Name.Function, u"'t"),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u':='),
+ (Token.Text, u'\n'),
+ (Token.Literal.String, u'"A "'),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'#2'),
+ (Token.Text, u'\n '),
+ (Token.Literal.String, u'"An "'),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'#3'),
+ (Token.Text, u'\n '),
+ (Token.Literal.String, u'"The "'),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'#4'),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u't'),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u'chop.word'),
+ (Token.Text, u'\n '),
+ (Token.Name.Variable, u'chop.word'),
+ (Token.Text, u'\n'),
+ (Token.Name.Variable, u'chop.word'),
+ (Token.Text, u'\n'),
+ (Token.Name.Variable, u'sortify'),
+ (Token.Text, u'\n'),
+ (Token.Literal.Number, u'#1'),
+ (Token.Text, u' '),
+ (Token.Name.Builtin, u'global.max$'),
+ (Token.Text, u' '),
+ (Token.Name.Builtin, u'substring$'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n\n'),
+ (Token.Keyword, u'ITERATE'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'{'),
+ (Token.Name.Builtin, u'call.type$'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(textwrap.dedent(data))), tokens)