summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--AUTHORS10
-rw-r--r--CHANGES32
-rwxr-xr-xpygmentize2
-rw-r--r--pygments/filters/__init__.py4
-rw-r--r--pygments/formatter.py3
-rw-r--r--pygments/lexers/__init__.py11
-rw-r--r--pygments/lexers/_mapping.py41
-rw-r--r--pygments/lexers/agile.py2
-rw-r--r--pygments/lexers/asm.py2
-rw-r--r--pygments/lexers/compiled.py20
-rw-r--r--pygments/lexers/functional.py120
-rw-r--r--pygments/lexers/math.py264
-rw-r--r--pygments/lexers/other.py139
-rw-r--r--pygments/lexers/shell.py2
-rw-r--r--pygments/lexers/sql.py2
-rw-r--r--pygments/lexers/templates.py4
-rw-r--r--pygments/lexers/text.py62
-rw-r--r--pygments/lexers/web.py972
-rw-r--r--pygments/modeline.py40
-rw-r--r--tests/examplefiles/example.hx142
-rw-r--r--tests/examplefiles/example.rexx50
-rw-r--r--tests/examplefiles/garcia-wachs.kk70
-rw-r--r--tests/examplefiles/objc_example.m7
-rw-r--r--tests/examplefiles/test.ebnf31
-rw-r--r--tests/test_lexers_other.py68
25 files changed, 1787 insertions, 313 deletions
diff --git a/AUTHORS b/AUTHORS
index 525d24f4..1c4a9992 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -6,8 +6,9 @@ Major developers are Tim Hatch <tim@timhatch.com> and Armin Ronacher
Other contributors, listed alphabetically, are:
* Sam Aaron -- Ioke lexer
-* Kumar Appaiah -- Debian control lexer
* Ali Afshar -- image formatter
+* Thomas Aglassinger -- Rexx lexer
+* Kumar Appaiah -- Debian control lexer
* Andreas Amann -- AppleScript lexer
* Timothy Armstrong -- Dart lexer fixes
* Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers
@@ -30,7 +31,8 @@ Other contributors, listed alphabetically, are:
* Christian Jann -- ShellSession lexer
* Christopher Creutzig -- MuPAD lexer
* Pete Curry -- bugfixes
-* Owen Durni -- haXe lexer
+* Bryan Davis -- EBNF lexer
+* Owen Durni -- Haxe lexer
* Nick Efford -- Python 3 lexer
* Sven Efftinge -- Xtend lexer
* Artem Egorkine -- terminal256 formatter
@@ -41,10 +43,13 @@ Other contributors, listed alphabetically, are:
* Naveen Garg -- Autohotkey lexer
* Laurent Gautier -- R/S lexer
* Alex Gaynor -- PyPy log lexer
+* Richard Gerkin -- Igor Pro lexer
* Alain Gilbert -- TypeScript lexer
+* Alex Gilding -- BlitzBasic lexer
* Bertrand Goetzmann -- Groovy lexer
* Krzysiek Goj -- Scala lexer
* Matt Good -- Genshi, Cheetah lexers
+* Michał Górny -- vim modeline support
* Patrick Gotthardt -- PHP namespaces support
* Olivier Guibe -- Asymptote lexer
* Jordi Gutiérrez Hermoso -- Octave lexer
@@ -101,6 +106,7 @@ Other contributors, listed alphabetically, are:
* Mike Nolta -- Julia lexer
* Jonas Obrist -- BBCode lexer
* David Oliva -- Rebol lexer
+* Pat Pannuto -- nesC lexer
* Jon Parise -- Protocol buffers lexer
* Ronny Pfannschmidt -- BBCode lexer
* Benjamin Peterson -- Test suite refactoring
diff --git a/CHANGES b/CHANGES
index bcca573b..9b2cafc5 100644
--- a/CHANGES
+++ b/CHANGES
@@ -14,6 +14,18 @@ Version 1.7
* Clay (PR#184)
* Perl 6 (PR#181)
+ * Swig (PR#168)
+ * nesC (PR#166)
+ * BlitzBasic (PR#197)
+ * EBNF (PR#193)
+ * Igor Pro (PR#172)
+ * Rexx (PR#199)
+ * Agda and Literate Agda (PR#203)
+
+- Pygments will now recognize "vim" modelines when guessing the lexer for
+ a file based on content (PR#118).
+
+- The NameHighlightFilter now works with any Name.* token type (#790).
- Python 3 lexer: add new exceptions from PEP 3151.
@@ -25,6 +37,22 @@ Version 1.7
- Lasso lexer: fix method highlighting, update builtins. Fix
guessing so that plain XML isn't always taken as Lasso (PR#163).
+- Objective C/C++ lexers: allow "@" prefixing any expression (#871).
+
+- Ruby lexer: fix lexing of Name::Space tokens (#860).
+
+- Stan lexer: update for version 1.3.0 of the language (PR#162).
+
+- JavaScript lexer: add the "yield" keyword (PR#196).
+
+- HTTP lexer: support for PATCH method (PR#190).
+
+- Koka lexer: update to newest language spec (PR#201).
+
+- Haxe lexer: rewrite and support for Haxe 3 (PR#174).
+
+- Prolog lexer: add different kinds of numeric literals (#864).
+
Version 1.6
-----------
@@ -279,7 +307,7 @@ Version 1.3
* Ada
* Coldfusion
* Modula-2
- * haXe
+ * Haxe
* R console
* Objective-J
* Haml and Sass
@@ -338,7 +366,7 @@ Version 1.2
* CMake
* Ooc
* Coldfusion
- * haXe
+ * Haxe
* R console
- Added options for rendering LaTeX in source code comments in the
diff --git a/pygmentize b/pygmentize
index e2379199..8b3b2067 100755
--- a/pygmentize
+++ b/pygmentize
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python2
import sys, pygments.cmdline
try:
diff --git a/pygments/filters/__init__.py b/pygments/filters/__init__.py
index f12d025c..84c0193d 100644
--- a/pygments/filters/__init__.py
+++ b/pygments/filters/__init__.py
@@ -129,7 +129,7 @@ class KeywordCaseFilter(Filter):
class NameHighlightFilter(Filter):
"""
- Highlight a normal Name token with a different token type.
+ Highlight a normal Name (and Name.*) token with a different token type.
Example::
@@ -163,7 +163,7 @@ class NameHighlightFilter(Filter):
def filter(self, lexer, stream):
for ttype, value in stream:
- if ttype is Name and value in self.names:
+ if ttype in Name and value in self.names:
yield self.tokentype, value
else:
yield ttype, value
diff --git a/pygments/formatter.py b/pygments/formatter.py
index 4b69f2a4..c1d6f2e1 100644
--- a/pygments/formatter.py
+++ b/pygments/formatter.py
@@ -68,6 +68,9 @@ class Formatter(object):
self.full = get_bool_opt(options, 'full', False)
self.title = options.get('title', '')
self.encoding = options.get('encoding', None) or None
+ if self.encoding == 'guess':
+ # can happen for pygmentize -O encoding=guess
+ self.encoding = 'utf-8'
self.encoding = options.get('outencoding', None) or self.encoding
self.options = options
diff --git a/pygments/lexers/__init__.py b/pygments/lexers/__init__.py
index 9af6ce68..dbfe4351 100644
--- a/pygments/lexers/__init__.py
+++ b/pygments/lexers/__init__.py
@@ -15,6 +15,7 @@ import fnmatch
from os.path import basename
from pygments.lexers._mapping import LEXERS
+from pygments.modeline import get_filetype_from_buffer
from pygments.plugin import find_plugin_lexers
from pygments.util import ClassNotFound, bytes
@@ -197,6 +198,16 @@ def guess_lexer(_text, **options):
"""
Guess a lexer by strong distinctions in the text (eg, shebang).
"""
+
+ # try to get a vim modeline first
+ ft = get_filetype_from_buffer(_text)
+
+ if ft is not None:
+ try:
+ return get_lexer_by_name(ft, **options)
+ except ClassNotFound:
+ pass
+
best_lexer = [0.0, None]
for lexer in _iter_lexerclasses():
rv = lexer.analyse_text(_text)
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 9fd8f713..969bdba5 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -33,13 +33,13 @@ LEXERS = {
'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
'AsymptoteLexer': ('pygments.lexers.other', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
'AutoItLexer': ('pygments.lexers.other', 'AutoIt', ('autoit', 'Autoit'), ('*.au3',), ('text/x-autoit',)),
- 'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk',), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
+ 'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()),
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*'), ('application/x-sh', 'application/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)),
- 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat',), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
+ 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
'BlitzBasicLexer': ('pygments.lexers.compiled', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
'BlitzMaxLexer': ('pygments.lexers.compiled', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
@@ -56,7 +56,7 @@ LEXERS = {
'CbmBasicV2Lexer': ('pygments.lexers.other', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
- 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire'), (), ('text/html+cheetah', 'text/html+spitfire')),
+ 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
@@ -64,10 +64,10 @@ LEXERS = {
'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
'CobolFreeformatLexer': ('pygments.lexers.compiled', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
'CobolLexer': ('pygments.lexers.compiled', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
- 'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript'), ('*.coffee',), ('text/coffeescript',)),
+ 'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml', '*.cfc'), ('application/x-coldfusion',)),
'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
- 'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
+ 'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
'CoqLexer': ('pygments.lexers.functional', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
@@ -79,12 +79,12 @@ LEXERS = {
'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
'CudaLexer': ('pygments.lexers.compiled', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
- 'CythonLexer': ('pygments.lexers.compiled', 'Cython', ('cython', 'pyx'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
+ 'CythonLexer': ('pygments.lexers.compiled', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
'DLexer': ('pygments.lexers.compiled', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
'DarcsPatchLexer': ('pygments.lexers.text', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
- 'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control',), ('control',), ()),
+ 'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
'DgLexer': ('pygments.lexers.agile', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
@@ -96,6 +96,7 @@ LEXERS = {
'DylanLidLexer': ('pygments.lexers.compiled', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
'ECLexer': ('pygments.lexers.compiled', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
+ 'EbnfLexer': ('pygments.lexers.text', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
'ElixirConsoleLexer': ('pygments.lexers.functional', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
'ElixirLexer': ('pygments.lexers.functional', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
@@ -112,7 +113,7 @@ LEXERS = {
'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)),
'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('Clipper', 'XBase'), ('*.PRG', '*.prg'), ()),
'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
- 'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas',), ('*.s', '*.S'), ('text/x-gas',)),
+ 'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
'GettextLexer': ('pygments.lexers.text', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
@@ -126,8 +127,8 @@ LEXERS = {
'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy',), ('text/x-groovy',)),
'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml', 'HAML'), ('*.haml',), ('text/x-haml',)),
'HaskellLexer': ('pygments.lexers.functional', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
- 'HaxeLexer': ('pygments.lexers.web', 'haXe', ('hx', 'haXe'), ('*.hx',), ('text/haxe',)),
- 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja'), (), ('text/html+django', 'text/html+jinja')),
+ 'HaxeLexer': ('pygments.lexers.web', 'Haxe', ('hx', 'Haxe', 'haxe', 'haXe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
+ 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')),
'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
@@ -136,7 +137,8 @@ LEXERS = {
'HxmlLexer': ('pygments.lexers.text', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
- 'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg'), ('*.ini', '*.cfg'), ('text/x-ini',)),
+ 'IgorLexer': ('pygments.lexers.math', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
+ 'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg'), ('text/x-ini',)),
'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
@@ -163,13 +165,13 @@ LEXERS = {
'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
'LighttpdConfLexer': ('pygments.lexers.text', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
'LiterateAgdaLexer': ('pygments.lexers.functional', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)),
- 'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell'), ('*.lhs',), ('text/x-literate-haskell',)),
+ 'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)),
'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)),
'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
'LogosLexer': ('pygments.lexers.compiled', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
- 'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode',), ('*.moo',), ('text/x-moocode',)),
+ 'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
@@ -217,14 +219,14 @@ LEXERS = {
'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm'), ('text/x-perl', 'application/x-perl')),
'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
- 'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript',), ('*.ps', '*.eps'), ('application/postscript',)),
+ 'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
- 'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties',), ('*.properties',), ('text/x-java-properties',)),
- 'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf',), ('*.proto',), ()),
+ 'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
+ 'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
'PuppetLexer': ('pygments.lexers.other', 'Puppet', ('puppet',), ('*.pp',), ()),
'PyPyLogLexer': ('pygments.lexers.text', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
@@ -249,6 +251,7 @@ LEXERS = {
'RebolLexer': ('pygments.lexers.other', 'REBOL', ('rebol',), ('*.r', '*.r3'), ('text/x-rebol',)),
'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()),
'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
+ 'RexxLexer': ('pygments.lexers.other', 'Rexx', ('rexx', 'ARexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('RobotFramework', 'robotframework'), ('*.txt', '*.robot'), ('text/x-robotframework',)),
'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
@@ -265,11 +268,11 @@ LEXERS = {
'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
'ShellSessionLexer': ('pygments.lexers.shell', 'Shell Session', ('shell-session',), ('*.shell-session',), ('application/x-sh-session',)),
'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
- 'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak'), ('*.st',), ('text/x-smalltalk',)),
+ 'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
'SnobolLexer': ('pygments.lexers.other', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
'SourcePawnLexer': ('pygments.lexers.other', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
- 'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list'), ('sources.list',), ()),
+ 'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
@@ -298,7 +301,7 @@ LEXERS = {
'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)),
- 'XmlLexer': ('pygments.lexers.web', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
+ 'XmlLexer': ('pygments.lexers.web', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
'XsltLexer': ('pygments.lexers.web', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index e9e173a8..896a3deb 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -1929,6 +1929,8 @@ class DgLexer(RegexLexer):
class Perl6Lexer(ExtendedRegexLexer):
"""
For `Perl 6 <http://www.perl6.org>`_ source code.
+
+ *New in Pygments 1.7.*
"""
name = 'Perl6'
diff --git a/pygments/lexers/asm.py b/pygments/lexers/asm.py
index f080327b..3f67862c 100644
--- a/pygments/lexers/asm.py
+++ b/pygments/lexers/asm.py
@@ -25,7 +25,7 @@ class GasLexer(RegexLexer):
For Gas (AT&T) assembly code.
"""
name = 'GAS'
- aliases = ['gas']
+ aliases = ['gas', 'asm']
filenames = ['*.s', '*.S']
mimetypes = ['text/x-gas']
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
index c76d113d..75ace35e 100644
--- a/pygments/lexers/compiled.py
+++ b/pygments/lexers/compiled.py
@@ -235,6 +235,8 @@ class CppLexer(CFamilyLexer):
class SwigLexer(CppLexer):
"""
For `SWIG <http://www.swig.org/>`_ source code.
+
+ *New in Pygments 1.7.*
"""
name = 'SWIG'
aliases = ['Swig', 'swig']
@@ -326,6 +328,8 @@ class NesCLexer(CLexer):
"""
For `nesC <https://github.com/tinyos/nesc>`_ source code with preprocessor
directives.
+
+ *New in Pygments 1.7.*
"""
name = 'nesC'
aliases = ['nesc']
@@ -1347,6 +1351,8 @@ def objective(baselexer):
('#pop', 'oc_classname')),
(r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
('#pop', 'oc_forward_classname')),
+ # @ can also prefix other expressions like @{...} or @(...)
+ (r'@', Punctuation),
inherit,
],
'oc_classname' : [
@@ -1602,7 +1608,15 @@ class PrologLexer(RegexLexer):
(r'^#.*', Comment.Single),
(r'/\*', Comment.Multiline, 'nested-comment'),
(r'%.*', Comment.Single),
- (r'[0-9]+', Number),
+ # character literal
+ (r'0\'.', String.Char),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ # literal with prepended base
+ (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer),
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer),
(r'[\[\](){}|.,;!]', Punctuation),
(r':-|-->', Punctuation),
(r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
@@ -1653,7 +1667,7 @@ class CythonLexer(RegexLexer):
"""
name = 'Cython'
- aliases = ['cython', 'pyx']
+ aliases = ['cython', 'pyx', 'pyrex']
filenames = ['*.pyx', '*.pxd', '*.pxi']
mimetypes = ['text/x-cython', 'application/x-cython']
@@ -2715,6 +2729,8 @@ class BlitzMaxLexer(RegexLexer):
class BlitzBasicLexer(RegexLexer):
"""
For `BlitzBasic <http://blitzbasic.com>`_ source code.
+
+ *New in Pygments 1.7.*
"""
name = 'BlitzBasic'
diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py
index b9016835..770e6bd9 100644
--- a/pygments/lexers/functional.py
+++ b/pygments/lexers/functional.py
@@ -723,7 +723,7 @@ class CommonLispLexer(RegexLexer):
*New in Pygments 0.9.*
"""
name = 'Common Lisp'
- aliases = ['common-lisp', 'cl']
+ aliases = ['common-lisp', 'cl', 'lisp']
filenames = ['*.cl', '*.lisp', '*.el'] # use for Elisp too
mimetypes = ['text/x-common-lisp']
@@ -985,6 +985,8 @@ class HaskellLexer(RegexLexer):
(r'\(', Punctuation, ('funclist', 'funclist')),
(r'\)', Punctuation, '#pop:2'),
],
+ # NOTE: the next four states are shared in the AgdaLexer; make sure
+ # any change is compatible with Agda as well or copy over and change
'comment': [
# Multiline Comments
(r'[^-{}]+', Comment.Multiline),
@@ -1019,6 +1021,8 @@ class AgdaLexer(RegexLexer):
"""
For the `Agda <http://wiki.portal.chalmers.se/agda/pmwiki.php>`_
dependently typed functional programming language and proof assistant.
+
+ *New in Pygments 1.7.*
"""
name = 'Agda'
@@ -1062,13 +1066,6 @@ class AgdaLexer(RegexLexer):
(r'[^\s\(\)\{\}]+', Text),
(r'\s+?', Text), # Whitespace
],
- 'comment': [
- # Multiline Comments
- (r'[^-{}]+', Comment.Multiline),
- (r'{-', Comment.Multiline, '#push'),
- (r'-}', Comment.Multiline, '#pop'),
- (r'[-{}]', Comment.Multiline),
- ],
'hole': [
# Holes
(r'[^!{}]+', Comment.Directive),
@@ -1081,6 +1078,7 @@ class AgdaLexer(RegexLexer):
(r'[a-zA-Z][a-zA-Z0-9_.]*', Name, '#pop'),
(r'[^a-zA-Z]*', Text)
],
+ 'comment': HaskellLexer.tokens['comment'],
'character': HaskellLexer.tokens['character'],
'string': HaskellLexer.tokens['string'],
'escape': HaskellLexer.tokens['escape']
@@ -1166,7 +1164,7 @@ class LiterateHaskellLexer(LiterateLexer):
*New in Pygments 0.9.*
"""
name = 'Literate Haskell'
- aliases = ['lhs', 'literate-haskell']
+ aliases = ['lhs', 'literate-haskell', 'lhaskell']
filenames = ['*.lhs']
mimetypes = ['text/x-literate-haskell']
@@ -1178,6 +1176,15 @@ class LiterateHaskellLexer(LiterateLexer):
class LiterateAgdaLexer(LiterateLexer):
"""
For Literate Agda source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ *New in Pygments 1.7.*
"""
name = 'Literate Agda'
aliases = ['lagda', 'literate-agda']
@@ -2510,7 +2517,7 @@ class ElixirConsoleLexer(Lexer):
class KokaLexer(RegexLexer):
"""
- Lexer for the `Koka <http://research.microsoft.com/en-us/projects/koka/>`_
+ Lexer for the `Koka <http://koka.codeplex.com>`_
language.
*New in Pygments 1.6.*
@@ -2522,7 +2529,7 @@ class KokaLexer(RegexLexer):
mimetypes = ['text/x-koka']
keywords = [
- 'infix', 'infixr', 'infixl', 'prefix', 'postfix',
+ 'infix', 'infixr', 'infixl',
'type', 'cotype', 'rectype', 'alias',
'struct', 'con',
'fun', 'function', 'val', 'var',
@@ -2561,7 +2568,12 @@ class KokaLexer(RegexLexer):
sboundary = '(?!'+symbols+')'
# name boundary: a keyword should not be followed by any of these
- boundary = '(?![a-zA-Z0-9_\\-])'
+ boundary = '(?![\w/])'
+
+ # koka token abstractions
+ tokenType = Name.Attribute
+ tokenTypeDef = Name.Class
+ tokenConstructor = Generic.Emph
# main lexer
tokens = {
@@ -2569,41 +2581,51 @@ class KokaLexer(RegexLexer):
include('whitespace'),
# go into type mode
- (r'::?' + sboundary, Keyword.Type, 'type'),
- (r'alias' + boundary, Keyword, 'alias-type'),
- (r'struct' + boundary, Keyword, 'struct-type'),
- (r'(%s)' % '|'.join(typeStartKeywords) + boundary, Keyword, 'type'),
+ (r'::?' + sboundary, tokenType, 'type'),
+ (r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'alias-type'),
+ (r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'struct-type'),
+ ((r'(%s)' % '|'.join(typeStartKeywords)) +
+ r'(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'type'),
# special sequences of tokens (we use ?: for non-capturing group as
# required by 'bygroups')
- (r'(module)(\s*)((?:interface)?)(\s*)'
- r'((?:[a-z](?:[a-zA-Z0-9_]|\-[a-zA-Z])*\.)*'
- r'[a-z](?:[a-zA-Z0-9_]|\-[a-zA-Z])*)',
- bygroups(Keyword, Text, Keyword, Text, Name.Namespace)),
- (r'(import)(\s+)((?:[a-z](?:[a-zA-Z0-9_]|\-[a-zA-Z])*\.)*[a-z]'
- r'(?:[a-zA-Z0-9_]|\-[a-zA-Z])*)(\s*)((?:as)?)'
- r'((?:[A-Z](?:[a-zA-Z0-9_]|\-[a-zA-Z])*)?)',
- bygroups(Keyword, Text, Name.Namespace, Text, Keyword,
- Name.Namespace)),
+ (r'(module)(\s+)(interface\s+)?((?:[a-z]\w*/)*[a-z]\w*)',
+ bygroups(Keyword, Text, Keyword, Name.Namespace)),
+ (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)'
+ r'(?:(\s*)(=)(\s*)((?:qualified\s*)?)'
+ r'((?:[a-z]\w*/)*[a-z]\w*))?',
+ bygroups(Keyword, Text, Name.Namespace, Text, Keyword, Text,
+ Keyword, Name.Namespace)),
+
+ (r'(^(?:(?:public|private)\s*)?(?:function|fun|val))'
+ r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(^(?:(?:public|private)\s*)?external)(\s+)(inline\s+)?'
+ r'([a-z]\w*|\((?:' + symbols + r'|/)\))',
+ bygroups(Keyword, Text, Keyword, Name.Function)),
# keywords
(r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
(r'(%s)' % '|'.join(keywords) + boundary, Keyword),
(r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
- (r'::|:=|\->|[=\.:]' + sboundary, Keyword),
- (r'\-' + sboundary, Generic.Strong),
+ (r'::?|:=|\->|[=\.]' + sboundary, Keyword),
# names
- (r'[A-Z]([a-zA-Z0-9_]|\-[a-zA-Z])*(?=\.)', Name.Namespace),
- (r'[A-Z]([a-zA-Z0-9_]|\-[a-zA-Z])*(?!\.)', Name.Class),
- (r'[a-z]([a-zA-Z0-9_]|\-[a-zA-Z])*', Name),
- (r'_([a-zA-Z0-9_]|\-[a-zA-Z])*', Name.Variable),
+ (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
+ bygroups(Name.Namespace, tokenConstructor)),
+ (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)),
+ (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))',
+ bygroups(Name.Namespace, Name)),
+ (r'_\w*', Name.Variable),
# literal string
(r'@"', String.Double, 'litstring'),
# operators
- (symbols, Operator),
+ (symbols + "|/(?![\*/])", Operator),
(r'`', Operator),
(r'[\{\}\(\)\[\];,]', Punctuation),
@@ -2630,17 +2652,17 @@ class KokaLexer(RegexLexer):
# type started by colon
'type': [
- (r'[\(\[<]', Keyword.Type, 'type-nested'),
+ (r'[\(\[<]', tokenType, 'type-nested'),
include('type-content')
],
# type nested in brackets: can contain parameters, comma etc.
'type-nested': [
- (r'[\)\]>]', Keyword.Type, '#pop'),
- (r'[\(\[<]', Keyword.Type, 'type-nested'),
- (r',', Keyword.Type),
- (r'([a-z](?:[a-zA-Z0-9_]|\-[a-zA-Z])*)(\s*)(:)(?!:)',
- bygroups(Name.Variable,Text,Keyword.Type)), # parameter name
+ (r'[\)\]>]', tokenType, '#pop'),
+ (r'[\(\[<]', tokenType, 'type-nested'),
+ (r',', tokenType),
+ (r'([a-z]\w*)(\s*)(:)(?!:)',
+ bygroups(Name, Text, tokenType)), # parameter name
include('type-content')
],
@@ -2649,23 +2671,23 @@ class KokaLexer(RegexLexer):
include('whitespace'),
# keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
+ (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
(r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
Keyword, '#pop'), # need to match because names overlap...
# kinds
- (r'[EPH]' + boundary, Keyword.Type),
- (r'[*!]', Keyword.Type),
+ (r'[EPHVX]' + boundary, tokenType),
# type names
- (r'[A-Z]([a-zA-Z0-9_]|\-[a-zA-Z])*(?=\.)', Name.Namespace),
- (r'[A-Z]([a-zA-Z0-9_]|\-[a-zA-Z])*(?!\.)', Name.Class),
- (r'[a-z][0-9]*(?![a-zA-Z_\-])', Keyword.Type), # Generic.Emph
- (r'_([a-zA-Z0-9_]|\-[a-zA-Z])*', Keyword.Type), # Generic.Emph
- (r'[a-z]([a-zA-Z0-9_]|\-[a-zA-Z])*', Keyword.Type),
+ (r'[a-z][0-9]*(?![\w/])', tokenType ),
+ (r'_\w*', tokenType.Variable), # Generic.Emph
+ (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
+ bygroups(Name.Namespace, tokenType)),
+ (r'((?:[a-z]\w*/)*)([a-z]\w+)',
+ bygroups(Name.Namespace, tokenType)),
# type keyword operators
- (r'::|\->|[\.:|]', Keyword.Type),
+ (r'::|\->|[\.:|]', tokenType),
#catchall
(r'', Text, '#pop')
@@ -2673,6 +2695,7 @@ class KokaLexer(RegexLexer):
# comments and literals
'whitespace': [
+ (r'\n\s*#.*$', Comment.Preproc),
(r'\s+', Text),
(r'/\*', Comment.Multiline, 'comment'),
(r'//.*$', Comment.Single)
@@ -2699,11 +2722,10 @@ class KokaLexer(RegexLexer):
(r'[\'\n]', String.Char, '#pop'),
],
'escape-sequence': [
- (r'\\[abfnrtv0\\\"\'\?]', String.Escape),
+ (r'\\[nrt\\\"\']', String.Escape),
(r'\\x[0-9a-fA-F]{2}', String.Escape),
(r'\\u[0-9a-fA-F]{4}', String.Escape),
# Yes, \U literals are 6 hex digits.
(r'\\U[0-9a-fA-F]{6}', String.Escape)
]
}
-
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index 0b757e44..a72a9124 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -24,7 +24,7 @@ from pygments.lexers import _stan_builtins
__all__ = ['JuliaLexer', 'JuliaConsoleLexer', 'MuPADLexer', 'MatlabLexer',
'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer', 'NumPyLexer',
'RConsoleLexer', 'SLexer', 'JagsLexer', 'BugsLexer', 'StanLexer',
- 'IDLLexer', 'RdLexer']
+ 'IDLLexer', 'RdLexer', 'IgorLexer']
class JuliaLexer(RegexLexer):
@@ -1113,7 +1113,8 @@ class SLexer(RegexLexer):
}
def analyse_text(text):
- return '<-' in text
+ if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
+ return 0.11
class BugsLexer(RegexLexer):
@@ -1300,7 +1301,7 @@ class JagsLexer(RegexLexer):
return 0
class StanLexer(RegexLexer):
- """Pygments Lexer for Stan models.
+ """Pygments Lexer for Stan models.
The Stan modeling language is specified in the *Stan 1.3.0
Modeling Language Manual* `pdf
@@ -1653,3 +1654,260 @@ class RdLexer(RegexLexer):
(r'.', Text),
]
}
+
+
+class IgorLexer(RegexLexer):
+ """
+ Pygments Lexer for Igor Pro procedure files (.ipf).
+ See http://www.wavemetrics.com/ and http://www.igorexchange.com/.
+
+ *New in Pygments 1.7.*
+ """
+
+ name = 'Igor'
+ aliases = ['igor', 'igorpro']
+ filenames = ['*.ipf']
+ mimetypes = ['text/ipf']
+
+ flags = re.IGNORECASE
+
+ flowControl = [
+ 'if', 'else', 'elseif', 'endif', 'for', 'endfor', 'strswitch', 'switch',
+ 'case', 'endswitch', 'do', 'while', 'try', 'catch', 'endtry', 'break',
+ 'continue', 'return',
+ ]
+ types = [
+ 'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE',
+ 'STRUCT', 'ThreadSafe', 'function', 'end', 'static', 'macro', 'window',
+ 'graph', 'Structure', 'EndStructure', 'EndMacro', 'FuncFit', 'Proc',
+ 'Picture', 'Menu', 'SubMenu', 'Prompt', 'DoPrompt',
+ ]
+ operations = [
+ 'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio',
+ 'AddMovieFrame', 'APMath', 'Append', 'AppendImage',
+ 'AppendLayoutObject', 'AppendMatrixContour', 'AppendText',
+ 'AppendToGraph', 'AppendToLayout', 'AppendToTable', 'AppendXYZContour',
+ 'AutoPositionWindow', 'BackgroundInfo', 'Beep', 'BoundingBall',
+ 'BrowseURL', 'BuildMenu', 'Button', 'cd', 'Chart', 'CheckBox',
+ 'CheckDisplayed', 'ChooseColor', 'Close', 'CloseMovie', 'CloseProc',
+ 'ColorScale', 'ColorTab2Wave', 'Concatenate', 'ControlBar',
+ 'ControlInfo', 'ControlUpdate', 'ConvexHull', 'Convolve', 'CopyFile',
+ 'CopyFolder', 'CopyScales', 'Correlate', 'CreateAliasShortcut', 'Cross',
+ 'CtrlBackground', 'CtrlFIFO', 'CtrlNamedBackground', 'Cursor',
+ 'CurveFit', 'CustomControl', 'CWT', 'Debugger', 'DebuggerOptions',
+ 'DefaultFont', 'DefaultGuiControls', 'DefaultGuiFont', 'DefineGuide',
+ 'DelayUpdate', 'DeleteFile', 'DeleteFolder', 'DeletePoints',
+ 'Differentiate', 'dir', 'Display', 'DisplayHelpTopic',
+ 'DisplayProcedure', 'DoAlert', 'DoIgorMenu', 'DoUpdate', 'DoWindow',
+ 'DoXOPIdle', 'DrawAction', 'DrawArc', 'DrawBezier', 'DrawLine',
+ 'DrawOval', 'DrawPICT', 'DrawPoly', 'DrawRect', 'DrawRRect', 'DrawText',
+ 'DSPDetrend', 'DSPPeriodogram', 'Duplicate', 'DuplicateDataFolder',
+ 'DWT', 'EdgeStats', 'Edit', 'ErrorBars', 'Execute', 'ExecuteScriptText',
+ 'ExperimentModified', 'Extract', 'FastGaussTransform', 'FastOp',
+ 'FBinRead', 'FBinWrite', 'FFT', 'FIFO2Wave', 'FIFOStatus', 'FilterFIR',
+ 'FilterIIR', 'FindLevel', 'FindLevels', 'FindPeak', 'FindPointsInPoly',
+ 'FindRoots', 'FindSequence', 'FindValue', 'FPClustering', 'fprintf',
+ 'FReadLine', 'FSetPos', 'FStatus', 'FTPDelete', 'FTPDownload',
+ 'FTPUpload', 'FuncFit', 'FuncFitMD', 'GetAxis', 'GetFileFolderInfo',
+ 'GetLastUserMenuInfo', 'GetMarquee', 'GetSelection', 'GetWindow',
+ 'GraphNormal', 'GraphWaveDraw', 'GraphWaveEdit', 'Grep', 'GroupBox',
+ 'Hanning', 'HideIgorMenus', 'HideInfo', 'HideProcedures', 'HideTools',
+ 'HilbertTransform', 'Histogram', 'IFFT', 'ImageAnalyzeParticles',
+ 'ImageBlend', 'ImageBoundaryToMask', 'ImageEdgeDetection',
+ 'ImageFileInfo', 'ImageFilter', 'ImageFocus', 'ImageGenerateROIMask',
+ 'ImageHistModification', 'ImageHistogram', 'ImageInterpolate',
+ 'ImageLineProfile', 'ImageLoad', 'ImageMorphology', 'ImageRegistration',
+ 'ImageRemoveBackground', 'ImageRestore', 'ImageRotate', 'ImageSave',
+ 'ImageSeedFill', 'ImageSnake', 'ImageStats', 'ImageThreshold',
+ 'ImageTransform', 'ImageUnwrapPhase', 'ImageWindow', 'IndexSort',
+ 'InsertPoints', 'Integrate', 'IntegrateODE', 'Interp3DPath',
+ 'Interpolate3D', 'KillBackground', 'KillControl', 'KillDataFolder',
+ 'KillFIFO', 'KillFreeAxis', 'KillPath', 'KillPICTs', 'KillStrings',
+ 'KillVariables', 'KillWaves', 'KillWindow', 'KMeans', 'Label', 'Layout',
+ 'Legend', 'LinearFeedbackShiftRegister', 'ListBox', 'LoadData',
+ 'LoadPackagePreferences', 'LoadPICT', 'LoadWave', 'Loess',
+ 'LombPeriodogram', 'Make', 'MakeIndex', 'MarkPerfTestTime',
+ 'MatrixConvolve', 'MatrixCorr', 'MatrixEigenV', 'MatrixFilter',
+ 'MatrixGaussJ', 'MatrixInverse', 'MatrixLinearSolve',
+ 'MatrixLinearSolveTD', 'MatrixLLS', 'MatrixLUBkSub', 'MatrixLUD',
+ 'MatrixMultiply', 'MatrixOP', 'MatrixSchur', 'MatrixSolve',
+ 'MatrixSVBkSub', 'MatrixSVD', 'MatrixTranspose', 'MeasureStyledText',
+ 'Modify', 'ModifyContour', 'ModifyControl', 'ModifyControlList',
+ 'ModifyFreeAxis', 'ModifyGraph', 'ModifyImage', 'ModifyLayout',
+ 'ModifyPanel', 'ModifyTable', 'ModifyWaterfall', 'MoveDataFolder',
+ 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable',
+ 'MoveWave', 'MoveWindow', 'NeuralNetworkRun', 'NeuralNetworkTrain',
+ 'NewDataFolder', 'NewFIFO', 'NewFIFOChan', 'NewFreeAxis', 'NewImage',
+ 'NewLayout', 'NewMovie', 'NewNotebook', 'NewPanel', 'NewPath',
+ 'NewWaterfall', 'Note', 'Notebook', 'NotebookAction', 'Open',
+ 'OpenNotebook', 'Optimize', 'ParseOperationTemplate', 'PathInfo',
+ 'PauseForUser', 'PauseUpdate', 'PCA', 'PlayMovie', 'PlayMovieAction',
+ 'PlaySnd', 'PlaySound', 'PopupContextualMenu', 'PopupMenu',
+ 'Preferences', 'PrimeFactors', 'Print', 'printf', 'PrintGraphs',
+ 'PrintLayout', 'PrintNotebook', 'PrintSettings', 'PrintTable',
+ 'Project', 'PulseStats', 'PutScrapText', 'pwd', 'Quit',
+ 'RatioFromNumber', 'Redimension', 'Remove', 'RemoveContour',
+ 'RemoveFromGraph', 'RemoveFromLayout', 'RemoveFromTable', 'RemoveImage',
+ 'RemoveLayoutObjects', 'RemovePath', 'Rename', 'RenameDataFolder',
+ 'RenamePath', 'RenamePICT', 'RenameWindow', 'ReorderImages',
+ 'ReorderTraces', 'ReplaceText', 'ReplaceWave', 'Resample',
+ 'ResumeUpdate', 'Reverse', 'Rotate', 'Save', 'SaveData',
+ 'SaveExperiment', 'SaveGraphCopy', 'SaveNotebook',
+ 'SavePackagePreferences', 'SavePICT', 'SaveTableCopy',
+ 'SetActiveSubwindow', 'SetAxis', 'SetBackground', 'SetDashPattern',
+ 'SetDataFolder', 'SetDimLabel', 'SetDrawEnv', 'SetDrawLayer',
+ 'SetFileFolderInfo', 'SetFormula', 'SetIgorHook', 'SetIgorMenuMode',
+ 'SetIgorOption', 'SetMarquee', 'SetProcessSleep', 'SetRandomSeed',
+ 'SetScale', 'SetVariable', 'SetWaveLock', 'SetWindow', 'ShowIgorMenus',
+ 'ShowInfo', 'ShowTools', 'Silent', 'Sleep', 'Slider', 'Smooth',
+ 'SmoothCustom', 'Sort', 'SoundInRecord', 'SoundInSet',
+ 'SoundInStartChart', 'SoundInStatus', 'SoundInStopChart',
+ 'SphericalInterpolate', 'SphericalTriangulate', 'SplitString',
+ 'sprintf', 'sscanf', 'Stack', 'StackWindows',
+ 'StatsAngularDistanceTest', 'StatsANOVA1Test', 'StatsANOVA2NRTest',
+ 'StatsANOVA2RMTest', 'StatsANOVA2Test', 'StatsChiTest',
+ 'StatsCircularCorrelationTest', 'StatsCircularMeans',
+ 'StatsCircularMoments', 'StatsCircularTwoSampleTest',
+ 'StatsCochranTest', 'StatsContingencyTable', 'StatsDIPTest',
+ 'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest',
+ 'StatsHodgesAjneTest', 'StatsJBTest', 'StatsKendallTauTest',
+ 'StatsKSTest', 'StatsKWTest', 'StatsLinearCorrelationTest',
+ 'StatsLinearRegression', 'StatsMultiCorrelationTest',
+ 'StatsNPMCTest', 'StatsNPNominalSRTest', 'StatsQuantiles',
+ 'StatsRankCorrelationTest', 'StatsResample', 'StatsSample',
+ 'StatsScheffeTest', 'StatsSignTest', 'StatsSRTest', 'StatsTTest',
+ 'StatsTukeyTest', 'StatsVariancesTest', 'StatsWatsonUSquaredTest',
+ 'StatsWatsonWilliamsTest', 'StatsWheelerWatsonTest',
+ 'StatsWilcoxonRankTest', 'StatsWRCorrelationTest', 'String',
+ 'StructGet', 'StructPut', 'TabControl', 'Tag', 'TextBox', 'Tile',
+ 'TileWindows', 'TitleBox', 'ToCommandLine', 'ToolsGrid',
+ 'Triangulate3d', 'Unwrap', 'ValDisplay', 'Variable', 'WaveMeanStdv',
+ 'WaveStats', 'WaveTransform', 'wfprintf', 'WignerTransform',
+ 'WindowFunction',
+ ]
+ functions = [
+ 'abs', 'acos', 'acosh', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD', 'alog',
+ 'area', 'areaXY', 'asin', 'asinh', 'atan', 'atan2', 'atanh',
+ 'AxisValFromPixel', 'Besseli', 'Besselj', 'Besselk', 'Bessely', 'bessi',
+ 'bessj', 'bessk', 'bessy', 'beta', 'betai', 'BinarySearch',
+ 'BinarySearchInterp', 'binomial', 'binomialln', 'binomialNoise', 'cabs',
+ 'CaptureHistoryStart', 'ceil', 'cequal', 'char2num', 'chebyshev',
+ 'chebyshevU', 'CheckName', 'cmplx', 'cmpstr', 'conj', 'ContourZ', 'cos',
+ 'cosh', 'cot', 'CountObjects', 'CountObjectsDFR', 'cpowi',
+ 'CreationDate', 'csc', 'DataFolderExists', 'DataFolderRefsEqual',
+ 'DataFolderRefStatus', 'date2secs', 'datetime', 'DateToJulian',
+ 'Dawson', 'DDEExecute', 'DDEInitiate', 'DDEPokeString', 'DDEPokeWave',
+ 'DDERequestWave', 'DDEStatus', 'DDETerminate', 'deltax', 'digamma',
+ 'DimDelta', 'DimOffset', 'DimSize', 'ei', 'enoise', 'equalWaves', 'erf',
+ 'erfc', 'exists', 'exp', 'expInt', 'expNoise', 'factorial', 'fakedata',
+ 'faverage', 'faverageXY', 'FindDimLabel', 'FindListItem', 'floor',
+ 'FontSizeHeight', 'FontSizeStringWidth', 'FresnelCos', 'FresnelSin',
+ 'gamma', 'gammaInc', 'gammaNoise', 'gammln', 'gammp', 'gammq', 'Gauss',
+ 'Gauss1D', 'Gauss2D', 'gcd', 'GetDefaultFontSize',
+ 'GetDefaultFontStyle', 'GetKeyState', 'GetRTError', 'gnoise',
+ 'GrepString', 'hcsr', 'hermite', 'hermiteGauss', 'HyperG0F1',
+ 'HyperG1F1', 'HyperG2F1', 'HyperGNoise', 'HyperGPFQ', 'IgorVersion',
+ 'ilim', 'imag', 'Inf', 'Integrate1D', 'interp', 'Interp2D', 'Interp3D',
+ 'inverseERF', 'inverseERFC', 'ItemsInList', 'jlim', 'Laguerre',
+ 'LaguerreA', 'LaguerreGauss', 'leftx', 'LegendreA', 'limit', 'ln',
+ 'log', 'logNormalNoise', 'lorentzianNoise', 'magsqr', 'MandelbrotPoint',
+ 'MarcumQ', 'MatrixDet', 'MatrixDot', 'MatrixRank', 'MatrixTrace', 'max',
+ 'mean', 'min', 'mod', 'ModDate', 'NaN', 'norm', 'NumberByKey',
+ 'numpnts', 'numtype', 'NumVarOrDefault', 'NVAR_Exists', 'p2rect',
+ 'ParamIsDefault', 'pcsr', 'Pi', 'PixelFromAxisVal', 'pnt2x',
+ 'poissonNoise', 'poly', 'poly2D', 'PolygonArea', 'qcsr', 'r2polar',
+ 'real', 'rightx', 'round', 'sawtooth', 'ScreenResolution', 'sec',
+ 'SelectNumber', 'sign', 'sin', 'sinc', 'sinh', 'SphericalBessJ',
+ 'SphericalBessJD', 'SphericalBessY', 'SphericalBessYD',
+ 'SphericalHarmonics', 'sqrt', 'StartMSTimer', 'StatsBetaCDF',
+ 'StatsBetaPDF', 'StatsBinomialCDF', 'StatsBinomialPDF',
+ 'StatsCauchyCDF', 'StatsCauchyPDF', 'StatsChiCDF', 'StatsChiPDF',
+ 'StatsCMSSDCDF', 'StatsCorrelation', 'StatsDExpCDF', 'StatsDExpPDF',
+ 'StatsErlangCDF', 'StatsErlangPDF', 'StatsErrorPDF', 'StatsEValueCDF',
+ 'StatsEValuePDF', 'StatsExpCDF', 'StatsExpPDF', 'StatsFCDF',
+ 'StatsFPDF', 'StatsFriedmanCDF', 'StatsGammaCDF', 'StatsGammaPDF',
+ 'StatsGeometricCDF', 'StatsGeometricPDF', 'StatsHyperGCDF',
+ 'StatsHyperGPDF', 'StatsInvBetaCDF', 'StatsInvBinomialCDF',
+ 'StatsInvCauchyCDF', 'StatsInvChiCDF', 'StatsInvCMSSDCDF',
+ 'StatsInvDExpCDF', 'StatsInvEValueCDF', 'StatsInvExpCDF',
+ 'StatsInvFCDF', 'StatsInvFriedmanCDF', 'StatsInvGammaCDF',
+ 'StatsInvGeometricCDF', 'StatsInvKuiperCDF', 'StatsInvLogisticCDF',
+ 'StatsInvLogNormalCDF', 'StatsInvMaxwellCDF', 'StatsInvMooreCDF',
+ 'StatsInvNBinomialCDF', 'StatsInvNCChiCDF', 'StatsInvNCFCDF',
+ 'StatsInvNormalCDF', 'StatsInvParetoCDF', 'StatsInvPoissonCDF',
+ 'StatsInvPowerCDF', 'StatsInvQCDF', 'StatsInvQpCDF',
+ 'StatsInvRayleighCDF', 'StatsInvRectangularCDF', 'StatsInvSpearmanCDF',
+ 'StatsInvStudentCDF', 'StatsInvTopDownCDF', 'StatsInvTriangularCDF',
+ 'StatsInvUsquaredCDF', 'StatsInvVonMisesCDF', 'StatsInvWeibullCDF',
+ 'StatsKuiperCDF', 'StatsLogisticCDF', 'StatsLogisticPDF',
+ 'StatsLogNormalCDF', 'StatsLogNormalPDF', 'StatsMaxwellCDF',
+ 'StatsMaxwellPDF', 'StatsMedian', 'StatsMooreCDF', 'StatsNBinomialCDF',
+ 'StatsNBinomialPDF', 'StatsNCChiCDF', 'StatsNCChiPDF', 'StatsNCFCDF',
+ 'StatsNCFPDF', 'StatsNCTCDF', 'StatsNCTPDF', 'StatsNormalCDF',
+ 'StatsNormalPDF', 'StatsParetoCDF', 'StatsParetoPDF', 'StatsPermute',
+ 'StatsPoissonCDF', 'StatsPoissonPDF', 'StatsPowerCDF',
+ 'StatsPowerNoise', 'StatsPowerPDF', 'StatsQCDF', 'StatsQpCDF',
+ 'StatsRayleighCDF', 'StatsRayleighPDF', 'StatsRectangularCDF',
+ 'StatsRectangularPDF', 'StatsRunsCDF', 'StatsSpearmanRhoCDF',
+ 'StatsStudentCDF', 'StatsStudentPDF', 'StatsTopDownCDF',
+ 'StatsTriangularCDF', 'StatsTriangularPDF', 'StatsTrimmedMean',
+ 'StatsUSquaredCDF', 'StatsVonMisesCDF', 'StatsVonMisesNoise',
+ 'StatsVonMisesPDF', 'StatsWaldCDF', 'StatsWaldPDF', 'StatsWeibullCDF',
+ 'StatsWeibullPDF', 'StopMSTimer', 'str2num', 'stringCRC', 'stringmatch',
+ 'strlen', 'strsearch', 'StudentA', 'StudentT', 'sum', 'SVAR_Exists',
+ 'TagVal', 'tan', 'tanh', 'ThreadGroupCreate', 'ThreadGroupRelease',
+ 'ThreadGroupWait', 'ThreadProcessorCount', 'ThreadReturnValue', 'ticks',
+ 'trunc', 'Variance', 'vcsr', 'WaveCRC', 'WaveDims', 'WaveExists',
+ 'WaveMax', 'WaveMin', 'WaveRefsEqual', 'WaveType', 'WhichListItem',
+ 'WinType', 'WNoise', 'x', 'x2pnt', 'xcsr', 'y', 'z', 'zcsr', 'ZernikeR',
+ ]
+ functions += [
+ 'AddListItem', 'AnnotationInfo', 'AnnotationList', 'AxisInfo',
+ 'AxisList', 'CaptureHistory', 'ChildWindowList', 'CleanupName',
+ 'ContourInfo', 'ContourNameList', 'ControlNameList', 'CsrInfo',
+ 'CsrWave', 'CsrXWave', 'CTabList', 'DataFolderDir', 'date',
+ 'DDERequestString', 'FontList', 'FuncRefInfo', 'FunctionInfo',
+ 'FunctionList', 'FunctionPath', 'GetDataFolder', 'GetDefaultFont',
+ 'GetDimLabel', 'GetErrMessage', 'GetFormula',
+ 'GetIndependentModuleName', 'GetIndexedObjName', 'GetIndexedObjNameDFR',
+ 'GetRTErrMessage', 'GetRTStackInfo', 'GetScrapText', 'GetUserData',
+ 'GetWavesDataFolder', 'GrepList', 'GuideInfo', 'GuideNameList', 'Hash',
+ 'IgorInfo', 'ImageInfo', 'ImageNameList', 'IndexedDir', 'IndexedFile',
+ 'JulianToDate', 'LayoutInfo', 'ListMatch', 'LowerStr', 'MacroList',
+ 'NameOfWave', 'note', 'num2char', 'num2istr', 'num2str',
+ 'OperationList', 'PadString', 'ParseFilePath', 'PathList', 'PICTInfo',
+ 'PICTList', 'PossiblyQuoteName', 'ProcedureText', 'RemoveByKey',
+ 'RemoveEnding', 'RemoveFromList', 'RemoveListItem',
+ 'ReplaceNumberByKey', 'ReplaceString', 'ReplaceStringByKey',
+ 'Secs2Date', 'Secs2Time', 'SelectString', 'SortList',
+ 'SpecialCharacterInfo', 'SpecialCharacterList', 'SpecialDirPath',
+ 'StringByKey', 'StringFromList', 'StringList', 'StrVarOrDefault',
+ 'TableInfo', 'TextFile', 'ThreadGroupGetDF', 'time', 'TraceFromPixel',
+ 'TraceInfo', 'TraceNameList', 'UniqueName', 'UnPadString', 'UpperStr',
+ 'VariableList', 'WaveInfo', 'WaveList', 'WaveName', 'WaveUnits',
+ 'WinList', 'WinName', 'WinRecreation', 'XWaveName',
+ 'ContourNameToWaveRef', 'CsrWaveRef', 'CsrXWaveRef',
+ 'ImageNameToWaveRef', 'NewFreeWave', 'TagWaveRef', 'TraceNameToWaveRef',
+ 'WaveRefIndexed', 'XWaveRefFromTrace', 'GetDataFolderDFR',
+ 'GetWavesDataFolderDFR', 'NewFreeDataFolder', 'ThreadGroupGetDFR',
+ ]
+
+ tokens = {
+ 'root': [
+ (r'//.*$', Comment.Single),
+ (r'"([^"\\]|\\.)*"', String),
+ # Flow Control.
+ (r'\b(%s)\b' % '|'.join(flowControl), Keyword),
+ # Types.
+ (r'\b(%s)\b' % '|'.join(types), Keyword.Type),
+ # Built-in operations.
+ (r'\b(%s)\b' % '|'.join(operations), Name.Class),
+ # Built-in functions.
+ (r'\b(%s)\b' % '|'.join(functions), Name.Function),
+ # Compiler directives.
+ (r'^#(include|pragma|define|ifdef|ifndef|endif)',
+ Name.Decorator),
+ (r'[^a-zA-Z"/]+', Text),
+ (r'.', Text),
+ ],
+ }
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index 803212b0..10598fb4 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -14,7 +14,8 @@ import re
from pygments.lexer import RegexLexer, include, bygroups, using, \
this, combined, ExtendedRegexLexer
from pygments.token import Error, Punctuation, Literal, Token, \
- Text, Comment, Operator, Keyword, Name, String, Number, Generic
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic, \
+ Whitespace
from pygments.util import get_bool_opt
from pygments.lexers.web import HtmlLexer
@@ -35,7 +36,7 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer', 'BroLexer',
'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer',
'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer',
- 'CbmBasicV2Lexer', 'AutoItLexer']
+ 'CbmBasicV2Lexer', 'AutoItLexer', 'RexxLexer']
class ECLLexer(RegexLexer):
@@ -245,7 +246,7 @@ class MOOCodeLexer(RegexLexer):
"""
name = 'MOOCode'
filenames = ['*.moo']
- aliases = ['moocode']
+ aliases = ['moocode', 'moo']
mimetypes = ['text/x-moocode']
tokens = {
@@ -289,7 +290,7 @@ class SmalltalkLexer(RegexLexer):
"""
name = 'Smalltalk'
filenames = ['*.st']
- aliases = ['smalltalk', 'squeak']
+ aliases = ['smalltalk', 'squeak', 'st']
mimetypes = ['text/x-smalltalk']
tokens = {
@@ -363,7 +364,7 @@ class SmalltalkLexer(RegexLexer):
include('literals'),
],
'afterobject' : [
- (r'! !$', Keyword , '#pop'), # squeak chunk delimeter
+ (r'! !$', Keyword , '#pop'), # squeak chunk delimiter
include('whitespaces'),
(r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
Name.Builtin, '#pop'),
@@ -1961,11 +1962,11 @@ class AsymptoteLexer(RegexLexer):
from pygments.lexers._asybuiltins import ASYFUNCNAME, ASYVARNAME
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in ASYFUNCNAME:
- token = Name.Function
- elif token is Name and value in ASYVARNAME:
- token = Name.Variable
- yield index, token, value
+ if token is Name and value in ASYFUNCNAME:
+ token = Name.Function
+ elif token is Name and value in ASYVARNAME:
+ token = Name.Variable
+ yield index, token, value
class PostScriptLexer(RegexLexer):
@@ -1979,7 +1980,7 @@ class PostScriptLexer(RegexLexer):
*New in Pygments 1.4.*
"""
name = 'PostScript'
- aliases = ['postscript']
+ aliases = ['postscript', 'postscr']
filenames = ['*.ps', '*.eps']
mimetypes = ['application/postscript']
@@ -2067,7 +2068,7 @@ class AutohotkeyLexer(RegexLexer):
*New in Pygments 1.4.*
"""
name = 'autohotkey'
- aliases = ['ahk']
+ aliases = ['ahk', 'autohotkey']
filenames = ['*.ahk', '*.ahkl']
mimetypes = ['text/x-autohotkey']
@@ -2352,7 +2353,7 @@ class ProtoBufLexer(RegexLexer):
"""
name = 'Protocol Buffer'
- aliases = ['protobuf']
+ aliases = ['protobuf', 'proto']
filenames = ['*.proto']
tokens = {
@@ -3624,7 +3625,7 @@ class AutoItLexer(RegexLexer):
(r'[a-zA-Z_#@$][a-zA-Z0-9_#@$]*', Name),
(r'\\|\'', Text),
(r'\`([\,\%\`abfnrtv\-\+;])', String.Escape),
- (r'_\n', Text), # Line continuation
+ (r'_\n', Text), # Line continuation
include('garbage'),
],
'commands': [
@@ -3665,3 +3666,113 @@ class AutoItLexer(RegexLexer):
(r'[^\S\n]', Text),
],
}
+
+
+class RexxLexer(RegexLexer):
+ """
+ `Rexx <http://www.rexxinfo.org/>`_ is a scripting language available for
+ a wide range of different platforms with its roots found on mainframe
+ systems. It is popular for I/O- and data based tasks and can act as glue
+ language to bind different applications together.
+
+ *New in Pygments 1.7.*
+ """
+ name = 'Rexx'
+ aliases = ['rexx', 'ARexx', 'arexx']
+ filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
+ mimetypes = ['text/x-rexx']
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s', Whitespace),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"', String, 'string_double'),
+ (r"'", String, 'string_single'),
+ (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
+ (r'([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)\b',
+ bygroups(Name.Function, Whitespace, Operator, Whitespace,
+ Keyword.Declaration)),
+ (r'([a-z_][a-z0-9_]*)(\s*)(:)',
+ bygroups(Name.Label, Whitespace, Operator)),
+ include('function'),
+ include('keyword'),
+ include('operator'),
+ (r'[a-z_][a-z0-9_]*', Text),
+ ],
+ 'function': [
+ (r'(abbrev|abs|address|arg|b2x|bitand|bitor|bitxor|c2d|c2x|'
+ r'center|charin|charout|chars|compare|condition|copies|d2c|'
+ r'd2x|datatype|date|delstr|delword|digits|errortext|form|'
+ r'format|fuzz|insert|lastpos|left|length|linein|lineout|lines|'
+ r'max|min|overlay|pos|queued|random|reverse|right|sign|'
+ r'sourceline|space|stream|strip|substr|subword|symbol|time|'
+ r'trace|translate|trunc|value|verify|word|wordindex|'
+ r'wordlength|wordpos|words|x2b|x2c|x2d|xrange)(\s*)(\()',
+ bygroups(Name.Builtin, Whitespace, Operator)),
+ ],
+ 'keyword': [
+ (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
+ r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
+ r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
+ r'while)\b', Keyword.Reserved),
+ ],
+ 'operator': [
+ (ur'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
+ ur'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
+ ur'¬>>|¬>|¬|\.|,)', Operator),
+ ],
+ 'string_double': [
+ (r'[^"\n]+', String),
+ (r'""', String),
+ (r'"', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'string_single': [
+ (r'[^\'\n]', String),
+ (r'\'\'', String),
+ (r'\'', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'comment': [
+ (r'[^*]+', Comment.Multiline),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'\*', Comment.Multiline),
+ ]
+ }
+
+ _c = lambda s: re.compile(s, re.MULTILINE)
+ _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
+ _ADDRESS_PATTERN = _c(r'^\s*address\s+')
+ _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
+ _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
+ _PROCEDURE_PATTERN = _c(r'^\s*([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)\b')
+ _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
+ _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
+ PATTERNS_AND_WEIGHTS = (
+ (_ADDRESS_COMMAND_PATTERN, 0.2),
+ (_ADDRESS_PATTERN, 0.05),
+ (_DO_WHILE_PATTERN, 0.1),
+ (_ELSE_DO_PATTERN, 0.1),
+ (_IF_THEN_DO_PATTERN, 0.1),
+ (_PROCEDURE_PATTERN, 0.5),
+ (_PARSE_ARG_PATTERN, 0.2),
+ )
+
+ def analyse_text(text):
+ """
+ Check for inital comment and patterns that distinguish Rexx from other
+ C-like languages.
+ """
+ if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
+ # Header matches MVS Rexx requirements, this is certainly a Rexx
+ # script.
+ return 1.0
+ elif text.startswith('/*'):
+ # Header matches general Rexx requirements; the source code might
+ # still be any language using C comments such as C++, C# or Java.
+ lowerText = text.lower()
+ result = sum(weight
+ for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
+ if pattern.search(lowerText)) + 0.01
+ return min(result, 1.0)
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
index 59a72a81..78c5c996 100644
--- a/pygments/lexers/shell.py
+++ b/pygments/lexers/shell.py
@@ -208,7 +208,7 @@ class BatchLexer(RegexLexer):
*New in Pygments 0.7.*
"""
name = 'Batchfile'
- aliases = ['bat']
+ aliases = ['bat', 'dosbatch', 'winbatch']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py
index dcfd8fa8..546d1f87 100644
--- a/pygments/lexers/sql.py
+++ b/pygments/lexers/sql.py
@@ -375,7 +375,7 @@ class SqlLexer(RegexLexer):
r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
- r'EXCEPT|ESCEPTION|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
+ r'EXCEPTION|EXCEPT|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
index ff4a0453..63fc5f37 100644
--- a/pygments/lexers/templates.py
+++ b/pygments/lexers/templates.py
@@ -766,7 +766,7 @@ class CheetahHtmlLexer(DelegatingLexer):
"""
name = 'HTML+Cheetah'
- aliases = ['html+cheetah', 'html+spitfire']
+ aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
mimetypes = ['text/html+cheetah', 'text/html+spitfire']
def __init__(self, **options):
@@ -1258,7 +1258,7 @@ class HtmlDjangoLexer(DelegatingLexer):
"""
name = 'HTML+Django/Jinja'
- aliases = ['html+django', 'html+jinja']
+ aliases = ['html+django', 'html+jinja', 'htmldjango']
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+django', 'text/html+jinja']
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index b47c49d2..de9979cf 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -25,7 +25,7 @@ __all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer
'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
- 'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer']
+ 'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer', 'EbnfLexer']
class IniLexer(RegexLexer):
@@ -34,7 +34,7 @@ class IniLexer(RegexLexer):
"""
name = 'INI'
- aliases = ['ini', 'cfg']
+ aliases = ['ini', 'cfg', 'dosini']
filenames = ['*.ini', '*.cfg']
mimetypes = ['text/x-ini']
@@ -106,7 +106,7 @@ class PropertiesLexer(RegexLexer):
"""
name = 'Properties'
- aliases = ['properties']
+ aliases = ['properties', 'jproperties']
filenames = ['*.properties']
mimetypes = ['text/x-java-properties']
@@ -128,7 +128,7 @@ class SourcesListLexer(RegexLexer):
"""
name = 'Debian Sourcelist'
- aliases = ['sourceslist', 'sources.list']
+ aliases = ['sourceslist', 'sources.list', 'debsources']
filenames = ['sources.list']
mimetype = ['application/x-debian-sourceslist']
@@ -1053,7 +1053,7 @@ class DebianControlLexer(RegexLexer):
*New in Pygments 0.9.*
"""
name = 'Debian Control file'
- aliases = ['control']
+ aliases = ['control', 'debcontrol']
filenames = ['control']
tokens = {
@@ -1631,7 +1631,7 @@ class CMakeLexer(RegexLexer):
# r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
# r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
# r'COUNTARGS)\b', Name.Builtin, 'args'),
- (r'\b([A-Za-z_]+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
+ (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
Punctuation), 'args'),
include('keywords'),
include('ws')
@@ -1841,3 +1841,53 @@ class HxmlLexer(RegexLexer):
(r'#.*', Comment.Single)
]
}
+
+
+class EbnfLexer(RegexLexer):
+ """
+ Lexer for `ISO/IEC 14977 EBNF
+ <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
+ grammars.
+
+ *New in Pygments 1.7.*
+ """
+
+ name = 'EBNF'
+ aliases = ['ebnf']
+ filenames = ['*.ebnf']
+ mimetypes = ['text/x-ebnf']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comment_start'),
+ include('identifier'),
+ (r'=', Operator, 'production'),
+ ],
+ 'production': [
+ include('whitespace'),
+ include('comment_start'),
+ include('identifier'),
+ (r'"[^"]*"', String.Double),
+ (r"'[^']*'", String.Single),
+ (r'(\?[^?]*\?)', Name.Entity),
+ (r'[\[\]{}(),|]', Punctuation),
+ (r'-', Operator),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+ 'comment_start': [
+ (r'\(\*', Comment.Multiline, 'comment'),
+ ],
+ 'comment': [
+ (r'[^*)]', Comment.Multiline),
+ include('comment_start'),
+ (r'\*\)', Comment.Multiline, '#pop'),
+ (r'[*)]', Comment.Multiline),
+ ],
+ 'identifier': [
+ (r'([a-zA-Z][a-zA-Z0-9 \-]*)', Keyword),
+ ],
+ }
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index 383bf6ad..142fef57 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -722,8 +722,10 @@ class HtmlLexer(RegexLexer):
('<![^>]*>', Comment.Preproc),
(r'<\s*script\s*', Name.Tag, ('script-content', 'tag')),
(r'<\s*style\s*', Name.Tag, ('style-content', 'tag')),
- (r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[a-zA-Z0-9:]+\s*>', Name.Tag),
+ # note: this allows tag names not used in HTML like <x:with-dash>,
+ # this is to support yet-unknown template engines and the like
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
],
'comment': [
('[^-]+', Comment),
@@ -1006,7 +1008,8 @@ class XmlLexer(RegexLexer):
name = 'XML'
aliases = ['xml']
- filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl']
+ filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd',
+ '*.wsdl', '*.wsf']
mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
'application/rss+xml', 'application/atom+xml']
@@ -1125,227 +1128,840 @@ class MxmlLexer(RegexLexer):
}
-class HaxeLexer(RegexLexer):
+class HaxeLexer(ExtendedRegexLexer):
"""
- For haXe source code (http://haxe.org/).
+ For Haxe source code (http://haxe.org/).
*New in Pygments 1.3.*
"""
- name = 'haXe'
- aliases = ['hx', 'haXe']
- filenames = ['*.hx']
- mimetypes = ['text/haxe']
+ name = 'Haxe'
+ aliases = ['hx', 'Haxe', 'haxe', 'haXe', 'hxsl']
+ filenames = ['*.hx', '*.hxsl']
+ mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
+
+ # keywords extracted from lexer.mll in the haxe compiler source
+ keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
+ r'break|return|continue|extends|implements|import|'
+ r'switch|case|default|public|private|try|untyped|'
+ r'catch|new|this|throw|extern|enum|in|interface|'
+ r'cast|override|dynamic|typedef|package|'
+ r'inline|using|null|true|false|abstract)\b')
+
+ # idtype in lexer.mll
+ typeid = r'_*[A-Z][_a-zA-Z0-9]*'
+
+ # combined ident and dollar and idtype
+ ident = r'(?:_*[a-z][_a-zA-Z0-9]*|_+[0-9][_a-zA-Z0-9]*|' + typeid + \
+ '|_+|\$[_a-zA-Z0-9]+)'
- ident = r'(?:[a-zA-Z_][a-zA-Z0-9_]*)'
- typeid = r'(?:(?:[a-z0-9_\.])*[A-Z_][A-Za-z0-9_]*)'
- key_prop = r'(?:default|null|never)'
- key_decl_mod = r'(?:public|private|override|static|inline|extern|dynamic)'
+ binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
+ r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
+ r'/|\-|=>|=)')
+
+ # ident except keywords
+ ident_no_keyword = r'(?!' + keyword + ')' + ident
flags = re.DOTALL | re.MULTILINE
+ preproc_stack = []
+
+ def preproc_callback(self, match, ctx):
+ proc = match.group(2)
+
+ if proc == 'if':
+ # store the current stack
+ self.preproc_stack.append(ctx.stack[:])
+ elif proc in ['else', 'elseif']:
+ # restore the stack back to right before #if
+ if self.preproc_stack: ctx.stack = self.preproc_stack[-1][:]
+ elif proc == 'end':
+ # remove the saved stack of previous #if
+ if self.preproc_stack: self.preproc_stack.pop()
+
+ # #if and #elseif should follow by an expr
+ if proc in ['if', 'elseif']:
+ ctx.stack.append('preproc-expr')
+
+ # #error can be optionally follow by the error msg
+ if proc in ['error']:
+ ctx.stack.append('preproc-error')
+
+ yield match.start(), Comment.Preproc, '#' + proc
+ ctx.pos = match.end()
+
+
tokens = {
'root': [
- include('whitespace'),
- include('comments'),
- (key_decl_mod, Keyword.Declaration),
- include('enumdef'),
- include('typedef'),
- include('classdef'),
- include('imports'),
+ include('spaces'),
+ include('meta'),
+ (r'(?:package)\b', Keyword.Namespace, ('semicolon', 'package')),
+ (r'(?:import)\b', Keyword.Namespace, ('semicolon', 'import')),
+ (r'(?:using)\b', Keyword.Namespace, ('semicolon', 'using')),
+ (r'(?:extern|private)\b', Keyword.Declaration),
+ (r'(?:abstract)\b', Keyword.Declaration, 'abstract'),
+ (r'(?:class|interface)\b', Keyword.Declaration, 'class'),
+ (r'(?:enum)\b', Keyword.Declaration, 'enum'),
+ (r'(?:typedef)\b', Keyword.Declaration, 'typedef'),
+
+ # top-level expression
+ # although it is not supported in haxe, but it is common to write
+ # expression in web pages the positive lookahead here is to prevent
+ # an infinite loop at the EOF
+ (r'(?=.)', Text, 'expr-statement'),
+ ],
+
+ # space/tab/comment/preproc
+ 'spaces': [
+ (r'\s+', Text),
+ (r'//[^\n\r]*', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(#)(if|elseif|else|end|error)\b', preproc_callback),
],
- # General constructs
- 'comments': [
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'#[^\n]*', Comment.Preproc),
+ 'string-single-interpol': [
+ (r'\$\{', String.Interpol, ('string-interpol-close', 'expr')),
+ (r'\$\$', String.Escape),
+ (r'\$(?=' + ident + ')', String.Interpol, 'ident'),
+ include('string-single'),
],
- 'whitespace': [
- include('comments'),
- (r'\s+', Text),
+
+ 'string-single': [
+ (r"'", String.Single, '#pop'),
+ (r'\\.', String.Escape),
+ (r'.', String.Single),
],
- 'codekeywords': [
- (r'\b(if|else|while|do|for|in|break|continue|'
- r'return|switch|case|try|catch|throw|null|trace|'
- r'new|this|super|untyped|cast|callback|here)\b',
- Keyword.Reserved),
+
+ 'string-double': [
+ (r'"', String.Double, '#pop'),
+ (r'\\.', String.Escape),
+ (r'.', String.Double),
],
- 'literals': [
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r'~/([^\n])*?/[gisx]*', String.Regex),
- (r'\b(true|false|null)\b', Keyword.Constant),
- ],
- 'codeblock': [
- include('whitespace'),
- include('new'),
- include('case'),
- include('anonfundef'),
- include('literals'),
- include('vardef'),
- include('codekeywords'),
- (r'[();,\[\]]', Punctuation),
- (r'(?:=|\+=|-=|\*=|/=|%=|&=|\|=|\^=|<<=|>>=|>>>=|\|\||&&|'
- r'\.\.\.|==|!=|>|<|>=|<=|\||&|\^|<<|>>>|>>|\+|\-|\*|/|%|'
- r'!|\+\+|\-\-|~|\.|\?|\:)',
- Operator),
- (ident, Name),
-
- (r'}', Punctuation,'#pop'),
- (r'{', Punctuation,'#push'),
- ],
-
- # Instance/Block level constructs
- 'propertydef': [
- (r'(\()(' + key_prop + ')(,)(' + key_prop + ')(\))',
- bygroups(Punctuation, Keyword.Reserved, Punctuation,
- Keyword.Reserved, Punctuation)),
+
+ 'string-interpol-close': [
+ (r'\$'+ident, String.Interpol),
+ (r'\}', String.Interpol, '#pop'),
],
- 'new': [
- (r'\bnew\b', Keyword, 'typedecl'),
+
+ 'package': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\.', Punctuation, 'import-ident'),
+ (r'', Text, '#pop'),
],
- 'case': [
- (r'\b(case)(\s+)(' + ident + ')(\s*)(\()',
- bygroups(Keyword.Reserved, Text, Name, Text, Punctuation),
- 'funargdecl'),
+
+ 'import': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\*', Keyword), # wildcard import
+ (r'\.', Punctuation, 'import-ident'),
+ (r'in', Keyword.Namespace, 'ident'),
+ (r'', Text, '#pop'),
],
- 'vardef': [
- (r'\b(var)(\s+)(' + ident + ')',
- bygroups(Keyword.Declaration, Text, Name.Variable), 'vardecl'),
+
+ 'import-ident': [
+ include('spaces'),
+ (r'\*', Keyword, '#pop'), # wildcard import
+ (ident, Name.Namespace, '#pop'),
],
- 'vardecl': [
- include('whitespace'),
- include('typelabel'),
- (r'=', Operator,'#pop'),
- (r';', Punctuation,'#pop'),
+
+ 'using': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\.', Punctuation, 'import-ident'),
+ (r'', Text, '#pop'),
],
- 'instancevardef': [
- (key_decl_mod,Keyword.Declaration),
- (r'\b(var)(\s+)(' + ident + ')',
- bygroups(Keyword.Declaration, Text, Name.Variable.Instance),
- 'instancevardecl'),
+
+ 'preproc-error': [
+ (r'\s+', Comment.Preproc),
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ (r'', Text, '#pop'),
],
- 'instancevardecl': [
- include('vardecl'),
- include('propertydef'),
+
+ 'preproc-expr': [
+ (r'\s+', Comment.Preproc),
+ (r'\!', Comment.Preproc),
+ (r'\(', Comment.Preproc, ('#pop', 'preproc-parenthesis')),
+
+ (ident, Comment.Preproc, '#pop'),
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
],
- 'anonfundef': [
- (r'\bfunction\b', Keyword.Declaration, 'fundecl'),
+ 'preproc-parenthesis': [
+ (r'\s+', Comment.Preproc),
+ (r'\)', Comment.Preproc, '#pop'),
+ ('', Text, 'preproc-expr-in-parenthesis'),
],
- 'instancefundef': [
- (key_decl_mod, Keyword.Declaration),
- (r'\b(function)(\s+)(' + ident + ')',
- bygroups(Keyword.Declaration, Text, Name.Function), 'fundecl'),
+
+ 'preproc-expr-chain': [
+ (r'\s+', Comment.Preproc),
+ (binop, Comment.Preproc, ('#pop', 'preproc-expr-in-parenthesis')),
+ (r'', Text, '#pop'),
],
- 'fundecl': [
- include('whitespace'),
- include('typelabel'),
- include('generictypedecl'),
- (r'\(',Punctuation,'funargdecl'),
- (r'(?=[a-zA-Z0-9_])',Text,'#pop'),
- (r'{',Punctuation,('#pop','codeblock')),
- (r';',Punctuation,'#pop'),
- ],
- 'funargdecl': [
- include('whitespace'),
- (ident, Name.Variable),
- include('typelabel'),
- include('literals'),
- (r'=', Operator),
+
+ # same as 'preproc-expr' but able to chain 'preproc-expr-chain'
+ 'preproc-expr-in-parenthesis': [
+ (r'\s+', Comment.Preproc),
+ (r'\!', Comment.Preproc),
+ (r'\(', Comment.Preproc,
+ ('#pop', 'preproc-expr-chain', 'preproc-parenthesis')),
+
+ (ident, Comment.Preproc, ('#pop', 'preproc-expr-chain')),
+ (r"'", String.Single,
+ ('#pop', 'preproc-expr-chain', 'string-single')),
+ (r'"', String.Double,
+ ('#pop', 'preproc-expr-chain', 'string-double')),
+ ],
+
+ 'abstract' : [
+ include('spaces'),
+ (r'', Text, ('#pop', 'abstract-body', 'abstract-relation',
+ 'abstract-opaque', 'type-param-constraint', 'type-name')),
+ ],
+
+ 'abstract-body' : [
+ include('spaces'),
+ (r'\{', Punctuation, ('#pop', 'class-body')),
+ ],
+
+ 'abstract-opaque' : [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close', 'type')),
+ (r'', Text, '#pop'),
+ ],
+
+ 'abstract-relation': [
+ include('spaces'),
+ (r'(?:to|from)', Keyword.Declaration, 'type'),
(r',', Punctuation),
+ (r'', Text, '#pop'),
+ ],
+
+ 'meta': [
+ include('spaces'),
+ (r'@', Name.Decorator, ('meta-body', 'meta-ident', 'meta-colon')),
+ ],
+
+ # optional colon
+ 'meta-colon': [
+ include('spaces'),
+ (r':', Name.Decorator, '#pop'),
+ (r'', Text, '#pop'),
+ ],
+
+ # same as 'ident' but set token as Name.Decorator instead of Name
+ 'meta-ident': [
+ include('spaces'),
+ (ident, Name.Decorator, '#pop'),
+ ],
+
+ 'meta-body': [
+ include('spaces'),
+ (r'\(', Name.Decorator, ('#pop', 'meta-call')),
+ (r'', Text, '#pop'),
+ ],
+
+ 'meta-call': [
+ include('spaces'),
+ (r'\)', Name.Decorator, '#pop'),
+ (r'', Text, ('#pop', 'meta-call-sep', 'expr')),
+ ],
+
+ 'meta-call-sep': [
+ include('spaces'),
+ (r'\)', Name.Decorator, '#pop'),
+ (r',', Punctuation, ('#pop', 'meta-call')),
+ ],
+
+ 'typedef': [
+ include('spaces'),
+ (r'', Text, ('#pop', 'typedef-body', 'type-param-constraint',
+ 'type-name')),
+ ],
+
+ 'typedef-body': [
+ include('spaces'),
+ (r'=', Operator, ('#pop', 'optional-semicolon', 'type')),
+ ],
+
+ 'enum': [
+ include('spaces'),
+ (r'', Text, ('#pop', 'enum-body', 'bracket-open',
+ 'type-param-constraint', 'type-name')),
+ ],
+
+ 'enum-body': [
+ include('spaces'),
+ include('meta'),
+ (r'\}', Punctuation, '#pop'),
+ (ident_no_keyword, Name, ('enum-member', 'type-param-constraint')),
+ ],
+
+ 'enum-member': [
+ include('spaces'),
+ (r'\(', Punctuation,
+ ('#pop', 'semicolon', 'flag', 'function-param')),
+ (r'', Punctuation, ('#pop', 'semicolon', 'flag')),
+ ],
+
+ 'class': [
+ include('spaces'),
+ (r'', Text, ('#pop', 'class-body', 'bracket-open', 'extends',
+ 'type-param-constraint', 'type-name')),
+ ],
+
+ 'extends': [
+ include('spaces'),
+ (r'(?:extends|implements)\b', Keyword.Declaration, 'type'),
+ (r',', Punctuation), # the comma is made optional here, since haxe2
+ # requires the comma but haxe3 does not allow it
+ (r'', Text, '#pop'),
+ ],
+
+ 'bracket-open': [
+ include('spaces'),
+ (r'\{', Punctuation, '#pop'),
+ ],
+
+ 'bracket-close': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ 'class-body': [
+ include('spaces'),
+ include('meta'),
+ (r'\}', Punctuation, '#pop'),
+ (r'(?:static|public|private|override|dynamic|inline|macro)\b',
+ Keyword.Declaration),
+ (r'', Text, 'class-member'),
+ ],
+
+ 'class-member': [
+ include('spaces'),
+ (r'(var)\b', Keyword.Declaration,
+ ('#pop', 'optional-semicolon', 'prop')),
+ (r'(function)\b', Keyword.Declaration,
+ ('#pop', 'optional-semicolon', 'class-method')),
+ ],
+
+ # local function, anonymous or not
+ 'function-local': [
+ include('spaces'),
+ (r'(' + ident_no_keyword + ')?', Name.Function,
+ ('#pop', 'expr', 'flag', 'function-param',
+ 'parenthesis-open', 'type-param-constraint')),
+ ],
+
+ 'optional-expr': [
+ include('spaces'),
+ include('expr'),
+ (r'', Text, '#pop'),
+ ],
+
+ 'class-method': [
+ include('spaces'),
+ (ident, Name.Function, ('#pop', 'optional-expr', 'flag',
+ 'function-param', 'parenthesis-open',
+ 'type-param-constraint')),
+ ],
+
+ # function arguments
+ 'function-param': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
(r'\?', Punctuation),
+ (ident_no_keyword, Name,
+ ('#pop', 'function-param-sep', 'assign', 'flag')),
+ ],
+
+ 'function-param-sep': [
+ include('spaces'),
(r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'function-param')),
],
- 'typelabel': [
- (r':', Punctuation, 'type'),
+ # class property
+ # eg. var prop(default, null):String;
+ 'prop': [
+ include('spaces'),
+ (ident_no_keyword, Name, ('#pop', 'assign', 'flag', 'prop-get-set')),
],
- 'typedecl': [
- include('whitespace'),
- (typeid, Name.Class),
- (r'<', Punctuation, 'generictypedecl'),
- (r'(?=[{}()=,a-z])', Text,'#pop'),
+
+ 'prop-get-set': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close',
+ 'prop-get-set-opt', 'comma', 'prop-get-set-opt')),
+ (r'', Text, '#pop'),
+ ],
+
+ 'prop-get-set-opt': [
+ include('spaces'),
+ (r'(?:default|null|never|dynamic|get|set)\b', Keyword, '#pop'),
+ (ident_no_keyword, Text, '#pop'), #custom getter/setter
+ ],
+
+ 'expr-statement': [
+ include('spaces'),
+ # makes semicolon optional here, just to avoid checking the last
+ # one is bracket or not.
+ (r'', Text, ('#pop', 'optional-semicolon', 'expr')),
+ ],
+
+ 'expr': [
+ include('spaces'),
+ (r'@', Name.Decorator, ('#pop', 'optional-expr', 'meta-body',
+ 'meta-ident', 'meta-colon')),
+ (r'(?:\+\+|\-\-|~(?!/)|!|\-)', Operator),
+ (r'\(', Punctuation, ('#pop', 'expr-chain', 'parenthesis')),
+ (r'(?:inline)\b', Keyword.Declaration),
+ (r'(?:function)\b', Keyword.Declaration, ('#pop', 'expr-chain',
+ 'function-local')),
+ (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket')),
+ (r'(?:true|false|null)\b', Keyword.Constant, ('#pop', 'expr-chain')),
+ (r'(?:this)\b', Keyword, ('#pop', 'expr-chain')),
+ (r'(?:cast)\b', Keyword, ('#pop', 'expr-chain', 'cast')),
+ (r'(?:try)\b', Keyword, ('#pop', 'catch', 'expr')),
+ (r'(?:var)\b', Keyword.Declaration, ('#pop', 'var')),
+ (r'(?:new)\b', Keyword, ('#pop', 'expr-chain', 'new')),
+ (r'(?:switch)\b', Keyword, ('#pop', 'switch')),
+ (r'(?:if)\b', Keyword, ('#pop', 'if')),
+ (r'(?:do)\b', Keyword, ('#pop', 'do')),
+ (r'(?:while)\b', Keyword, ('#pop', 'while')),
+ (r'(?:for)\b', Keyword, ('#pop', 'for')),
+ (r'(?:untyped|throw)\b', Keyword),
+ (r'(?:return)\b', Keyword, ('#pop', 'optional-expr')),
+ (r'(?:macro)\b', Keyword, ('#pop', 'macro')),
+ (r'(?:continue|break)\b', Keyword, '#pop'),
+ (r'(?:\$\s*[a-z]\b|\$(?!'+ident+'))', Name, ('#pop', 'dollar')),
+ (ident_no_keyword, Name, ('#pop', 'expr-chain')),
+
+ # Float
+ (r'\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'expr-chain')),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
+ (r'[0-9]+', Number.Integer, ('#pop', 'expr-chain')),
+
+ # String
+ (r"'", String.Single, ('#pop', 'expr-chain', 'string-single-interpol')),
+ (r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
+
+ # EReg
+ (r'~/(\\\\|\\/|[^/\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
+
+ # Array
+ (r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
+ ],
+
+ 'expr-chain': [
+ include('spaces'),
+ (r'(?:\+\+|\-\-)', Operator),
+ (binop, Operator, ('#pop', 'expr')),
+ (r'(?:in)\b', Keyword, ('#pop', 'expr')),
+ (r'\?', Operator, ('#pop', 'expr', 'ternary', 'expr')),
+ (r'(\.)(' + ident_no_keyword + ')', bygroups(Punctuation, Name)),
+ (r'\[', Punctuation, 'array-access'),
+ (r'\(', Punctuation, 'call'),
+ (r'', Text, '#pop'),
+ ],
+
+ # macro reification
+ 'macro': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'type')),
+ (r'', Text, ('#pop', 'expr')),
+ ],
+
+ # cast can be written as "cast expr" or "cast(expr, type)"
+ 'cast': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close',
+ 'cast-type', 'expr')),
+ (r'', Text, ('#pop', 'expr')),
+ ],
+
+ # optionally give a type as the 2nd argument of cast()
+ 'cast-type': [
+ include('spaces'),
+ (r',', Punctuation, ('#pop', 'type')),
+ (r'', Text, '#pop'),
+ ],
+
+ 'catch': [
+ include('spaces'),
+ (r'(?:catch)\b', Keyword, ('expr', 'function-param',
+ 'parenthesis-open')),
+ (r'', Text, '#pop'),
+ ],
+
+ # do-while loop
+ 'do': [
+ include('spaces'),
+ (r'', Punctuation, ('#pop', 'do-while', 'expr')),
],
+
+ # the while after do
+ 'do-while': [
+ include('spaces'),
+ (r'(?:while)\b', Keyword, ('#pop', 'parenthesis',
+ 'parenthesis-open')),
+ ],
+
+ 'while': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
+ ],
+
+ 'for': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
+ ],
+
+ 'if': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'else', 'optional-semicolon', 'expr',
+ 'parenthesis')),
+ ],
+
+ 'else': [
+ include('spaces'),
+ (r'(?:else)\b', Keyword, ('#pop', 'expr')),
+ (r'', Text, '#pop'),
+ ],
+
+ 'switch': [
+ include('spaces'),
+ (r'', Text, ('#pop', 'switch-body', 'bracket-open', 'expr')),
+ ],
+
+ 'switch-body': [
+ include('spaces'),
+ (r'(?:case|default)\b', Keyword, ('case-block', 'case')),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ 'case': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ (r'', Text, ('#pop', 'case-sep', 'case-guard', 'expr')),
+ ],
+
+ 'case-sep': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'case')),
+ ],
+
+ 'case-guard': [
+ include('spaces'),
+ (r'(?:if)\b', Keyword, ('#pop', 'parenthesis', 'parenthesis-open')),
+ (r'', Text, '#pop'),
+ ],
+
+ # optional multiple expr under a case
+ 'case-block': [
+ include('spaces'),
+ (r'(?!(?:case|default)\b|\})', Keyword, 'expr-statement'),
+ (r'', Text, '#pop'),
+ ],
+
+ 'new': [
+ include('spaces'),
+ (r'', Text, ('#pop', 'call', 'parenthesis-open', 'type')),
+ ],
+
+ 'array-decl': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ (r'', Text, ('#pop', 'array-decl-sep', 'expr')),
+ ],
+
+ 'array-decl-sep': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'array-decl')),
+ ],
+
+ 'array-access': [
+ include('spaces'),
+ (r'', Text, ('#pop', 'array-access-close', 'expr')),
+ ],
+
+ 'array-access-close': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ ],
+
+ 'comma': [
+ include('spaces'),
+ (r',', Punctuation, '#pop'),
+ ],
+
+ 'colon': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ ],
+
+ 'semicolon': [
+ include('spaces'),
+ (r';', Punctuation, '#pop'),
+ ],
+
+ 'optional-semicolon': [
+ include('spaces'),
+ (r';', Punctuation, '#pop'),
+ (r'', Text, '#pop'),
+ ],
+
+ # identity that CAN be a Haxe keyword
+ 'ident': [
+ include('spaces'),
+ (ident, Name, '#pop'),
+ ],
+
+ 'dollar': [
+ include('spaces'),
+ (r'\{', Keyword, ('#pop', 'bracket-close', 'expr')),
+ (r'', Text, ('#pop', 'expr-chain')),
+ ],
+
+ 'type-name': [
+ include('spaces'),
+ (typeid, Name, '#pop'),
+ ],
+
+ 'type-full-name': [
+ include('spaces'),
+ (r'\.', Punctuation, 'ident'),
+ (r'', Text, '#pop'),
+ ],
+
'type': [
- include('whitespace'),
- (typeid, Name.Class),
- (r'<', Punctuation, 'generictypedecl'),
- (r'->', Keyword.Type),
- (r'(?=[{}(),;=])', Text, '#pop'),
+ include('spaces'),
+ (r'\?', Punctuation),
+ (ident, Name, ('#pop', 'type-check', 'type-full-name')),
+ (r'\{', Punctuation, ('#pop', 'type-check', 'type-struct')),
+ (r'\(', Punctuation, ('#pop', 'type-check', 'type-parenthesis')),
],
- 'generictypedecl': [
- include('whitespace'),
- (typeid, Name.Class),
- (r'<', Punctuation, '#push'),
+
+ 'type-parenthesis': [
+ include('spaces'),
+ (r'', Text, ('#pop', 'parenthesis-close', 'type')),
+ ],
+
+ 'type-check': [
+ include('spaces'),
+ (r'->', Punctuation, ('#pop', 'type')),
+ (r'<(?!=)', Punctuation, 'type-param'),
+ (r'', Text, '#pop'),
+ ],
+
+ 'type-struct': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r'\?', Punctuation),
+ (r'>', Punctuation, ('comma', 'type')),
+ (ident_no_keyword, Name, ('#pop', 'type-struct-sep', 'type', 'colon')),
+ include('class-body'),
+ ],
+
+ 'type-struct-sep': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-struct')),
+ ],
+
+ # type-param can be a normal type or a constant literal...
+ 'type-param-type': [
+ # Float
+ (r'\.[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, '#pop'),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
+ (r'[0-9]+', Number.Integer, '#pop'),
+
+ # String
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+
+ # EReg
+ (r'~/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex, '#pop'),
+
+ # Array
+ (r'\[', Operator, ('#pop', 'array-decl')),
+
+ include('type'),
+ ],
+
+ # type-param part of a type
+ # ie. the <A,B> path in Map<A,B>
+ 'type-param': [
+ include('spaces'),
+ (r'', Text, ('#pop', 'type-param-sep', 'type-param-type')),
+ ],
+
+ 'type-param-sep': [
+ include('spaces'),
(r'>', Punctuation, '#pop'),
- (r',', Punctuation),
+ (r',', Punctuation, ('#pop', 'type-param')),
],
- # Top level constructs
- 'imports': [
- (r'(package|import|using)(\s+)([^;]+)(;)',
- bygroups(Keyword.Namespace, Text, Name.Namespace,Punctuation)),
+ # optional type-param that may include constraint
+ # ie. <T:Constraint, T2:(ConstraintA,ConstraintB)>
+ 'type-param-constraint': [
+ include('spaces'),
+ (r'<(?!=)', Punctuation, ('#pop', 'type-param-constraint-sep',
+ 'type-param-constraint-flag', 'type-name')),
+ (r'', Text, '#pop'),
],
- 'typedef': [
- (r'typedef', Keyword.Declaration, ('typedefprebody', 'typedecl')),
+
+ 'type-param-constraint-sep': [
+ include('spaces'),
+ (r'>', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-param-constraint-sep',
+ 'type-param-constraint-flag', 'type-name')),
],
- 'typedefprebody': [
- include('whitespace'),
- (r'(=)(\s*)({)', bygroups(Punctuation, Text, Punctuation),
- ('#pop', 'typedefbody')),
+
+ # the optional constraint inside type-param
+ 'type-param-constraint-flag': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'type-param-constraint-flag-type')),
+ (r'', Text, '#pop'),
],
- 'enumdef': [
- (r'enum', Keyword.Declaration, ('enumdefprebody', 'typedecl')),
+
+ 'type-param-constraint-flag-type': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'type-param-constraint-flag-type-sep',
+ 'type')),
+ (r'', Text, ('#pop', 'type')),
],
- 'enumdefprebody': [
- include('whitespace'),
- (r'{', Punctuation, ('#pop','enumdefbody')),
+
+ 'type-param-constraint-flag-type-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, 'type'),
],
- 'classdef': [
- (r'class', Keyword.Declaration, ('classdefprebody', 'typedecl')),
+
+ # a parenthesis expr that contain exactly one expr
+ 'parenthesis': [
+ include('spaces'),
+ (r'', Text, ('#pop', 'parenthesis-close', 'expr')),
],
- 'classdefprebody': [
- include('whitespace'),
- (r'(extends|implements)', Keyword.Declaration,'typedecl'),
- (r'{', Punctuation, ('#pop', 'classdefbody')),
+
+ 'parenthesis-open': [
+ include('spaces'),
+ (r'\(', Punctuation, '#pop'),
],
- 'interfacedef': [
- (r'interface', Keyword.Declaration,
- ('interfacedefprebody', 'typedecl')),
+
+ 'parenthesis-close': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
],
- 'interfacedefprebody': [
- include('whitespace'),
- (r'(extends)', Keyword.Declaration, 'typedecl'),
- (r'{', Punctuation, ('#pop', 'classdefbody')),
- ],
-
- 'typedefbody': [
- include('whitespace'),
- include('instancevardef'),
- include('instancefundef'),
- (r'>', Punctuation, 'typedecl'),
- (r',', Punctuation),
- (r'}', Punctuation, '#pop'),
- ],
- 'enumdefbody': [
- include('whitespace'),
- (ident, Name.Variable.Instance),
- (r'\(', Punctuation, 'funargdecl'),
- (r';', Punctuation),
- (r'}', Punctuation, '#pop'),
- ],
- 'classdefbody': [
- include('whitespace'),
- include('instancevardef'),
- include('instancefundef'),
- (r'}', Punctuation, '#pop'),
- include('codeblock'),
+
+ 'var': [
+ include('spaces'),
+ (ident_no_keyword, Text, ('#pop', 'var-sep', 'assign', 'flag')),
],
+
+ # optional more var decl.
+ 'var-sep': [
+ include('spaces'),
+ (r',', Punctuation, ('#pop', 'var')),
+ (r'', Text, '#pop'),
+ ],
+
+ # optional assignment
+ 'assign': [
+ include('spaces'),
+ (r'=', Operator, ('#pop', 'expr')),
+ (r'', Text, '#pop'),
+ ],
+
+ # optional type flag
+ 'flag': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'type')),
+ (r'', Text, '#pop'),
+ ],
+
+ # colon as part of a ternary operator (?:)
+ 'ternary': [
+ include('spaces'),
+ (r':', Operator, '#pop'),
+ ],
+
+ # function call
+ 'call': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r'', Text, ('#pop', 'call-sep', 'expr')),
+ ],
+
+ # after a call param
+ 'call-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'call')),
+ ],
+
+ # bracket can be block or object
+ 'bracket': [
+ include('spaces'),
+ (r'(?!(?:\$\s*[a-z]\b|\$(?!'+ident+')))' + ident_no_keyword, Name,
+ ('#pop', 'bracket-check')),
+ (r"'", String.Single, ('#pop', 'bracket-check', 'string-single')),
+ (r'"', String.Double, ('#pop', 'bracket-check', 'string-double')),
+ (r'', Text, ('#pop', 'block')),
+ ],
+
+ 'bracket-check': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'object-sep', 'expr')), #is object
+ (r'', Text, ('#pop', 'block', 'optional-semicolon', 'expr-chain')), #is block
+ ],
+
+ # code block
+ 'block': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r'', Text, 'expr-statement'),
+ ],
+
+ # object in key-value pairs
+ 'object': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r'', Text, ('#pop', 'object-sep', 'expr', 'colon', 'ident-or-string'))
+ ],
+
+ # a key of an object
+ 'ident-or-string': [
+ include('spaces'),
+ (ident_no_keyword, Name, '#pop'),
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ ],
+
+ # after a key-value pair in object
+ 'object-sep': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'object')),
+ ],
+
+
+
}
def analyse_text(text):
@@ -1797,7 +2413,7 @@ class CoffeeScriptLexer(RegexLexer):
"""
name = 'CoffeeScript'
- aliases = ['coffee-script', 'coffeescript']
+ aliases = ['coffee-script', 'coffeescript', 'coffee']
filenames = ['*.coffee']
mimetypes = ['text/coffeescript']
diff --git a/pygments/modeline.py b/pygments/modeline.py
new file mode 100644
index 00000000..cba1cab2
--- /dev/null
+++ b/pygments/modeline.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.modeline
+ ~~~~~~~~~~~~~~~~~
+
+ A simple modeline parser (based on pymodeline).
+
+ :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+__all__ = ['get_filetype_from_buffer']
+
+modeline_re = re.compile(r'''
+ (?: vi | vim | ex ) (?: [<=>]? \d* )? :
+ .* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
+''', re.VERBOSE)
+
+def get_filetype_from_line(l):
+ m = modeline_re.search(l)
+ if m:
+ return m.group(1)
+
+def get_filetype_from_buffer(buf, max_lines=5):
+ """
+ Scan the buffer for modelines and return filetype if one is found.
+ """
+ lines = buf.splitlines()
+ for l in lines[-1:-max_lines-1:-1]:
+ ret = get_filetype_from_line(l)
+ if ret:
+ return ret
+ for l in lines[max_lines:0:-1]:
+ ret = get_filetype_from_line(l)
+ if ret:
+ return ret
+
+ return None
diff --git a/tests/examplefiles/example.hx b/tests/examplefiles/example.hx
new file mode 100644
index 00000000..fd93bb49
--- /dev/null
+++ b/tests/examplefiles/example.hx
@@ -0,0 +1,142 @@
+/**
+ * This is not really a valid Haxe file, but just an demo...
+ */
+
+package;
+package net.onthewings;
+
+import net.onthewings.Test;
+import net.onthewings.*;
+
+using Lambda;
+using net.onthewings.Test;
+
+#if flash8
+// Haxe code specific for flash player 8
+#elseif flash
+// Haxe code specific for flash platform (any version)
+#elseif js
+// Haxe code specific for javascript plaform
+#elseif neko
+// Haxe code specific for neko plaform
+#else
+// do something else
+ #error // will display an error "Not implemented on this platform"
+ #error "Custom error message" // will display an error "Custom error message"
+#end
+
+0; // Int
+-134; // Int
+0xFF00; // Int
+
+123.0; // Float
+.14179; // Float
+13e50; // Float
+-1e-99; // Float
+
+"hello"; // String
+"hello \"world\" !"; // String
+'hello "world" !'; // String
+
+true; // Bool
+false; // Bool
+
+null; // Unknown<0>
+
+~/[a-z]+/i; // EReg : regular expression
+
+var point = { "x" : 1, "y" : -5 };
+
+{
+ var x;
+ var y = 3;
+ var z : String;
+ var w : String = "";
+ var a, b : Bool, c : Int = 0;
+}
+
+//haxe3 pattern matching
+switch(e.expr) {
+ case EConst(CString(s)) if (StringTools.startsWith(s, "foo")):
+ "1";
+ case EConst(CString(s)) if (StringTools.startsWith(s, "bar")):
+ "2";
+ case EConst(CInt(i)) if (switch(Std.parseInt(i) * 2) { case 4: true; case _: false; }):
+ "3";
+ case EConst(_):
+ "4";
+ case _:
+ "5";
+}
+
+switch [true, 1, "foo"] {
+ case [true, 1, "foo"]: "0";
+ case [true, 1, _]: "1";
+ case _: "_";
+}
+
+
+class Test <T:Void->Void> {
+ private function new():Void {
+ inline function innerFun(a:Int, b:Int):Int {
+ return readOnlyField = a + b;
+ }
+
+ _innerFun(1, 2.3);
+ }
+
+ static public var instance(get,null):Test;
+ static function get_instance():Test {
+ return instance != null ? instance : instance = new Test();
+ }
+}
+
+@:native("Test") private class Test2 {}
+
+extern class Ext {}
+
+@:macro class M {
+ @:macro static function test(e:Array<Expr>):ExprOf<String> {
+ return macro "ok";
+ }
+}
+
+enum Color {
+ Red;
+ Green;
+ Blue;
+ Grey( v : Int );
+ Rgb( r : Int, g : Int, b : Int );
+ Alpha( a : Int, col : Color );
+}
+
+class Colors {
+ static function toInt( c : Color ) : Int {
+ return switch( c ) {
+ case Red: 0xFF0000;
+ case Green: 0x00FF00;
+ case Blue: 0x0000FF;
+ case Grey(v): (v << 16) | (v << 8) | v;
+ case Rgb(r,g,b): (r << 16) | (g << 8) | b;
+ case Alpha(a,c): (a << 24) | (toInt(c) & 0xFFFFFF);
+ }
+ }
+}
+
+class EvtQueue<T : (Event, EventDispatcher)> {
+ var evt : T;
+}
+
+typedef DS = Dynamic<String>;
+typedef Pt = {
+ var x:Float;
+ var y:Float;
+ @:optional var z:Float; /* optional z */
+ function add(pt:Pt):Void;
+}
+typedef Pt2 = {
+ x:Float,
+ y:Float,
+ ?z:Float, //optional z
+ add : Point -> Void,
+} \ No newline at end of file
diff --git a/tests/examplefiles/example.rexx b/tests/examplefiles/example.rexx
new file mode 100644
index 00000000..ec4da5ad
--- /dev/null
+++ b/tests/examplefiles/example.rexx
@@ -0,0 +1,50 @@
+/* REXX example. */
+
+/* Some basic constructs. */
+almost_pi = 0.1415 + 3
+if almost_pi < 3 then
+ say 'huh?'
+else do
+ say 'almost_pi=' almost_pi || " - ok"
+end
+x = '"' || "'" || '''' || """" /* quotes */
+
+/* A comment
+ * spawning multiple
+ lines. /* / */
+
+/* Built-in functions. */
+line = 'line containing some short text'
+say WordPos(line, 'some')
+say Word(line, 4)
+
+/* Labels and procedures. */
+some_label :
+
+divide: procedure
+ parse arg some other
+ return some / other
+
+call divide(5, 2)
+
+/* Loops */
+do i = 1 to 5
+ do j = -3 to -9 by -3
+ say i '+' j '=' i + j
+ end j
+end i
+
+do forever
+ leave
+end
+
+/* Print a text file on MVS. */
+ADDRESS TSO
+"ALLOC F(TEXTFILE) DSN('some.text.dsn') SHR REU"
+"EXECIO * DISKR TEXTFILE ( FINIS STEM LINES."
+"FREE F(TEXTFILE)"
+I = 1
+DO WHILE I <= LINES.0
+ SAY ' LINE ' I ' : ' LINES.I
+ I = I + 1
+END
diff --git a/tests/examplefiles/garcia-wachs.kk b/tests/examplefiles/garcia-wachs.kk
index f766e051..91a01fbe 100644
--- a/tests/examplefiles/garcia-wachs.kk
+++ b/tests/examplefiles/garcia-wachs.kk
@@ -1,9 +1,25 @@
-/* This is an example in the Koka Language of the Garcia-Wachs algorithm */
-module garcia-wachs
+// Koka language test module
-public fun main()
-{
- test().print
+// This module implements the GarsiaWachs algorithm.
+// It is an adaptation of the algorithm in ML as described by JeanChristophe Filli�tre:
+// in ''A functional implementation of the GarsiaWachs algorithm. (functional pearl). ML workshop 2008, pages 91--96''.
+// See: http://www.lri.fr/~filliatr/publis/gwWml08.pdf
+//
+// The algorithm is interesting since it uses mutable references shared between a list and tree but the
+// side effects are not observable from outside. Koka automatically infers that the final algorithm is pure.
+// Note: due to a current limitation in the divergence analysis, koka cannot yet infer that mutually recursive
+// definitions in "insert" and "extract" are terminating and the final algorithm still has a divergence effect.
+// However, koka does infer that no other effect (i.e. an exception due to a partial match) can occur.
+module garcsiaWachs
+
+import test = qualified std/flags
+
+# pre processor test
+
+public function main() {
+ wlist = Cons1(('a',3), [('b',2),('c',1),('d',4),('e',5)])
+ tree = wlist.garsiaWachs()
+ tree.show.println()
}
//----------------------------------------------------
@@ -14,10 +30,9 @@ public type tree<a> {
con Node(left :tree<a>, right :tree<a>)
}
-fun show( t : tree<char> ) : string
-{
+function show( t : tree<char> ) : string {
match(t) {
- Leaf(c) -> Core.show(c)
+ Leaf(c) -> core/show(c)
Node(l,r) -> "Node(" + show(l) + "," + show(r) + ")"
}
}
@@ -30,23 +45,21 @@ public type list1<a> {
Cons1( head : a, tail : list<a> )
}
-
-fun map( xs, f ) {
+function map( xs, f ) {
val Cons1(y,ys) = xs
- return Cons1(f(y), Core.map(ys,f))
+ return Cons1(f(y), core/map(ys,f))
}
-fun zip( xs :list1<a>, ys :list1<b> ) : list1<(a,b)> {
+function zip( xs :list1<a>, ys :list1<b> ) : list1<(a,b)> {
Cons1( (xs.head, ys.head), zip(xs.tail, ys.tail))
}
-
//----------------------------------------------------
// Phase 1
//----------------------------------------------------
-fun insert( after : list<(tree<a>,int)>, t : (tree<a>,int), before : list<(tree<a>,int)> ) : div tree<a>
+function insert( after : list<(tree<a>,int)>, t : (tree<a>,int), before : list<(tree<a>,int)> ) : div tree<a>
{
match(before) {
Nil -> extract( [], Cons1(t,after) )
@@ -60,7 +73,7 @@ fun insert( after : list<(tree<a>,int)>, t : (tree<a>,int), before : list<(tree<
}
}
-fun extract( before : list<(tree<a>,int)>, after : list1<(tree<a>,int)> ) : div tree<a>
+function extract( before : list<(tree<a>,int)>, after : list1<(tree<a>,int)> ) : div tree<a>
{
val Cons1((t1,w1) as x, xs ) = after
match(xs) {
@@ -75,25 +88,24 @@ fun extract( before : list<(tree<a>,int)>, after : list1<(tree<a>,int)> ) : div
}
}
-
-
-fun balance( xs : list1<(tree<a>,int)> ) : div tree<a>
-{
+function balance( xs : list1<(tree<a>,int)> ) : div tree<a> {
extract( [], xs )
}
-fun mark( depth :int, t :tree<(a,ref<h,int>)> ) : <write<h>> ()
-{
+//----------------------------------------------------
+// Phase 2
+//----------------------------------------------------
+
+function mark( depth :int, t :tree<(a,ref<h,int>)> ) : <write<h>> () {
match(t) {
Leaf((_,d)) -> d := depth
Node(l,r) -> { mark(depth+1,l); mark(depth+1,r) }
}
}
-
-fun build( depth :int, xs :list1<(a,ref<h,int>)> ) : <read<h>,div> (tree<a>,list<(a,ref<h,int>)>)
+function build( depth :int, xs :list1<(a,ref<h,int>)> ) : <read<h>,div> (tree<a>,list<(a,ref<h,int>)>)
{
- if (!xs.head.snd == depth) return (Leaf(xs.head.fst), xs.tail)
+ if (!(xs.head.snd) == depth) return (Leaf(xs.head.fst), xs.tail)
l = build(depth+1, xs)
match(l.snd) {
@@ -105,13 +117,11 @@ fun build( depth :int, xs :list1<(a,ref<h,int>)> ) : <read<h>,div> (tree<a>,list
}
}
-public fun test() {
- wlist = Cons1(('a',3), [('b',2),('c',1),('d',4),('e',5)])
- tree = wlist.garciawachs()
- tree.show()
-}
+//----------------------------------------------------
+// Main
+//----------------------------------------------------
-public fun garciawachs( xs : list1<(a,int)> ) : div tree<a>
+public function garsiaWachs( xs : list1<(a,int)> ) : div tree<a>
{
refs = xs.map(fst).map( fun(x) { (x, ref(0)) } )
wleafs = zip( refs.map(Leaf), xs.map(snd) )
diff --git a/tests/examplefiles/objc_example.m b/tests/examplefiles/objc_example.m
index cb5c0975..67b33022 100644
--- a/tests/examplefiles/objc_example.m
+++ b/tests/examplefiles/objc_example.m
@@ -23,3 +23,10 @@ for (key in dictionary) {
NSLog(@"English: %@, Latin: %@", key, [dictionary valueForKey:key]);
}
+// Literals
+NSArray *a = @[ @"1", @"2" ];
+
+NSDictionary *d = @{ @"key": @"value" };
+
+NSNumber *n1 = @( 1 );
+NSNumber *n2 = @( [a length] );
diff --git a/tests/examplefiles/test.ebnf b/tests/examplefiles/test.ebnf
new file mode 100644
index 00000000..a96171b0
--- /dev/null
+++ b/tests/examplefiles/test.ebnf
@@ -0,0 +1,31 @@
+letter = "A" | "B" | "C" | "D" | "E" | "F" | "G"
+ | "H" | "I" | "J" | "K" | "L" | "M" | "N"
+ | "O" | "P" | "Q" | "R" | "S" | "T" | "U"
+ | "V" | "W" | "X" | "Y" | "Z" ;
+digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" ;
+symbol = "[" | "]" | "{" | "}" | "(" | ")" | "<" | ">"
+ | "'" | '"' | "=" | "|" | "." | "," | ";" ;
+character = letter | digit | symbol | " " ;
+
+identifier = letter , { letter | digit | " " } ;
+terminal = "'" , character , { character } , "'"
+ | '"' , character , { character } , '"' ;
+
+special = "?" , any , "?" ;
+
+comment = (* this is a comment "" *) "(*" , any-symbol , "*)" ;
+any-symbol = ? any visible character ? ; (* ? ... ? *)
+
+lhs = identifier ;
+rhs = identifier
+ | terminal
+ | comment , rhs
+ | rhs , comment
+ | "[" , rhs , "]"
+ | "{" , rhs , "}"
+ | "(" , rhs , ")"
+ | rhs , "|" , rhs
+ | rhs , "," , rhs ;
+
+rule = lhs , "=" , rhs , ";" | comment ;
+grammar = { rule } ;
diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py
new file mode 100644
index 00000000..d3cfa246
--- /dev/null
+++ b/tests/test_lexers_other.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+"""
+ Tests for other lexers
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import glob
+import os
+import unittest
+
+from pygments.lexers import guess_lexer
+from pygments.lexers.other import RexxLexer
+
+
+def _exampleFilePath(filename):
+ return os.path.join(os.path.dirname(__file__), 'examplefiles', filename)
+
+
+class AnalyseTextTest(unittest.TestCase):
+ def _testCanRecognizeAndGuessExampleFiles(self, lexer):
+ assert lexer is not None
+
+ for pattern in lexer.filenames:
+ exampleFilesPattern = _exampleFilePath(pattern)
+ for exampleFilePath in glob.glob(exampleFilesPattern):
+ exampleFile = open(exampleFilePath, 'rb')
+ try:
+ text = exampleFile.read()
+ probability = lexer.analyse_text(text)
+ self.assertTrue(probability > 0,
+ '%s must recognize %r' % (
+ lexer.name, exampleFilePath))
+ guessedLexer = guess_lexer(text)
+ self.assertEqual(guessedLexer.name, lexer.name)
+ finally:
+ exampleFile.close()
+
+ def testCanRecognizeAndGuessExampleFiles(self):
+ self._testCanRecognizeAndGuessExampleFiles(RexxLexer)
+
+
+class RexxLexerTest(unittest.TestCase):
+ def testCanGuessFromText(self):
+ self.assertAlmostEqual(0.01,
+ RexxLexer.analyse_text('/* */'))
+ self.assertAlmostEqual(1.0,
+ RexxLexer.analyse_text('''/* Rexx */
+ say "hello world"'''))
+ self.assertLess(0.5,
+ RexxLexer.analyse_text('/* */\n'
+ 'hello:pRoceduRe\n'
+ ' say "hello world"'))
+ self.assertLess(0.2,
+ RexxLexer.analyse_text('''/* */
+ if 1 > 0 then do
+ say "ok"
+ end
+ else do
+ say "huh?"
+ end'''))
+ self.assertLess(0.2,
+ RexxLexer.analyse_text('''/* */
+ greeting = "hello world!"
+ parse value greeting "hello" name "!"
+ say name'''))