summaryrefslogtreecommitdiff
path: root/pygments/lexers
diff options
context:
space:
mode:
authorAnteru <bitbucket@ca.sh13.net>2019-04-30 15:47:45 +0000
committerAnteru <bitbucket@ca.sh13.net>2019-04-30 15:47:45 +0000
commit83e159f05ec8dae7d6e52140b23b363d4c3b18f2 (patch)
tree1fa06af4657f37b48cc798a662a0986fb004ba52 /pygments/lexers
parent836d98cc163ea0dcb1b60ef1e536fdacb351d78a (diff)
parenta0fc52727aaed41c8e09c87996de842117872afb (diff)
downloadpygments-83e159f05ec8dae7d6e52140b23b363d4c3b18f2.tar.gz
Merged in Praetonus/pygments-main/pony (pull request #627)
Add lexer for the Pony language
Diffstat (limited to 'pygments/lexers')
-rw-r--r--pygments/lexers/__init__.py45
-rw-r--r--pygments/lexers/_asy_builtins.py2
-rw-r--r--pygments/lexers/_cl_builtins.py2
-rw-r--r--pygments/lexers/_cocoa_builtins.py15
-rw-r--r--pygments/lexers/_csound_builtins.py2990
-rw-r--r--pygments/lexers/_lasso_builtins.py9
-rw-r--r--pygments/lexers/_lua_builtins.py4
-rw-r--r--pygments/lexers/_mapping.py34
-rw-r--r--pygments/lexers/_mql_builtins.py2
-rw-r--r--pygments/lexers/_openedge_builtins.py2
-rw-r--r--pygments/lexers/_php_builtins.py32
-rw-r--r--pygments/lexers/_postgres_builtins.py2
-rw-r--r--pygments/lexers/_scilab_builtins.py2
-rw-r--r--pygments/lexers/_sourcemod_builtins.py2
-rw-r--r--pygments/lexers/_stan_builtins.py276
-rw-r--r--pygments/lexers/_stata_builtins.py10
-rw-r--r--pygments/lexers/_tsql_builtins.py2
-rw-r--r--pygments/lexers/_vbscript_builtins.py279
-rw-r--r--pygments/lexers/_vim_builtins.py2
-rw-r--r--pygments/lexers/actionscript.py6
-rw-r--r--pygments/lexers/agile.py2
-rw-r--r--pygments/lexers/algebra.py2
-rw-r--r--pygments/lexers/ambient.py2
-rw-r--r--pygments/lexers/ampl.py6
-rw-r--r--pygments/lexers/apl.py10
-rw-r--r--pygments/lexers/archetype.py2
-rw-r--r--pygments/lexers/asm.py226
-rw-r--r--pygments/lexers/automation.py6
-rw-r--r--pygments/lexers/basic.py167
-rw-r--r--pygments/lexers/bibtex.py20
-rw-r--r--pygments/lexers/business.py88
-rw-r--r--pygments/lexers/c_cpp.py10
-rw-r--r--pygments/lexers/c_like.py30
-rw-r--r--pygments/lexers/capnproto.py41
-rw-r--r--pygments/lexers/chapel.py33
-rw-r--r--pygments/lexers/clean.py378
-rw-r--r--pygments/lexers/compiled.py2
-rw-r--r--pygments/lexers/configs.py129
-rw-r--r--pygments/lexers/console.py2
-rw-r--r--pygments/lexers/crystal.py49
-rw-r--r--pygments/lexers/csound.py462
-rw-r--r--pygments/lexers/css.py25
-rw-r--r--pygments/lexers/d.py2
-rw-r--r--pygments/lexers/dalvik.py2
-rw-r--r--pygments/lexers/data.py14
-rw-r--r--pygments/lexers/diff.py2
-rw-r--r--pygments/lexers/dotnet.py22
-rw-r--r--pygments/lexers/dsls.py14
-rw-r--r--pygments/lexers/dylan.py8
-rw-r--r--pygments/lexers/ecl.py2
-rw-r--r--pygments/lexers/eiffel.py2
-rw-r--r--pygments/lexers/elm.py4
-rw-r--r--pygments/lexers/erlang.py6
-rw-r--r--pygments/lexers/esoteric.py18
-rw-r--r--pygments/lexers/ezhil.py7
-rw-r--r--pygments/lexers/factor.py2
-rw-r--r--pygments/lexers/fantom.py2
-rw-r--r--pygments/lexers/felix.py2
-rw-r--r--pygments/lexers/floscript.py87
-rw-r--r--pygments/lexers/forth.py4
-rw-r--r--pygments/lexers/fortran.py7
-rw-r--r--pygments/lexers/foxpro.py2
-rw-r--r--pygments/lexers/functional.py2
-rw-r--r--pygments/lexers/go.py2
-rw-r--r--pygments/lexers/grammar_notation.py6
-rw-r--r--pygments/lexers/graph.py23
-rw-r--r--pygments/lexers/graphics.py278
-rw-r--r--pygments/lexers/haskell.py50
-rw-r--r--pygments/lexers/haxe.py12
-rw-r--r--pygments/lexers/hdl.py2
-rw-r--r--pygments/lexers/hexdump.py30
-rw-r--r--pygments/lexers/html.py34
-rw-r--r--pygments/lexers/idl.py16
-rw-r--r--pygments/lexers/igor.py361
-rw-r--r--pygments/lexers/inferno.py4
-rw-r--r--pygments/lexers/installers.py2
-rw-r--r--pygments/lexers/int_fiction.py4
-rw-r--r--pygments/lexers/iolang.py4
-rw-r--r--pygments/lexers/j.py6
-rw-r--r--pygments/lexers/javascript.py99
-rw-r--r--pygments/lexers/julia.py14
-rw-r--r--pygments/lexers/jvm.py99
-rw-r--r--pygments/lexers/lisp.py104
-rw-r--r--pygments/lexers/make.py2
-rw-r--r--pygments/lexers/markup.py14
-rw-r--r--pygments/lexers/math.py2
-rw-r--r--pygments/lexers/matlab.py6
-rw-r--r--pygments/lexers/ml.py4
-rw-r--r--pygments/lexers/modeling.py36
-rw-r--r--pygments/lexers/modula2.py2
-rw-r--r--pygments/lexers/monte.py7
-rw-r--r--pygments/lexers/ncl.py329
-rw-r--r--pygments/lexers/nimrod.py2
-rw-r--r--pygments/lexers/nit.py2
-rw-r--r--pygments/lexers/nix.py2
-rw-r--r--pygments/lexers/oberon.py2
-rw-r--r--pygments/lexers/objective.py30
-rw-r--r--pygments/lexers/ooc.py2
-rw-r--r--pygments/lexers/other.py2
-rw-r--r--pygments/lexers/parasail.py2
-rw-r--r--pygments/lexers/parsers.py14
-rw-r--r--pygments/lexers/pascal.py14
-rw-r--r--pygments/lexers/pawn.py10
-rw-r--r--pygments/lexers/perl.py46
-rw-r--r--pygments/lexers/php.py11
-rw-r--r--pygments/lexers/praat.py2
-rw-r--r--pygments/lexers/prolog.py10
-rw-r--r--pygments/lexers/python.py42
-rw-r--r--pygments/lexers/qvt.py14
-rw-r--r--pygments/lexers/r.py278
-rw-r--r--pygments/lexers/rdf.py10
-rw-r--r--pygments/lexers/rebol.py16
-rw-r--r--pygments/lexers/resource.py2
-rw-r--r--pygments/lexers/rnc.py2
-rw-r--r--pygments/lexers/roboconf.py2
-rw-r--r--pygments/lexers/robotframework.py4
-rw-r--r--pygments/lexers/ruby.py14
-rw-r--r--pygments/lexers/rust.py73
-rw-r--r--pygments/lexers/sas.py14
-rw-r--r--pygments/lexers/scripting.py8
-rw-r--r--pygments/lexers/sgf.py54
-rw-r--r--pygments/lexers/shell.py83
-rw-r--r--pygments/lexers/slash.py187
-rw-r--r--pygments/lexers/smalltalk.py2
-rw-r--r--pygments/lexers/smv.py52
-rw-r--r--pygments/lexers/snobol.py2
-rw-r--r--pygments/lexers/special.py2
-rw-r--r--pygments/lexers/sql.py78
-rw-r--r--pygments/lexers/stata.py143
-rw-r--r--pygments/lexers/supercollider.py2
-rw-r--r--pygments/lexers/tcl.py2
-rw-r--r--pygments/lexers/templates.py111
-rw-r--r--pygments/lexers/testing.py4
-rw-r--r--pygments/lexers/text.py3
-rw-r--r--pygments/lexers/textedit.py2
-rw-r--r--pygments/lexers/textfmts.py12
-rw-r--r--pygments/lexers/theorem.py2
-rw-r--r--pygments/lexers/trafficscript.py2
-rw-r--r--pygments/lexers/typoscript.py25
-rw-r--r--pygments/lexers/unicon.py390
-rw-r--r--pygments/lexers/urbi.py2
-rw-r--r--pygments/lexers/varnish.py10
-rw-r--r--pygments/lexers/verification.py6
-rw-r--r--pygments/lexers/web.py2
-rw-r--r--pygments/lexers/webmisc.py44
-rw-r--r--pygments/lexers/whiley.py15
-rw-r--r--pygments/lexers/x10.py2
-rw-r--r--pygments/lexers/xorg.py37
148 files changed, 5676 insertions, 3403 deletions
diff --git a/pygments/lexers/__init__.py b/pygments/lexers/__init__.py
index d64f163f..50f39d4e 100644
--- a/pygments/lexers/__init__.py
+++ b/pygments/lexers/__init__.py
@@ -5,7 +5,7 @@
Pygments lexers.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,7 +22,7 @@ from pygments.util import ClassNotFound, itervalues, guess_decode
__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
- 'guess_lexer'] + list(LEXERS)
+ 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS)
_lexer_cache = {}
_pattern_cache = {}
@@ -72,7 +72,7 @@ def find_lexer_class(name):
return cls
-def find_lexer_class_by_name(alias):
+def find_lexer_class_by_name(_alias):
"""Lookup a lexer class by alias.
Like `get_lexer_by_name`, but does not instantiate the class.
@@ -115,6 +115,41 @@ def get_lexer_by_name(_alias, **options):
raise ClassNotFound('no lexer for alias %r found' % _alias)
+def load_lexer_from_file(filename, lexername="CustomLexer", **options):
+ """Load a lexer from a file.
+
+ This method expects a file located relative to the current working
+ directory, which contains a Lexer class. By default, it expects the
+ Lexer to be name CustomLexer; you can specify your own class name
+ as the second argument to this function.
+
+ Users should be very careful with the input, because this method
+ is equivalent to running eval on the input file.
+
+ Raises ClassNotFound if there are any problems importing the Lexer.
+
+ .. versionadded:: 2.2
+ """
+ try:
+ # This empty dict will contain the namespace for the exec'd file
+ custom_namespace = {}
+ with open(filename, 'rb') as f:
+ exec(f.read(), custom_namespace)
+ # Retrieve the class `lexername` from that namespace
+ if lexername not in custom_namespace:
+ raise ClassNotFound('no valid %s class found in %s' %
+ (lexername, filename))
+ lexer_class = custom_namespace[lexername]
+ # And finally instantiate it with the options
+ return lexer_class(**options)
+ except IOError as err:
+ raise ClassNotFound('cannot read %s' % filename)
+ except ClassNotFound as err:
+ raise
+ except Exception as err:
+ raise ClassNotFound('error when loading custom lexer: %s' % err)
+
+
def find_lexer_class_for_filename(_fn, code=None):
"""Get a lexer for a filename.
@@ -149,8 +184,8 @@ def find_lexer_class_for_filename(_fn, code=None):
# gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
# to find lexers which need it overridden.
if code:
- return cls.analyse_text(code) + bonus
- return cls.priority + bonus
+ return cls.analyse_text(code) + bonus, cls.__name__
+ return cls.priority + bonus, cls.__name__
if matches:
matches.sort(key=get_rating)
diff --git a/pygments/lexers/_asy_builtins.py b/pygments/lexers/_asy_builtins.py
index 51716866..1f831cdb 100644
--- a/pygments/lexers/_asy_builtins.py
+++ b/pygments/lexers/_asy_builtins.py
@@ -10,7 +10,7 @@
TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
for function and variable names.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_cl_builtins.py b/pygments/lexers/_cl_builtins.py
index a2243647..ce5ad48e 100644
--- a/pygments/lexers/_cl_builtins.py
+++ b/pygments/lexers/_cl_builtins.py
@@ -5,7 +5,7 @@
ANSI Common Lisp builtins.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_cocoa_builtins.py b/pygments/lexers/_cocoa_builtins.py
index a4f00d9d..f17ea876 100644
--- a/pygments/lexers/_cocoa_builtins.py
+++ b/pygments/lexers/_cocoa_builtins.py
@@ -8,7 +8,7 @@
File may be also used as standalone generator for aboves.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -40,24 +40,25 @@ if __name__ == '__main__': # pragma: no cover
continue
headerFilePath = frameworkHeadersDir + f
- content = open(headerFilePath).read()
- res = re.findall('(?<=@interface )\w+', content)
+ with open(headerFilePath) as f:
+ content = f.read()
+ res = re.findall(r'(?<=@interface )\w+', content)
for r in res:
all_interfaces.add(r)
- res = re.findall('(?<=@protocol )\w+', content)
+ res = re.findall(r'(?<=@protocol )\w+', content)
for r in res:
all_protocols.add(r)
- res = re.findall('(?<=typedef enum )\w+', content)
+ res = re.findall(r'(?<=typedef enum )\w+', content)
for r in res:
all_primitives.add(r)
- res = re.findall('(?<=typedef struct )\w+', content)
+ res = re.findall(r'(?<=typedef struct )\w+', content)
for r in res:
all_primitives.add(r)
- res = re.findall('(?<=typedef const struct )\w+', content)
+ res = re.findall(r'(?<=typedef const struct )\w+', content)
for r in res:
all_primitives.add(r)
diff --git a/pygments/lexers/_csound_builtins.py b/pygments/lexers/_csound_builtins.py
index a88e0a83..56b5a452 100644
--- a/pygments/lexers/_csound_builtins.py
+++ b/pygments/lexers/_csound_builtins.py
@@ -3,1344 +3,1658 @@
pygments.lexers._csound_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2018 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-# Opcodes in Csound 6.05 from
-# csound --list-opcodes
-# except
-# cggoto <http://www.csounds.com/manual/html/cggoto.html>
-# cigoto <http://www.csounds.com/manual/html/cigoto.html>
-# cingoto (undocumented)
-# ckgoto <http://www.csounds.com/manual/html/ckgoto.html>
-# cngoto <http://www.csounds.com/manual/html/cngoto.html>
-# endin <http://www.csounds.com/manual/html/endin.html
-# endop <http://www.csounds.com/manual/html/endop.html
-# goto <http://www.csounds.com/manual/html/goto.html>
-# igoto <http://www.csounds.com/manual/html/igoto.html>
-# instr <http://www.csounds.com/manual/html/instr.html>
-# kgoto <http://www.csounds.com/manual/html/kgoto.html>
-# loop_ge <http://www.csounds.com/manual/html/loop_ge.html>
-# loop_gt <http://www.csounds.com/manual/html/loop_gt.html>
-# loop_le <http://www.csounds.com/manual/html/loop_le.html>
-# loop_lt <http://www.csounds.com/manual/html/loop_lt.html>
-# opcode <http://www.csounds.com/manual/html/opcode.html>
-# return <http://www.csounds.com/manual/html/return.html>
-# rigoto <http://www.csounds.com/manual/html/rigoto.html>
-# tigoto <http://www.csounds.com/manual/html/tigoto.html>
-# timout <http://www.csounds.com/manual/html/timout.html>
-# which are treated as keywords; the scoreline opcodes
-# scoreline <http://www.csounds.com/manual/html/scoreline.html>
-# scoreline_i <http://www.csounds.com/manual/html/scoreline_i.html>
-# which allow Csound Score highlighting; the pyrun opcodes
-# <http://www.csounds.com/manual/html/pyrun.html>
-# pylrun
-# pylruni
-# pylrunt
-# pyrun
-# pyruni
-# pyrunt
-# which allow Python highlighting; and the Lua opcodes
-# lua_exec <http://www.csounds.com/manual/html/lua_exec.html>
-# lua_opdef <http://www.csounds.com/manual/html/lua_opdef.html>
-# which allow Lua highlighting.
-OPCODES = set((
- 'ATSadd',
- 'ATSaddnz',
- 'ATSbufread',
- 'ATScross',
- 'ATSinfo',
- 'ATSinterpread',
- 'ATSpartialtap',
- 'ATSread',
- 'ATSreadnz',
- 'ATSsinnoi',
- 'FLbox',
- 'FLbutBank',
- 'FLbutton',
- 'FLcloseButton',
- 'FLcolor',
- 'FLcolor2',
- 'FLcount',
- 'FLexecButton',
- 'FLgetsnap',
- 'FLgroup',
- 'FLgroupEnd',
- 'FLgroup_end',
- 'FLhide',
- 'FLhvsBox',
- 'FLhvsBoxSetValue',
- 'FLjoy',
- 'FLkeyIn',
- 'FLknob',
- 'FLlabel',
- 'FLloadsnap',
- 'FLmouse',
- 'FLpack',
- 'FLpackEnd',
- 'FLpack_end',
- 'FLpanel',
- 'FLpanelEnd',
- 'FLpanel_end',
- 'FLprintk',
- 'FLprintk2',
- 'FLroller',
- 'FLrun',
- 'FLsavesnap',
- 'FLscroll',
- 'FLscrollEnd',
- 'FLscroll_end',
- 'FLsetAlign',
- 'FLsetBox',
- 'FLsetColor',
- 'FLsetColor2',
- 'FLsetFont',
- 'FLsetPosition',
- 'FLsetSize',
- 'FLsetSnapGroup',
- 'FLsetText',
- 'FLsetTextColor',
- 'FLsetTextSize',
- 'FLsetTextType',
- 'FLsetVal',
- 'FLsetVal_i',
- 'FLsetVali',
- 'FLsetsnap',
- 'FLshow',
- 'FLslidBnk',
- 'FLslidBnk2',
- 'FLslidBnk2Set',
- 'FLslidBnk2Setk',
- 'FLslidBnkGetHandle',
- 'FLslidBnkSet',
- 'FLslidBnkSetk',
- 'FLslider',
- 'FLtabs',
- 'FLtabsEnd',
- 'FLtabs_end',
- 'FLtext',
- 'FLupdate',
- 'FLvalue',
- 'FLvkeybd',
- 'FLvslidBnk',
- 'FLvslidBnk2',
- 'FLxyin',
- 'MixerClear',
- 'MixerGetLevel',
- 'MixerReceive',
- 'MixerSend',
- 'MixerSetLevel',
- 'MixerSetLevel_i',
- 'OSCinit',
- 'OSClisten',
- 'OSCsend',
- 'a',
- 'abs',
- 'active',
- 'adsr',
- 'adsyn',
- 'adsynt',
- 'adsynt2',
- 'aftouch',
- 'alpass',
- 'alwayson',
- 'ampdb',
- 'ampdbfs',
- 'ampmidi',
- 'ampmidid',
- 'areson',
- 'aresonk',
- 'array',
- 'atone',
- 'atonek',
- 'atonex',
- 'babo',
- 'balance',
- 'bamboo',
- 'barmodel',
- 'bbcutm',
- 'bbcuts',
- 'betarand',
- 'bexprnd',
- 'bformdec',
- 'bformdec1',
- 'bformenc',
- 'bformenc1',
- 'binit',
- 'biquad',
- 'biquada',
- 'birnd',
- 'bqrez',
- 'buchla',
- 'butbp',
- 'butbr',
- 'buthp',
- 'butlp',
- 'butterbp',
- 'butterbr',
- 'butterhp',
- 'butterlp',
- 'button',
- 'buzz',
- 'c2r',
- 'cabasa',
- 'cauchy',
- 'cauchyi',
- 'ceil',
- 'cell',
- 'cent',
- 'centroid',
- 'ceps',
- #'cggoto',
- 'chanctrl',
- 'changed',
- 'chani',
- 'chano',
- 'chebyshevpoly',
- 'checkbox',
- 'chn_S',
- 'chn_a',
- 'chn_k',
- 'chnclear',
- 'chnexport',
- 'chnget',
- 'chnmix',
- 'chnparams',
- 'chnset',
- 'chuap',
- #'cigoto',
- #'cingoto',
- #'ckgoto',
- 'clear',
- 'clfilt',
- 'clip',
- 'clockoff',
- 'clockon',
- 'cmplxprod',
- #'cngoto',
- 'comb',
- 'combinv',
- 'compilecsd',
- 'compileorc',
- 'compilestr',
- 'compress',
- 'connect',
- 'control',
- 'convle',
- 'convolve',
- 'copy2ftab',
- 'copy2ttab',
- 'copya2ftab',
- 'copyf2array',
- 'cos',
- 'cosh',
- 'cosinv',
- 'cosseg',
- 'cossegb',
- 'cossegr',
- 'cps2pch',
- 'cpsmidi',
- 'cpsmidib',
- 'cpsmidinn',
- 'cpsoct',
- 'cpspch',
- 'cpstmid',
- 'cpstun',
- 'cpstuni',
- 'cpsxpch',
- 'cpuprc',
- 'cross2',
- 'crossfm',
- 'crossfmi',
- 'crossfmpm',
- 'crossfmpmi',
- 'crosspm',
- 'crosspmi',
- 'crunch',
- 'ctlchn',
- 'ctrl14',
- 'ctrl21',
- 'ctrl7',
- 'ctrlinit',
- 'cuserrnd',
- 'dam',
- 'date',
- 'dates',
- 'db',
- 'dbamp',
- 'dbfsamp',
- 'dcblock',
- 'dcblock2',
- 'dconv',
- 'delay',
- 'delay1',
- 'delayk',
- 'delayr',
- 'delayw',
- 'deltap',
- 'deltap3',
- 'deltapi',
- 'deltapn',
- 'deltapx',
- 'deltapxw',
- 'denorm',
- 'diff',
- 'diskgrain',
- 'diskin',
- 'diskin2',
- 'dispfft',
- 'display',
- 'distort',
- 'distort1',
- 'divz',
- 'doppler',
- 'downsamp',
- 'dripwater',
- 'dumpk',
- 'dumpk2',
- 'dumpk3',
- 'dumpk4',
- 'duserrnd',
- 'dust',
- 'dust2',
- #'endin',
- #'endop',
- 'envlpx',
- 'envlpxr',
- 'ephasor',
- 'eqfil',
- 'evalstr',
- 'event',
- 'event_i',
- 'exciter',
- 'exitnow',
- 'exp',
- 'expcurve',
- 'expon',
- 'exprand',
- 'exprandi',
- 'expseg',
- 'expsega',
- 'expsegb',
- 'expsegba',
- 'expsegr',
- 'fareylen',
- 'fareyleni',
- 'faustaudio',
- 'faustcompile',
- 'faustctl',
- 'faustgen',
- 'fft',
- 'fftinv',
- 'ficlose',
- 'filebit',
- 'filelen',
- 'filenchnls',
- 'filepeak',
- 'filesr',
- 'filevalid',
- 'fillarray',
- 'filter2',
- 'fin',
- 'fini',
- 'fink',
- 'fiopen',
- 'flanger',
- 'flashtxt',
- 'flooper',
- 'flooper2',
- 'floor',
- 'fluidAllOut',
- 'fluidCCi',
- 'fluidCCk',
- 'fluidControl',
- 'fluidEngine',
- 'fluidLoad',
- 'fluidNote',
- 'fluidOut',
- 'fluidProgramSelect',
- 'fluidSetInterpMethod',
- 'fmb3',
- 'fmbell',
- 'fmmetal',
- 'fmpercfl',
- 'fmrhode',
- 'fmvoice',
- 'fmwurlie',
- 'fof',
- 'fof2',
- 'fofilter',
- 'fog',
- 'fold',
- 'follow',
- 'follow2',
- 'foscil',
- 'foscili',
- 'fout',
- 'fouti',
- 'foutir',
- 'foutk',
- 'fprintks',
- 'fprints',
- 'frac',
- 'fractalnoise',
- 'freeverb',
- 'ftchnls',
- 'ftconv',
- 'ftcps',
- 'ftfree',
- 'ftgen',
- 'ftgenonce',
- 'ftgentmp',
- 'ftlen',
- 'ftload',
- 'ftloadk',
- 'ftlptim',
- 'ftmorf',
- 'ftresize',
- 'ftresizei',
- 'ftsave',
- 'ftsavek',
- 'ftsr',
- 'gain',
- 'gainslider',
- 'gauss',
- 'gaussi',
- 'gausstrig',
- 'gbuzz',
- 'genarray',
- 'genarray_i',
- 'gendy',
- 'gendyc',
- 'gendyx',
- 'getcfg',
- 'getcol',
- 'getrow',
- 'gogobel',
- #'goto',
- 'grain',
- 'grain2',
- 'grain3',
- 'granule',
- 'guiro',
- 'harmon',
- 'harmon2',
- 'harmon3',
- 'harmon4',
- 'hdf5read',
- 'hdf5write',
- 'hilbert',
- 'hrtfearly',
- 'hrtfer',
- 'hrtfmove',
- 'hrtfmove2',
- 'hrtfreverb',
- 'hrtfstat',
- 'hsboscil',
- 'hvs1',
- 'hvs2',
- 'hvs3',
- 'i',
- 'iceps',
- #'igoto',
- 'ihold',
- 'imagecreate',
- 'imagefree',
- 'imagegetpixel',
- 'imageload',
- 'imagesave',
- 'imagesetpixel',
- 'imagesize',
- 'in',
- 'in32',
- 'inch',
- 'inh',
- 'init',
- 'initc14',
- 'initc21',
- 'initc7',
- 'inleta',
- 'inletf',
- 'inletk',
- 'inletkid',
- 'inletv',
- 'ino',
- 'inq',
- 'inrg',
- 'ins',
- 'insglobal',
- 'insremot',
- #'instr',
- 'int',
- 'integ',
- 'interp',
- 'invalue',
- 'inx',
- 'inz',
- 'jitter',
- 'jitter2',
- 'jspline',
- 'k',
- #'kgoto',
- 'ktableseg',
- 'lenarray',
- 'lentab',
- 'lfo',
- 'limit',
- 'line',
- 'linen',
- 'linenr',
- 'lineto',
- 'linrand',
- 'linseg',
- 'linsegb',
- 'linsegr',
- 'locsend',
- 'locsig',
- 'log',
- 'log10',
- 'log2',
- 'logbtwo',
- 'logcurve',
- #'loop_ge',
- #'loop_gt',
- #'loop_le',
- #'loop_lt',
- 'loopseg',
- 'loopsegp',
- 'looptseg',
- 'loopxseg',
- 'lorenz',
- 'loscil',
- 'loscil3',
- 'loscilx',
- 'lowpass2',
- 'lowres',
- 'lowresx',
- 'lpf18',
- 'lpform',
- 'lpfreson',
- 'lphasor',
- 'lpinterp',
- 'lposcil',
- 'lposcil3',
- 'lposcila',
- 'lposcilsa',
- 'lposcilsa2',
- 'lpread',
- 'lpreson',
- 'lpshold',
- 'lpsholdp',
- 'lpslot',
- #'lua_exec',
- 'lua_ikopcall',
- #'lua_opdef',
- 'mac',
- 'maca',
- 'madsr',
- 'mags',
- 'mandel',
- 'mandol',
- 'maparray',
- 'maparray_i',
- 'marimba',
- 'massign',
- 'max',
- 'max_k',
- 'maxabs',
- 'maxabsaccum',
- 'maxaccum',
- 'maxalloc',
- 'maxarray',
- 'maxtab',
- 'mclock',
- 'mdelay',
- 'median',
- 'mediank',
- 'metro',
- 'midglobal',
- 'midic14',
- 'midic21',
- 'midic7',
- 'midichannelaftertouch',
- 'midichn',
- 'midicontrolchange',
- 'midictrl',
- 'mididefault',
- 'midifilestatus',
- 'midiin',
- 'midinoteoff',
- 'midinoteoncps',
- 'midinoteonkey',
- 'midinoteonoct',
- 'midinoteonpch',
- 'midion',
- 'midion2',
- 'midiout',
- 'midipgm',
- 'midipitchbend',
- 'midipolyaftertouch',
- 'midiprogramchange',
- 'miditempo',
- 'midremot',
- 'min',
- 'minabs',
- 'minabsaccum',
- 'minaccum',
- 'minarray',
- 'mincer',
- 'mintab',
- 'mirror',
- 'mode',
- 'modmatrix',
- 'monitor',
- 'moog',
- 'moogladder',
- 'moogvcf',
- 'moogvcf2',
- 'moscil',
- 'mp3bitrate',
- 'mp3in',
- 'mp3len',
- 'mp3nchnls',
- 'mp3sr',
- 'mpulse',
- 'mrtmsg',
- 'multitap',
- 'mute',
- 'mxadsr',
- 'nestedap',
- 'nlalp',
- 'nlfilt',
- 'nlfilt2',
- 'noise',
- 'noteoff',
- 'noteon',
- 'noteondur',
- 'noteondur2',
- 'notnum',
- 'nreverb',
- 'nrpn',
- 'nsamp',
- 'nstance',
- 'nstrnum',
- 'ntrpol',
- 'octave',
- 'octcps',
- 'octmidi',
- 'octmidib',
- 'octmidinn',
- 'octpch',
- #'opcode',
- 'oscbnk',
- 'oscil',
- 'oscil1',
- 'oscil1i',
- 'oscil3',
- 'oscili',
- 'oscilikt',
- 'osciliktp',
- 'oscilikts',
- 'osciln',
- 'oscils',
- 'oscilx',
- 'out',
- 'out32',
- 'outc',
- 'outch',
- 'outh',
- 'outiat',
- 'outic',
- 'outic14',
- 'outipat',
- 'outipb',
- 'outipc',
- 'outkat',
- 'outkc',
- 'outkc14',
- 'outkpat',
- 'outkpb',
- 'outkpc',
- 'outleta',
- 'outletf',
- 'outletk',
- 'outletkid',
- 'outletv',
- 'outo',
- 'outq',
- 'outq1',
- 'outq2',
- 'outq3',
- 'outq4',
- 'outrg',
- 'outs',
- 'outs1',
- 'outs2',
- 'outvalue',
- 'outx',
- 'outz',
- 'p',
- 'pan',
- 'pan2',
- 'pareq',
- 'partials',
- 'partikkel',
- 'partikkelget',
- 'partikkelset',
- 'partikkelsync',
- 'passign',
- 'pcauchy',
- 'pchbend',
- 'pchmidi',
- 'pchmidib',
- 'pchmidinn',
- 'pchoct',
- 'pconvolve',
- 'pcount',
- 'pdclip',
- 'pdhalf',
- 'pdhalfy',
- 'peak',
- 'pgmassign',
- 'pgmchn',
- 'phaser1',
- 'phaser2',
- 'phasor',
- 'phasorbnk',
- 'phs',
- 'pindex',
- 'pinker',
- 'pinkish',
- 'pitch',
- 'pitchac',
- 'pitchamdf',
- 'planet',
- 'platerev',
- 'plltrack',
- 'pluck',
- 'poisson',
- 'pol2rect',
- 'polyaft',
- 'polynomial',
- 'pop',
- 'pop_f',
- 'port',
- 'portk',
- 'poscil',
- 'poscil3',
- 'pow',
- 'powershape',
- 'powoftwo',
- 'prealloc',
- 'prepiano',
- 'print',
- 'print_type',
- 'printf',
- 'printf_i',
- 'printk',
- 'printk2',
- 'printks',
- 'printks2',
- 'prints',
- 'product',
- 'pset',
- 'ptable',
- 'ptable3',
- 'ptablei',
- 'ptableiw',
- 'ptablew',
- 'ptrack',
- 'push',
- 'push_f',
- 'puts',
- 'pvadd',
- 'pvbufread',
- 'pvcross',
- 'pvinterp',
- 'pvoc',
- 'pvread',
- 'pvs2array',
- 'pvs2tab',
- 'pvsadsyn',
- 'pvsanal',
- 'pvsarp',
- 'pvsbandp',
- 'pvsbandr',
- 'pvsbin',
- 'pvsblur',
- 'pvsbuffer',
- 'pvsbufread',
- 'pvsbufread2',
- 'pvscale',
- 'pvscent',
- 'pvsceps',
- 'pvscross',
- 'pvsdemix',
- 'pvsdiskin',
- 'pvsdisp',
- 'pvsenvftw',
- 'pvsfilter',
- 'pvsfread',
- 'pvsfreeze',
- 'pvsfromarray',
- 'pvsftr',
- 'pvsftw',
- 'pvsfwrite',
- 'pvsgain',
- 'pvsgendy',
- 'pvshift',
- 'pvsifd',
- 'pvsin',
- 'pvsinfo',
- 'pvsinit',
- 'pvslock',
- 'pvsmaska',
- 'pvsmix',
- 'pvsmooth',
- 'pvsmorph',
- 'pvsosc',
- 'pvsout',
- 'pvspitch',
- 'pvstanal',
- 'pvstencil',
- 'pvsvoc',
- 'pvswarp',
- 'pvsynth',
- 'pwd',
- 'pyassign',
- 'pyassigni',
- 'pyassignt',
- 'pycall',
- 'pycall1',
- 'pycall1i',
- 'pycall1t',
- 'pycall2',
- 'pycall2i',
- 'pycall2t',
- 'pycall3',
- 'pycall3i',
- 'pycall3t',
- 'pycall4',
- 'pycall4i',
- 'pycall4t',
- 'pycall5',
- 'pycall5i',
- 'pycall5t',
- 'pycall6',
- 'pycall6i',
- 'pycall6t',
- 'pycall7',
- 'pycall7i',
- 'pycall7t',
- 'pycall8',
- 'pycall8i',
- 'pycall8t',
- 'pycalli',
- 'pycalln',
- 'pycallni',
- 'pycallt',
- 'pyeval',
- 'pyevali',
- 'pyevalt',
- 'pyexec',
- 'pyexeci',
- 'pyexect',
- 'pyinit',
- 'pylassign',
- 'pylassigni',
- 'pylassignt',
- 'pylcall',
- 'pylcall1',
- 'pylcall1i',
- 'pylcall1t',
- 'pylcall2',
- 'pylcall2i',
- 'pylcall2t',
- 'pylcall3',
- 'pylcall3i',
- 'pylcall3t',
- 'pylcall4',
- 'pylcall4i',
- 'pylcall4t',
- 'pylcall5',
- 'pylcall5i',
- 'pylcall5t',
- 'pylcall6',
- 'pylcall6i',
- 'pylcall6t',
- 'pylcall7',
- 'pylcall7i',
- 'pylcall7t',
- 'pylcall8',
- 'pylcall8i',
- 'pylcall8t',
- 'pylcalli',
- 'pylcalln',
- 'pylcallni',
- 'pylcallt',
- 'pyleval',
- 'pylevali',
- 'pylevalt',
- 'pylexec',
- 'pylexeci',
- 'pylexect',
- #'pylrun',
- #'pylruni',
- #'pylrunt',
- #'pyrun',
- #'pyruni',
- #'pyrunt',
- 'qinf',
- 'qnan',
- 'r2c',
- 'rand',
- 'randh',
- 'randi',
- 'random',
- 'randomh',
- 'randomi',
- 'rbjeq',
- 'readclock',
- 'readf',
- 'readfi',
- 'readk',
- 'readk2',
- 'readk3',
- 'readk4',
- 'readks',
- 'readscore',
- 'readscratch',
- 'rect2pol',
- 'reinit',
- 'release',
- 'remoteport',
- 'remove',
- 'repluck',
- 'reson',
- 'resonk',
- 'resonr',
- 'resonx',
- 'resonxk',
- 'resony',
- 'resonz',
- 'resyn',
- #'return',
- 'reverb',
- 'reverb2',
- 'reverbsc',
- 'rewindscore',
- 'rezzy',
- 'rfft',
- 'rifft',
- #'rigoto',
- 'rireturn',
- 'rms',
- 'rnd',
- 'rnd31',
- 'round',
- 'rspline',
- 'rtclock',
- 's16b14',
- 's32b14',
- 'samphold',
- 'sandpaper',
- 'scale',
- 'scalearray',
- 'scalet',
- 'scanhammer',
- 'scans',
- 'scantable',
- 'scanu',
- 'schedkwhen',
- 'schedkwhennamed',
- 'schedule',
- 'schedwhen',
- #'scoreline',
- #'scoreline_i',
- 'seed',
- 'sekere',
- 'semitone',
- 'sense',
- 'sensekey',
- 'seqtime',
- 'seqtime2',
- 'serialBegin',
- 'serialEnd',
- 'serialFlush',
- 'serialPrint',
- 'serialRead',
- 'serialWrite',
- 'serialWrite_i',
- 'setcol',
- 'setctrl',
- 'setksmps',
- 'setrow',
- 'setscorepos',
- 'sfilist',
- 'sfinstr',
- 'sfinstr3',
- 'sfinstr3m',
- 'sfinstrm',
- 'sfload',
- 'sflooper',
- 'sfpassign',
- 'sfplay',
- 'sfplay3',
- 'sfplay3m',
- 'sfplaym',
- 'sfplist',
- 'sfpreset',
- 'shaker',
- 'shiftin',
- 'shiftout',
- 'signalflowgraph',
- 'signum',
- 'sin',
- 'sinh',
- 'sininv',
- 'sinsyn',
- 'sleighbells',
- 'slicearray',
- 'slider16',
- 'slider16f',
- 'slider16table',
- 'slider16tablef',
- 'slider32',
- 'slider32f',
- 'slider32table',
- 'slider32tablef',
- 'slider64',
- 'slider64f',
- 'slider64table',
- 'slider64tablef',
- 'slider8',
- 'slider8f',
- 'slider8table',
- 'slider8tablef',
- 'sliderKawai',
- 'sndload',
- 'sndloop',
- 'sndwarp',
- 'sndwarpst',
- 'sockrecv',
- 'sockrecvs',
- 'socksend',
- 'socksends',
- 'soundin',
- 'soundout',
- 'soundouts',
- 'space',
- 'spat3d',
- 'spat3di',
- 'spat3dt',
- 'spdist',
- 'specaddm',
- 'specdiff',
- 'specdisp',
- 'specfilt',
- 'spechist',
- 'specptrk',
- 'specscal',
- 'specsum',
- 'spectrum',
- 'splitrig',
- 'sprintf',
- 'sprintfk',
- 'spsend',
- 'sqrt',
- 'stack',
- 'statevar',
- 'stix',
- 'strcat',
- 'strcatk',
- 'strchar',
- 'strchark',
- 'strcmp',
- 'strcmpk',
- 'strcpy',
- 'strcpyk',
- 'strecv',
- 'streson',
- 'strfromurl',
- 'strget',
- 'strindex',
- 'strindexk',
- 'strlen',
- 'strlenk',
- 'strlower',
- 'strlowerk',
- 'strrindex',
- 'strrindexk',
- 'strset',
- 'strsub',
- 'strsubk',
- 'strtod',
- 'strtodk',
- 'strtol',
- 'strtolk',
- 'strupper',
- 'strupperk',
- 'stsend',
- 'subinstr',
- 'subinstrinit',
- 'sum',
- 'sumarray',
- 'sumtab',
- 'svfilter',
- 'syncgrain',
- 'syncloop',
- 'syncphasor',
- 'system',
- 'system_i',
- 'tab',
- 'tab2pvs',
- 'tab_i',
- 'tabgen',
- 'table',
- 'table3',
- 'table3kt',
- 'tablecopy',
- 'tablefilter',
- 'tablefilteri',
- 'tablegpw',
- 'tablei',
- 'tableicopy',
- 'tableigpw',
- 'tableikt',
- 'tableimix',
- 'tableiw',
- 'tablekt',
- 'tablemix',
- 'tableng',
- 'tablera',
- 'tableseg',
- 'tableshuffle',
- 'tableshufflei',
- 'tablew',
- 'tablewa',
- 'tablewkt',
- 'tablexkt',
- 'tablexseg',
- 'tabmap',
- 'tabmap_i',
- 'tabmorph',
- 'tabmorpha',
- 'tabmorphak',
- 'tabmorphi',
- 'tabplay',
- 'tabrec',
- 'tabslice',
- 'tabsum',
- 'tabw',
- 'tabw_i',
- 'tambourine',
- 'tan',
- 'tanh',
- 'taninv',
- 'taninv2',
- 'tb0',
- 'tb0_init',
- 'tb1',
- 'tb10',
- 'tb10_init',
- 'tb11',
- 'tb11_init',
- 'tb12',
- 'tb12_init',
- 'tb13',
- 'tb13_init',
- 'tb14',
- 'tb14_init',
- 'tb15',
- 'tb15_init',
- 'tb1_init',
- 'tb2',
- 'tb2_init',
- 'tb3',
- 'tb3_init',
- 'tb4',
- 'tb4_init',
- 'tb5',
- 'tb5_init',
- 'tb6',
- 'tb6_init',
- 'tb7',
- 'tb7_init',
- 'tb8',
- 'tb8_init',
- 'tb9',
- 'tb9_init',
- 'tbvcf',
- 'tempest',
- 'tempo',
- 'temposcal',
- 'tempoval',
- #'tigoto',
- 'timedseq',
- 'timeinstk',
- 'timeinsts',
- 'timek',
- 'times',
- #'timout',
- 'tival',
- 'tlineto',
- 'tone',
- 'tonek',
- 'tonex',
- 'tradsyn',
- 'trandom',
- 'transeg',
- 'transegb',
- 'transegr',
- 'trcross',
- 'trfilter',
- 'trhighest',
- 'trigger',
- 'trigseq',
- 'trirand',
- 'trlowest',
- 'trmix',
- 'trscale',
- 'trshift',
- 'trsplit',
- 'turnoff',
- 'turnoff2',
- 'turnon',
- 'unirand',
- 'unwrap',
- 'upsamp',
- 'urd',
- 'vactrol',
- 'vadd',
- 'vadd_i',
- 'vaddv',
- 'vaddv_i',
- 'vaget',
- 'valpass',
- 'vaset',
- 'vbap',
- 'vbap16',
- 'vbap4',
- 'vbap4move',
- 'vbap8',
- 'vbap8move',
- 'vbapg',
- 'vbapgmove',
- 'vbaplsinit',
- 'vbapmove',
- 'vbapz',
- 'vbapzmove',
- 'vcella',
- 'vco',
- 'vco2',
- 'vco2ft',
- 'vco2ift',
- 'vco2init',
- 'vcomb',
- 'vcopy',
- 'vcopy_i',
- 'vdel_k',
- 'vdelay',
- 'vdelay3',
- 'vdelayk',
- 'vdelayx',
- 'vdelayxq',
- 'vdelayxs',
- 'vdelayxw',
- 'vdelayxwq',
- 'vdelayxws',
- 'vdivv',
- 'vdivv_i',
- 'vecdelay',
- 'veloc',
- 'vexp',
- 'vexp_i',
- 'vexpseg',
- 'vexpv',
- 'vexpv_i',
- 'vibes',
- 'vibr',
- 'vibrato',
- 'vincr',
- 'vlimit',
- 'vlinseg',
- 'vlowres',
- 'vmap',
- 'vmirror',
- 'vmult',
- 'vmult_i',
- 'vmultv',
- 'vmultv_i',
- 'voice',
- 'vosim',
- 'vphaseseg',
- 'vport',
- 'vpow',
- 'vpow_i',
- 'vpowv',
- 'vpowv_i',
- 'vpvoc',
- 'vrandh',
- 'vrandi',
- 'vsubv',
- 'vsubv_i',
- 'vtaba',
- 'vtabi',
- 'vtabk',
- 'vtable1k',
- 'vtablea',
- 'vtablei',
- 'vtablek',
- 'vtablewa',
- 'vtablewi',
- 'vtablewk',
- 'vtabwa',
- 'vtabwi',
- 'vtabwk',
- 'vwrap',
- 'waveset',
- 'weibull',
- 'wgbow',
- 'wgbowedbar',
- 'wgbrass',
- 'wgclar',
- 'wgflute',
- 'wgpluck',
- 'wgpluck2',
- 'wguide1',
- 'wguide2',
- 'wiiconnect',
- 'wiidata',
- 'wiirange',
- 'wiisend',
- 'window',
- 'wrap',
- 'writescratch',
- 'wterrain',
- 'xadsr',
- 'xin',
- 'xout',
- 'xscanmap',
- 'xscans',
- 'xscansmap',
- 'xscanu',
- 'xtratim',
- 'xyin',
- 'zacl',
- 'zakinit',
- 'zamod',
- 'zar',
- 'zarg',
- 'zaw',
- 'zawm',
- 'zfilter2',
- 'zir',
- 'ziw',
- 'ziwm',
- 'zkcl',
- 'zkmod',
- 'zkr',
- 'zkw',
- 'zkwm'
-))
+# Opcodes in Csound 6.12.0 at commit 6ca322bd31f1ca907c008616b40a5f237ff449db using
+# python -c "
+# import re, subprocess
+# output = subprocess.Popen(['csound', '--list-opcodes0'], stderr=subprocess.PIPE).communicate()[1]
+# opcodes = output[re.search(r'^$', output, re.M).end():re.search(r'^\d+ opcodes$', output, re.M).start()].split()
+# output = subprocess.Popen(['csound', '--list-opcodes2'], stderr=subprocess.PIPE).communicate()[1]
+# all_opcodes = output[re.search(r'^$', output, re.M).end():re.search(r'^\d+ opcodes$', output, re.M).start()].split()
+# deprecated_opcodes = [opcode for opcode in all_opcodes if opcode not in opcodes]
+# print '''OPCODES = set(\'''
+# {}
+# \'''.split())
+#
+# DEPRECATED_OPCODES = set(\'''
+# {}
+# \'''.split())
+# '''.format('\n'.join(opcodes), '\n'.join(deprecated_opcodes))
+# "
+# except for
+# cggoto csound.com/docs/manual/cggoto.html
+# cigoto csound.com/docs/manual/cigoto.html
+# cingoto (undocumented)
+# ckgoto csound.com/docs/manual/ckgoto.html
+# cngoto csound.com/docs/manual/cngoto.html
+# cnkgoto (undocumented)
+# endin csound.com/docs/manual/endin.html
+# endop csound.com/docs/manual/endop.html
+# goto csound.com/docs/manual/goto.html
+# igoto csound.com/docs/manual/igoto.html
+# instr csound.com/docs/manual/instr.html
+# kgoto csound.com/docs/manual/kgoto.html
+# loop_ge csound.com/docs/manual/loop_ge.html
+# loop_gt csound.com/docs/manual/loop_gt.html
+# loop_le csound.com/docs/manual/loop_le.html
+# loop_lt csound.com/docs/manual/loop_lt.html
+# opcode csound.com/docs/manual/opcode.html
+# reinit csound.com/docs/manual/reinit.html
+# return csound.com/docs/manual/return.html
+# rireturn csound.com/docs/manual/rireturn.html
+# rigoto csound.com/docs/manual/rigoto.html
+# tigoto csound.com/docs/manual/tigoto.html
+# timout csound.com/docs/manual/timout.html
+# which are treated as keywords in csound.py.
+
+OPCODES = set('''
+ATSadd
+ATSaddnz
+ATSbufread
+ATScross
+ATSinfo
+ATSinterpread
+ATSpartialtap
+ATSread
+ATSreadnz
+ATSsinnoi
+FLbox
+FLbutBank
+FLbutton
+FLcloseButton
+FLcolor
+FLcolor2
+FLcount
+FLexecButton
+FLgetsnap
+FLgroup
+FLgroupEnd
+FLgroup_end
+FLhide
+FLhvsBox
+FLhvsBoxSetValue
+FLjoy
+FLkeyIn
+FLknob
+FLlabel
+FLloadsnap
+FLmouse
+FLpack
+FLpackEnd
+FLpack_end
+FLpanel
+FLpanelEnd
+FLpanel_end
+FLprintk
+FLprintk2
+FLroller
+FLrun
+FLsavesnap
+FLscroll
+FLscrollEnd
+FLscroll_end
+FLsetAlign
+FLsetBox
+FLsetColor
+FLsetColor2
+FLsetFont
+FLsetPosition
+FLsetSize
+FLsetSnapGroup
+FLsetText
+FLsetTextColor
+FLsetTextSize
+FLsetTextType
+FLsetVal
+FLsetVal_i
+FLsetVali
+FLsetsnap
+FLshow
+FLslidBnk
+FLslidBnk2
+FLslidBnk2Set
+FLslidBnk2Setk
+FLslidBnkGetHandle
+FLslidBnkSet
+FLslidBnkSetk
+FLslider
+FLtabs
+FLtabsEnd
+FLtabs_end
+FLtext
+FLupdate
+FLvalue
+FLvkeybd
+FLvslidBnk
+FLvslidBnk2
+FLxyin
+JackoAudioIn
+JackoAudioInConnect
+JackoAudioOut
+JackoAudioOutConnect
+JackoFreewheel
+JackoInfo
+JackoInit
+JackoMidiInConnect
+JackoMidiOut
+JackoMidiOutConnect
+JackoNoteOut
+JackoOn
+JackoTransport
+K35_hpf
+K35_lpf
+MixerClear
+MixerGetLevel
+MixerReceive
+MixerSend
+MixerSetLevel
+MixerSetLevel_i
+OSCbundle
+OSCcount
+OSCinit
+OSCinitM
+OSClisten
+OSCraw
+OSCsend
+OSCsend_lo
+S
+STKBandedWG
+STKBeeThree
+STKBlowBotl
+STKBlowHole
+STKBowed
+STKBrass
+STKClarinet
+STKDrummer
+STKFlute
+STKFMVoices
+STKHevyMetl
+STKMandolin
+STKModalBar
+STKMoog
+STKPercFlut
+STKPlucked
+STKResonate
+STKRhodey
+STKSaxofony
+STKShakers
+STKSimple
+STKSitar
+STKStifKarp
+STKTubeBell
+STKVoicForm
+STKWhistle
+STKWurley
+a
+abs
+active
+adsr
+adsyn
+adsynt
+adsynt2
+aftouch
+alpass
+alwayson
+ampdb
+ampdbfs
+ampmidi
+ampmidid
+areson
+aresonk
+atone
+atonek
+atonex
+babo
+balance
+balance2
+bamboo
+barmodel
+bbcutm
+bbcuts
+beadsynt
+beosc
+betarand
+bexprnd
+bformdec1
+bformenc1
+binit
+biquad
+biquada
+birnd
+bpf
+bpfcos
+bqrez
+butbp
+butbr
+buthp
+butlp
+butterbp
+butterbr
+butterhp
+butterlp
+button
+buzz
+c2r
+cabasa
+cauchy
+cauchyi
+cbrt
+ceil
+cell
+cent
+centroid
+ceps
+cepsinv
+chanctrl
+changed
+changed2
+chani
+chano
+chebyshevpoly
+checkbox
+chn_S
+chn_a
+chn_k
+chnclear
+chnexport
+chnget
+chngetks
+chnmix
+chnparams
+chnset
+chnsetks
+chuap
+clear
+clfilt
+clip
+clockoff
+clockon
+cmp
+cmplxprod
+comb
+combinv
+compilecsd
+compileorc
+compilestr
+compress
+compress2
+connect
+control
+convle
+convolve
+copya2ftab
+copyf2array
+cos
+cosh
+cosinv
+cosseg
+cossegb
+cossegr
+cps2pch
+cpsmidi
+cpsmidib
+cpsmidinn
+cpsoct
+cpspch
+cpstmid
+cpstun
+cpstuni
+cpsxpch
+cpumeter
+cpuprc
+cross2
+crossfm
+crossfmi
+crossfmpm
+crossfmpmi
+crosspm
+crosspmi
+crunch
+ctlchn
+ctrl14
+ctrl21
+ctrl7
+ctrlinit
+cuserrnd
+dam
+date
+dates
+db
+dbamp
+dbfsamp
+dcblock
+dcblock2
+dconv
+dct
+dctinv
+deinterleave
+delay
+delay1
+delayk
+delayr
+delayw
+deltap
+deltap3
+deltapi
+deltapn
+deltapx
+deltapxw
+denorm
+diff
+diode_ladder
+directory
+diskgrain
+diskin
+diskin2
+dispfft
+display
+distort
+distort1
+divz
+doppler
+dot
+downsamp
+dripwater
+dssiactivate
+dssiaudio
+dssictls
+dssiinit
+dssilist
+dumpk
+dumpk2
+dumpk3
+dumpk4
+duserrnd
+dust
+dust2
+envlpx
+envlpxr
+ephasor
+eqfil
+evalstr
+event
+event_i
+exciter
+exitnow
+exp
+expcurve
+expon
+exprand
+exprandi
+expseg
+expsega
+expsegb
+expsegba
+expsegr
+fareylen
+fareyleni
+faustaudio
+faustcompile
+faustctl
+faustdsp
+faustgen
+faustplay
+fft
+fftinv
+ficlose
+filebit
+filelen
+filenchnls
+filepeak
+filescal
+filesr
+filevalid
+fillarray
+filter2
+fin
+fini
+fink
+fiopen
+flanger
+flashtxt
+flooper
+flooper2
+floor
+fmanal
+fmax
+fmb3
+fmbell
+fmin
+fmmetal
+fmod
+fmpercfl
+fmrhode
+fmvoice
+fmwurlie
+fof
+fof2
+fofilter
+fog
+fold
+follow
+follow2
+foscil
+foscili
+fout
+fouti
+foutir
+foutk
+fprintks
+fprints
+frac
+fractalnoise
+framebuffer
+freeverb
+ftaudio
+ftchnls
+ftconv
+ftcps
+ftfree
+ftgen
+ftgenonce
+ftgentmp
+ftlen
+ftload
+ftloadk
+ftlptim
+ftmorf
+ftom
+ftprint
+ftresize
+ftresizei
+ftsamplebank
+ftsave
+ftsavek
+ftslice
+ftsr
+gain
+gainslider
+gauss
+gaussi
+gausstrig
+gbuzz
+genarray
+genarray_i
+gendy
+gendyc
+gendyx
+getcfg
+getcol
+getftargs
+getrow
+getrowlin
+getseed
+gogobel
+grain
+grain2
+grain3
+granule
+guiro
+harmon
+harmon2
+harmon3
+harmon4
+hdf5read
+hdf5write
+hilbert
+hilbert2
+hrtfearly
+hrtfmove
+hrtfmove2
+hrtfreverb
+hrtfstat
+hsboscil
+hvs1
+hvs2
+hvs3
+hypot
+i
+ihold
+imagecreate
+imagefree
+imagegetpixel
+imageload
+imagesave
+imagesetpixel
+imagesize
+in
+in32
+inch
+inh
+init
+initc14
+initc21
+initc7
+inleta
+inletf
+inletk
+inletkid
+inletv
+ino
+inq
+inrg
+ins
+insglobal
+insremot
+int
+integ
+interleave
+interp
+invalue
+inx
+inz
+jacktransport
+jitter
+jitter2
+joystick
+jspline
+k
+la_i_add_mc
+la_i_add_mr
+la_i_add_vc
+la_i_add_vr
+la_i_assign_mc
+la_i_assign_mr
+la_i_assign_t
+la_i_assign_vc
+la_i_assign_vr
+la_i_conjugate_mc
+la_i_conjugate_mr
+la_i_conjugate_vc
+la_i_conjugate_vr
+la_i_distance_vc
+la_i_distance_vr
+la_i_divide_mc
+la_i_divide_mr
+la_i_divide_vc
+la_i_divide_vr
+la_i_dot_mc
+la_i_dot_mc_vc
+la_i_dot_mr
+la_i_dot_mr_vr
+la_i_dot_vc
+la_i_dot_vr
+la_i_get_mc
+la_i_get_mr
+la_i_get_vc
+la_i_get_vr
+la_i_invert_mc
+la_i_invert_mr
+la_i_lower_solve_mc
+la_i_lower_solve_mr
+la_i_lu_det_mc
+la_i_lu_det_mr
+la_i_lu_factor_mc
+la_i_lu_factor_mr
+la_i_lu_solve_mc
+la_i_lu_solve_mr
+la_i_mc_create
+la_i_mc_set
+la_i_mr_create
+la_i_mr_set
+la_i_multiply_mc
+la_i_multiply_mr
+la_i_multiply_vc
+la_i_multiply_vr
+la_i_norm_euclid_mc
+la_i_norm_euclid_mr
+la_i_norm_euclid_vc
+la_i_norm_euclid_vr
+la_i_norm_inf_mc
+la_i_norm_inf_mr
+la_i_norm_inf_vc
+la_i_norm_inf_vr
+la_i_norm_max_mc
+la_i_norm_max_mr
+la_i_norm1_mc
+la_i_norm1_mr
+la_i_norm1_vc
+la_i_norm1_vr
+la_i_print_mc
+la_i_print_mr
+la_i_print_vc
+la_i_print_vr
+la_i_qr_eigen_mc
+la_i_qr_eigen_mr
+la_i_qr_factor_mc
+la_i_qr_factor_mr
+la_i_qr_sym_eigen_mc
+la_i_qr_sym_eigen_mr
+la_i_random_mc
+la_i_random_mr
+la_i_random_vc
+la_i_random_vr
+la_i_size_mc
+la_i_size_mr
+la_i_size_vc
+la_i_size_vr
+la_i_subtract_mc
+la_i_subtract_mr
+la_i_subtract_vc
+la_i_subtract_vr
+la_i_t_assign
+la_i_trace_mc
+la_i_trace_mr
+la_i_transpose_mc
+la_i_transpose_mr
+la_i_upper_solve_mc
+la_i_upper_solve_mr
+la_i_vc_create
+la_i_vc_set
+la_i_vr_create
+la_i_vr_set
+la_k_a_assign
+la_k_add_mc
+la_k_add_mr
+la_k_add_vc
+la_k_add_vr
+la_k_assign_a
+la_k_assign_f
+la_k_assign_mc
+la_k_assign_mr
+la_k_assign_t
+la_k_assign_vc
+la_k_assign_vr
+la_k_conjugate_mc
+la_k_conjugate_mr
+la_k_conjugate_vc
+la_k_conjugate_vr
+la_k_current_f
+la_k_current_vr
+la_k_distance_vc
+la_k_distance_vr
+la_k_divide_mc
+la_k_divide_mr
+la_k_divide_vc
+la_k_divide_vr
+la_k_dot_mc
+la_k_dot_mc_vc
+la_k_dot_mr
+la_k_dot_mr_vr
+la_k_dot_vc
+la_k_dot_vr
+la_k_f_assign
+la_k_get_mc
+la_k_get_mr
+la_k_get_vc
+la_k_get_vr
+la_k_invert_mc
+la_k_invert_mr
+la_k_lower_solve_mc
+la_k_lower_solve_mr
+la_k_lu_det_mc
+la_k_lu_det_mr
+la_k_lu_factor_mc
+la_k_lu_factor_mr
+la_k_lu_solve_mc
+la_k_lu_solve_mr
+la_k_mc_set
+la_k_mr_set
+la_k_multiply_mc
+la_k_multiply_mr
+la_k_multiply_vc
+la_k_multiply_vr
+la_k_norm_euclid_mc
+la_k_norm_euclid_mr
+la_k_norm_euclid_vc
+la_k_norm_euclid_vr
+la_k_norm_inf_mc
+la_k_norm_inf_mr
+la_k_norm_inf_vc
+la_k_norm_inf_vr
+la_k_norm_max_mc
+la_k_norm_max_mr
+la_k_norm1_mc
+la_k_norm1_mr
+la_k_norm1_vc
+la_k_norm1_vr
+la_k_qr_eigen_mc
+la_k_qr_eigen_mr
+la_k_qr_factor_mc
+la_k_qr_factor_mr
+la_k_qr_sym_eigen_mc
+la_k_qr_sym_eigen_mr
+la_k_random_mc
+la_k_random_mr
+la_k_random_vc
+la_k_random_vr
+la_k_subtract_mc
+la_k_subtract_mr
+la_k_subtract_vc
+la_k_subtract_vr
+la_k_t_assign
+la_k_trace_mc
+la_k_trace_mr
+la_k_upper_solve_mc
+la_k_upper_solve_mr
+la_k_vc_set
+la_k_vr_set
+lenarray
+lfo
+limit
+limit1
+lincos
+line
+linen
+linenr
+lineto
+link_beat_force
+link_beat_get
+link_beat_request
+link_create
+link_enable
+link_is_enabled
+link_metro
+link_peers
+link_tempo_get
+link_tempo_set
+linlin
+linrand
+linseg
+linsegb
+linsegr
+liveconv
+locsend
+locsig
+log
+log10
+log2
+logbtwo
+logcurve
+loopseg
+loopsegp
+looptseg
+loopxseg
+lorenz
+loscil
+loscil3
+loscil3phs
+loscilphs
+loscilx
+lowpass2
+lowres
+lowresx
+lpf18
+lpform
+lpfreson
+lphasor
+lpinterp
+lposcil
+lposcil3
+lposcila
+lposcilsa
+lposcilsa2
+lpread
+lpreson
+lpshold
+lpsholdp
+lpslot
+lua_exec
+lua_iaopcall
+lua_iaopcall_off
+lua_ikopcall
+lua_ikopcall_off
+lua_iopcall
+lua_iopcall_off
+lua_opdef
+mac
+maca
+madsr
+mags
+mandel
+mandol
+maparray
+maparray_i
+marimba
+massign
+max
+max_k
+maxabs
+maxabsaccum
+maxaccum
+maxalloc
+maxarray
+mclock
+mdelay
+median
+mediank
+metro
+mfb
+midglobal
+midiarp
+midic14
+midic21
+midic7
+midichannelaftertouch
+midichn
+midicontrolchange
+midictrl
+mididefault
+midifilestatus
+midiin
+midinoteoff
+midinoteoncps
+midinoteonkey
+midinoteonoct
+midinoteonpch
+midion
+midion2
+midiout
+midiout_i
+midipgm
+midipitchbend
+midipolyaftertouch
+midiprogramchange
+miditempo
+midremot
+min
+minabs
+minabsaccum
+minaccum
+minarray
+mincer
+mirror
+mode
+modmatrix
+monitor
+moog
+moogladder
+moogladder2
+moogvcf
+moogvcf2
+moscil
+mp3bitrate
+mp3in
+mp3len
+mp3nchnls
+mp3scal
+mp3sr
+mpulse
+mrtmsg
+mtof
+mton
+multitap
+mute
+mvchpf
+mvclpf1
+mvclpf2
+mvclpf3
+mvclpf4
+mxadsr
+nchnls_hw
+nestedap
+nlalp
+nlfilt
+nlfilt2
+noise
+noteoff
+noteon
+noteondur
+noteondur2
+notnum
+nreverb
+nrpn
+nsamp
+nstance
+nstrnum
+ntom
+ntrpol
+nxtpow2
+octave
+octcps
+octmidi
+octmidib
+octmidinn
+octpch
+olabuffer
+oscbnk
+oscil
+oscil1
+oscil1i
+oscil3
+oscili
+oscilikt
+osciliktp
+oscilikts
+osciln
+oscils
+oscilx
+out
+out32
+outc
+outch
+outh
+outiat
+outic
+outic14
+outipat
+outipb
+outipc
+outkat
+outkc
+outkc14
+outkpat
+outkpb
+outkpc
+outleta
+outletf
+outletk
+outletkid
+outletv
+outo
+outq
+outq1
+outq2
+outq3
+outq4
+outrg
+outs
+outs1
+outs2
+outvalue
+outx
+outz
+p
+p5gconnect
+p5gdata
+pan
+pan2
+pareq
+part2txt
+partials
+partikkel
+partikkelget
+partikkelset
+partikkelsync
+passign
+paulstretch
+pcauchy
+pchbend
+pchmidi
+pchmidib
+pchmidinn
+pchoct
+pchtom
+pconvolve
+pcount
+pdclip
+pdhalf
+pdhalfy
+peak
+pgmassign
+pgmchn
+phaser1
+phaser2
+phasor
+phasorbnk
+phs
+pindex
+pinker
+pinkish
+pitch
+pitchac
+pitchamdf
+planet
+platerev
+plltrack
+pluck
+poisson
+pol2rect
+polyaft
+polynomial
+port
+portk
+poscil
+poscil3
+pow
+powershape
+powoftwo
+pows
+prealloc
+prepiano
+print
+print_type
+printarray
+printf
+printf_i
+printk
+printk2
+printks
+printks2
+prints
+product
+pset
+ptable
+ptable3
+ptablei
+ptableiw
+ptablew
+ptrack
+puts
+pvadd
+pvbufread
+pvcross
+pvinterp
+pvoc
+pvread
+pvs2array
+pvs2tab
+pvsadsyn
+pvsanal
+pvsarp
+pvsbandp
+pvsbandr
+pvsbin
+pvsblur
+pvsbuffer
+pvsbufread
+pvsbufread2
+pvscale
+pvscent
+pvsceps
+pvscross
+pvsdemix
+pvsdiskin
+pvsdisp
+pvsenvftw
+pvsfilter
+pvsfread
+pvsfreeze
+pvsfromarray
+pvsftr
+pvsftw
+pvsfwrite
+pvsgain
+pvshift
+pvsifd
+pvsin
+pvsinfo
+pvsinit
+pvslock
+pvsmaska
+pvsmix
+pvsmooth
+pvsmorph
+pvsosc
+pvsout
+pvspitch
+pvstanal
+pvstencil
+pvstrace
+pvsvoc
+pvswarp
+pvsynth
+pwd
+pyassign
+pyassigni
+pyassignt
+pycall
+pycall1
+pycall1i
+pycall1t
+pycall2
+pycall2i
+pycall2t
+pycall3
+pycall3i
+pycall3t
+pycall4
+pycall4i
+pycall4t
+pycall5
+pycall5i
+pycall5t
+pycall6
+pycall6i
+pycall6t
+pycall7
+pycall7i
+pycall7t
+pycall8
+pycall8i
+pycall8t
+pycalli
+pycalln
+pycallni
+pycallt
+pyeval
+pyevali
+pyevalt
+pyexec
+pyexeci
+pyexect
+pyinit
+pylassign
+pylassigni
+pylassignt
+pylcall
+pylcall1
+pylcall1i
+pylcall1t
+pylcall2
+pylcall2i
+pylcall2t
+pylcall3
+pylcall3i
+pylcall3t
+pylcall4
+pylcall4i
+pylcall4t
+pylcall5
+pylcall5i
+pylcall5t
+pylcall6
+pylcall6i
+pylcall6t
+pylcall7
+pylcall7i
+pylcall7t
+pylcall8
+pylcall8i
+pylcall8t
+pylcalli
+pylcalln
+pylcallni
+pylcallt
+pyleval
+pylevali
+pylevalt
+pylexec
+pylexeci
+pylexect
+pylrun
+pylruni
+pylrunt
+pyrun
+pyruni
+pyrunt
+qinf
+qnan
+r2c
+rand
+randh
+randi
+random
+randomh
+randomi
+rbjeq
+readclock
+readf
+readfi
+readk
+readk2
+readk3
+readk4
+readks
+readscore
+readscratch
+rect2pol
+release
+remoteport
+remove
+repluck
+reshapearray
+reson
+resonk
+resonr
+resonx
+resonxk
+resony
+resonz
+resyn
+reverb
+reverb2
+reverbsc
+rewindscore
+rezzy
+rfft
+rifft
+rms
+rnd
+rnd31
+round
+rspline
+rtclock
+s16b14
+s32b14
+samphold
+sandpaper
+sc_lag
+sc_lagud
+sc_phasor
+sc_trig
+scale
+scalearray
+scanhammer
+scans
+scantable
+scanu
+schedkwhen
+schedkwhennamed
+schedule
+schedwhen
+scoreline
+scoreline_i
+seed
+sekere
+select
+semitone
+sense
+sensekey
+seqtime
+seqtime2
+serialBegin
+serialEnd
+serialFlush
+serialPrint
+serialRead
+serialWrite
+serialWrite_i
+setcol
+setctrl
+setksmps
+setrow
+setscorepos
+sfilist
+sfinstr
+sfinstr3
+sfinstr3m
+sfinstrm
+sfload
+sflooper
+sfpassign
+sfplay
+sfplay3
+sfplay3m
+sfplaym
+sfplist
+sfpreset
+shaker
+shiftin
+shiftout
+signum
+sin
+sinh
+sininv
+sinsyn
+sleighbells
+slicearray
+slicearray_i
+slider16
+slider16f
+slider16table
+slider16tablef
+slider32
+slider32f
+slider32table
+slider32tablef
+slider64
+slider64f
+slider64table
+slider64tablef
+slider8
+slider8f
+slider8table
+slider8tablef
+sliderKawai
+sndloop
+sndwarp
+sndwarpst
+sockrecv
+sockrecvs
+socksend
+socksends
+sorta
+sortd
+soundin
+space
+spat3d
+spat3di
+spat3dt
+spdist
+splitrig
+sprintf
+sprintfk
+spsend
+sqrt
+squinewave
+statevar
+stix
+strcat
+strcatk
+strchar
+strchark
+strcmp
+strcmpk
+strcpy
+strcpyk
+strecv
+streson
+strfromurl
+strget
+strindex
+strindexk
+strlen
+strlenk
+strlower
+strlowerk
+strrindex
+strrindexk
+strset
+strsub
+strsubk
+strtod
+strtodk
+strtol
+strtolk
+strupper
+strupperk
+stsend
+subinstr
+subinstrinit
+sum
+sumarray
+svfilter
+syncgrain
+syncloop
+syncphasor
+system
+system_i
+tab
+tab2array
+tab2pvs
+tab_i
+tabifd
+table
+table3
+table3kt
+tablecopy
+tablefilter
+tablefilteri
+tablegpw
+tablei
+tableicopy
+tableigpw
+tableikt
+tableimix
+tableiw
+tablekt
+tablemix
+tableng
+tablera
+tableseg
+tableshuffle
+tableshufflei
+tablew
+tablewa
+tablewkt
+tablexkt
+tablexseg
+tabmorph
+tabmorpha
+tabmorphak
+tabmorphi
+tabplay
+tabrec
+tabrowlin
+tabsum
+tabw
+tabw_i
+tambourine
+tan
+tanh
+taninv
+taninv2
+tbvcf
+tempest
+tempo
+temposcal
+tempoval
+timedseq
+timeinstk
+timeinsts
+timek
+times
+tival
+tlineto
+tone
+tonek
+tonex
+tradsyn
+trandom
+transeg
+transegb
+transegr
+trcross
+trfilter
+trhighest
+trigger
+trigseq
+trim
+trim_i
+trirand
+trlowest
+trmix
+trscale
+trshift
+trsplit
+turnoff
+turnoff2
+turnon
+tvconv
+unirand
+unwrap
+upsamp
+urandom
+urd
+vactrol
+vadd
+vadd_i
+vaddv
+vaddv_i
+vaget
+valpass
+vaset
+vbap
+vbapg
+vbapgmove
+vbaplsinit
+vbapmove
+vbapz
+vbapzmove
+vcella
+vco
+vco2
+vco2ft
+vco2ift
+vco2init
+vcomb
+vcopy
+vcopy_i
+vdel_k
+vdelay
+vdelay3
+vdelayk
+vdelayx
+vdelayxq
+vdelayxs
+vdelayxw
+vdelayxwq
+vdelayxws
+vdivv
+vdivv_i
+vecdelay
+veloc
+vexp
+vexp_i
+vexpseg
+vexpv
+vexpv_i
+vibes
+vibr
+vibrato
+vincr
+vlimit
+vlinseg
+vlowres
+vmap
+vmirror
+vmult
+vmult_i
+vmultv
+vmultv_i
+voice
+vosim
+vphaseseg
+vport
+vpow
+vpow_i
+vpowv
+vpowv_i
+vpvoc
+vrandh
+vrandi
+vsubv
+vsubv_i
+vtaba
+vtabi
+vtabk
+vtable1k
+vtablea
+vtablei
+vtablek
+vtablewa
+vtablewi
+vtablewk
+vtabwa
+vtabwi
+vtabwk
+vwrap
+waveset
+websocket
+weibull
+wgbow
+wgbowedbar
+wgbrass
+wgclar
+wgflute
+wgpluck
+wgpluck2
+wguide1
+wguide2
+wiiconnect
+wiidata
+wiirange
+wiisend
+window
+wrap
+writescratch
+wterrain
+xadsr
+xin
+xout
+xscanmap
+xscans
+xscansmap
+xscanu
+xtratim
+xyscale
+zacl
+zakinit
+zamod
+zar
+zarg
+zaw
+zawm
+zdf_1pole
+zdf_1pole_mode
+zdf_2pole
+zdf_2pole_mode
+zdf_ladder
+zfilter2
+zir
+ziw
+ziwm
+zkcl
+zkmod
+zkr
+zkw
+zkwm
+'''.split())
+
+DEPRECATED_OPCODES = set('''
+array
+bformdec
+bformenc
+copy2ftab
+copy2ttab
+hrtfer
+ktableseg
+lentab
+maxtab
+mintab
+pop
+pop_f
+push
+push_f
+scalet
+sndload
+soundout
+soundouts
+specaddm
+specdiff
+specdisp
+specfilt
+spechist
+specptrk
+specscal
+specsum
+spectrum
+stack
+sumtab
+tabgen
+tabmap
+tabmap_i
+tabslice
+tb0
+tb0_init
+tb1
+tb10
+tb10_init
+tb11
+tb11_init
+tb12
+tb12_init
+tb13
+tb13_init
+tb14
+tb14_init
+tb15
+tb15_init
+tb1_init
+tb2
+tb2_init
+tb3
+tb3_init
+tb4
+tb4_init
+tb5
+tb5_init
+tb6
+tb6_init
+tb7
+tb7_init
+tb8
+tb8_init
+tb9
+tb9_init
+vbap16
+vbap4
+vbap4move
+vbap8
+vbap8move
+xyin
+'''.split())
diff --git a/pygments/lexers/_lasso_builtins.py b/pygments/lexers/_lasso_builtins.py
index 7c6fd6d4..d950cbe8 100644
--- a/pygments/lexers/_lasso_builtins.py
+++ b/pygments/lexers/_lasso_builtins.py
@@ -5,7 +5,7 @@
Built-in Lasso types, traits, methods, and members.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -471,6 +471,10 @@ BUILTINS = {
'curl_netrc_ignored',
'curl_netrc_optional',
'curl_netrc_required',
+ 'curl_sslversion_default',
+ 'curl_sslversion_sslv2',
+ 'curl_sslversion_sslv3',
+ 'curl_sslversion_tlsv1',
'curl_version_asynchdns',
'curl_version_debug',
'curl_version_gssnegotiate',
@@ -1102,6 +1106,7 @@ BUILTINS = {
'json_open_array',
'json_open_object',
'json_period',
+ 'json_positive',
'json_quote_double',
'json_rpccall',
'json_serialize',
@@ -1229,6 +1234,7 @@ BUILTINS = {
'lcapi_loadmodules',
'lcapi_updatedatasourceslist',
'ldap_scope_base',
+ 'ldap_scope_children',
'ldap_scope_onelevel',
'ldap_scope_subtree',
'library_once',
@@ -4044,6 +4050,7 @@ MEMBERS = {
'iscntrl',
'isdigit',
'isdir',
+ 'isdirectory',
'isempty',
'isemptyelement',
'isfirststep',
diff --git a/pygments/lexers/_lua_builtins.py b/pygments/lexers/_lua_builtins.py
index 7472b9e6..0561725d 100644
--- a/pygments/lexers/_lua_builtins.py
+++ b/pygments/lexers/_lua_builtins.py
@@ -9,7 +9,7 @@
Do not edit the MODULES dict by hand.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -288,7 +288,7 @@ if __name__ == '__main__': # pragma: no cover
print('>> %s' % full_function_name)
m = get_function_module(full_function_name)
modules.setdefault(m, []).append(full_function_name)
- modules = {k: tuple(v) for k, v in modules.iteritems()}
+ modules = dict((k, tuple(v)) for k, v in modules.iteritems())
regenerate(__file__, modules)
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 847cc0bf..5fb438d9 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -44,14 +44,16 @@ LEXERS = {
'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
+ 'AugeasLexer': ('pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()),
'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
+ 'BBCBasicLexer': ('pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()),
'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
- 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
+ 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
@@ -80,6 +82,7 @@ LEXERS = {
'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
+ 'CharmciLexer': ('pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()),
'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
@@ -105,7 +108,7 @@ LEXERS = {
'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
'CrystalLexer': ('pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)),
'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()),
- 'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc',), ()),
+ 'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()),
'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)),
@@ -120,8 +123,9 @@ LEXERS = {
'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
+ 'Dasm16Lexer': ('pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)),
'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
- 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
+ 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
@@ -153,8 +157,10 @@ LEXERS = {
'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
+ 'FennelLexer': ('pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()),
'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
'FlatlineLexer': ('pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)),
+ 'FloScriptLexer': ('pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()),
'ForthLexer': ('pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)),
'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
@@ -174,6 +180,7 @@ LEXERS = {
'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
+ 'HLSLShaderLexer': ('pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)),
'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
@@ -181,6 +188,7 @@ LEXERS = {
'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
'HsailLexer': ('pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)),
+ 'HspecLexer': ('pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()),
'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')),
'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
@@ -191,6 +199,7 @@ LEXERS = {
'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
'IDLLexer': ('pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
+ 'IconLexer': ('pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()),
'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
'IgorLexer': ('pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
@@ -289,7 +298,7 @@ LEXERS = {
'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')),
'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
- 'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
+ 'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)),
'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
@@ -368,10 +377,11 @@ LEXERS = {
'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()),
'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')),
- 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs', '*.rs.in'), ('text/rust',)),
+ 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust',)),
'SASLexer': ('pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
+ 'SarlLexer': ('pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)),
'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
@@ -379,10 +389,13 @@ LEXERS = {
'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
- 'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil',), ()),
+ 'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()),
+ 'SlashLexer': ('pygments.lexers.slash', 'Slash', ('slash',), ('*.sl',), ()),
'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
+ 'SlurmBashLexer': ('pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()),
'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
+ 'SmartGameFormatLexer': ('pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()),
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
@@ -400,6 +413,7 @@ LEXERS = {
'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
+ 'TOMLLexer': ('pygments.lexers.configs', 'TOML', ('toml',), ('*.toml',), ()),
'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
'TasmLexer': ('pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)),
'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
@@ -418,11 +432,14 @@ LEXERS = {
'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
- 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts', 'typescript'), ('*.ts',), ('text/x-typescript',)),
+ 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts', 'typescript'), ('*.ts', '*.tsx'), ('text/x-typescript',)),
'TypoScriptCssDataLexer': ('pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()),
'TypoScriptHtmlDataLexer': ('pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()),
- 'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.ts', '*.txt'), ('text/x-typoscript',)),
+ 'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)),
+ 'UcodeLexer': ('pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()),
+ 'UniconLexer': ('pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)),
'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
+ 'VBScriptLexer': ('pygments.lexers.basic', 'VBScript', (), ('*.vbs', '*.VBS'), ()),
'VCLLexer': ('pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)),
'VCLSnippetLexer': ('pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)),
'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()),
@@ -445,6 +462,7 @@ LEXERS = {
'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
+ 'XorgLexer': ('pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()),
'XsltLexer': ('pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
'XtlangLexer': ('pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()),
diff --git a/pygments/lexers/_mql_builtins.py b/pygments/lexers/_mql_builtins.py
index 524a2ea2..6eb600c4 100644
--- a/pygments/lexers/_mql_builtins.py
+++ b/pygments/lexers/_mql_builtins.py
@@ -5,7 +5,7 @@
Builtins for the MqlLexer.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
types = (
diff --git a/pygments/lexers/_openedge_builtins.py b/pygments/lexers/_openedge_builtins.py
index 46b6cc42..0fa7d1b2 100644
--- a/pygments/lexers/_openedge_builtins.py
+++ b/pygments/lexers/_openedge_builtins.py
@@ -5,7 +5,7 @@
Builtin list for the OpenEdgeLexer.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_php_builtins.py b/pygments/lexers/_php_builtins.py
index f1b64ced..c6084003 100644
--- a/pygments/lexers/_php_builtins.py
+++ b/pygments/lexers/_php_builtins.py
@@ -12,7 +12,7 @@
internet connection. don't run that at home, use
a server ;-)
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -4688,7 +4688,7 @@ if __name__ == '__main__': # pragma: no cover
PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
PHP_MANUAL_DIR = './php-chunked-xhtml/'
PHP_REFERENCE_GLOB = 'ref.*'
- PHP_FUNCTION_RE = '<a href="function\..*?\.html">(.*?)</a>'
+ PHP_FUNCTION_RE = r'<a href="function\..*?\.html">(.*?)</a>'
PHP_MODULE_RE = '<title>(.*?) Functions</title>'
def get_php_functions():
@@ -4698,18 +4698,19 @@ if __name__ == '__main__': # pragma: no cover
for file in get_php_references():
module = ''
- for line in open(file):
- if not module:
- search = module_re.search(line)
- if search:
- module = search.group(1)
- modules[module] = []
+ with open(file) as f:
+ for line in f:
+ if not module:
+ search = module_re.search(line)
+ if search:
+ module = search.group(1)
+ modules[module] = []
- elif 'href="function.' in line:
- for match in function_re.finditer(line):
- fn = match.group(1)
- if '-&gt;' not in fn and '::' not in fn and fn not in modules[module]:
- modules[module].append(fn)
+ elif 'href="function.' in line:
+ for match in function_re.finditer(line):
+ fn = match.group(1)
+ if '-&gt;' not in fn and '::' not in fn and fn not in modules[module]:
+ modules[module].append(fn)
if module:
# These are dummy manual pages, not actual functions
@@ -4726,9 +4727,8 @@ if __name__ == '__main__': # pragma: no cover
def get_php_references():
download = urlretrieve(PHP_MANUAL_URL)
- tar = tarfile.open(download[0])
- tar.extractall()
- tar.close()
+ with tarfile.open(download[0]) as tar:
+ tar.extractall()
for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)):
yield file
os.remove(download[0])
diff --git a/pygments/lexers/_postgres_builtins.py b/pygments/lexers/_postgres_builtins.py
index 671fa677..a71360f0 100644
--- a/pygments/lexers/_postgres_builtins.py
+++ b/pygments/lexers/_postgres_builtins.py
@@ -5,7 +5,7 @@
Self-updating data files for PostgreSQL lexer.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_scilab_builtins.py b/pygments/lexers/_scilab_builtins.py
index 85c99966..ce0ac67d 100644
--- a/pygments/lexers/_scilab_builtins.py
+++ b/pygments/lexers/_scilab_builtins.py
@@ -5,7 +5,7 @@
Builtin list for the ScilabLexer.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_sourcemod_builtins.py b/pygments/lexers/_sourcemod_builtins.py
index 9ebb1595..f08ea481 100644
--- a/pygments/lexers/_sourcemod_builtins.py
+++ b/pygments/lexers/_sourcemod_builtins.py
@@ -8,7 +8,7 @@
Do not edit the FUNCTIONS list by hand.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py
index 6585ad71..7f1e0ce3 100644
--- a/pygments/lexers/_stan_builtins.py
+++ b/pygments/lexers/_stan_builtins.py
@@ -4,24 +4,23 @@
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the names of functions for Stan used by
- ``pygments.lexers.math.StanLexer. This is for Stan language version 2.8.0.
+ ``pygments.lexers.math.StanLexer. This is for Stan language version 2.17.0.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2018 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
KEYWORDS = (
+ 'break',
+ 'continue',
'else',
'for',
'if',
'in',
- 'increment_log_prob',
- 'integrate_ode',
- 'lp__',
'print',
'reject',
'return',
- 'while'
+ 'while',
)
TYPES = (
@@ -35,18 +34,18 @@ TYPES = (
'positive_ordered',
'real',
'row_vector',
- 'row_vectormatrix',
'simplex',
'unit_vector',
'vector',
- 'void')
+ 'void',
+)
FUNCTIONS = (
- 'Phi',
- 'Phi_approx',
'abs',
'acos',
'acosh',
+ 'algebra_solver',
+ 'append_array',
'append_col',
'append_row',
'asin',
@@ -54,55 +53,59 @@ FUNCTIONS = (
'atan',
'atan2',
'atanh',
- 'bernoulli_ccdf_log',
'bernoulli_cdf',
- 'bernoulli_cdf_log',
- 'bernoulli_log',
- 'bernoulli_logit_log',
+ 'bernoulli_lccdf',
+ 'bernoulli_lcdf',
+ 'bernoulli_logit_lpmf',
+ 'bernoulli_logit_rng',
+ 'bernoulli_lpmf',
'bernoulli_rng',
'bessel_first_kind',
'bessel_second_kind',
- 'beta_binomial_ccdf_log',
'beta_binomial_cdf',
- 'beta_binomial_cdf_log',
- 'beta_binomial_log',
+ 'beta_binomial_lccdf',
+ 'beta_binomial_lcdf',
+ 'beta_binomial_lpmf',
'beta_binomial_rng',
- 'beta_ccdf_log',
'beta_cdf',
- 'beta_cdf_log',
- 'beta_log',
+ 'beta_lccdf',
+ 'beta_lcdf',
+ 'beta_lpdf',
'beta_rng',
'binary_log_loss',
- 'binomial_ccdf_log',
'binomial_cdf',
- 'binomial_cdf_log',
'binomial_coefficient_log',
- 'binomial_log',
- 'binomial_logit_log',
+ 'binomial_lccdf',
+ 'binomial_lcdf',
+ 'binomial_logit_lpmf',
+ 'binomial_lpmf',
'binomial_rng',
'block',
- 'categorical_log',
- 'categorical_logit_log',
+ 'categorical_logit_lpmf',
+ 'categorical_logit_rng',
+ 'categorical_lpmf',
'categorical_rng',
- 'cauchy_ccdf_log',
'cauchy_cdf',
- 'cauchy_cdf_log',
- 'cauchy_log',
+ 'cauchy_lccdf',
+ 'cauchy_lcdf',
+ 'cauchy_lpdf',
'cauchy_rng',
'cbrt',
'ceil',
- 'chi_square_ccdf_log',
'chi_square_cdf',
- 'chi_square_cdf_log',
- 'chi_square_log',
+ 'chi_square_lccdf',
+ 'chi_square_lcdf',
+ 'chi_square_lpdf',
'chi_square_rng',
'cholesky_decompose',
+ 'choose',
'col',
'cols',
'columns_dot_product',
'columns_dot_self',
'cos',
'cosh',
+ 'cov_exp_quad',
'crossprod',
'csr_extract_u',
'csr_extract_v',
@@ -117,15 +120,15 @@ FUNCTIONS = (
'diagonal',
'digamma',
'dims',
- 'dirichlet_log',
+ 'dirichlet_lpdf',
'dirichlet_rng',
'distance',
'dot_product',
'dot_self',
- 'double_exponential_ccdf_log',
'double_exponential_cdf',
- 'double_exponential_cdf_log',
- 'double_exponential_log',
+ 'double_exponential_lccdf',
+ 'double_exponential_lcdf',
+ 'double_exponential_lpdf',
'double_exponential_rng',
'e',
'eigenvalues_sym',
@@ -134,16 +137,16 @@ FUNCTIONS = (
'erfc',
'exp',
'exp2',
- 'exp_mod_normal_ccdf_log',
'exp_mod_normal_cdf',
- 'exp_mod_normal_cdf_log',
- 'exp_mod_normal_log',
+ 'exp_mod_normal_lccdf',
+ 'exp_mod_normal_lcdf',
+ 'exp_mod_normal_lpdf',
'exp_mod_normal_rng',
'expm1',
- 'exponential_ccdf_log',
'exponential_cdf',
- 'exponential_cdf_log',
- 'exponential_log',
+ 'exponential_lccdf',
+ 'exponential_lcdf',
+ 'exponential_lpdf',
'exponential_rng',
'fabs',
'falling_factorial',
@@ -153,60 +156,65 @@ FUNCTIONS = (
'fmax',
'fmin',
'fmod',
- 'frechet_ccdf_log',
'frechet_cdf',
- 'frechet_cdf_log',
- 'frechet_log',
+ 'frechet_lccdf',
+ 'frechet_lcdf',
+ 'frechet_lpdf',
'frechet_rng',
- 'gamma_ccdf_log',
'gamma_cdf',
- 'gamma_cdf_log',
- 'gamma_log',
+ 'gamma_lccdf',
+ 'gamma_lcdf',
+ 'gamma_lpdf',
'gamma_p',
'gamma_q',
'gamma_rng',
- 'gaussian_dlm_obs_log',
+ 'gaussian_dlm_obs_lpdf',
'get_lp',
- 'gumbel_ccdf_log',
'gumbel_cdf',
- 'gumbel_cdf_log',
- 'gumbel_log',
+ 'gumbel_lccdf',
+ 'gumbel_lcdf',
+ 'gumbel_lpdf',
'gumbel_rng',
'head',
- 'hypergeometric_log',
+ 'hypergeometric_lpmf',
'hypergeometric_rng',
'hypot',
- 'if_else',
+ 'inc_beta',
'int_step',
+ 'integrate_ode',
+ 'integrate_ode_bdf',
+ 'integrate_ode_rk45',
'inv',
- 'inv_chi_square_ccdf_log',
'inv_chi_square_cdf',
- 'inv_chi_square_cdf_log',
- 'inv_chi_square_log',
+ 'inv_chi_square_lccdf',
+ 'inv_chi_square_lcdf',
+ 'inv_chi_square_lpdf',
'inv_chi_square_rng',
'inv_cloglog',
- 'inv_gamma_ccdf_log',
'inv_gamma_cdf',
- 'inv_gamma_cdf_log',
- 'inv_gamma_log',
+ 'inv_gamma_lccdf',
+ 'inv_gamma_lcdf',
+ 'inv_gamma_lpdf',
'inv_gamma_rng',
'inv_logit',
- 'inv_phi',
+ 'inv_Phi',
'inv_sqrt',
'inv_square',
- 'inv_wishart_log',
+ 'inv_wishart_lpdf',
'inv_wishart_rng',
'inverse',
'inverse_spd',
'is_inf',
'is_nan',
'lbeta',
+ 'lchoose',
'lgamma',
- 'lkj_corr_cholesky_log',
+ 'lkj_corr_cholesky_lpdf',
'lkj_corr_cholesky_rng',
- 'lkj_corr_log',
+ 'lkj_corr_lpdf',
'lkj_corr_rng',
'lmgamma',
+ 'lmultiply',
'log',
'log10',
'log1m',
@@ -223,81 +231,87 @@ FUNCTIONS = (
'log_rising_factorial',
'log_softmax',
'log_sum_exp',
- 'logistic_ccdf_log',
'logistic_cdf',
- 'logistic_cdf_log',
- 'logistic_log',
+ 'logistic_lccdf',
+ 'logistic_lcdf',
+ 'logistic_lpdf',
'logistic_rng',
'logit',
- 'lognormal_ccdf_log',
'lognormal_cdf',
- 'lognormal_cdf_log',
- 'lognormal_log',
+ 'lognormal_lccdf',
+ 'lognormal_lcdf',
+ 'lognormal_lpdf',
'lognormal_rng',
'machine_precision',
+ 'matrix_exp',
'max',
+ 'mdivide_left_spd',
'mdivide_left_tri_low',
+ 'mdivide_right_spd',
'mdivide_right_tri_low',
'mean',
'min',
'modified_bessel_first_kind',
'modified_bessel_second_kind',
- 'multi_gp_cholesky_log',
- 'multi_gp_log',
- 'multi_normal_cholesky_log',
+ 'multi_gp_cholesky_lpdf',
+ 'multi_gp_lpdf',
+ 'multi_normal_cholesky_lpdf',
'multi_normal_cholesky_rng',
- 'multi_normal_log',
- 'multi_normal_prec_log',
+ 'multi_normal_lpdf',
+ 'multi_normal_prec_lpdf',
'multi_normal_rng',
- 'multi_student_t_log',
+ 'multi_student_t_lpdf',
'multi_student_t_rng',
- 'multinomial_log',
+ 'multinomial_lpmf',
'multinomial_rng',
'multiply_log',
'multiply_lower_tri_self_transpose',
- 'neg_binomial_2_ccdf_log',
'neg_binomial_2_cdf',
- 'neg_binomial_2_cdf_log',
- 'neg_binomial_2_log',
- 'neg_binomial_2_log_log',
+ 'neg_binomial_2_lccdf',
+ 'neg_binomial_2_lcdf',
+ 'neg_binomial_2_log_lpmf',
'neg_binomial_2_log_rng',
+ 'neg_binomial_2_lpmf',
'neg_binomial_2_rng',
- 'neg_binomial_ccdf_log',
'neg_binomial_cdf',
- 'neg_binomial_cdf_log',
- 'neg_binomial_log',
+ 'neg_binomial_lccdf',
+ 'neg_binomial_lcdf',
+ 'neg_binomial_lpmf',
'neg_binomial_rng',
'negative_infinity',
- 'normal_ccdf_log',
'normal_cdf',
- 'normal_cdf_log',
- 'normal_log',
+ 'normal_lccdf',
+ 'normal_lcdf',
+ 'normal_lpdf',
'normal_rng',
'not_a_number',
'num_elements',
- 'ordered_logistic_log',
+ 'ordered_logistic_lpmf',
'ordered_logistic_rng',
'owens_t',
- 'pareto_ccdf_log',
'pareto_cdf',
- 'pareto_cdf_log',
- 'pareto_log',
+ 'pareto_lccdf',
+ 'pareto_lcdf',
+ 'pareto_lpdf',
'pareto_rng',
- 'pareto_type_2_ccdf_log',
'pareto_type_2_cdf',
- 'pareto_type_2_cdf_log',
- 'pareto_type_2_log',
+ 'pareto_type_2_lccdf',
+ 'pareto_type_2_lcdf',
+ 'pareto_type_2_lpdf',
'pareto_type_2_rng',
+ 'Phi',
+ 'Phi_approx',
'pi',
- 'poisson_ccdf_log',
'poisson_cdf',
- 'poisson_cdf_log',
- 'poisson_log',
- 'poisson_log_log',
+ 'poisson_lccdf',
+ 'poisson_lcdf',
+ 'poisson_log_lpmf',
'poisson_log_rng',
+ 'poisson_lpmf',
'poisson_rng',
'positive_infinity',
'pow',
+ 'print',
'prod',
'qr_Q',
'qr_R',
@@ -305,11 +319,12 @@ FUNCTIONS = (
'quad_form_diag',
'quad_form_sym',
'rank',
- 'rayleigh_ccdf_log',
'rayleigh_cdf',
- 'rayleigh_cdf_log',
- 'rayleigh_log',
+ 'rayleigh_lccdf',
+ 'rayleigh_lcdf',
+ 'rayleigh_lpdf',
'rayleigh_rng',
+ 'reject',
'rep_array',
'rep_matrix',
'rep_row_vector',
@@ -320,10 +335,10 @@ FUNCTIONS = (
'rows',
'rows_dot_product',
'rows_dot_self',
- 'scaled_inv_chi_square_ccdf_log',
'scaled_inv_chi_square_cdf',
- 'scaled_inv_chi_square_cdf_log',
- 'scaled_inv_chi_square_log',
+ 'scaled_inv_chi_square_lccdf',
+ 'scaled_inv_chi_square_lcdf',
+ 'scaled_inv_chi_square_lpdf',
'scaled_inv_chi_square_rng',
'sd',
'segment',
@@ -331,10 +346,10 @@ FUNCTIONS = (
'singular_values',
'sinh',
'size',
- 'skew_normal_ccdf_log',
'skew_normal_cdf',
- 'skew_normal_cdf_log',
- 'skew_normal_log',
+ 'skew_normal_lccdf',
+ 'skew_normal_lcdf',
+ 'skew_normal_lpdf',
'skew_normal_rng',
'softmax',
'sort_asc',
@@ -346,10 +361,10 @@ FUNCTIONS = (
'square',
'squared_distance',
'step',
- 'student_t_ccdf_log',
'student_t_cdf',
- 'student_t_cdf_log',
- 'student_t_log',
+ 'student_t_lccdf',
+ 'student_t_lcdf',
+ 'student_t_lpdf',
'student_t_rng',
'sub_col',
'sub_row',
@@ -357,6 +372,7 @@ FUNCTIONS = (
'tail',
'tan',
'tanh',
+ 'target',
'tcrossprod',
'tgamma',
'to_array_1d',
@@ -369,22 +385,22 @@ FUNCTIONS = (
'trace_quad_form',
'trigamma',
'trunc',
- 'uniform_ccdf_log',
'uniform_cdf',
- 'uniform_cdf_log',
- 'uniform_log',
+ 'uniform_lccdf',
+ 'uniform_lcdf',
+ 'uniform_lpdf',
'uniform_rng',
'variance',
- 'von_mises_log',
+ 'von_mises_lpdf',
'von_mises_rng',
- 'weibull_ccdf_log',
'weibull_cdf',
- 'weibull_cdf_log',
- 'weibull_log',
+ 'weibull_lccdf',
+ 'weibull_lcdf',
+ 'weibull_lpdf',
'weibull_rng',
- 'wiener_log',
- 'wishart_log',
- 'wishart_rng'
+ 'wiener_lpdf',
+ 'wishart_lpdf',
+ 'wishart_rng',
)
DISTRIBUTIONS = (
@@ -438,7 +454,7 @@ DISTRIBUTIONS = (
'von_mises',
'weibull',
'wiener',
- 'wishart'
+ 'wishart',
)
RESERVED = (
@@ -469,19 +485,23 @@ RESERVED = (
'do',
'double',
'dynamic_cast',
+ 'else',
'enum',
'explicit',
'export',
'extern',
'false',
- 'false',
'float',
+ 'for',
'friend',
'fvar',
'goto',
+ 'if',
+ 'in',
'inline',
'int',
'long',
+ 'lp__',
'mutable',
'namespace',
'new',
@@ -498,9 +518,16 @@ RESERVED = (
'register',
'reinterpret_cast',
'repeat',
+ 'return',
'short',
'signed',
'sizeof',
+ 'STAN_MAJOR',
+ 'STAN_MATH_MAJOR',
+ 'STAN_MATH_MINOR',
+ 'STAN_MATH_PATCH',
+ 'STAN_MINOR',
+ 'STAN_PATCH',
'static',
'static_assert',
'static_cast',
@@ -512,7 +539,6 @@ RESERVED = (
'thread_local',
'throw',
'true',
- 'true',
'try',
'typedef',
'typeid',
@@ -526,7 +552,7 @@ RESERVED = (
'void',
'volatile',
'wchar_t',
+ 'while',
'xor',
- 'xor_eq'
+ 'xor_eq',
)
-
diff --git a/pygments/lexers/_stata_builtins.py b/pygments/lexers/_stata_builtins.py
index 424a739f..3e5e75b2 100644
--- a/pygments/lexers/_stata_builtins.py
+++ b/pygments/lexers/_stata_builtins.py
@@ -5,11 +5,15 @@
Builtins for Stata
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+builtins_special = (
+ "if", "in", "using", "replace", "by", "gen", "generate"
+)
+
builtins_base = (
"if", "else", "in", "foreach", "for", "forv", "forva",
"forval", "forvalu", "forvalue", "forvalues", "by", "bys",
@@ -66,7 +70,7 @@ builtins_base = (
"doedit", "dotplot", "dotplot_7", "dprobit", "drawnorm",
"drop", "ds", "ds_util", "dstdize", "duplicates", "durbina",
"dwstat", "dydx", "e", "ed", "edi", "edit", "egen",
- "eivreg", "emdef", "en", "enc", "enco", "encod", "encode",
+ "eivreg", "emdef", "end", "en", "enc", "enco", "encod", "encode",
"eq", "erase", "ereg", "ereg_lf", "ereg_p", "ereg_sw",
"ereghet", "ereghet_glf", "ereghet_glf_sh", "ereghet_gp",
"ereghet_ilf", "ereghet_ilf_sh", "ereghet_ip", "eret",
@@ -415,5 +419,3 @@ builtins_functions = (
"weekly", "wofd", "word", "wordcount", "year", "yearly",
"yh", "ym", "yofd", "yq", "yw"
)
-
-
diff --git a/pygments/lexers/_tsql_builtins.py b/pygments/lexers/_tsql_builtins.py
index 44ad8244..e29ed34b 100644
--- a/pygments/lexers/_tsql_builtins.py
+++ b/pygments/lexers/_tsql_builtins.py
@@ -5,7 +5,7 @@
These are manually translated lists from https://msdn.microsoft.com.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_vbscript_builtins.py b/pygments/lexers/_vbscript_builtins.py
new file mode 100644
index 00000000..7d514790
--- /dev/null
+++ b/pygments/lexers/_vbscript_builtins.py
@@ -0,0 +1,279 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._vbscript_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ These are manually translated lists from
+ http://www.indusoft.com/pdf/VBScript%20Reference.pdf.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+KEYWORDS = [
+ 'ByRef',
+ 'ByVal',
+ # dim: special rule
+ 'call',
+ 'case',
+ 'class',
+ # const: special rule
+ 'do',
+ 'each',
+ 'else',
+ 'elseif',
+ 'end',
+ 'erase',
+ 'execute',
+ 'function',
+ 'exit',
+ 'for',
+ 'function',
+ 'GetRef',
+ 'global',
+ 'if',
+ 'let',
+ 'loop',
+ 'next',
+ 'new',
+ # option: special rule
+ 'private',
+ 'public',
+ 'redim',
+ 'select',
+ 'set',
+ 'sub',
+ 'then',
+ 'wend',
+ 'while',
+ 'with',
+]
+
+BUILTIN_FUNCTIONS = [
+ 'Abs',
+ 'Array',
+ 'Asc',
+ 'Atn',
+ 'CBool',
+ 'CByte',
+ 'CCur',
+ 'CDate',
+ 'CDbl',
+ 'Chr',
+ 'CInt',
+ 'CLng',
+ 'Cos',
+ 'CreateObject',
+ 'CSng',
+ 'CStr',
+ 'Date',
+ 'DateAdd',
+ 'DateDiff',
+ 'DatePart',
+ 'DateSerial',
+ 'DateValue',
+ 'Day',
+ 'Eval',
+ 'Exp',
+ 'Filter',
+ 'Fix',
+ 'FormatCurrency',
+ 'FormatDateTime',
+ 'FormatNumber',
+ 'FormatPercent',
+ 'GetObject',
+ 'GetLocale',
+ 'Hex',
+ 'Hour',
+ 'InStr',
+ 'inStrRev',
+ 'Int',
+ 'IsArray',
+ 'IsDate',
+ 'IsEmpty',
+ 'IsNull',
+ 'IsNumeric',
+ 'IsObject',
+ 'Join',
+ 'LBound',
+ 'LCase',
+ 'Left',
+ 'Len',
+ 'LoadPicture',
+ 'Log',
+ 'LTrim',
+ 'Mid',
+ 'Minute',
+ 'Month',
+ 'MonthName',
+ 'MsgBox',
+ 'Now',
+ 'Oct',
+ 'Randomize',
+ 'RegExp',
+ 'Replace',
+ 'RGB',
+ 'Right',
+ 'Rnd',
+ 'Round',
+ 'RTrim',
+ 'ScriptEngine',
+ 'ScriptEngineBuildVersion',
+ 'ScriptEngineMajorVersion',
+ 'ScriptEngineMinorVersion',
+ 'Second',
+ 'SetLocale',
+ 'Sgn',
+ 'Space',
+ 'Split',
+ 'Sqr',
+ 'StrComp',
+ 'String',
+ 'StrReverse',
+ 'Tan',
+ 'Time',
+ 'Timer',
+ 'TimeSerial',
+ 'TimeValue',
+ 'Trim',
+ 'TypeName',
+ 'UBound',
+ 'UCase',
+ 'VarType',
+ 'Weekday',
+ 'WeekdayName',
+ 'Year',
+]
+
+BUILTIN_VARIABLES = [
+ 'Debug',
+ 'Dictionary',
+ 'Drive',
+ 'Drives',
+ 'Err',
+ 'File',
+ 'Files',
+ 'FileSystemObject',
+ 'Folder',
+ 'Folders',
+ 'Match',
+ 'Matches',
+ 'RegExp',
+ 'Submatches',
+ 'TextStream',
+]
+
+OPERATORS = [
+ '+',
+ '-',
+ '*',
+ '/',
+ '\\',
+ '^',
+ '|',
+ '<',
+ '<=',
+ '>',
+ '>=',
+ '=',
+ '<>',
+ '&',
+ '$',
+]
+
+OPERATOR_WORDS = [
+ 'mod',
+ 'and',
+ 'or',
+ 'xor',
+ 'eqv',
+ 'imp',
+ 'is',
+ 'not',
+]
+
+BUILTIN_CONSTANTS = [
+ 'False',
+ 'True',
+ 'vbAbort',
+ 'vbAbortRetryIgnore',
+ 'vbApplicationModal',
+ 'vbArray',
+ 'vbBinaryCompare',
+ 'vbBlack',
+ 'vbBlue',
+ 'vbBoole',
+ 'vbByte',
+ 'vbCancel',
+ 'vbCr',
+ 'vbCritical',
+ 'vbCrLf',
+ 'vbCurrency',
+ 'vbCyan',
+ 'vbDataObject',
+ 'vbDate',
+ 'vbDefaultButton1',
+ 'vbDefaultButton2',
+ 'vbDefaultButton3',
+ 'vbDefaultButton4',
+ 'vbDouble',
+ 'vbEmpty',
+ 'vbError',
+ 'vbExclamation',
+ 'vbFalse',
+ 'vbFirstFullWeek',
+ 'vbFirstJan1',
+ 'vbFormFeed',
+ 'vbFriday',
+ 'vbGeneralDate',
+ 'vbGreen',
+ 'vbIgnore',
+ 'vbInformation',
+ 'vbInteger',
+ 'vbLf',
+ 'vbLong',
+ 'vbLongDate',
+ 'vbLongTime',
+ 'vbMagenta',
+ 'vbMonday',
+ 'vbMsgBoxHelpButton',
+ 'vbMsgBoxRight',
+ 'vbMsgBoxRtlReading',
+ 'vbMsgBoxSetForeground',
+ 'vbNewLine',
+ 'vbNo',
+ 'vbNull',
+ 'vbNullChar',
+ 'vbNullString',
+ 'vbObject',
+ 'vbObjectError',
+ 'vbOK',
+ 'vbOKCancel',
+ 'vbOKOnly',
+ 'vbQuestion',
+ 'vbRed',
+ 'vbRetry',
+ 'vbRetryCancel',
+ 'vbSaturday',
+ 'vbShortDate',
+ 'vbShortTime',
+ 'vbSingle',
+ 'vbString',
+ 'vbSunday',
+ 'vbSystemModal',
+ 'vbTab',
+ 'vbTextCompare',
+ 'vbThursday',
+ 'vbTrue',
+ 'vbTuesday',
+ 'vbUseDefault',
+ 'vbUseSystem',
+ 'vbUseSystem',
+ 'vbVariant',
+ 'vbVerticalTab',
+ 'vbWednesday',
+ 'vbWhite',
+ 'vbYellow',
+ 'vbYes',
+ 'vbYesNo',
+ 'vbYesNoCancel',
+] \ No newline at end of file
diff --git a/pygments/lexers/_vim_builtins.py b/pygments/lexers/_vim_builtins.py
index e9b5fa1e..82586289 100644
--- a/pygments/lexers/_vim_builtins.py
+++ b/pygments/lexers/_vim_builtins.py
@@ -5,7 +5,7 @@
This file is autogenerated by scripts/get_vimkw.py
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/actionscript.py b/pygments/lexers/actionscript.py
index 9c687a57..fc3b90cd 100644
--- a/pygments/lexers/actionscript.py
+++ b/pygments/lexers/actionscript.py
@@ -5,7 +5,7 @@
Lexers for ActionScript and MXML.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -125,7 +125,7 @@ class ActionScript3Lexer(RegexLexer):
'text/actionscript3']
identifier = r'[$a-zA-Z_]\w*'
- typeidentifier = identifier + '(?:\.<\w+>)?'
+ typeidentifier = identifier + r'(?:\.<\w+>)?'
flags = re.DOTALL | re.MULTILINE
tokens = {
@@ -232,7 +232,7 @@ class MxmlLexer(RegexLexer):
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
- ('\s+', Text),
+ (r'\s+', Text),
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index defa7b6e..cb200b9e 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/algebra.py b/pygments/lexers/algebra.py
index 79460ad4..15d68842 100644
--- a/pygments/lexers/algebra.py
+++ b/pygments/lexers/algebra.py
@@ -5,7 +5,7 @@
Lexers for computer algebra systems.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ambient.py b/pygments/lexers/ambient.py
index 7f622fbc..53f3a5e1 100644
--- a/pygments/lexers/ambient.py
+++ b/pygments/lexers/ambient.py
@@ -5,7 +5,7 @@
Lexers for AmbientTalk language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ampl.py b/pygments/lexers/ampl.py
index c3ca80d4..638d025d 100644
--- a/pygments/lexers/ampl.py
+++ b/pygments/lexers/ampl.py
@@ -3,9 +3,9 @@
pygments.lexers.ampl
~~~~~~~~~~~~~~~~~~~~
- Lexers for the ampl language. <http://ampl.com/>
+ Lexers for the AMPL language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,7 +18,7 @@ __all__ = ['AmplLexer']
class AmplLexer(RegexLexer):
"""
- For AMPL source code.
+ For `AMPL <http://ampl.com/>`_ source code.
.. versionadded:: 2.2
"""
diff --git a/pygments/lexers/apl.py b/pygments/lexers/apl.py
index 61ea4c4b..4bb88ae3 100644
--- a/pygments/lexers/apl.py
+++ b/pygments/lexers/apl.py
@@ -5,7 +5,7 @@
Lexers for APL.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -71,14 +71,14 @@ class APLLexer(RegexLexer):
#
# Numbers
# =======
- (u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)'
- u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?',
+ (u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)'
+ u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?',
Number),
#
# Operators
# ==========
- (u'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type
- (u'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]',
+ (u'[\\.\\\\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type
+ (u'[+\\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]',
Operator),
#
# Constant
diff --git a/pygments/lexers/archetype.py b/pygments/lexers/archetype.py
index e596b7be..5d4eb9aa 100644
--- a/pygments/lexers/archetype.py
+++ b/pygments/lexers/archetype.py
@@ -14,7 +14,7 @@
Contributed by Thomas Beale <https://github.com/wolandscat>,
<https://bitbucket.org/thomas_beale>.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/asm.py b/pygments/lexers/asm.py
index 2bb3eac9..7100868c 100644
--- a/pygments/lexers/asm.py
+++ b/pygments/lexers/asm.py
@@ -5,7 +5,7 @@
Lexers for assembly languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,7 +20,7 @@ from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'NasmLexer',
- 'NasmObjdumpLexer', 'TasmLexer', 'Ca65Lexer']
+ 'NasmObjdumpLexer', 'TasmLexer', 'Ca65Lexer', 'Dasm16Lexer']
class GasLexer(RegexLexer):
@@ -35,7 +35,7 @@ class GasLexer(RegexLexer):
#: optional Comment or Whitespace
string = r'"(\\"|[^"])*"'
char = r'[\w$.@-]'
- identifier = r'(?:[a-zA-Z$_]' + char + '*|\.' + char + '+)'
+ identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
tokens = {
@@ -53,8 +53,7 @@ class GasLexer(RegexLexer):
('@' + identifier, Name.Attribute),
(number, Number.Integer),
(r'[\r\n]+', Text, '#pop'),
-
- (r'#.*?$', Comment, '#pop'),
+ (r'[;#].*?\n', Comment, '#pop'),
include('punctuation'),
include('whitespace')
@@ -78,14 +77,15 @@ class GasLexer(RegexLexer):
('$'+number, Number.Integer),
(r"$'(.|\\')'", String.Char),
(r'[\r\n]+', Text, '#pop'),
- (r'#.*?$', Comment, '#pop'),
+ (r'[;#].*?\n', Comment, '#pop'),
+
include('punctuation'),
include('whitespace')
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
- (r'#.*?\n', Comment)
+ (r'[;#].*?\n', Comment)
],
'punctuation': [
(r'[-*,.()\[\]!:]+', Punctuation)
@@ -258,7 +258,7 @@ class HsailLexer(RegexLexer):
(r'0[xX][a-fA-F0-9]+', Number.Hex),
(ieeefloat, Number.Float),
(float, Number.Float),
- ('\d+', Number.Integer),
+ (r'\d+', Number.Integer),
(r'[=<>{}\[\]()*.,:;!]|x\b', Punctuation)
],
@@ -267,7 +267,7 @@ class HsailLexer(RegexLexer):
],
'comments': [
(r'/\*.*?\*/', Comment.Multiline),
- (r'//.*?\n', Comment.Singleline),
+ (r'//.*?\n', Comment.Single),
],
'keyword': [
# Types
@@ -281,7 +281,7 @@ class HsailLexer(RegexLexer):
'enabledetectexceptions', 'maxdynamicgroupsize', 'maxflatgridsize',
'maxflatworkgroupsize', 'requireddim', 'requiredgridsize',
'requiredworkgroupsize', 'requirenopartialworkgroups'),
- suffix=r'\b'), Keyword),
+ suffix=r'\b'), Keyword),
# instructions
(roundingMod, Keyword),
@@ -352,7 +352,7 @@ class LlvmLexer(RegexLexer):
include('whitespace'),
# Before keywords, because keywords are valid label names :(...
- (identifier + '\s*:', Name.Label),
+ (identifier + r'\s*:', Name.Label),
include('keyword'),
@@ -377,53 +377,75 @@ class LlvmLexer(RegexLexer):
'keyword': [
# Regular keywords
(words((
- 'begin', 'end', 'true', 'false', 'declare', 'define', 'global',
- 'constant', 'private', 'linker_private', 'internal',
- 'available_externally', 'linkonce', 'linkonce_odr', 'weak',
- 'weak_odr', 'appending', 'dllimport', 'dllexport', 'common',
- 'default', 'hidden', 'protected', 'extern_weak', 'external',
- 'thread_local', 'zeroinitializer', 'undef', 'null', 'to', 'tail',
- 'target', 'triple', 'datalayout', 'volatile', 'nuw', 'nsw', 'nnan',
- 'ninf', 'nsz', 'arcp', 'fast', 'exact', 'inbounds', 'align',
- 'addrspace', 'section', 'alias', 'module', 'asm', 'sideeffect',
- 'gc', 'dbg', 'linker_private_weak', 'attributes', 'blockaddress',
- 'initialexec', 'localdynamic', 'localexec', 'prefix', 'unnamed_addr',
- 'ccc', 'fastcc', 'coldcc', 'x86_stdcallcc', 'x86_fastcallcc',
- 'arm_apcscc', 'arm_aapcscc', 'arm_aapcs_vfpcc', 'ptx_device',
- 'ptx_kernel', 'intel_ocl_bicc', 'msp430_intrcc', 'spir_func',
- 'spir_kernel', 'x86_64_sysvcc', 'x86_64_win64cc', 'x86_thiscallcc',
- 'cc', 'c', 'signext', 'zeroext', 'inreg', 'sret', 'nounwind',
- 'noreturn', 'noalias', 'nocapture', 'byval', 'nest', 'readnone',
- 'readonly', 'inlinehint', 'noinline', 'alwaysinline', 'optsize', 'ssp',
- 'sspreq', 'noredzone', 'noimplicitfloat', 'naked', 'builtin', 'cold',
- 'nobuiltin', 'noduplicate', 'nonlazybind', 'optnone', 'returns_twice',
- 'sanitize_address', 'sanitize_memory', 'sanitize_thread', 'sspstrong',
- 'uwtable', 'returned', 'type', 'opaque', 'eq', 'ne', 'slt', 'sgt',
- 'sle', 'sge', 'ult', 'ugt', 'ule', 'uge', 'oeq', 'one', 'olt', 'ogt',
- 'ole', 'oge', 'ord', 'uno', 'ueq', 'une', 'x', 'acq_rel', 'acquire',
- 'alignstack', 'atomic', 'catch', 'cleanup', 'filter', 'inteldialect',
- 'max', 'min', 'monotonic', 'nand', 'personality', 'release', 'seq_cst',
- 'singlethread', 'umax', 'umin', 'unordered', 'xchg', 'add', 'fadd',
- 'sub', 'fsub', 'mul', 'fmul', 'udiv', 'sdiv', 'fdiv', 'urem', 'srem',
- 'frem', 'shl', 'lshr', 'ashr', 'and', 'or', 'xor', 'icmp', 'fcmp',
- 'phi', 'call', 'trunc', 'zext', 'sext', 'fptrunc', 'fpext', 'uitofp',
- 'sitofp', 'fptoui', 'fptosi', 'inttoptr', 'ptrtoint', 'bitcast',
- 'addrspacecast', 'select', 'va_arg', 'ret', 'br', 'switch', 'invoke',
- 'unwind', 'unreachable', 'indirectbr', 'landingpad', 'resume',
- 'malloc', 'alloca', 'free', 'load', 'store', 'getelementptr',
- 'extractelement', 'insertelement', 'shufflevector', 'getresult',
- 'extractvalue', 'insertvalue', 'atomicrmw', 'cmpxchg', 'fence'),
- suffix=r'\b'), Keyword),
+ 'acq_rel', 'acquire', 'add', 'addrspace', 'addrspacecast', 'afn', 'alias',
+ 'aliasee', 'align', 'alignLog2', 'alignstack', 'alloca', 'allocsize', 'allOnes',
+ 'alwaysinline', 'amdgpu_cs', 'amdgpu_es', 'amdgpu_gs', 'amdgpu_hs',
+ 'amdgpu_kernel', 'amdgpu_ls', 'amdgpu_ps', 'amdgpu_vs', 'and', 'any',
+ 'anyregcc', 'appending', 'arcp', 'argmemonly', 'args', 'arm_aapcs_vfpcc',
+ 'arm_aapcscc', 'arm_apcscc', 'ashr', 'asm', 'atomic', 'atomicrmw', 'attributes',
+ 'available_externally', 'avr_intrcc', 'avr_signalcc', 'bit', 'bitcast',
+ 'bitMask', 'blockaddress', 'br', 'branchFunnel', 'builtin', 'byArg', 'byte',
+ 'byteArray', 'byval', 'c', 'call', 'callee', 'caller', 'calls', 'catch',
+ 'catchpad', 'catchret', 'catchswitch', 'cc', 'ccc', 'cleanup', 'cleanuppad',
+ 'cleanupret', 'cmpxchg', 'cold', 'coldcc', 'comdat', 'common', 'constant',
+ 'contract', 'convergent', 'critical', 'cxx_fast_tlscc', 'datalayout', 'declare',
+ 'default', 'define', 'deplibs', 'dereferenceable', 'dereferenceable_or_null',
+ 'distinct', 'dllexport', 'dllimport', 'double', 'dso_local', 'dso_preemptable',
+ 'dsoLocal', 'eq', 'exact', 'exactmatch', 'extern_weak', 'external',
+ 'externally_initialized', 'extractelement', 'extractvalue', 'fadd', 'false',
+ 'fast', 'fastcc', 'fcmp', 'fdiv', 'fence', 'filter', 'flags', 'float', 'fmul',
+ 'fp128', 'fpext', 'fptosi', 'fptoui', 'fptrunc', 'frem', 'from', 'fsub',
+ 'funcFlags', 'function', 'gc', 'getelementptr', 'ghccc', 'global', 'guid', 'gv',
+ 'half', 'hash', 'hhvm_ccc', 'hhvmcc', 'hidden', 'hot', 'hotness', 'icmp',
+ 'ifunc', 'inaccessiblemem_or_argmemonly', 'inaccessiblememonly', 'inalloca',
+ 'inbounds', 'indir', 'indirectbr', 'info', 'initialexec', 'inline',
+ 'inlineBits', 'inlinehint', 'inrange', 'inreg', 'insertelement', 'insertvalue',
+ 'insts', 'intel_ocl_bicc', 'inteldialect', 'internal', 'inttoptr', 'invoke',
+ 'jumptable', 'kind', 'label', 'landingpad', 'largest', 'linkage', 'linkonce',
+ 'linkonce_odr', 'live', 'load', 'local_unnamed_addr', 'localdynamic',
+ 'localexec', 'lshr', 'max', 'metadata', 'min', 'minsize', 'module', 'monotonic',
+ 'msp430_intrcc', 'mul', 'musttail', 'naked', 'name', 'nand', 'ne', 'nest',
+ 'ninf', 'nnan', 'noalias', 'nobuiltin', 'nocapture', 'nocf_check',
+ 'noduplicate', 'noduplicates', 'noimplicitfloat', 'noinline', 'none',
+ 'nonlazybind', 'nonnull', 'norecurse', 'noRecurse', 'noredzone', 'noreturn',
+ 'notail', 'notEligibleToImport', 'nounwind', 'nsw', 'nsz', 'null', 'nuw', 'oeq',
+ 'offset', 'oge', 'ogt', 'ole', 'olt', 'one', 'opaque', 'optforfuzzing',
+ 'optnone', 'optsize', 'or', 'ord', 'path', 'personality', 'phi', 'ppc_fp128',
+ 'prefix', 'preserve_allcc', 'preserve_mostcc', 'private', 'prologue',
+ 'protected', 'ptrtoint', 'ptx_device', 'ptx_kernel', 'readnone', 'readNone',
+ 'readonly', 'readOnly', 'reassoc', 'refs', 'relbf', 'release', 'resByArg',
+ 'resume', 'ret', 'returnDoesNotAlias', 'returned', 'returns_twice', 'safestack',
+ 'samesize', 'sanitize_address', 'sanitize_hwaddress', 'sanitize_memory',
+ 'sanitize_thread', 'sdiv', 'section', 'select', 'seq_cst', 'sext', 'sge', 'sgt',
+ 'shadowcallstack', 'shl', 'shufflevector', 'sideeffect', 'signext', 'single',
+ 'singleImpl', 'singleImplName', 'sitofp', 'sizeM1', 'sizeM1BitWidth', 'sle',
+ 'slt', 'source_filename', 'speculatable', 'spir_func', 'spir_kernel', 'srem',
+ 'sret', 'ssp', 'sspreq', 'sspstrong', 'store', 'strictfp', 'sub', 'summaries',
+ 'summary', 'swiftcc', 'swifterror', 'swiftself', 'switch', 'syncscope', 'tail',
+ 'target', 'thread_local', 'to', 'token', 'triple', 'true', 'trunc', 'type',
+ 'typeCheckedLoadConstVCalls', 'typeCheckedLoadVCalls', 'typeid', 'typeIdInfo',
+ 'typeTestAssumeConstVCalls', 'typeTestAssumeVCalls', 'typeTestRes', 'typeTests',
+ 'udiv', 'ueq', 'uge', 'ugt', 'uitofp', 'ule', 'ult', 'umax', 'umin', 'undef',
+ 'une', 'uniformRetVal', 'uniqueRetVal', 'unknown', 'unnamed_addr', 'uno',
+ 'unordered', 'unreachable', 'unsat', 'unwind', 'urem', 'uselistorder',
+ 'uselistorder_bb', 'uwtable', 'va_arg', 'variable', 'vFuncId',
+ 'virtualConstProp', 'void', 'volatile', 'weak', 'weak_odr', 'webkit_jscc',
+ 'win64cc', 'within', 'wpdRes', 'wpdResolutions', 'writeonly', 'x',
+ 'x86_64_sysvcc', 'x86_fastcallcc', 'x86_fp80', 'x86_intrcc', 'x86_mmx',
+ 'x86_regcallcc', 'x86_stdcallcc', 'x86_thiscallcc', 'x86_vectorcallcc', 'xchg',
+ 'xor', 'zeroext', 'zeroinitializer', 'zext'),
+ suffix=r'\b'), Keyword),
# Types
(words(('void', 'half', 'float', 'double', 'x86_fp80', 'fp128',
- 'ppc_fp128', 'label', 'metadata')), Keyword.Type),
+ 'ppc_fp128', 'label', 'metadata', 'token')), Keyword.Type),
# Integer types
(r'i[1-9]\d*', Keyword)
]
}
+
class NasmLexer(RegexLexer):
"""
For Nasm (Intel) assembly code.
@@ -628,3 +650,109 @@ class Ca65Lexer(RegexLexer):
# comments in GAS start with "#"
if re.match(r'^\s*;', text, re.MULTILINE):
return 0.9
+
+
+class Dasm16Lexer(RegexLexer):
+ """
+ Simple lexer for DCPU-16 Assembly
+
+ Check http://0x10c.com/doc/dcpu-16.txt
+
+ .. versionadded:: 2.4
+ """
+ name = 'DASM16'
+ aliases = ['dasm16']
+ filenames = ['*.dasm16', '*.dasm']
+ mimetypes = ['text/x-dasm16']
+
+ INSTRUCTIONS = [
+ 'SET',
+ 'ADD', 'SUB',
+ 'MUL', 'MLI',
+ 'DIV', 'DVI',
+ 'MOD', 'MDI',
+ 'AND', 'BOR', 'XOR',
+ 'SHR', 'ASR', 'SHL',
+ 'IFB', 'IFC', 'IFE', 'IFN', 'IFG', 'IFA', 'IFL', 'IFU',
+ 'ADX', 'SBX',
+ 'STI', 'STD',
+ 'JSR',
+ 'INT', 'IAG', 'IAS', 'RFI', 'IAQ', 'HWN', 'HWQ', 'HWI',
+ ]
+
+ REGISTERS = [
+ 'A', 'B', 'C',
+ 'X', 'Y', 'Z',
+ 'I', 'J',
+ 'SP', 'PC', 'EX',
+ 'POP', 'PEEK', 'PUSH'
+ ]
+
+ # Regexes yo
+ char = r'[a-zA-Z$._0-9@]'
+ identifier = r'(?:[a-zA-Z$_]' + char + '*|\.' + char + '+)'
+ number = r'[+-]?(?:0[xX][a-zA-Z0-9]+|\d+)'
+ binary_number = r'0b[01_]+'
+ instruction = r'(?i)(' + '|'.join(INSTRUCTIONS) + ')'
+ single_char = r"'\\?" + char + "'"
+ string = r'"(\\"|[^"])*"'
+
+ def guess_identifier(lexer, match):
+ ident = match.group(0)
+ klass = Name.Variable if ident.upper() in lexer.REGISTERS else Name.Label
+ yield match.start(), klass, ident
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ (':' + identifier, Name.Label),
+ (identifier + ':', Name.Label),
+ (instruction, Name.Function, 'instruction-args'),
+ (r'\.' + identifier, Name.Function, 'data-args'),
+ (r'[\r\n]+', Text)
+ ],
+
+ 'numeric' : [
+ (binary_number, Number.Integer),
+ (number, Number.Integer),
+ (single_char, String),
+ ],
+
+ 'arg' : [
+ (identifier, guess_identifier),
+ include('numeric')
+ ],
+
+ 'deref' : [
+ (r'\+', Punctuation),
+ (r'\]', Punctuation, '#pop'),
+ include('arg'),
+ include('whitespace')
+ ],
+
+ 'instruction-line' : [
+ (r'[\r\n]+', Text, '#pop'),
+ (r';.*?$', Comment, '#pop'),
+ include('whitespace')
+ ],
+
+ 'instruction-args': [
+ (r',', Punctuation),
+ (r'\[', Punctuation, 'deref'),
+ include('arg'),
+ include('instruction-line')
+ ],
+
+ 'data-args' : [
+ (r',', Punctuation),
+ include('numeric'),
+ (string, String),
+ include('instruction-line')
+ ],
+
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r';.*?\n', Comment)
+ ],
+ }
diff --git a/pygments/lexers/automation.py b/pygments/lexers/automation.py
index 2ebc4d24..3ef42e48 100644
--- a/pygments/lexers/automation.py
+++ b/pygments/lexers/automation.py
@@ -5,7 +5,7 @@
Lexers for automation scripting languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -31,8 +31,8 @@ class AutohotkeyLexer(RegexLexer):
'root': [
(r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline), 'incomment'),
(r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
- (r'\s+;.*?$', Comment.Singleline),
- (r'^;.*?$', Comment.Singleline),
+ (r'\s+;.*?$', Comment.Single),
+ (r'^;.*?$', Comment.Single),
(r'[]{}(),;[]', Punctuation),
(r'(in|is|and|or|not)\b', Operator.Word),
(r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable),
diff --git a/pygments/lexers/basic.py b/pygments/lexers/basic.py
index a73ad8b4..4d957c2b 100644
--- a/pygments/lexers/basic.py
+++ b/pygments/lexers/basic.py
@@ -5,18 +5,21 @@
Lexers for BASIC like languages (other than VB.net).
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, default, words, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
+from pygments.token import Comment, Error, Keyword, Name, Number, \
+ Punctuation, Operator, String, Text, Whitespace
+from pygments.lexers import _vbscript_builtins
+
__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer',
- 'QBasicLexer']
+ 'QBasicLexer', 'VBScriptLexer', 'BBCBasicLexer']
+
class BlitzMaxLexer(RegexLexer):
@@ -498,3 +501,159 @@ class QBasicLexer(RegexLexer):
def analyse_text(text):
if '$DYNAMIC' in text or '$STATIC' in text:
return 0.9
+
+
+class VBScriptLexer(RegexLexer):
+ """
+ VBScript is scripting language that is modeled on Visual Basic.
+
+ .. versionadded:: 2.4
+ """
+ name = 'VBScript'
+ aliases = []
+ filenames = ['*.vbs', '*.VBS']
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r"'[^\n]*", Comment.Single),
+ (r'\s+', Whitespace),
+ ('"', String.Double, 'string'),
+ ('&h[0-9a-f]+', Number.Hex),
+ # Float variant 1, for example: 1., 1.e2, 1.2e3
+ (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float),
+ (r'\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Float variant 2, for example: .1, .1e2
+ (r'[0-9]+e[+-]?[0-9]+', Number.Float), # Float variant 3, for example: 123e45
+ (r'\d+', Number.Integer),
+ ('#.+#', String), # date or time value
+ (r'(dim)(\s+)([a-z_][a-z0-9_]*)',
+ bygroups(Keyword.Declaration, Whitespace, Name.Variable), 'dim_more'),
+ (r'(function|sub)(\s+)([a-z_][a-z0-9_]*)',
+ bygroups(Keyword.Declaration, Whitespace, Name.Function)),
+ (r'(class)(\s+)([a-z_][a-z0-9_]*)', bygroups(Keyword.Declaration, Whitespace, Name.Class)),
+ (r'(const)(\s+)([a-z_][a-z0-9_]*)', bygroups(Keyword.Declaration, Whitespace, Name.Constant)),
+ (r'(end)(\s+)(class|function|if|property|sub|with)', bygroups(Keyword, Whitespace, Keyword)),
+ (r'(on)(\s+)(error)(\s+)(goto)(\s+)(0)',
+ bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Number.Integer)),
+ (r'(on)(\s+)(error)(\s+)(resume)(\s+)(next)',
+ bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Keyword)),
+ (r'(option)(\s+)(explicit)', bygroups(Keyword, Whitespace, Keyword)),
+ (r'(property)(\s+)(get|let|set)(\s+)([a-z_][a-z0-9_]*)',
+ bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration, Whitespace, Name.Property)),
+ (r'rem\s.*[^\n]*', Comment.Single),
+ (words(_vbscript_builtins.KEYWORDS, suffix=r'\b'), Keyword),
+ (words(_vbscript_builtins.OPERATORS), Operator),
+ (words(_vbscript_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word),
+ (words(_vbscript_builtins.BUILTIN_CONSTANTS, suffix=r'\b'), Name.Constant),
+ (words(_vbscript_builtins.BUILTIN_FUNCTIONS, suffix=r'\b'), Name.Builtin),
+ (words(_vbscript_builtins.BUILTIN_VARIABLES, suffix=r'\b'), Name.Builtin),
+ (r'[a-z_][a-z0-9_]*', Name),
+ (r'\b_\n', Operator),
+ (words(r'(),.:'), Punctuation),
+ ('.+(\n)?', Error)
+ ],
+ 'dim_more': [
+ (r'(\s*)(,)(\s*)([a-z_][a-z0-9]*)', bygroups(Whitespace, Punctuation, Whitespace, Name.Variable)),
+ default('#pop'),
+ ],
+ 'string': [
+ (r'[^"\n]+', String.Double),
+ (r'\"\"', String.Double),
+ (r'"', String.Double, '#pop'),
+ (r'\n', Error, '#pop'), # Unterminated string
+ ],
+ }
+
+
+class BBCBasicLexer(RegexLexer):
+ """
+ BBC Basic was supplied on the BBC Micro, and later Acorn RISC OS.
+ It is also used by BBC Basic For Windows.
+
+ .. versionadded:: 2.4????
+ """
+ base_keywords = ['OTHERWISE', 'AND', 'DIV', 'EOR', 'MOD', 'OR', 'ERROR',
+ 'LINE', 'OFF', 'STEP', 'SPC', 'TAB', 'ELSE', 'THEN',
+ 'OPENIN', 'PTR', 'PAGE', 'TIME', 'LOMEM', 'HIMEM', 'ABS',
+ 'ACS', 'ADVAL', 'ASC', 'ASN', 'ATN', 'BGET', 'COS', 'COUNT',
+ 'DEG', 'ERL', 'ERR', 'EVAL', 'EXP', 'EXT', 'FALSE', 'FN',
+ 'GET', 'INKEY', 'INSTR', 'INT', 'LEN', 'LN', 'LOG', 'NOT',
+ 'OPENUP', 'OPENOUT', 'PI', 'POINT', 'POS', 'RAD', 'RND',
+ 'SGN', 'SIN', 'SQR', 'TAN', 'TO', 'TRUE', 'USR', 'VAL',
+ 'VPOS', 'CHR$', 'GET$', 'INKEY$', 'LEFT$', 'MID$',
+ 'RIGHT$', 'STR$', 'STRING$', 'EOF', 'PTR', 'PAGE', 'TIME',
+ 'LOMEM', 'HIMEM', 'SOUND', 'BPUT', 'CALL', 'CHAIN', 'CLEAR',
+ 'CLOSE', 'CLG', 'CLS', 'DATA', 'DEF', 'DIM', 'DRAW', 'END',
+ 'ENDPROC', 'ENVELOPE', 'FOR', 'GOSUB', 'GOTO', 'GCOL', 'IF',
+ 'INPUT', 'LET', 'LOCAL', 'MODE', 'MOVE', 'NEXT', 'ON',
+ 'VDU', 'PLOT', 'PRINT', 'PROC', 'READ', 'REM', 'REPEAT',
+ 'REPORT', 'RESTORE', 'RETURN', 'RUN', 'STOP', 'COLOUR',
+ 'TRACE', 'UNTIL', 'WIDTH', 'OSCLI']
+
+ basic5_keywords = ['WHEN', 'OF', 'ENDCASE', 'ENDIF', 'ENDWHILE', 'CASE',
+ 'CIRCLE', 'FILL', 'ORIGIN', 'POINT', 'RECTANGLE', 'SWAP',
+ 'WHILE', 'WAIT', 'MOUSE', 'QUIT', 'SYS', 'INSTALL',
+ 'LIBRARY', 'TINT', 'ELLIPSE', 'BEATS', 'TEMPO', 'VOICES',
+ 'VOICE', 'STEREO', 'OVERLAY', 'APPEND', 'AUTO', 'CRUNCH',
+ 'DELETE', 'EDIT', 'HELP', 'LIST', 'LOAD', 'LVAR', 'NEW',
+ 'OLD', 'RENUMBER', 'SAVE', 'TEXTLOAD', 'TEXTSAVE',
+ 'TWIN', 'TWINO', 'INSTALL', 'SUM', 'BEAT']
+
+
+ name = 'BBC Basic'
+ aliases = ['bbcbasic']
+ filenames = ['*.bbc']
+
+ tokens = {
+ 'root': [
+ (r"[0-9]+", Name.Label),
+ (r"(\*)([^\n]*)",
+ bygroups(Keyword.Pseudo, Comment.Special)),
+ (r"", Whitespace, 'code'),
+ ],
+
+ 'code': [
+ (r"(REM)([^\n]*)",
+ bygroups(Keyword.Declaration, Comment.Single)),
+ (r'\n', Whitespace, 'root'),
+ (r'\s+', Whitespace),
+ (r':', Comment.Preproc),
+
+ # Some special cases to make functions come out nicer
+ (r'(DEF)(\s*)(FN|PROC)([A-Za-z_@][A-Za-z0-9_@]*)',
+ bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration, Name.Function)),
+ (r'(FN|PROC)([A-Za-z_@][A-Za-z0-9_@]*)',
+ bygroups(Keyword, Name.Function)),
+
+ (r'(GOTO|GOSUB|THEN|RESTORE)(\s*)(\d+)',
+ bygroups(Keyword, Whitespace, Name.Label)),
+
+ (r'(TRUE|FALSE)', Keyword.Constant),
+ (r'(PAGE|LOMEM|HIMEM|TIME|WIDTH|ERL|ERR|REPORT\$|POS|VPOS|VOICES)', Keyword.Pseudo),
+
+ (words(base_keywords), Keyword),
+ (words(basic5_keywords), Keyword),
+
+ ('"', String.Double, 'string'),
+
+ ('%[01]{1,32}', Number.Bin),
+ ('&[0-9a-f]{1,8}', Number.Hex),
+
+ (r'[+-]?[0-9]+\.[0-9]*(E[+-]?[0-9]+)?', Number.Float),
+ (r'[+-]?\.[0-9]+(E[+-]?[0-9]+)?', Number.Float),
+ (r'[+-]?[0-9]+E[+-]?[0-9]+', Number.Float),
+ (r'[+-]?\d+', Number.Integer),
+
+ (r'([A-Za-z_@][A-Za-z0-9_@]*[%$]?)', Name.Variable),
+ (r'([+\-]=|[$!|?+\-*/%^=><();]|>=|<=|<>|<<|>>|>>>|,)', Operator),
+ ],
+ 'string': [
+ (r'[^"\n]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ (r'\n', Error, 'root'), # Unterminated string
+ ],
+ }
+
+ def analyse_text(text):
+ if text.startswith('10REM >') or text.startswith('REM >'):
+ return 0.9
diff --git a/pygments/lexers/bibtex.py b/pygments/lexers/bibtex.py
index cbaedca2..7244ef2f 100644
--- a/pygments/lexers/bibtex.py
+++ b/pygments/lexers/bibtex.py
@@ -5,14 +5,16 @@
Lexers for BibTeX bibliography data and styles
- :copyright: Copyright 2005-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, default, words
-from pygments.token import Name, Comment, String, Error, Number, Text, Keyword, Punctuation
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, default, \
+ words
+from pygments.token import Name, Comment, String, Error, Number, Text, \
+ Keyword, Punctuation
__all__ = ['BibTeXLexer', 'BSTLexer']
@@ -57,7 +59,8 @@ class BibTeXLexer(ExtendedRegexLexer):
('@comment', Comment),
('@preamble', Name.Class, ('closing-brace', 'value', 'opening-brace')),
('@string', Name.Class, ('closing-brace', 'field', 'opening-brace')),
- ('@' + IDENTIFIER, Name.Class, ('closing-brace', 'command-body', 'opening-brace')),
+ ('@' + IDENTIFIER, Name.Class,
+ ('closing-brace', 'command-body', 'opening-brace')),
('.+', Comment),
],
'opening-brace': [
@@ -98,12 +101,12 @@ class BibTeXLexer(ExtendedRegexLexer):
'quoted-string': [
(r'\{', String, 'braced-string'),
('"', String, '#pop'),
- ('[^\{\"]+', String),
+ (r'[^\{\"]+', String),
],
'braced-string': [
(r'\{', String, '#push'),
(r'\}', String, '#pop'),
- ('[^\{\}]+', String),
+ (r'[^\{\}]+', String),
],
'whitespace': [
(r'\s+', Text),
@@ -127,7 +130,8 @@ class BSTLexer(RegexLexer):
'root': [
include('whitespace'),
(words(['read', 'sort']), Keyword),
- (words(['execute', 'integers', 'iterate', 'reverse', 'strings']), Keyword, ('group')),
+ (words(['execute', 'integers', 'iterate', 'reverse', 'strings']),
+ Keyword, ('group')),
(words(['function', 'macro']), Keyword, ('group', 'group')),
(words(['entry']), Keyword, ('group', 'group', 'group')),
],
@@ -150,7 +154,7 @@ class BSTLexer(RegexLexer):
default('#pop'),
],
'whitespace': [
- ('\s+', Text),
+ (r'\s+', Text),
('%.*?$', Comment.SingleLine),
],
}
diff --git a/pygments/lexers/business.py b/pygments/lexers/business.py
index 43978690..552f3d9c 100644
--- a/pygments/lexers/business.py
+++ b/pygments/lexers/business.py
@@ -5,7 +5,7 @@
Lexers for "business-oriented" languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -256,6 +256,7 @@ class ABAPLexer(RegexLexer):
(r'\s+', Text),
(r'^\*.*$', Comment.Single),
(r'\".*?\n', Comment.Single),
+ (r'##\w+', Comment.Special),
],
'variable-names': [
(r'<\S+>', Name.Variable),
@@ -264,8 +265,8 @@ class ABAPLexer(RegexLexer):
'root': [
include('common'),
# function calls
- (r'(CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION))(\s+)(\'?\S+\'?)',
- bygroups(Keyword, Text, Name.Function)),
+ (r'CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)',
+ Keyword),
(r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
r'TRANSACTION|TRANSFORMATION))\b',
Keyword),
@@ -285,6 +286,12 @@ class ABAPLexer(RegexLexer):
# call methodnames returning style
(r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
+ # text elements
+ (r'(TEXT)(-)(\d{3})',
+ bygroups(Keyword, Punctuation, Number.Integer)),
+ (r'(TEXT)(-)(\w{3})',
+ bygroups(Keyword, Punctuation, Name.Variable)),
+
# keywords with dashes in them.
# these need to be first, because for instance the -ID part
# of MESSAGE-ID wouldn't get highlighted if MESSAGE was
@@ -301,13 +308,13 @@ class ABAPLexer(RegexLexer):
r'OUTPUT-LENGTH|PRINT-CONTROL|'
r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
- r'TYPE-POOL|TYPE-POOLS'
+ r'TYPE-POOL|TYPE-POOLS|NO-DISPLAY'
r')\b', Keyword),
# keyword kombinations
- (r'CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
- r'((PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
- r'(TYPE|LIKE)(\s+(LINE\s+OF|REF\s+TO|'
+ (r'(?<![-\>])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
+ r'(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
+ r'(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|'
r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
@@ -343,10 +350,16 @@ class ABAPLexer(RegexLexer):
r'(BEGIN|END)\s+OF|'
r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
r'COMPARING(\s+ALL\s+FIELDS)?|'
- r'INSERT(\s+INITIAL\s+LINE\s+INTO|\s+LINES\s+OF)?|'
+ r'(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|'
r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
r'END-OF-(DEFINITION|PAGE|SELECTION)|'
r'WITH\s+FRAME(\s+TITLE)|'
+ r'(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|'
+ r'MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|'
+ r'(RESPECTING|IGNORING)\s+CASE|'
+ r'IN\s+UPDATE\s+TASK|'
+ r'(SOURCE|RESULT)\s+(XML)?|'
+ r'REFERENCE\s+INTO|'
# simple kombinations
r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
@@ -355,39 +368,41 @@ class ABAPLexer(RegexLexer):
r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
- r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE)\b', Keyword),
+ r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b', Keyword),
# single word keywords.
- (r'(^|(?<=(\s|\.)))(ABBREVIATED|ADD|ALIASES|APPEND|ASSERT|'
- r'ASSIGN(ING)?|AT(\s+FIRST)?|'
+ (r'(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|'
+ r'ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|'
r'BACK|BLOCK|BREAK-POINT|'
r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
- r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|'
- r'DATA|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
- r'DETAIL|DIRECTORY|DIVIDE|DO|'
- r'ELSE(IF)?|ENDAT|ENDCASE|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
- r'ENDIF|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|'
- r'ENHANCEMENT|EVENTS|EXCEPTIONS|EXIT|EXPORT|EXPORTING|EXTRACT|'
- r'FETCH|FIELDS?|FIND|FOR|FORM|FORMAT|FREE|FROM|'
+ r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|'
+ r'DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
+ r'DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|'
+ r'ELSE(IF)?|ENDAT|ENDCASE|ENDCATCH|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
+ r'ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|'
+ r'ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|'
+ r'FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|'
r'HIDE|'
r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
- r'LENGTH|LINES|LOAD|LOCAL|'
+ r'LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|'
r'JOIN|'
r'KEY|'
- r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFY|MOVE|MULTIPLY|'
- r'NODES|'
- r'OBLIGATORY|OF|OFF|ON|OVERLAY|'
- r'PACK|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|'
- r'RAISE|RAISING|RANGES|READ|RECEIVE|REFRESH|REJECT|REPORT|RESERVE|'
- r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|'
- r'SCROLL|SEARCH|SELECT|SHIFT|SINGLE|SKIP|SORT|SPLIT|STATICS|STOP|'
- r'SUBMIT|SUBTRACT|SUM|SUMMARY|SUMMING|SUPPLY|'
- r'TABLE|TABLES|TIMES|TITLE|TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
+ r'NEXT|'
+ r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|'
+ r'NODES|NUMBER|'
+ r'OBLIGATORY|OBJECT|OF|OFF|ON|OTHERS|OVERLAY|'
+ r'PACK|PAD|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|PF\d\d|'
+ r'RAISE|RAISING|RANGES?|READ|RECEIVE|REDEFINITION|REFRESH|REJECT|REPORT|RESERVE|'
+ r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|REPLACE|'
+ r'SCROLL|SEARCH|SELECT|SHIFT|SIGN|SINGLE|SIZE|SKIP|SORT|SPLIT|STATICS|STOP|'
+ r'STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|'
+ r'TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|'
+ r'TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
r'ULINE|UNDER|UNPACK|UPDATE|USING|'
- r'VALUE|VALUES|VIA|'
- r'WAIT|WHEN|WHERE|WHILE|WITH|WINDOW|WRITE)\b', Keyword),
+ r'VALUE|VALUES|VIA|VARYING|VARY|'
+ r'WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b', Keyword),
# builtins
(r'(abs|acos|asin|atan|'
@@ -413,18 +428,21 @@ class ABAPLexer(RegexLexer):
# operators which look like variable names before
# parsing variable names.
- (r'(?<=(\s|.))(AND|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
+ (r'(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
- r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator),
+ r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator.Word),
include('variable-names'),
- # standard oparators after variable names,
+ # standard operators after variable names,
# because < and > are part of field symbols.
- (r'[?*<>=\-+]', Operator),
+ (r'[?*<>=\-+&]', Operator),
(r"'(''|[^'])*'", String.Single),
(r"`([^`])*`", String.Single),
- (r'[/;:()\[\],.]', Punctuation)
+ (r"([|}])([^{}|]*?)([|{])",
+ bygroups(Punctuation, String.Single, Punctuation)),
+ (r'[/;:()\[\],.]', Punctuation),
+ (r'(!)(\w+)', bygroups(Operator, Name)),
],
}
diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py
index 2a2419d4..38f425db 100644
--- a/pygments/lexers/c_cpp.py
+++ b/pygments/lexers/c_cpp.py
@@ -5,7 +5,7 @@
Lexers for C/C++ languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,7 +36,7 @@ class CFamilyLexer(RegexLexer):
tokens = {
'whitespace': [
# preprocessor directives: without whitespace
- ('^#if\s+0', Comment.Preproc, 'if0'),
+ (r'^#if\s+0', Comment.Preproc, 'if0'),
('^#', Comment.Preproc, 'macro'),
# or with whitespace
('^(' + _ws1 + r')(#if\s+0)',
@@ -84,7 +84,7 @@ class CFamilyLexer(RegexLexer):
prefix=r'__', suffix=r'\b'), Keyword.Reserved),
(r'(true|false|NULL)\b', Name.Builtin),
(r'([a-zA-Z_]\w*)(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)),
- ('[a-zA-Z_]\w*', Name),
+ (r'[a-zA-Z_]\w*', Name),
],
'root': [
include('whitespace'),
@@ -190,9 +190,9 @@ class CLexer(CFamilyLexer):
priority = 0.1
def analyse_text(text):
- if re.search('^\s*#include [<"]', text, re.MULTILINE):
+ if re.search(r'^\s*#include [<"]', text, re.MULTILINE):
return 0.1
- if re.search('^\s*#ifn?def ', text, re.MULTILINE):
+ if re.search(r'^\s*#ifn?def ', text, re.MULTILINE):
return 0.1
diff --git a/pygments/lexers/c_like.py b/pygments/lexers/c_like.py
index f4a9c299..58372b81 100644
--- a/pygments/lexers/c_like.py
+++ b/pygments/lexers/c_like.py
@@ -5,7 +5,7 @@
Lexers for other C-like languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,7 +20,7 @@ from pygments.lexers.c_cpp import CLexer, CppLexer
from pygments.lexers import _mql_builtins
__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer',
- 'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer']
+ 'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer', 'CharmciLexer']
class PikeLexer(CppLexer):
@@ -105,7 +105,7 @@ class ClayLexer(RegexLexer):
tokens = {
'root': [
(r'\s', Text),
- (r'//.*?$', Comment.Singleline),
+ (r'//.*?$', Comment.Single),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
(r'\b(public|private|import|as|record|variant|instance'
r'|define|overload|default|external|alias'
@@ -245,7 +245,7 @@ class ValaLexer(RegexLexer):
'ulong', 'unichar', 'ushort'), suffix=r'\b'),
Keyword.Type),
(r'(true|false|null)\b', Name.Builtin),
- ('[a-zA-Z_]\w*', Name),
+ (r'[a-zA-Z_]\w*', Name),
],
'root': [
include('whitespace'),
@@ -344,7 +344,7 @@ class SwigLexer(CppLexer):
# SWIG directives
(r'(%[a-z_][a-z0-9_]*)', Name.Function),
# Special variables
- ('\$\**\&?\w+', Name),
+ (r'\$\**\&?\w+', Name),
# Stringification / additional preprocessor directives
(r'##*[a-zA-Z_]\w*', Comment.Preproc),
inherit,
@@ -539,3 +539,23 @@ class ArduinoLexer(CppLexer):
yield index, Name.Function, value
else:
yield index, token, value
+
+class CharmciLexer(CppLexer):
+ """
+ For `Charm++ <https://charm.cs.illinois.edu>`_ interface files (.ci).
+ """
+
+ name = 'Charmci'
+ aliases = ['charmci']
+ filenames = ['*.ci']
+
+ mimetypes = []
+
+ tokens = {
+ 'statements': [
+ (r'(module)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (words(('mainmodule','mainchare','chare','array','group','nodegroup','message','conditional')), Keyword),
+ (words(('entry','aggregate','threaded','sync','exclusive','nokeep','notrace','immediate','expedited','inline','local','python','accel','readwrite','writeonly','accelblock','memcritical','packed','varsize','initproc','initnode','initcall','stacksize','createhere','createhome','reductiontarget','iget','nocopy','mutable','migratable','readonly')), Keyword),
+ inherit,
+ ],
+ }
diff --git a/pygments/lexers/capnproto.py b/pygments/lexers/capnproto.py
index f9c11330..2615dcaf 100644
--- a/pygments/lexers/capnproto.py
+++ b/pygments/lexers/capnproto.py
@@ -5,15 +5,14 @@
Lexers for the Cap'n Proto schema language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal
+from pygments.lexer import RegexLexer, default
+from pygments.token import Text, Comment, Keyword, Name, Literal
__all__ = ['CapnProtoLexer']
@@ -30,7 +29,6 @@ class CapnProtoLexer(RegexLexer):
flags = re.MULTILINE | re.UNICODE
-
tokens = {
'root': [
(r'#.*?$', Comment.Single),
@@ -38,42 +36,43 @@ class CapnProtoLexer(RegexLexer):
(r'=', Literal, 'expression'),
(r':', Name.Class, 'type'),
(r'\$', Name.Attribute, 'annotation'),
- (r'(struct|enum|interface|union|import|using|const|annotation|extends|in|of|on|as|with|from|fixed)\b',
- Keyword),
- (r'[a-zA-Z0-9_.]+', Name),
- (r'[^#@=:$a-zA-Z0-9_]+', Text),
+ (r'(struct|enum|interface|union|import|using|const|annotation|'
+ r'extends|in|of|on|as|with|from|fixed)\b',
+ Keyword),
+ (r'[\w.]+', Name),
+ (r'[^#@=:$\w]+', Text),
],
'type': [
(r'[^][=;,(){}$]+', Name.Class),
- (r'[[(]', Name.Class, 'parentype'),
- (r'', Name.Class, '#pop')
+ (r'[\[(]', Name.Class, 'parentype'),
+ default('#pop'),
],
'parentype': [
(r'[^][;()]+', Name.Class),
- (r'[[(]', Name.Class, '#push'),
+ (r'[\[(]', Name.Class, '#push'),
(r'[])]', Name.Class, '#pop'),
- (r'', Name.Class, '#pop')
+ default('#pop'),
],
'expression': [
(r'[^][;,(){}$]+', Literal),
- (r'[[(]', Literal, 'parenexp'),
- (r'', Literal, '#pop')
+ (r'[\[(]', Literal, 'parenexp'),
+ default('#pop'),
],
'parenexp': [
(r'[^][;()]+', Literal),
- (r'[[(]', Literal, '#push'),
+ (r'[\[(]', Literal, '#push'),
(r'[])]', Literal, '#pop'),
- (r'', Literal, '#pop')
+ default('#pop'),
],
'annotation': [
(r'[^][;,(){}=:]+', Name.Attribute),
- (r'[[(]', Name.Attribute, 'annexp'),
- (r'', Name.Attribute, '#pop')
+ (r'[\[(]', Name.Attribute, 'annexp'),
+ default('#pop'),
],
'annexp': [
(r'[^][;()]+', Name.Attribute),
- (r'[[(]', Name.Attribute, '#push'),
+ (r'[\[(]', Name.Attribute, '#push'),
(r'[])]', Name.Attribute, '#pop'),
- (r'', Name.Attribute, '#pop')
+ default('#pop'),
],
}
diff --git a/pygments/lexers/chapel.py b/pygments/lexers/chapel.py
index e6507394..16ce720b 100644
--- a/pygments/lexers/chapel.py
+++ b/pygments/lexers/chapel.py
@@ -5,7 +5,7 @@
Lexer for the Chapel language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -42,17 +42,27 @@ class ChapelLexer(RegexLexer):
(r'(bool|complex|imag|int|opaque|range|real|string|uint)\b',
Keyword.Type),
(words((
- 'align', 'as', 'atomic', 'begin', 'break', 'by', 'cobegin',
- 'coforall', 'continue', 'delete', 'dmapped', 'do', 'domain',
- 'else', 'enum', 'except', 'export', 'extern', 'for', 'forall',
- 'if', 'index', 'inline', 'iter', 'label', 'lambda', 'let',
- 'local', 'new', 'noinit', 'on', 'only', 'otherwise', 'pragma',
- 'private', 'public', 'reduce', 'require', 'return', 'scan',
- 'select', 'serial', 'single', 'sparse', 'subdomain', 'sync',
- 'then', 'use', 'when', 'where', 'while', 'with', 'yield',
+ 'align', 'as', 'atomic',
+ 'begin', 'borrowed', 'break', 'by',
+ 'catch', 'cobegin', 'coforall', 'continue',
+ 'delete', 'dmapped', 'do', 'domain',
+ 'else', 'enum', 'except', 'export', 'extern',
+ 'for', 'forall',
+ 'if', 'index', 'inline',
+ 'label', 'lambda', 'let', 'local',
+ 'new', 'noinit',
+ 'on', 'only', 'otherwise', 'override', 'owned',
+ 'pragma', 'private', 'prototype', 'public',
+ 'reduce', 'require', 'return',
+ 'scan', 'select', 'serial', 'shared', 'single', 'sparse', 'subdomain', 'sync',
+ 'then', 'throw', 'throws', 'try',
+ 'unmanaged', 'use',
+ 'when', 'where', 'while', 'with',
+ 'yield',
'zip'), suffix=r'\b'),
Keyword),
- (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'),
+ (r'(iter)((?:\s)+)', bygroups(Keyword, Text), 'procname'),
+ (r'(proc)((?:\s)+)', bygroups(Keyword, Text), 'procname'),
(r'(class|module|record|union)(\s+)', bygroups(Keyword, Text),
'classname'),
@@ -96,6 +106,7 @@ class ChapelLexer(RegexLexer):
(r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
],
'procname': [
- (r'[a-zA-Z_][\w$]*', Name.Function, '#pop'),
+ (r'([a-zA-Z_][.\w$]*|\~[a-zA-Z_][.\w$]*|[+*/!~%<>=&^|\-]{1,2})',
+ Name.Function, '#pop'),
],
}
diff --git a/pygments/lexers/clean.py b/pygments/lexers/clean.py
index b87ff99e..dc973bea 100644
--- a/pygments/lexers/clean.py
+++ b/pygments/lexers/clean.py
@@ -9,10 +9,9 @@
:license: BSD, see LICENSE for details.
"""
-from pygments.lexer import ExtendedRegexLexer, LexerContext, \
- bygroups, words, include, default
-from pygments.token import Comment, Keyword, Literal, Name, Number, Operator, \
- Punctuation, String, Text, Whitespace
+from pygments.lexer import ExtendedRegexLexer, words, include, bygroups
+from pygments.token import Comment, Error, Keyword, Literal, Name, Number, \
+ Operator, Punctuation, String, Whitespace
__all__ = ['CleanLexer']
@@ -28,261 +27,152 @@ class CleanLexer(ExtendedRegexLexer):
aliases = ['clean']
filenames = ['*.icl', '*.dcl']
- def get_tokens_unprocessed(self, text=None, context=None):
- ctx = LexerContext(text, 0)
- ctx.indent = 0
- return ExtendedRegexLexer.get_tokens_unprocessed(self, text, context=ctx)
+ keywords = (
+ 'case', 'ccall', 'class', 'code', 'code inline', 'derive', 'export',
+ 'foreign', 'generic', 'if', 'in', 'infix', 'infixl', 'infixr',
+ 'instance', 'let', 'of', 'otherwise', 'special', 'stdcall', 'where',
+ 'with')
- def check_class_not_import(lexer, match, ctx):
- if match.group(0) == 'import':
- yield match.start(), Keyword.Namespace, match.group(0)
- ctx.stack = ctx.stack[:-1] + ['fromimportfunc']
- else:
- yield match.start(), Name.Class, match.group(0)
- ctx.pos = match.end()
+ modulewords = ('implementation', 'definition', 'system')
- def check_instance_class(lexer, match, ctx):
- if match.group(0) == 'instance' or match.group(0) == 'class':
- yield match.start(), Keyword, match.group(0)
- else:
- yield match.start(), Name.Function, match.group(0)
- ctx.stack = ctx.stack + ['fromimportfunctype']
- ctx.pos = match.end()
-
- @staticmethod
- def indent_len(text):
- # Tabs are four spaces:
- # https://svn.cs.ru.nl/repos/clean-platform/trunk/doc/STANDARDS.txt
- text = text.replace('\n', '')
- return len(text.replace('\t', ' ')), len(text)
-
- def store_indent(lexer, match, ctx):
- ctx.indent, _ = CleanLexer.indent_len(match.group(0))
- ctx.pos = match.end()
- yield match.start(), Text, match.group(0)
-
- def check_indent1(lexer, match, ctx):
- indent, reallen = CleanLexer.indent_len(match.group(0))
- if indent > ctx.indent:
- yield match.start(), Whitespace, match.group(0)
- ctx.pos = match.start() + reallen + 1
- else:
- ctx.indent = 0
- ctx.pos = match.start()
- ctx.stack = ctx.stack[:-1]
- yield match.start(), Whitespace, match.group(0)[1:]
-
- def check_indent2(lexer, match, ctx):
- indent, reallen = CleanLexer.indent_len(match.group(0))
- if indent > ctx.indent:
- yield match.start(), Whitespace, match.group(0)
- ctx.pos = match.start() + reallen + 1
- else:
- ctx.indent = 0
- ctx.pos = match.start()
- ctx.stack = ctx.stack[:-2]
-
- def check_indent3(lexer, match, ctx):
- indent, reallen = CleanLexer.indent_len(match.group(0))
- if indent > ctx.indent:
- yield match.start(), Whitespace, match.group(0)
- ctx.pos = match.start() + reallen + 1
- else:
- ctx.indent = 0
- ctx.pos = match.start()
- ctx.stack = ctx.stack[:-3]
- yield match.start(), Whitespace, match.group(0)[1:]
- if match.group(0) == '\n\n':
- ctx.pos = ctx.pos + 1
-
- def skip(lexer, match, ctx):
- ctx.stack = ctx.stack[:-1]
- ctx.pos = match.end()
- yield match.start(), Comment, match.group(0)
-
- keywords = ('class', 'instance', 'where', 'with', 'let', 'let!',
- 'in', 'case', 'of', 'infix', 'infixr', 'infixl', 'generic',
- 'derive', 'otherwise', 'code', 'inline')
+ lowerId = r'[a-z`][\w\d`]*'
+ upperId = r'[A-Z`][\w\d`]*'
+ funnyId = r'[~@#\$%\^?!+\-*<>\\/|&=:]+'
+ scoreUpperId = r'_' + upperId
+ scoreLowerId = r'_' + lowerId
+ moduleId = r'[a-zA-Z_][a-zA-Z0-9_.`]+'
+ classId = '|'.join([lowerId, upperId, funnyId])
tokens = {
- 'common': [
- (r';', Punctuation, '#pop'),
- (r'//', Comment, 'singlecomment'),
- ],
'root': [
- # Comments
+ include('comments'),
+ include('keywords'),
+ include('module'),
+ include('import'),
+ include('whitespace'),
+ include('literals'),
+ include('operators'),
+ include('delimiters'),
+ include('names'),
+ ],
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ ],
+ 'comments': [
(r'//.*\n', Comment.Single),
- (r'(?s)/\*\*.*?\*/', Comment.Special),
- (r'(?s)/\*.*?\*/', Comment.Multi),
-
- # Modules, imports, etc.
- (r'\b((?:implementation|definition|system)\s+)?(module)(\s+)([\w`\.]+)',
- bygroups(Keyword.Namespace, Keyword.Namespace, Text, Name.Class)),
- (r'(?<=\n)import(?=\s)', Keyword.Namespace, 'import'),
- (r'(?<=\n)from(?=\s)', Keyword.Namespace, 'fromimport'),
-
- # Keywords
- # We cannot use (?s)^|(?<=\s) as prefix, so need to repeat this
- (words(keywords, prefix=r'(?<=\s)', suffix=r'(?=\s)'), Keyword),
- (words(keywords, prefix=r'^', suffix=r'(?=\s)'), Keyword),
-
- # Function definitions
- (r'(?=\{\|)', Whitespace, 'genericfunction'),
- (r'(?<=\n)([ \t]*)([\w`$()=\-<>~*\^|+&%]+)((?:\s+\w)*)(\s*)(::)',
- bygroups(store_indent, Name.Function, Keyword.Type, Whitespace,
- Punctuation),
- 'functiondefargs'),
-
- # Type definitions
- (r'(?<=\n)([ \t]*)(::)', bygroups(store_indent, Punctuation), 'typedef'),
- (r'^([ \t]*)(::)', bygroups(store_indent, Punctuation), 'typedef'),
-
- # Literals
- (r'\'\\?.(?<!\\)\'', String.Char),
- (r'\'\\\d+\'', String.Char),
- (r'\'\\\\\'', String.Char), # (special case for '\\')
- (r'[+\-~]?\s*\d+\.\d+(E[+\-~]?\d+)?\b', Number.Float),
- (r'[+\-~]?\s*0[0-7]\b', Number.Oct),
- (r'[+\-~]?\s*0x[0-9a-fA-F]\b', Number.Hex),
- (r'[+\-~]?\s*\d+\b', Number.Integer),
- (r'"', String.Double, 'doubleqstring'),
- (words(('True', 'False'), prefix=r'(?<=\s)', suffix=r'(?=\s)'),
- Literal),
-
- # Qualified names
- (r'(\')([\w\.]+)(\'\.)',
- bygroups(Punctuation, Name.Namespace, Punctuation)),
-
- # Everything else is some name
- (r'([\w`$%\/\?@]+\.?)*[\w`$%\/\?@]+', Name),
-
- # Punctuation
- (r'[{}()\[\],:;.#]', Punctuation),
- (r'[+\-=!<>|&~*\^/]', Operator),
- (r'\\\\', Operator),
-
- # Lambda expressions
- (r'\\.*?(->|\.|=)', Name.Function),
-
- # Whitespace
- (r'\s', Whitespace),
-
- include('common'),
+ (r'/\*', Comment.Multi, 'comments.in'),
+ (r'/\*\*', Comment.Special, 'comments.in'),
],
- 'fromimport': [
- include('common'),
- (r'([\w`\.]+)', check_class_not_import),
- (r'\n', Whitespace, '#pop'),
- (r'\s', Whitespace),
+ 'comments.in': [
+ (r'\*\/', Comment.Multi, '#pop'),
+ (r'/\*', Comment.Multi, '#push'),
+ (r'[^*/]+', Comment.Multi),
+ (r'\*(?!/)', Comment.Multi),
+ (r'/', Comment.Multi),
],
- 'fromimportfunc': [
- include('common'),
- (r'(::)\s+([^,\s]+)', bygroups(Punctuation, Keyword.Type)),
- (r'([\w`$()=\-<>~*\^|+&%\/]+)', check_instance_class),
- (r',', Punctuation),
- (r'\n', Whitespace, '#pop'),
- (r'\s', Whitespace),
+ 'keywords': [
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
],
- 'fromimportfunctype': [
- include('common'),
- (r'[{(\[]', Punctuation, 'combtype'),
- (r',', Punctuation, '#pop'),
- (r'[:;.#]', Punctuation),
- (r'\n', Whitespace, '#pop:2'),
- (r'[^\S\n]+', Whitespace),
- (r'\S+', Keyword.Type),
+ 'module': [
+ (words(modulewords, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (r'\bmodule\b', Keyword.Namespace, 'module.name'),
],
- 'combtype': [
- include('common'),
- (r'[})\]]', Punctuation, '#pop'),
- (r'[{(\[]', Punctuation, '#pop'),
- (r'[,:;.#]', Punctuation),
- (r'\s+', Whitespace),
- (r'\S+', Keyword.Type),
+ 'module.name': [
+ include('whitespace'),
+ (moduleId, Name.Class, '#pop'),
],
'import': [
- include('common'),
- (words(('from', 'import', 'as', 'qualified'),
- prefix='(?<=\s)', suffix='(?=\s)'), Keyword.Namespace),
- (r'[\w`\.]+', Name.Class),
- (r'\n', Whitespace, '#pop'),
- (r',', Punctuation),
- (r'[^\S\n]+', Whitespace),
- ],
- 'singlecomment': [
- (r'(.)(?=\n)', skip),
- (r'.+(?!\n)', Comment),
- ],
- 'doubleqstring': [
- (r'[^\\"]+', String.Double),
- (r'"', String.Double, '#pop'),
- (r'\\.', String.Double),
- ],
- 'typedef': [
- include('common'),
- (r'[\w`]+', Keyword.Type),
- (r'[:=|(),\[\]{}!*]', Punctuation),
- (r'->', Punctuation),
- (r'\n(?=[^\s|])', Whitespace, '#pop'),
- (r'\s', Whitespace),
- (r'.', Keyword.Type),
+ (r'\b(import)\b(\s*)', bygroups(Keyword, Whitespace), 'import.module'),
+ (r'\b(from)\b(\s*)\b(' + moduleId + r')\b(\s*)\b(import)\b',
+ bygroups(Keyword, Whitespace, Name.Class, Whitespace, Keyword),
+ 'import.what'),
+ ],
+ 'import.module': [
+ (r'\b(qualified)\b(\s*)', bygroups(Keyword, Whitespace)),
+ (r'(\s*)\b(as)\b', bygroups(Whitespace, Keyword), ('#pop', 'import.module.as')),
+ (moduleId, Name.Class),
+ (r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace)),
+ (r'\s*', Whitespace, '#pop'),
+ ],
+ 'import.module.as': [
+ include('whitespace'),
+ (lowerId, Name.Class, '#pop'),
+ (upperId, Name.Class, '#pop'),
+ ],
+ 'import.what': [
+ (r'\b(class)\b(\s+)(' + classId + r')',
+ bygroups(Keyword, Whitespace, Name.Class), 'import.what.class'),
+ (r'\b(instance)(\s+)(' + classId + r')(\s+)',
+ bygroups(Keyword, Whitespace, Name.Class, Whitespace), 'import.what.instance'),
+ (r'(::)(\s*)\b(' + upperId + r')\b',
+ bygroups(Punctuation, Whitespace, Name.Class), 'import.what.type'),
+ (r'\b(generic)\b(\s+)\b(' + lowerId + '|' + upperId + r')\b',
+ bygroups(Keyword, Whitespace, Name)),
+ include('names'),
+ (r'(,)(\s+)', bygroups(Punctuation, Whitespace)),
+ (r'$', Whitespace, '#pop'),
+ include('whitespace'),
+ ],
+ 'import.what.class': [
+ (r',', Punctuation, '#pop'),
+ (r'\(', Punctuation, 'import.what.class.members'),
+ (r'$', Whitespace, '#pop:2'),
+ include('whitespace'),
],
- 'genericfunction': [
- include('common'),
- (r'\{\|', Punctuation),
- (r'\|\}', Punctuation, '#pop'),
+ 'import.what.class.members': [
(r',', Punctuation),
- (r'->', Punctuation),
- (r'(\s+of\s+)(\{)', bygroups(Keyword, Punctuation), 'genericftypes'),
- (r'\s', Whitespace),
- (r'[\w`\[\]{}!]+', Keyword.Type),
- (r'[*()]', Punctuation),
+ (r'\.\.', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ include('names'),
+ ],
+ 'import.what.instance': [
+ (r'[,)]', Punctuation, '#pop'),
+ (r'\(', Punctuation, 'import.what.instance'),
+ (r'$', Whitespace, '#pop:2'),
+ include('whitespace'),
+ include('names'),
+ ],
+ 'import.what.type': [
+ (r',', Punctuation, '#pop'),
+ (r'[({]', Punctuation, 'import.what.type.consesandfields'),
+ (r'$', Whitespace, '#pop:2'),
+ include('whitespace'),
],
- 'genericftypes': [
- include('common'),
- (r'[\w`]+', Keyword.Type),
+ 'import.what.type.consesandfields': [
(r',', Punctuation),
- (r'\s', Whitespace),
- (r'\}', Punctuation, '#pop'),
- ],
- 'functiondefargs': [
- include('common'),
- (r'\n(\s*)', check_indent1),
- (r'[!{}()\[\],:;.#]', Punctuation),
- (r'->', Punctuation, 'functiondefres'),
- (r'^(?=\S)', Whitespace, '#pop'),
- (r'\S', Keyword.Type),
- (r'\s', Whitespace),
- ],
- 'functiondefres': [
- include('common'),
- (r'\n(\s*)', check_indent2),
- (r'^(?=\S)', Whitespace, '#pop:2'),
- (r'[!{}()\[\],:;.#]', Punctuation),
- (r'\|', Punctuation, 'functiondefclasses'),
- (r'\S', Keyword.Type),
- (r'\s', Whitespace),
- ],
- 'functiondefclasses': [
- include('common'),
- (r'\n(\s*)', check_indent3),
- (r'^(?=\S)', Whitespace, '#pop:3'),
- (r'[,&]', Punctuation),
- (r'\[', Punctuation, 'functiondefuniquneq'),
- (r'[\w`$()=\-<>~*\^|+&%\/{}\[\]@]', Name.Function, 'functionname'),
- (r'\s+', Whitespace),
- ],
- 'functiondefuniquneq': [
- include('common'),
- (r'[a-z]+', Keyword.Type),
- (r'\s+', Whitespace),
- (r'<=|,', Punctuation),
- (r'\]', Punctuation, '#pop')
- ],
- 'functionname': [
- include('common'),
- (r'[\w`$()=\-<>~*\^|+&%\/]+', Name.Function),
- (r'(?=\{\|)', Punctuation, 'genericfunction'),
- default('#pop'),
+ (r'\.\.', Punctuation),
+ (r'[)}]', Punctuation, '#pop'),
+ include('names'),
+ ],
+ 'literals': [
+ (r'\'([^\'\\]|\\(x[\da-fA-F]+|\d+|.))\'', Literal.Char),
+ (r'[+~-]?0[0-7]+\b', Number.Oct),
+ (r'[+~-]?\d+\.\d+(E[+-]?\d+)?', Number.Float),
+ (r'[+~-]?\d+\b', Number.Integer),
+ (r'[+~-]?0x[\da-fA-F]+\b', Number.Hex),
+ (r'True|False', Literal),
+ (r'"', String.Double, 'literals.stringd'),
+ ],
+ 'literals.stringd': [
+ (r'[^\\"\n]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ (r'\\.', String.Double),
+ (r'[$\n]', Error, '#pop'),
+ ],
+ 'operators': [
+ (r'[-~@#\$%\^?!+*<>\\/|&=:\.]+', Operator),
+ (r'\b_+\b', Operator),
+ ],
+ 'delimiters': [
+ (r'[,;(){}\[\]]', Punctuation),
+ (r'(\')([\w`.]+)(\')',
+ bygroups(Punctuation, Name.Class, Punctuation)),
+ ],
+ 'names': [
+ (lowerId, Name),
+ (scoreLowerId, Name),
+ (funnyId, Name.Function),
+ (upperId, Name.Class),
+ (scoreUpperId, Name.Class),
]
}
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
index b6673437..ab52a370 100644
--- a/pygments/lexers/compiled.py
+++ b/pygments/lexers/compiled.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py
index 5b9cbfa2..6f2c7c76 100644
--- a/pygments/lexers/configs.py
+++ b/pygments/lexers/configs.py
@@ -5,7 +5,7 @@
Lexers for configuration file formats.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -15,12 +15,13 @@ from pygments.lexer import RegexLexer, default, words, bygroups, include, using
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace, Literal
from pygments.lexers.shell import BashLexer
+from pygments.lexers.data import JsonLexer
__all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer',
'TerraformLexer', 'TermcapLexer', 'TerminfoLexer',
- 'PkgConfigLexer', 'PacmanConfLexer']
+ 'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer']
class IniLexer(RegexLexer):
@@ -462,7 +463,7 @@ class NginxConfLexer(RegexLexer):
"""
name = 'Nginx configuration file'
aliases = ['nginx']
- filenames = []
+ filenames = ['nginx.conf']
mimetypes = ['text/x-nginx-conf']
tokens = {
@@ -539,20 +540,25 @@ class DockerLexer(RegexLexer):
filenames = ['Dockerfile', '*.docker']
mimetypes = ['text/x-dockerfile-config']
- _keywords = (r'(?:FROM|MAINTAINER|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|'
- r'VOLUME|WORKDIR)')
-
+ _keywords = (r'(?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
+ _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)')
+ _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
- (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,),
- bygroups(Name.Keyword, Whitespace, Keyword)),
- (r'^(%s)\b(.*)' % (_keywords,), bygroups(Keyword, String)),
(r'#.*', Comment),
- (r'RUN', Keyword), # Rest of line falls through
+ (r'(ONBUILD)(%s)' % (_lb,), bygroups(Keyword, using(BashLexer))),
+ (r'(HEALTHCHECK)((%s--\w+=\w+%s)*)' % (_lb, _lb),
+ bygroups(Keyword, using(BashLexer))),
+ (r'(VOLUME|ENTRYPOINT|CMD|SHELL)(%s)(\[.*?\])' % (_lb,),
+ bygroups(Keyword, using(BashLexer), using(JsonLexer))),
+ (r'(LABEL|ENV|ARG)((%s\w+=\w+%s)*)' % (_lb, _lb),
+ bygroups(Keyword, using(BashLexer))),
+ (r'(%s|VOLUME)\b(.*)' % (_keywords), bygroups(Keyword, String)),
+ (r'(%s)' % (_bash_keywords,), Keyword),
(r'(.*\\\n)*.+', using(BashLexer)),
- ],
+ ]
}
@@ -568,6 +574,8 @@ class TerraformLexer(RegexLexer):
filenames = ['*.tf']
mimetypes = ['application/x-tf', 'application/x-terraform']
+ embedded_keywords = ('ingress', 'egress', 'listener', 'default', 'connection', 'alias', 'tags', 'lifecycle', 'timeouts')
+
tokens = {
'root': [
include('string'),
@@ -584,9 +592,8 @@ class TerraformLexer(RegexLexer):
(r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)),
(words(('variable', 'resource', 'provider', 'provisioner', 'module'),
prefix=r'\b', suffix=r'\b'), Keyword.Reserved, 'function'),
- (words(('ingress', 'egress', 'listener', 'default', 'connection'),
- prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
- ('\$\{', String.Interpol, 'var_builtin'),
+ (words(embedded_keywords, prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
+ (r'\$\{', String.Interpol, 'var_builtin'),
],
'function': [
(r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)),
@@ -831,3 +838,97 @@ class PacmanConfLexer(RegexLexer):
(r'.', Text),
],
}
+
+
+class AugeasLexer(RegexLexer):
+ """
+ Lexer for `Augeas <http://augeas.net>`_.
+
+ .. versionadded:: 2.4
+ """
+ name = 'Augeas'
+ aliases = ['augeas']
+ filenames = ['*.aug']
+
+ tokens = {
+ 'root': [
+ (r'(module)(\s*)([^\s=]+)', bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ (r'(let)(\s*)([^\s=]+)', bygroups(Keyword.Declaration, Text, Name.Variable)),
+ (r'(del|store|value|counter|seq|key|label|autoload|incl|excl|transform|test|get|put)(\s+)', bygroups(Name.Builtin, Text)),
+ (r'(\()([^:]+)(\:)(unit|string|regexp|lens|tree|filter)(\))', bygroups(Punctuation, Name.Variable, Punctuation, Keyword.Type, Punctuation)),
+ (r'\(\*', Comment.Multiline, 'comment'),
+ (r'[*+\-.;=?|]', Operator),
+ (r'[()\[\]{}]', Operator),
+ (r'"', String.Double, 'string'),
+ (r'\/', String.Regex, 'regex'),
+ (r'([A-Z]\w*)(\.)(\w+)', bygroups(Name.Namespace, Punctuation, Name.Variable)),
+ (r'.', Name.Variable),
+ (r'\s', Text),
+ ],
+ 'string': [
+ (r'\\.', String.Escape),
+ (r'[^"]', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'regex': [
+ (r'\\.', String.Escape),
+ (r'[^/]', String.Regex),
+ (r'\/', String.Regex, '#pop'),
+ ],
+ 'comment': [
+ (r'[^*)]', Comment.Multiline),
+ (r'\(\*', Comment.Multiline, '#push'),
+ (r'\*\)', Comment.Multiline, '#pop'),
+ (r'[)*]', Comment.Multiline)
+ ],
+ }
+
+
+class TOMLLexer(RegexLexer):
+ """
+ Lexer for `TOML <https://github.com/toml-lang/toml>`_, a simple language
+ for config files.
+
+ .. versionadded:: 2.4
+ """
+
+ name = 'TOML'
+ aliases = ['toml']
+ filenames = ['*.toml']
+
+ tokens = {
+ 'root': [
+
+ # Basics, comments, strings
+ (r'\s+', Text),
+ (r'#.*?$', Comment.Single),
+ # Basic string
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Literal string
+ (r'\'\'\'(.*)\'\'\'', String),
+ (r'\'[^\']*\'', String),
+ (r'(true|false)$', Keyword.Constant),
+ (r'[a-zA-Z_][\w\-]*', Name),
+
+ (r'\[.*?\]$', Keyword),
+ # Datetime
+ # TODO this needs to be expanded, as TOML is rather flexible:
+ # https://github.com/toml-lang/toml#offset-date-time
+ (r'\d{4}-\d{2}-\d{2}(?:T| )\d{2}:\d{2}:\d{2}(?:Z|[-+]\d{2}:\d{2})', Number.Integer),
+
+ # Numbers
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
+ # Handle +-inf, +-infinity, +-nan
+ (r'[+-]?(?:(inf(?:inity)?)|nan)', Number.Float),
+ (r'[+-]?\d+', Number.Integer),
+
+ # Punctuation
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\.', Punctuation),
+
+ # Operators
+ (r'=', Operator)
+
+ ]
+ }
diff --git a/pygments/lexers/console.py b/pygments/lexers/console.py
index 1d89b770..77bb72e5 100644
--- a/pygments/lexers/console.py
+++ b/pygments/lexers/console.py
@@ -5,7 +5,7 @@
Lexers for misc console output.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/crystal.py b/pygments/lexers/crystal.py
index 78c70b61..bea4833f 100644
--- a/pygments/lexers/crystal.py
+++ b/pygments/lexers/crystal.py
@@ -5,25 +5,22 @@
Lexer for Crystal.
- :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
- bygroups, default, LexerContext, do_insertions, words
+from pygments.lexer import ExtendedRegexLexer, include, \
+ bygroups, default, LexerContext, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Generic
-from pygments.util import shebang_matches
+ Number, Punctuation, Error
__all__ = ['CrystalLexer']
line_re = re.compile('.*?\n')
-
-
CRYSTAL_OPERATORS = [
'!=', '!~', '!', '%', '&&', '&', '**', '*', '+', '-', '/', '<=>', '<<', '<=', '<',
'===', '==', '=~', '=', '>=', '>>', '>', '[]=', '[]?', '[]', '^', '||', '|', '~'
@@ -33,6 +30,8 @@ CRYSTAL_OPERATORS = [
class CrystalLexer(ExtendedRegexLexer):
"""
For `Crystal <http://crystal-lang.org>`_ source code.
+
+ .. versionadded:: 2.2
"""
name = 'Crystal'
@@ -48,9 +47,9 @@ class CrystalLexer(ExtendedRegexLexer):
start = match.start(1)
yield start, Operator, match.group(1) # <<-?
- yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
- yield match.start(3), String.Delimiter, match.group(3) # heredoc name
- yield match.start(4), String.Heredoc, match.group(4) # quote again
+ yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
+ yield match.start(3), String.Delimiter, match.group(3) # heredoc name
+ yield match.start(4), String.Heredoc, match.group(4) # quote again
heredocstack = ctx.__dict__.setdefault('heredocstack', [])
outermost = not bool(heredocstack)
@@ -211,17 +210,21 @@ class CrystalLexer(ExtendedRegexLexer):
# macros
(words('''
debugger record pp assert_responds_to spawn parallel
- getter setter property delegate def_hash def_equals def_equals_and_hash forward_missing_to
+ getter setter property delegate def_hash def_equals def_equals_and_hash
+ forward_missing_to
'''.split(), suffix=r'\b'), Name.Builtin.Pseudo),
(r'getter[!?]|property[!?]|__(DIR|FILE|LINE)__\b', Name.Builtin.Pseudo),
# builtins
# http://crystal-lang.org/api/toplevel.html
(words('''
Object Value Struct Reference Proc Class Nil Symbol Enum Void
- Bool Number Int Int8 Int16 Int32 Int64 UInt8 UInt16 UInt32 UInt64 Float Float32 Float64 Char String
+ Bool Number Int Int8 Int16 Int32 Int64 UInt8 UInt16 UInt32 UInt64
+ Float Float32 Float64 Char String
Pointer Slice Range Exception Regex
- Mutex StaticArray Array Hash Set Tuple Deque Box Process File Dir Time Channel Concurrent Scheduler
- abort at_exit caller delay exit fork future get_stack_top gets lazy loop main p print printf puts
+ Mutex StaticArray Array Hash Set Tuple Deque Box Process File
+ Dir Time Channel Concurrent Scheduler
+ abort at_exit caller delay exit fork future get_stack_top gets
+ lazy loop main p print printf puts
raise rand read_line sleep sprintf system with_color
'''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Name.Builtin),
# normal heredocs
@@ -274,12 +277,16 @@ class CrystalLexer(ExtendedRegexLexer):
bygroups(Number.Hex, Text, Operator)),
(r'(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
bygroups(Number.Bin, Text, Operator)),
- # 3 separate expressions for floats because any of the 3 optional parts makes it a float
- (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?(?:_?[f][0-9]+)?)(\s*)([/?])?',
+ # 3 separate expressions for floats because any of the 3 optional
+ # parts makes it a float
+ (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?'
+ r'(?:_?f[0-9]+)?)(\s*)([/?])?',
bygroups(Number.Float, Text, Operator)),
- (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)(?:_?[f][0-9]+)?)(\s*)([/?])?',
+ (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)'
+ r'(?:_?f[0-9]+)?)(\s*)([/?])?',
bygroups(Number.Float, Text, Operator)),
- (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?(?:_?[f][0-9]+))(\s*)([/?])?',
+ (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?'
+ r'(?:_?f[0-9]+))(\s*)([/?])?',
bygroups(Number.Float, Text, Operator)),
(r'(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
bygroups(Number.Integer, Text, Operator)),
@@ -301,7 +308,8 @@ class CrystalLexer(ExtendedRegexLexer):
(r'\{%', String.Interpol, 'in-macro-control'),
(r'\{\{', String.Interpol, 'in-macro-expr'),
# attributes
- (r'(@\[)(\s*)([A-Z]\w*)', bygroups(Operator, Text, Name.Decorator), 'in-attr'),
+ (r'(@\[)(\s*)([A-Z]\w*)',
+ bygroups(Operator, Text, Name.Decorator), 'in-attr'),
# this is needed because Crystal attributes can look
# like keywords (class) or like this: ` ?!?
(words(CRYSTAL_OPERATORS, prefix=r'(\.|::)'),
@@ -325,7 +333,8 @@ class CrystalLexer(ExtendedRegexLexer):
],
'classname': [
(r'[A-Z_]\w*', Name.Class),
- (r'(\()(\s*)([A-Z_]\w*)(\s*)(\))', bygroups(Punctuation, Text, Name.Class, Text, Punctuation)),
+ (r'(\()(\s*)([A-Z_]\w*)(\s*)(\))',
+ bygroups(Punctuation, Text, Name.Class, Text, Punctuation)),
default('#pop')
],
'in-intp': [
diff --git a/pygments/lexers/csound.py b/pygments/lexers/csound.py
index 95ee73d8..161e1576 100644
--- a/pygments/lexers/csound.py
+++ b/pygments/lexers/csound.py
@@ -3,18 +3,18 @@
pygments.lexers.csound
~~~~~~~~~~~~~~~~~~~~~~
- Lexers for CSound languages.
+ Lexers for Csound languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, default, include, using, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \
- String, Text
-from pygments.lexers._csound_builtins import OPCODES
+from pygments.token import Comment, Error, Keyword, Name, Number, Operator, Punctuation, \
+ String, Text, Whitespace
+from pygments.lexers._csound_builtins import OPCODES, DEPRECATED_OPCODES
from pygments.lexers.html import HtmlLexer
from pygments.lexers.python import PythonLexer
from pygments.lexers.scripting import LuaLexer
@@ -25,74 +25,104 @@ newline = (r'((?:(?:;|//).*)*)(\n)', bygroups(Comment.Single, Text))
class CsoundLexer(RegexLexer):
- # Subclasses must define a 'single-line string' state.
tokens = {
'whitespace': [
(r'[ \t]+', Text),
- (r'\\\n', Text),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline)
+ (r'/[*](?:.|\n)*?[*]/', Comment.Multiline),
+ (r'(?:;|//).*$', Comment.Single),
+ (r'(\\)(\n)', bygroups(Whitespace, Text))
],
- 'macro call': [
- (r'(\$\w+\.?)(\()', bygroups(Comment.Preproc, Punctuation),
- 'function macro call'),
- (r'\$\w+(\.|\b)', Comment.Preproc)
- ],
- 'function macro call': [
- (r"((?:\\['\)]|[^'\)])+)(')", bygroups(Comment.Preproc, Punctuation)),
- (r"([^'\)]+)(\))", bygroups(Comment.Preproc, Punctuation), '#pop')
+ 'preprocessor directives': [
+ (r'#(?:e(?:nd(?:if)?|lse)\b|##)|@@?[ \t]*\d+', Comment.Preproc),
+ (r'#include', Comment.Preproc, 'include directive'),
+ (r'#[ \t]*define', Comment.Preproc, 'define directive'),
+ (r'#(?:ifn?def|undef)\b', Comment.Preproc, 'macro directive')
],
- 'whitespace or macro call': [
+ 'include directive': [
include('whitespace'),
- include('macro call')
+ (r'([^ \t]).*?\1', String, '#pop')
],
- 'preprocessor directives': [
- (r'#(e(nd(if)?|lse)|ifn?def|undef)\b|##', Comment.Preproc),
- (r'#include\b', Comment.Preproc, 'include'),
- (r'#[ \t]*define\b', Comment.Preproc, 'macro name'),
- (r'@+[ \t]*\d*', Comment.Preproc)
+ 'define directive': [
+ (r'\n', Text),
+ include('whitespace'),
+ (r'([A-Z_a-z]\w*)(\()', bygroups(Comment.Preproc, Punctuation),
+ ('#pop', 'macro parameter name list')),
+ (r'[A-Z_a-z]\w*', Comment.Preproc, ('#pop', 'before macro body'))
],
-
- 'include': [
+ 'macro parameter name list': [
include('whitespace'),
- (r'"', String, 'single-line string')
+ (r'[A-Z_a-z]\w*', Comment.Preproc),
+ (r"['#]", Punctuation),
+ (r'\)', Punctuation, ('#pop', 'before macro body'))
],
-
- 'macro name': [
+ 'before macro body': [
+ (r'\n', Text),
include('whitespace'),
- (r'(\w+)(\()', bygroups(Comment.Preproc, Text),
- 'function macro argument list'),
- (r'\w+', Comment.Preproc, 'object macro definition after name')
+ (r'#', Punctuation, ('#pop', 'macro body'))
+ ],
+ 'macro body': [
+ (r'(?:\\(?!#)|[^#\\]|\n)+', Comment.Preproc),
+ (r'\\#', Comment.Preproc),
+ (r'(?<!\\)#', Punctuation, '#pop')
],
- 'object macro definition after name': [
+
+ 'macro directive': [
include('whitespace'),
- (r'#', Punctuation, 'object macro replacement text')
+ (r'[A-Z_a-z]\w*', Comment.Preproc, '#pop')
+ ],
+
+ 'macro uses': [
+ (r'(\$[A-Z_a-z]\w*\.?)(\()', bygroups(Comment.Preproc, Punctuation),
+ 'macro parameter value list'),
+ (r'\$[A-Z_a-z]\w*(?:\.|\b)', Comment.Preproc)
],
- 'object macro replacement text': [
- (r'(\\#|[^#])+', Comment.Preproc),
- (r'#', Punctuation, '#pop:3')
+ 'macro parameter value list': [
+ (r'(?:[^\'#"{()]|\{(?!\{))+', Comment.Preproc),
+ (r"['#]", Punctuation),
+ (r'"', String, 'macro parameter value quoted string'),
+ (r'\{\{', String, 'macro parameter value braced string'),
+ (r'\(', Comment.Preproc, 'macro parameter value parenthetical'),
+ (r'\)', Punctuation, '#pop')
],
- 'function macro argument list': [
- (r"(\w+)(['#])", bygroups(Comment.Preproc, Punctuation)),
- (r'(\w+)(\))', bygroups(Comment.Preproc, Punctuation),
- 'function macro definition after name')
+ 'macro parameter value quoted string': [
+ (r"\\[#'()]", Comment.Preproc),
+ (r"[#'()]", Error),
+ include('quoted string')
],
- 'function macro definition after name': [
- (r'[ \t]+', Text),
- (r'#', Punctuation, 'function macro replacement text')
+ 'macro parameter value braced string': [
+ (r"\\[#'()]", Comment.Preproc),
+ (r"[#'()]", Error),
+ include('braced string')
+ ],
+ 'macro parameter value parenthetical': [
+ (r'(?:[^\\()]|\\\))+', Comment.Preproc),
+ (r'\(', Comment.Preproc, '#push'),
+ (r'\)', Comment.Preproc, '#pop')
],
- 'function macro replacement text': [
- (r'(\\#|[^#])+', Comment.Preproc),
- (r'#', Punctuation, '#pop:4')
+
+ 'whitespace and macro uses': [
+ include('whitespace'),
+ include('macro uses')
+ ],
+
+ 'numbers': [
+ (r'\d+[Ee][+-]?\d+|(\d+\.\d*|\d*\.\d+)([Ee][+-]?\d+)?', Number.Float),
+ (r'(0[Xx])([0-9A-Fa-f]+)', bygroups(Keyword.Type, Number.Hex)),
+ (r'\d+', Number.Integer)
+ ],
+
+ 'braced string': [
+ # Do nothing. This must be defined in subclasses.
]
}
class CsoundScoreLexer(CsoundLexer):
"""
- For `Csound <http://csound.github.io>`_ scores.
+ For `Csound <https://csound.github.io>`_ scores.
.. versionadded:: 2.1
"""
@@ -102,47 +132,77 @@ class CsoundScoreLexer(CsoundLexer):
filenames = ['*.sco']
tokens = {
- 'partial statement': [
+ 'root': [
+ (r'\n', Text),
+ include('whitespace and macro uses'),
include('preprocessor directives'),
- (r'\d+e[+-]?\d+|(\d+\.\d*|\d*\.\d+)(e[+-]?\d+)?', Number.Float),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'"', String, 'single-line string'),
- (r'[+\-*/%^!=<>|&#~.]', Operator),
- (r'[]()[]', Punctuation),
- (r'\w+', Comment.Preproc)
- ],
- 'statement': [
- include('whitespace or macro call'),
- newline + ('#pop',),
- include('partial statement')
+ (r'[abCdefiqstvxy]', Keyword),
+ # There is also a w statement that is generated internally and should not be
+ # used; see https://github.com/csound/csound/issues/750.
+
+ (r'z', Keyword.Constant),
+ # z is a constant equal to 800,000,000,000. 800 billion seconds is about
+ # 25,367.8 years. See also
+ # https://csound.github.io/docs/manual/ScoreTop.html and
+ # https://github.com/csound/csound/search?q=stof+path%3AEngine+filename%3Asread.c.
+
+ (r'([nNpP][pP])(\d+)', bygroups(Keyword, Number.Integer)),
+
+ (r'[mn]', Keyword, 'mark statement'),
+
+ include('numbers'),
+ (r'[!+\-*/^%&|<>#~.]', Operator),
+ (r'[()\[\]]', Punctuation),
+ (r'"', String, 'quoted string'),
+ (r'\{', Comment.Preproc, 'loop after left brace'),
],
- 'root': [
- newline,
- include('whitespace or macro call'),
- (r'[{}]', Punctuation, 'statement'),
- (r'[abefimq-tv-z]|[nN][pP]?', Keyword, 'statement')
+ 'mark statement': [
+ include('whitespace and macro uses'),
+ (r'[A-Z_a-z]\w*', Name.Label),
+ (r'\n', Text, '#pop')
],
- 'single-line string': [
+ 'quoted string': [
(r'"', String, '#pop'),
- (r'[^\\"]+', String)
+ (r'[^"$]+', String),
+ include('macro uses'),
+ (r'[$]', String)
+ ],
+
+ 'loop after left brace': [
+ include('whitespace and macro uses'),
+ (r'\d+', Number.Integer, ('#pop', 'loop after repeat count')),
+ ],
+ 'loop after repeat count': [
+ include('whitespace and macro uses'),
+ (r'[A-Z_a-z]\w*', Comment.Preproc, ('#pop', 'loop'))
+ ],
+ 'loop': [
+ (r'\}', Comment.Preproc, '#pop'),
+ include('root')
+ ],
+
+ # Braced strings are not allowed in Csound scores, but this is needed
+ # because the superclass includes it.
+ 'braced string': [
+ (r'\}\}', String, '#pop'),
+ (r'[^}]|\}(?!\})', String)
]
}
class CsoundOrchestraLexer(CsoundLexer):
"""
- For `Csound <http://csound.github.io>`_ orchestras.
+ For `Csound <https://csound.github.io>`_ orchestras.
.. versionadded:: 2.1
"""
name = 'Csound Orchestra'
aliases = ['csound', 'csound-orc']
- filenames = ['*.orc']
+ filenames = ['*.orc', '*.udo']
user_defined_opcodes = set()
@@ -152,159 +212,189 @@ class CsoundOrchestraLexer(CsoundLexer):
yield match.start(), Name.Function, opcode
def name_callback(lexer, match):
- name = match.group(0)
- if re.match('p\d+$', name) or name in OPCODES:
+ name = match.group(1)
+ if name in OPCODES or name in DEPRECATED_OPCODES:
yield match.start(), Name.Builtin, name
+ if match.group(2):
+ yield match.start(2), Punctuation, match.group(2)
+ yield match.start(3), Keyword.Type, match.group(3)
elif name in lexer.user_defined_opcodes:
yield match.start(), Name.Function, name
else:
- nameMatch = re.search(r'^(g?[aikSw])(\w+)', name)
+ nameMatch = re.search(r'^(g?[afikSw])(\w+)', name)
if nameMatch:
yield nameMatch.start(1), Keyword.Type, nameMatch.group(1)
yield nameMatch.start(2), Name, nameMatch.group(2)
else:
yield match.start(), Name, name
+ # If there's a trailing :V, for example, we want to keep this around
+ # and emit it as well, otherwise this lexer will not pass round-trip
+ # testing
+ if match.group(2):
+ yield match.start(2), Punctuation, match.group(2)
+ yield match.start(3), Name, match.group(3)
+
tokens = {
- 'label': [
- (r'\b(\w+)(:)', bygroups(Name.Label, Punctuation))
- ],
+ 'root': [
+ (r'\n', Text),
- 'partial expression': [
+ (r'^([ \t]*)(\w+)(:)(?:[ \t]+|$)', bygroups(Text, Name.Label, Punctuation)),
+
+ include('whitespace and macro uses'),
include('preprocessor directives'),
- (r'\b(0dbfs|k(r|smps)|nchnls(_i)?|sr)\b', Name.Variable.Global),
- (r'\d+e[+-]?\d+|(\d+\.\d*|\d*\.\d+)(e[+-]?\d+)?', Number.Float),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'"', String, 'single-line string'),
- (r'\{\{', String, 'multi-line string'),
- (r'[+\-*/%^!=&|<>#~¬]', Operator),
- (r'[](),?:[]', Punctuation),
+
+ (r'\binstr\b', Keyword.Declaration, 'instrument numbers and identifiers'),
+ (r'\bopcode\b', Keyword.Declaration, 'after opcode keyword'),
+ (r'\b(?:end(?:in|op))\b', Keyword.Declaration),
+
+ include('partial statements')
+ ],
+
+ 'partial statements': [
+ (r'\b(?:0dbfs|A4|k(?:r|smps)|nchnls(?:_i)?|sr)\b', Name.Variable.Global),
+
+ include('numbers'),
+
+ (r'\+=|-=|\*=|/=|<<|>>|<=|>=|==|!=|&&|\|\||[~¬]|[=!+\-*/^%&|<>#?:]', Operator),
+ (r'[(),\[\]]', Punctuation),
+
+ (r'"', String, 'quoted string'),
+ (r'\{\{', String, 'braced string'),
+
(words((
- # Keywords
'do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen', 'kthen',
'od', 'then', 'until', 'while',
- # Opcodes that act as control structures
- 'return', 'timout'
), prefix=r'\b', suffix=r'\b'), Keyword),
- (words(('goto', 'igoto', 'kgoto', 'rigoto', 'tigoto'),
- prefix=r'\b', suffix=r'\b'), Keyword, 'goto label'),
- (words(('cggoto', 'cigoto', 'cingoto', 'ckgoto', 'cngoto'),
- prefix=r'\b', suffix=r'\b'), Keyword,
- ('goto label', 'goto expression')),
- (words(('loop_ge', 'loop_gt', 'loop_le', 'loop_lt'),
- prefix=r'\b', suffix=r'\b'), Keyword,
- ('goto label', 'goto expression', 'goto expression', 'goto expression')),
- (r'\bscoreline(_i)?\b', Name.Builtin, 'scoreline opcode'),
- (r'\bpyl?run[it]?\b', Name.Builtin, 'python opcode'),
- (r'\blua_(exec|opdef)\b', Name.Builtin, 'lua opcode'),
- (r'\b[a-zA-Z_]\w*\b', name_callback)
- ],
-
- 'expression': [
- include('whitespace or macro call'),
- newline + ('#pop',),
- include('partial expression')
- ],
+ (words(('return', 'rireturn'), prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
- 'root': [
- newline,
- include('whitespace or macro call'),
- (r'\binstr\b', Keyword, ('instrument block', 'instrument name list')),
- (r'\bopcode\b', Keyword, ('opcode block', 'opcode parameter list',
- 'opcode types', 'opcode types', 'opcode name')),
- include('label'),
- default('expression')
- ],
-
- 'instrument name list': [
- include('whitespace or macro call'),
- (r'\d+|\+?[a-zA-Z_]\w*', Name.Function),
- (r',', Punctuation),
- newline + ('#pop',)
+ (r'\b[ik]?goto\b', Keyword, 'goto label'),
+ (r'\b(r(?:einit|igoto)|tigoto)(\(|\b)', bygroups(Keyword.Pseudo, Punctuation),
+ 'goto label'),
+ (r'\b(c(?:g|in?|k|nk?)goto)(\(|\b)', bygroups(Keyword.Pseudo, Punctuation),
+ ('goto label', 'goto argument')),
+ (r'\b(timout)(\(|\b)', bygroups(Keyword.Pseudo, Punctuation),
+ ('goto label', 'goto argument', 'goto argument')),
+ (r'\b(loop_[gl][et])(\(|\b)', bygroups(Keyword.Pseudo, Punctuation),
+ ('goto label', 'goto argument', 'goto argument', 'goto argument')),
+
+ (r'\bprintk?s\b', Name.Builtin, 'prints opcode'),
+ (r'\b(?:readscore|scoreline(?:_i)?)\b', Name.Builtin, 'Csound score opcode'),
+ (r'\bpyl?run[it]?\b', Name.Builtin, 'Python opcode'),
+ (r'\blua_(?:exec|opdef)\b', Name.Builtin, 'Lua opcode'),
+ (r'\bp\d+\b', Name.Variable.Instance),
+ (r'\b([A-Z_a-z]\w*)(?:(:)([A-Za-z]))?\b', name_callback)
],
- 'instrument block': [
- newline,
- include('whitespace or macro call'),
- (r'\bendin\b', Keyword, '#pop'),
- include('label'),
- default('expression')
+
+ 'instrument numbers and identifiers': [
+ include('whitespace and macro uses'),
+ (r'\d+|[A-Z_a-z]\w*', Name.Function),
+ (r'[+,]', Punctuation),
+ (r'\n', Text, '#pop')
],
- 'opcode name': [
- include('whitespace or macro call'),
- (r'[a-zA-Z_]\w*', opcode_name_callback, '#pop')
+ 'after opcode keyword': [
+ include('whitespace and macro uses'),
+ (r'[A-Z_a-z]\w*', opcode_name_callback, ('#pop', 'opcode type signatures')),
+ (r'\n', Text, '#pop')
],
- 'opcode types': [
- include('whitespace or macro call'),
- (r'0|[]afijkKoOpPStV[]+', Keyword.Type, '#pop'),
- (r',', Punctuation)
+ 'opcode type signatures': [
+ include('whitespace and macro uses'),
+
+ # https://github.com/csound/csound/search?q=XIDENT+path%3AEngine+filename%3Acsound_orc.lex
+ (r'0|[afijkKoOpPStV\[\]]+', Keyword.Type),
+
+ (r',', Punctuation),
+ (r'\n', Text, '#pop')
],
- 'opcode parameter list': [
- include('whitespace or macro call'),
- newline + ('#pop',)
+
+ 'quoted string': [
+ (r'"', String, '#pop'),
+ (r'[^\\"$%)]+', String),
+ include('macro uses'),
+ include('escape sequences'),
+ include('format specifiers'),
+ (r'[\\$%)]', String)
],
- 'opcode block': [
- newline,
- include('whitespace or macro call'),
- (r'\bendop\b', Keyword, '#pop'),
- include('label'),
- default('expression')
+ 'braced string': [
+ (r'\}\}', String, '#pop'),
+ (r'(?:[^\\%)}]|\}(?!\}))+', String),
+ include('escape sequences'),
+ include('format specifiers'),
+ (r'[\\%)]', String)
+ ],
+ 'escape sequences': [
+ # https://github.com/csound/csound/search?q=unquote_string+path%3AEngine+filename%3Acsound_orc_compile.c
+ (r'\\(?:[\\abnrt"]|[0-7]{1,3})', String.Escape)
+ ],
+ # Format specifiers are highlighted in all strings, even though only
+ # fprintks https://csound.github.io/docs/manual/fprintks.html
+ # fprints https://csound.github.io/docs/manual/fprints.html
+ # printf/printf_i https://csound.github.io/docs/manual/printf.html
+ # printks https://csound.github.io/docs/manual/printks.html
+ # prints https://csound.github.io/docs/manual/prints.html
+ # sprintf https://csound.github.io/docs/manual/sprintf.html
+ # sprintfk https://csound.github.io/docs/manual/sprintfk.html
+ # work with strings that contain format specifiers. In addition, these
+ # opcodes’ handling of format specifiers is inconsistent:
+ # - fprintks, fprints, printks, and prints do accept %a and %A
+ # specifiers, but can’t accept %s specifiers.
+ # - printf, printf_i, sprintf, and sprintfk don’t accept %a and %A
+ # specifiers, but can accept %s specifiers.
+ # See https://github.com/csound/csound/issues/747 for more information.
+ 'format specifiers': [
+ (r'%[#0\- +]*\d*(?:\.\d+)?[diuoxXfFeEgGaAcs]', String.Interpol),
+ (r'%%', String.Escape)
+ ],
+
+ 'goto argument': [
+ include('whitespace and macro uses'),
+ (r',', Punctuation, '#pop'),
+ include('partial statements')
],
-
'goto label': [
- include('whitespace or macro call'),
+ include('whitespace and macro uses'),
(r'\w+', Name.Label, '#pop'),
default('#pop')
],
- 'goto expression': [
- include('whitespace or macro call'),
- (r',', Punctuation, '#pop'),
- include('partial expression')
- ],
- 'single-line string': [
- include('macro call'),
- (r'"', String, '#pop'),
- # From https://github.com/csound/csound/blob/develop/Opcodes/fout.c#L1405
- (r'%\d*(\.\d+)?[cdhilouxX]', String.Interpol),
- (r'%[!%nNrRtT]|[~^]|\\([\\aAbBnNrRtT"]|[0-7]{1,3})', String.Escape),
- (r'[^\\"~$%\^\n]+', String),
- (r'[\\"~$%\^\n]', String)
+ 'prints opcode': [
+ include('whitespace and macro uses'),
+ (r'"', String, 'prints quoted string'),
+ default('#pop')
],
- 'multi-line string': [
- (r'\}\}', String, '#pop'),
- (r'[^}]+|\}(?!\})', String)
+ 'prints quoted string': [
+ (r'\\\\[aAbBnNrRtT]', String.Escape),
+ (r'%[!nNrRtT]|[~^]{1,2}', String.Escape),
+ include('quoted string')
],
- 'scoreline opcode': [
- include('whitespace or macro call'),
- (r'\{\{', String, 'scoreline'),
- default('#pop')
+ 'Csound score opcode': [
+ include('whitespace and macro uses'),
+ (r'\{\{', String, 'Csound score'),
+ (r'\n', Text, '#pop')
],
- 'scoreline': [
+ 'Csound score': [
(r'\}\}', String, '#pop'),
(r'([^}]+)|\}(?!\})', using(CsoundScoreLexer))
],
- 'python opcode': [
- include('whitespace or macro call'),
- (r'\{\{', String, 'python'),
- default('#pop')
+ 'Python opcode': [
+ include('whitespace and macro uses'),
+ (r'\{\{', String, 'Python'),
+ (r'\n', Text, '#pop')
],
- 'python': [
+ 'Python': [
(r'\}\}', String, '#pop'),
(r'([^}]+)|\}(?!\})', using(PythonLexer))
],
- 'lua opcode': [
- include('whitespace or macro call'),
- (r'"', String, 'single-line string'),
- (r'\{\{', String, 'lua'),
- (r',', Punctuation),
- default('#pop')
+ 'Lua opcode': [
+ include('whitespace and macro uses'),
+ (r'\{\{', String, 'Lua'),
+ (r'\n', Text, '#pop')
],
- 'lua': [
+ 'Lua': [
(r'\}\}', String, '#pop'),
(r'([^}]+)|\}(?!\})', using(LuaLexer))
]
@@ -313,7 +403,7 @@ class CsoundOrchestraLexer(CsoundLexer):
class CsoundDocumentLexer(RegexLexer):
"""
- For `Csound <http://csound.github.io>`_ documents.
+ For `Csound <https://csound.github.io>`_ documents.
.. versionadded:: 2.1
"""
@@ -331,15 +421,18 @@ class CsoundDocumentLexer(RegexLexer):
# be XML files.
tokens = {
'root': [
- newline,
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'[^<&;/]+', Text),
+ (r'(?:;|//).*$', Comment.Single),
+ (r'[^/;<]+|/(?!/)', Text),
+
(r'<\s*CsInstruments', Name.Tag, ('orchestra', 'tag')),
(r'<\s*CsScore', Name.Tag, ('score', 'tag')),
- (r'<\s*[hH][tT][mM][lL]', Name.Tag, ('HTML', 'tag')),
+ (r'<\s*[Hh][Tt][Mm][Ll]', Name.Tag, ('HTML', 'tag')),
+
(r'<\s*[\w:.-]+', Name.Tag, 'tag'),
(r'<\s*/\s*[\w:.-]+\s*>', Name.Tag)
],
+
'orchestra': [
(r'<\s*/\s*CsInstruments\s*>', Name.Tag, '#pop'),
(r'(.|\n)+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer))
@@ -349,9 +442,10 @@ class CsoundDocumentLexer(RegexLexer):
(r'(.|\n)+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer))
],
'HTML': [
- (r'<\s*/\s*[hH][tT][mM][lL]\s*>', Name.Tag, '#pop'),
- (r'(.|\n)+?(?=<\s*/\s*[hH][tT][mM][lL]\s*>)', using(HtmlLexer))
+ (r'<\s*/\s*[Hh][Tt][Mm][Ll]\s*>', Name.Tag, '#pop'),
+ (r'(.|\n)+?(?=<\s*/\s*[Hh][Tt][Mm][Ll]\s*>)', using(HtmlLexer))
],
+
'tag': [
(r'\s+', Text),
(r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
diff --git a/pygments/lexers/css.py b/pygments/lexers/css.py
index e9e642f8..ce97730e 100644
--- a/pygments/lexers/css.py
+++ b/pygments/lexers/css.py
@@ -5,7 +5,7 @@
Lexers for CSS and related stylesheet formats.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -125,7 +125,7 @@ _css_properties = (
'wrap-flow', 'wrap-inside', 'wrap-through', 'writing-mode', 'z-index',
)
-# List of keyword values obtained from:
+# List of keyword values obtained from:
# http://cssvalues.com/
_keyword_values = (
'absolute', 'alias', 'all', 'all-petite-caps', 'all-scroll',
@@ -263,7 +263,7 @@ _time_units = (
's', 'ms',
)
_all_units = _angle_units + _frequency_units + _length_units + \
- _resolution_units + _time_units
+ _resolution_units + _time_units
class CssLexer(RegexLexer):
@@ -322,16 +322,18 @@ class CssLexer(RegexLexer):
include('urls'),
(r'('+r'|'.join(_functional_notation_keyword_values)+r')(\()',
bygroups(Name.Builtin, Punctuation), 'function-start'),
- (r'([a-zA-Z_][\w-]+)(\()', bygroups(Name.Function, Punctuation), 'function-start'),
+ (r'([a-zA-Z_][\w-]+)(\()',
+ bygroups(Name.Function, Punctuation), 'function-start'),
(words(_keyword_values, suffix=r'\b'), Keyword.Constant),
(words(_other_keyword_values, suffix=r'\b'), Keyword.Constant),
(words(_color_keywords, suffix=r'\b'), Keyword.Constant),
- (words(_css_properties, suffix=r'\b'), Keyword), # for transition-property etc.
+ # for transition-property etc.
+ (words(_css_properties, suffix=r'\b'), Keyword),
(r'\!important', Comment.Preproc),
(r'/\*(?:.|\n)*?\*/', Comment),
include('numeric-values'),
-
+
(r'[~^*!%&<>|+=@:./?-]+', Operator),
(r'[\[\](),]+', Punctuation),
(r'"(\\\\|\\"|[^"])*"', String.Double),
@@ -351,7 +353,8 @@ class CssLexer(RegexLexer):
# function-start may be entered recursively
(r'(' + r'|'.join(_functional_notation_keyword_values) + r')(\()',
bygroups(Name.Builtin, Punctuation), 'function-start'),
- (r'([a-zA-Z_][\w-]+)(\()', bygroups(Name.Function, Punctuation), 'function-start'),
+ (r'([a-zA-Z_][\w-]+)(\()',
+ bygroups(Name.Function, Punctuation), 'function-start'),
(r'/\*(?:.|\n)*?\*/', Comment),
include('numeric-values'),
@@ -373,8 +376,8 @@ class CssLexer(RegexLexer):
'numeric-values': [
(r'\#[a-zA-Z0-9]{1,6}', Number.Hex),
(r'[+\-]?[0-9]*[.][0-9]+', Number.Float, 'numeric-end'),
- (r'[+\-]?[0-9]+', Number.Integer, 'numeric-end'),
- ],
+ (r'[+\-]?[0-9]+', Number.Integer, 'numeric-end'),
+ ],
'numeric-end': [
(words(_all_units, suffix=r'\b'), Keyword.Type),
(r'%', Keyword.Type),
@@ -466,9 +469,9 @@ common_sass_tokens = {
],
'string-single': [
- (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
+ (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Single),
(r'#\{', String.Interpol, 'interpolation'),
- (r"'", String.Double, '#pop'),
+ (r"'", String.Single, '#pop'),
],
'string-url': [
diff --git a/pygments/lexers/d.py b/pygments/lexers/d.py
index 98e01dcf..09e6fe87 100644
--- a/pygments/lexers/d.py
+++ b/pygments/lexers/d.py
@@ -5,7 +5,7 @@
Lexers for D languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/dalvik.py b/pygments/lexers/dalvik.py
index 2f26fa04..c211f13e 100644
--- a/pygments/lexers/dalvik.py
+++ b/pygments/lexers/dalvik.py
@@ -5,7 +5,7 @@
Pygments lexers for Dalvik VM-related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/data.py b/pygments/lexers/data.py
index 4c39db64..7593b487 100644
--- a/pygments/lexers/data.py
+++ b/pygments/lexers/data.py
@@ -5,7 +5,7 @@
Lexers for data file format.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -205,7 +205,7 @@ class YamlLexer(ExtendedRegexLexer):
bygroups(Text, Number), 'ignored-line'),
],
- # the %YAG directive
+ # the %TAG directive
'tag-directive': [
# a tag handle and the corresponding prefix
(r'([ ]+)(!|![\w-]*!)'
@@ -218,7 +218,7 @@ class YamlLexer(ExtendedRegexLexer):
'indentation': [
# trailing whitespaces are ignored
(r'[ ]*$', something(Text), '#pop:2'),
- # whitespaces preceeding block collection indicators
+ # whitespaces preceding block collection indicators
(r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)),
# block collection indicators
(r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
@@ -232,6 +232,9 @@ class YamlLexer(ExtendedRegexLexer):
(r'[ ]*(?=#|$)', something(Text), '#pop'),
# whitespaces separating tokens
(r'[ ]+', Text),
+ # key with colon
+ (r'''([^,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
+ bygroups(Name.Tag, set_indent(Punctuation, implicit=True))),
# tags, anchors and aliases,
include('descriptors'),
# block collections and scalars
@@ -250,7 +253,7 @@ class YamlLexer(ExtendedRegexLexer):
(r'!<[\w#;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
# a tag in the form '!', '!suffix' or '!handle!suffix'
(r'!(?:[\w-]+!)?'
- r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]+', Keyword.Type),
+ r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]*', Keyword.Type),
# an anchor
(r'&[\w-]+', Name.Label),
# an alias
@@ -308,6 +311,9 @@ class YamlLexer(ExtendedRegexLexer):
# a flow mapping indicated by '{' and '}'
'flow-mapping': [
+ # key with colon
+ (r'''([^,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
+ bygroups(Name.Tag, Punctuation)),
# include flow collection rules
include('flow-collection'),
# the closing indicator
diff --git a/pygments/lexers/diff.py b/pygments/lexers/diff.py
index 726b49ad..f7019440 100644
--- a/pygments/lexers/diff.py
+++ b/pygments/lexers/diff.py
@@ -5,7 +5,7 @@
Lexers for diff/patch formats.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index 11b4573e..27ae77c5 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -5,7 +5,7 @@
Lexers for .net languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
@@ -58,7 +58,7 @@ class CSharpLexer(RegexLexer):
# http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
levels = {
- 'none': '@?[_a-zA-Z]\w*',
+ 'none': r'@?[_a-zA-Z]\w*',
'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
'Cf', 'Mn', 'Mc') + ']*'),
@@ -171,7 +171,7 @@ class NemerleLexer(RegexLexer):
# http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
levels = {
- 'none': '@?[_a-zA-Z]\w*',
+ 'none': r'@?[_a-zA-Z]\w*',
'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
'Cf', 'Mn', 'Mc') + ']*'),
@@ -352,13 +352,13 @@ class BooLexer(RegexLexer):
('[*/]', Comment.Multiline)
],
'funcname': [
- ('[a-zA-Z_]\w*', Name.Function, '#pop')
+ (r'[a-zA-Z_]\w*', Name.Function, '#pop')
],
'classname': [
- ('[a-zA-Z_]\w*', Name.Class, '#pop')
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'namespace': [
- ('[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
+ (r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
]
}
@@ -413,7 +413,7 @@ class VbNetLexer(RegexLexer):
'Static', 'Step', 'Stop', 'SyncLock', 'Then', 'Throw', 'To',
'True', 'Try', 'TryCast', 'Wend', 'Using', 'When', 'While',
'Widening', 'With', 'WithEvents', 'WriteOnly'),
- prefix='(?<!\.)', suffix=r'\b'), Keyword),
+ prefix=r'(?<!\.)', suffix=r'\b'), Keyword),
(r'(?<!\.)End\b', Keyword, 'end'),
(r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
(r'(?<!\.)(Function|Sub|Property)(\s+)',
@@ -574,10 +574,10 @@ class FSharpLexer(RegexLexer):
'virtual', 'volatile',
]
keyopts = [
- '!=', '#', '&&', '&', '\(', '\)', '\*', '\+', ',', '-\.',
- '->', '-', '\.\.', '\.', '::', ':=', ':>', ':', ';;', ';', '<-',
- '<\]', '<', '>\]', '>', '\?\?', '\?', '\[<', '\[\|', '\[', '\]',
- '_', '`', '\{', '\|\]', '\|', '\}', '~', '<@@', '<@', '=', '@>', '@@>',
+ '!=', '#', '&&', '&', r'\(', r'\)', r'\*', r'\+', ',', r'-\.',
+ '->', '-', r'\.\.', r'\.', '::', ':=', ':>', ':', ';;', ';', '<-',
+ r'<\]', '<', r'>\]', '>', r'\?\?', r'\?', r'\[<', r'\[\|', r'\[', r'\]',
+ '_', '`', r'\{', r'\|\]', r'\|', r'\}', '~', '<@@', '<@', '=', '@>', '@@>',
]
operators = r'[!$%&*+\./:<=>?@^|~-]'
diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py
index 312d5f5e..4451b480 100644
--- a/pygments/lexers/dsls.py
+++ b/pygments/lexers/dsls.py
@@ -5,7 +5,7 @@
Lexers for various domain-specific languages.
- :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,7 +36,7 @@ class ProtoBufLexer(RegexLexer):
tokens = {
'root': [
(r'[ \t]+', Text),
- (r'[,;{}\[\]()]', Punctuation),
+ (r'[,;{}\[\]()<>]', Punctuation),
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
(words((
@@ -66,7 +66,7 @@ class ProtoBufLexer(RegexLexer):
(r'[+-=]', Operator),
(r'([a-zA-Z_][\w.]*)([ \t]*)(=)',
bygroups(Name.Attribute, Text, Operator)),
- ('[a-zA-Z_][\w.]*', Name),
+ (r'[a-zA-Z_][\w.]*', Name),
],
'package': [
(r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
@@ -300,7 +300,7 @@ class PuppetLexer(RegexLexer):
],
'names': [
- ('[a-zA-Z_]\w*', Name.Attribute),
+ (r'[a-zA-Z_]\w*', Name.Attribute),
(r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
String, Punctuation)),
(r'\$\S+', Name.Variable),
@@ -581,7 +581,7 @@ class PanLexer(RegexLexer):
'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final',
'prefix', 'unique', 'object', 'foreach', 'include', 'template',
'function', 'variable', 'structure', 'extensible', 'declaration'),
- prefix=r'\b', suffix=r'\s*\b'),
+ prefix=r'\b', suffix=r'\s*\b'),
Keyword),
(words((
'file_contents', 'format', 'index', 'length', 'match', 'matches',
@@ -593,7 +593,7 @@ class PanLexer(RegexLexer):
'is_number', 'is_property', 'is_resource', 'is_string', 'to_boolean',
'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists',
'path_exists', 'if_exists', 'return', 'value'),
- prefix=r'\b', suffix=r'\s*\b'),
+ prefix=r'\b', suffix=r'\s*\b'),
Name.Builtin),
(r'#.*', Comment),
(r'\\[\w\W]', String.Escape),
@@ -688,7 +688,7 @@ class CrmshLexer(RegexLexer):
(r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action,
bygroups(Name, Punctuation, Operator.Word)),
# punctuation
- (r'(\\(?=\n)|[[\](){}/:@])', Punctuation),
+ (r'(\\(?=\n)|[\[\](){}/:@])', Punctuation),
(r'\s+|\n', Whitespace),
],
}
diff --git a/pygments/lexers/dylan.py b/pygments/lexers/dylan.py
index 600a78e5..30318f38 100644
--- a/pygments/lexers/dylan.py
+++ b/pygments/lexers/dylan.py
@@ -5,7 +5,7 @@
Lexers for the Dylan language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -179,10 +179,10 @@ class DylanLexer(RegexLexer):
(valid_name + ':', Keyword),
# class names
- (r'<' + valid_name + '>', Name.Class),
+ ('<' + valid_name + '>', Name.Class),
# define variable forms.
- (r'\*' + valid_name + '\*', Name.Variable.Global),
+ (r'\*' + valid_name + r'\*', Name.Variable.Global),
# define constant forms.
(r'\$' + valid_name, Name.Constant),
@@ -260,7 +260,7 @@ class DylanConsoleLexer(Lexer):
mimetypes = ['text/x-dylan-console']
_line_re = re.compile('.*?\n')
- _prompt_re = re.compile('\?| ')
+ _prompt_re = re.compile(r'\?| ')
def get_tokens_unprocessed(self, text):
dylexer = DylanLexer(**self.options)
diff --git a/pygments/lexers/ecl.py b/pygments/lexers/ecl.py
index 95572ba7..bd80ad19 100644
--- a/pygments/lexers/ecl.py
+++ b/pygments/lexers/ecl.py
@@ -5,7 +5,7 @@
Lexers for the ECL language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/eiffel.py b/pygments/lexers/eiffel.py
index 8a244613..a90ab0a5 100644
--- a/pygments/lexers/eiffel.py
+++ b/pygments/lexers/eiffel.py
@@ -5,7 +5,7 @@
Lexer for the Eiffel language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/elm.py b/pygments/lexers/elm.py
index cd1fb98e..22a10bd9 100644
--- a/pygments/lexers/elm.py
+++ b/pygments/lexers/elm.py
@@ -5,7 +5,7 @@
Lexer for the Elm programming language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -27,7 +27,7 @@ class ElmLexer(RegexLexer):
filenames = ['*.elm']
mimetypes = ['text/x-elm']
- validName = r'[a-z_][a-zA-Z_\']*'
+ validName = r'[a-z_][a-zA-Z0-9_\']*'
specialName = r'^main '
diff --git a/pygments/lexers/erlang.py b/pygments/lexers/erlang.py
index 93ddd2c2..0d0d0798 100644
--- a/pygments/lexers/erlang.py
+++ b/pygments/lexers/erlang.py
@@ -5,7 +5,7 @@
Lexers for Erlang.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -344,7 +344,7 @@ class ElixirLexer(RegexLexer):
op1_re = "|".join(re.escape(s) for s in OPERATORS1)
ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re)
punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION)
- alnum = '\w'
+ alnum = r'\w'
name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum
modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum}
complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re)
@@ -495,7 +495,7 @@ class ElixirConsoleLexer(Lexer):
aliases = ['iex']
mimetypes = ['text/x-elixir-shellsession']
- _prompt_re = re.compile('(iex|\.{3})(\(\d+\))?> ')
+ _prompt_re = re.compile(r'(iex|\.{3})(\(\d+\))?> ')
def get_tokens_unprocessed(self, text):
exlexer = ElixirLexer(**self.options)
diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py
index 150d930f..26222c9f 100644
--- a/pygments/lexers/esoteric.py
+++ b/pygments/lexers/esoteric.py
@@ -5,7 +5,7 @@
Lexers for esoteric languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,7 +14,7 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error
__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer',
- 'CapDLLexer', 'AheuiLexer']
+ 'CapDLLexer', 'AheuiLexer']
class BrainfuckLexer(RegexLexer):
@@ -155,14 +155,15 @@ class CapDLLexer(RegexLexer):
shadow type names, but these instances are currently incorrectly
highlighted as types. Supporting this would need a stateful lexer that is
considered unnecessarily complex for now.
+
+ .. versionadded:: 2.2
"""
name = 'CapDL'
aliases = ['capdl']
filenames = ['*.cdl']
tokens = {
- 'root':[
-
+ 'root': [
# C pre-processor directive
(r'^\s*#.*\n', Comment.Preproc),
@@ -171,7 +172,7 @@ class CapDLLexer(RegexLexer):
(r'/\*(.|\n)*?\*/', Comment),
(r'(//|--).*\n', Comment),
- (r'[<>\[\(\)\{\},:;=\]]', Punctuation),
+ (r'[<>\[(){},:;=\]]', Punctuation),
(r'\.\.', Punctuation),
(words(('arch', 'arm11', 'caps', 'child_of', 'ia32', 'irq', 'maps',
@@ -187,7 +188,7 @@ class CapDLLexer(RegexLexer):
'prio', 'sp', 'R', 'RG', 'RX', 'RW', 'RWG', 'RWX', 'W',
'WG', 'WX', 'level', 'masked', 'master_reply', 'paddr',
'ports', 'reply', 'uncached'), suffix=r'\b'),
- Keyword.Reserved),
+ Keyword.Reserved),
# Literals
(r'0[xX][\da-fA-F]+', Number.Hex),
@@ -197,10 +198,11 @@ class CapDLLexer(RegexLexer):
'ipc_buffer_slot'), suffix=r'\b'), Number),
# Identifiers
- (r'[a-zA-Z_][-_@\.\w]*', Name),
+ (r'[a-zA-Z_][-@\.\w]*', Name),
],
}
+
class RedcodeLexer(RegexLexer):
"""
A simple Redcode lexer based on ICWS'94.
@@ -243,7 +245,7 @@ class AheuiLexer(RegexLexer):
Aheui_ is esoteric language based on Korean alphabets.
- .. _Aheui:: http://aheui.github.io/
+ .. _Aheui: http://aheui.github.io/
"""
diff --git a/pygments/lexers/ezhil.py b/pygments/lexers/ezhil.py
index eea300ad..ce1cdb2d 100644
--- a/pygments/lexers/ezhil.py
+++ b/pygments/lexers/ezhil.py
@@ -4,8 +4,8 @@
~~~~~~~~~~~~~~~~~~~~~
Pygments lexers for Ezhil language.
-
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -16,6 +16,7 @@ from pygments.token import String, Number, Punctuation, Operator
__all__ = ['EzhilLexer']
+
class EzhilLexer(RegexLexer):
"""
Lexer for `Ezhil, a Tamil script-based programming language <http://ezhillang.org>`_
@@ -62,7 +63,7 @@ class EzhilLexer(RegexLexer):
(r'(?u)\d+', Number.Integer),
]
}
-
+
def __init__(self, **options):
super(EzhilLexer, self).__init__(**options)
self.encoding = options.get('encoding', 'utf-8')
diff --git a/pygments/lexers/factor.py b/pygments/lexers/factor.py
index 6a39a1d4..09d85c27 100644
--- a/pygments/lexers/factor.py
+++ b/pygments/lexers/factor.py
@@ -5,7 +5,7 @@
Lexers for the Factor language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/fantom.py b/pygments/lexers/fantom.py
index c20a3f38..3ea2177c 100644
--- a/pygments/lexers/fantom.py
+++ b/pygments/lexers/fantom.py
@@ -5,7 +5,7 @@
Lexer for the Fantom language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/felix.py b/pygments/lexers/felix.py
index 9631bcc1..8f0695b5 100644
--- a/pygments/lexers/felix.py
+++ b/pygments/lexers/felix.py
@@ -5,7 +5,7 @@
Lexer for the Felix language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/floscript.py b/pygments/lexers/floscript.py
new file mode 100644
index 00000000..4f200809
--- /dev/null
+++ b/pygments/lexers/floscript.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.floscript
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for FloScript
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
+ default, words, combined, do_insertions
+from pygments.util import get_bool_opt, shebang_matches
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Other, Error
+from pygments import unistring as uni
+
+__all__ = ['FloScriptLexer',]
+
+class FloScriptLexer(RegexLexer):
+ """
+ For `FloScript <https://github.com/ioflo/ioflo>`_ configuration language source code.
+
+ .. versionadded:: 2.4
+ """
+
+ name = 'FloScript'
+ aliases = ['floscript', 'flo']
+ filenames = ['*.flo']
+
+ def innerstring_rules(ttype):
+ return [
+ # the old style '%s' % (...) string formatting
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[E-GXc-giorsux%]', String.Interpol),
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"%\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ # unhandled string formatting sign
+ (r'%', ttype),
+ # newlines are an error (use "nl" state)
+ ]
+
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(to|by|with|from|per|for|cum|qua|via|as|at|in|of|on|re|is|if|be|into|and|not)\b', Operator.Word),
+ (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
+ (r'(load|init|server|logger|log|loggee|first|over|under|next|done|timeout|repeat|native|benter|enter|recur|exit|precur|renter|rexit|print|put|inc|copy|set|aux|rear|raze|go|let|do|bid|ready|start|stop|run|abort|use|flo|give|take)\b', Name.Builtin),
+ (r'(frame|framer|house)\b', Keyword),
+ ('"', String, 'string'),
+
+ include('name'),
+ include('numbers'),
+ (r'#.+$', Comment.Singleline),
+ ],
+ 'string': [
+ ('[^"]+', String),
+ ('"', String, '#pop'),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+j?', Number.Integer)
+ ],
+
+ 'name': [
+ (r'@[\w.]+', Name.Decorator),
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+
+
+
+ }
diff --git a/pygments/lexers/forth.py b/pygments/lexers/forth.py
index eb806ba0..7fecdd52 100644
--- a/pygments/lexers/forth.py
+++ b/pygments/lexers/forth.py
@@ -3,7 +3,9 @@
pygments.lexers.forth
~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ Lexer for the Forth language.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/fortran.py b/pygments/lexers/fortran.py
index e2f95b11..5165bac0 100644
--- a/pygments/lexers/fortran.py
+++ b/pygments/lexers/fortran.py
@@ -5,7 +5,7 @@
Lexers for Fortran languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -156,8 +156,9 @@ class FortranLexer(RegexLexer):
'nums': [
(r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
- (r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
- (r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
+ (r'[+-]?\d*\.\d+([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float),
+ (r'[+-]?\d+\.\d*([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float),
+ (r'[+-]?\d+(\.\d*)?[ed][-+]?\d+(_[a-z]\w+)?', Number.Float),
],
}
diff --git a/pygments/lexers/foxpro.py b/pygments/lexers/foxpro.py
index c7f368c7..7c0d2621 100644
--- a/pygments/lexers/foxpro.py
+++ b/pygments/lexers/foxpro.py
@@ -5,7 +5,7 @@
Simple lexer for Microsoft Visual FoxPro source code.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py
index 13c72b1e..254df795 100644
--- a/pygments/lexers/functional.py
+++ b/pygments/lexers/functional.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/go.py b/pygments/lexers/go.py
index 8bd6c7fb..cc2a6d63 100644
--- a/pygments/lexers/go.py
+++ b/pygments/lexers/go.py
@@ -5,7 +5,7 @@
Lexers for the Google Go language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/grammar_notation.py b/pygments/lexers/grammar_notation.py
index d59cc61c..bc715ffa 100644
--- a/pygments/lexers/grammar_notation.py
+++ b/pygments/lexers/grammar_notation.py
@@ -5,7 +5,7 @@
Lexers for grammer notations like BNF.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,7 +13,7 @@ import re
from pygments.lexer import RegexLexer, bygroups, include, this, using, words
from pygments.token import Comment, Keyword, Literal, Name, Number, \
- Operator, Punctuation, String, Text
+ Operator, Punctuation, String, Text
__all__ = ['BnfLexer', 'AbnfLexer', 'JsgfLexer']
@@ -158,7 +158,7 @@ class JsgfLexer(RegexLexer):
(r'//.*', Comment.Single),
],
'non-comments': [
- ('\A#JSGF[^;]*', Comment.Preproc),
+ (r'\A#JSGF[^;]*', Comment.Preproc),
(r'\s+', Text),
(r';', Punctuation),
(r'[=|()\[\]*+]', Operator),
diff --git a/pygments/lexers/graph.py b/pygments/lexers/graph.py
index 8315898c..6e836bdd 100644
--- a/pygments/lexers/graph.py
+++ b/pygments/lexers/graph.py
@@ -5,7 +5,7 @@
Lexers for graph query languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,9 +22,9 @@ __all__ = ['CypherLexer']
class CypherLexer(RegexLexer):
"""
For `Cypher Query Language
- <http://docs.neo4j.org/chunked/milestone/cypher-query-lang.html>`_
+ <https://neo4j.com/docs/developer-manual/3.3/cypher/>`_
- For the Cypher version in Neo4J 2.0
+ For the Cypher version in Neo4j 3.3
.. versionadded:: 2.0
"""
@@ -49,14 +49,19 @@ class CypherLexer(RegexLexer):
],
'keywords': [
(r'(create|order|match|limit|set|skip|start|return|with|where|'
- r'delete|foreach|not|by)\b', Keyword),
+ r'delete|foreach|not|by|true|false)\b', Keyword),
],
'clauses': [
- # TODO: many missing ones, see http://docs.neo4j.org/refcard/2.0/
- (r'(all|any|as|asc|create|create\s+unique|delete|'
- r'desc|distinct|foreach|in|is\s+null|limit|match|none|'
- r'order\s+by|return|set|skip|single|start|union|where|with)\b',
- Keyword),
+ # based on https://neo4j.com/docs/cypher-refcard/3.3/
+ (r'(all|any|as|asc|ascending|assert|call|case|create|'
+ r'create\s+index|create\s+unique|delete|desc|descending|'
+ r'distinct|drop\s+constraint\s+on|drop\s+index\s+on|end|'
+ r'ends\s+with|fieldterminator|foreach|in|is\s+node\s+key|'
+ r'is\s+null|is\s+unique|limit|load\s+csv\s+from|match|merge|none|'
+ r'not|null|on\s+match|on\s+create|optional\s+match|order\s+by|'
+ r'remove|return|set|skip|single|start|starts\s+with|then|union|'
+ r'union\s+all|unwind|using\s+periodic\s+commit|yield|where|when|'
+ r'with)\b', Keyword),
],
'relations': [
(r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)),
diff --git a/pygments/lexers/graphics.py b/pygments/lexers/graphics.py
index b40e0286..30ab2cbc 100644
--- a/pygments/lexers/graphics.py
+++ b/pygments/lexers/graphics.py
@@ -5,7 +5,7 @@
Lexers for computer graphics and plotting related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -15,7 +15,7 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, \
Number, Punctuation, String
__all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer',
- 'PovrayLexer']
+ 'PovrayLexer', 'HLSLShaderLexer']
class GLShaderLexer(RegexLexer):
@@ -46,28 +46,102 @@ class GLShaderLexer(RegexLexer):
(r'0[0-7]*', Number.Oct),
(r'[1-9][0-9]*', Number.Integer),
(words((
- 'attribute', 'const', 'uniform', 'varying', 'centroid', 'break',
- 'continue', 'do', 'for', 'while', 'if', 'else', 'in', 'out',
- 'inout', 'float', 'int', 'void', 'bool', 'true', 'false',
- 'invariant', 'discard', 'return', 'mat2', 'mat3' 'mat4',
- 'mat2x2', 'mat3x2', 'mat4x2', 'mat2x3', 'mat3x3', 'mat4x3',
- 'mat2x4', 'mat3x4', 'mat4x4', 'vec2', 'vec3', 'vec4',
- 'ivec2', 'ivec3', 'ivec4', 'bvec2', 'bvec3', 'bvec4',
- 'sampler1D', 'sampler2D', 'sampler3D' 'samplerCube',
- 'sampler1DShadow', 'sampler2DShadow', 'struct'),
+ # Storage qualifiers
+ 'attribute', 'const', 'uniform', 'varying',
+ 'buffer', 'shared', 'in', 'out',
+ # Layout qualifiers
+ 'layout',
+ # Interpolation qualifiers
+ 'flat', 'smooth', 'noperspective',
+ # Auxiliary qualifiers
+ 'centroid', 'sample', 'patch',
+ # Parameter qualifiers. Some double as Storage qualifiers
+ 'inout',
+ # Precision qualifiers
+ 'lowp', 'mediump', 'highp', 'precision',
+ # Invariance qualifiers
+ 'invariant',
+ # Precise qualifiers
+ 'precise',
+ # Memory qualifiers
+ 'coherent', 'volatile', 'restrict', 'readonly', 'writeonly',
+ # Statements
+ 'break', 'continue', 'do', 'for', 'while', 'switch',
+ 'case', 'default', 'if', 'else', 'subroutine',
+ 'discard', 'return', 'struct'),
prefix=r'\b', suffix=r'\b'),
Keyword),
(words((
- 'asm', 'class', 'union', 'enum', 'typedef', 'template', 'this',
- 'packed', 'goto', 'switch', 'default', 'inline', 'noinline',
- 'volatile', 'public', 'static', 'extern', 'external', 'interface',
- 'long', 'short', 'double', 'half', 'fixed', 'unsigned', 'lowp',
- 'mediump', 'highp', 'precision', 'input', 'output',
- 'hvec2', 'hvec3', 'hvec4', 'dvec2', 'dvec3', 'dvec4',
- 'fvec2', 'fvec3', 'fvec4', 'sampler2DRect', 'sampler3DRect',
- 'sampler2DRectShadow', 'sizeof', 'cast', 'namespace', 'using'),
+ # Boolean values
+ 'true', 'false'),
prefix=r'\b', suffix=r'\b'),
- Keyword), # future use
+ Keyword.Constant),
+ (words((
+ # Miscellaneous types
+ 'void', 'atomic_uint',
+ # Floating-point scalars and vectors
+ 'float', 'vec2', 'vec3', 'vec4',
+ 'double', 'dvec2', 'dvec3', 'dvec4',
+ # Integer scalars and vectors
+ 'int', 'ivec2', 'ivec3', 'ivec4',
+ 'uint', 'uvec2', 'uvec3', 'uvec4',
+ # Boolean scalars and vectors
+ 'bool', 'bvec2', 'bvec3', 'bvec4',
+ # Matrices
+ 'mat2', 'mat3', 'mat4', 'dmat2', 'dmat3', 'dmat4',
+ 'mat2x2', 'mat2x3', 'mat2x4', 'dmat2x2', 'dmat2x3', 'dmat2x4',
+ 'mat3x2', 'mat3x3', 'mat3x4', 'dmat3x2', 'dmat3x3',
+ 'dmat3x4', 'mat4x2', 'mat4x3', 'mat4x4', 'dmat4x2', 'dmat4x3', 'dmat4x4',
+ # Floating-point samplers
+ 'sampler1D', 'sampler2D', 'sampler3D', 'samplerCube',
+ 'sampler1DArray', 'sampler2DArray', 'samplerCubeArray',
+ 'sampler2DRect', 'samplerBuffer',
+ 'sampler2DMS', 'sampler2DMSArray',
+ # Shadow samplers
+ 'sampler1DShadow', 'sampler2DShadow', 'samplerCubeShadow',
+ 'sampler1DArrayShadow', 'sampler2DArrayShadow',
+ 'samplerCubeArrayShadow', 'sampler2DRectShadow',
+ # Signed integer samplers
+ 'isampler1D', 'isampler2D', 'isampler3D', 'isamplerCube',
+ 'isampler1DArray', 'isampler2DArray', 'isamplerCubeArray',
+ 'isampler2DRect', 'isamplerBuffer',
+ 'isampler2DMS', 'isampler2DMSArray',
+ # Unsigned integer samplers
+ 'usampler1D', 'usampler2D', 'usampler3D', 'usamplerCube',
+ 'usampler1DArray', 'usampler2DArray', 'usamplerCubeArray',
+ 'usampler2DRect', 'usamplerBuffer',
+ 'usampler2DMS', 'usampler2DMSArray',
+ # Floating-point image types
+ 'image1D', 'image2D', 'image3D', 'imageCube',
+ 'image1DArray', 'image2DArray', 'imageCubeArray',
+ 'image2DRect', 'imageBuffer',
+ 'image2DMS', 'image2DMSArray',
+ # Signed integer image types
+ 'iimage1D', 'iimage2D', 'iimage3D', 'iimageCube',
+ 'iimage1DArray', 'iimage2DArray', 'iimageCubeArray',
+ 'iimage2DRect', 'iimageBuffer',
+ 'iimage2DMS', 'iimage2DMSArray',
+ # Unsigned integer image types
+ 'uimage1D', 'uimage2D', 'uimage3D', 'uimageCube',
+ 'uimage1DArray', 'uimage2DArray', 'uimageCubeArray',
+ 'uimage2DRect', 'uimageBuffer',
+ 'uimage2DMS', 'uimage2DMSArray'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword.Type),
+ (words((
+ # Reserved for future use.
+ 'common', 'partition', 'active', 'asm', 'class',
+ 'union', 'enum', 'typedef', 'template', 'this',
+ 'resource', 'goto', 'inline', 'noinline', 'public',
+ 'static', 'extern', 'external', 'interface', 'long',
+ 'short', 'half', 'fixed', 'unsigned', 'superp', 'input',
+ 'output', 'hvec2', 'hvec3', 'hvec4', 'fvec2', 'fvec3',
+ 'fvec4', 'sampler3DRect', 'filter', 'sizeof', 'cast',
+ 'namespace', 'using'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ # All names beginning with "gl_" are reserved.
+ (r'gl_\w*', Name.Builtin),
(r'[a-zA-Z_]\w*', Name),
(r'\.', Punctuation),
(r'\s+', Text),
@@ -75,6 +149,160 @@ class GLShaderLexer(RegexLexer):
}
+class HLSLShaderLexer(RegexLexer):
+ """
+ HLSL (Microsoft Direct3D Shader) lexer.
+
+ .. versionadded:: 2.3
+ """
+ name = 'HLSL'
+ aliases = ['hlsl']
+ filenames = ['*.hlsl', '*.hlsli']
+ mimetypes = ['text/x-hlsl']
+
+ tokens = {
+ 'root': [
+ (r'^#.*', Comment.Preproc),
+ (r'//.*', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
+ Operator),
+ (r'[?:]', Operator), # quick hack for ternary
+ (r'\bdefined\b', Operator),
+ (r'[;{}(),.\[\]]', Punctuation),
+ # FIXME when e is present, no decimal point needed
+ (r'[+-]?\d*\.\d+([eE][-+]?\d+)?f?', Number.Float),
+ (r'[+-]?\d+\.\d*([eE][-+]?\d+)?f?', Number.Float),
+ (r'0[xX][0-9a-fA-F]*', Number.Hex),
+ (r'0[0-7]*', Number.Oct),
+ (r'[1-9][0-9]*', Number.Integer),
+ (r'"', String, 'string'),
+ (words((
+ 'asm','asm_fragment','break','case','cbuffer','centroid','class',
+ 'column_major','compile','compile_fragment','const','continue',
+ 'default','discard','do','else','export','extern','for','fxgroup',
+ 'globallycoherent','groupshared','if','in','inline','inout',
+ 'interface','line','lineadj','linear','namespace','nointerpolation',
+ 'noperspective','NULL','out','packoffset','pass','pixelfragment',
+ 'point','precise','return','register','row_major','sample',
+ 'sampler','shared','stateblock','stateblock_state','static',
+ 'struct','switch','tbuffer','technique','technique10',
+ 'technique11','texture','typedef','triangle','triangleadj',
+ 'uniform','vertexfragment','volatile','while'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words(('true','false'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Constant),
+ (words((
+ 'auto','catch','char','const_cast','delete','dynamic_cast','enum',
+ 'explicit','friend','goto','long','mutable','new','operator',
+ 'private','protected','public','reinterpret_cast','short','signed',
+ 'sizeof','static_cast','template','this','throw','try','typename',
+ 'union','unsigned','using','virtual'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ (words((
+ 'dword','matrix','snorm','string','unorm','unsigned','void','vector',
+ 'BlendState','Buffer','ByteAddressBuffer','ComputeShader',
+ 'DepthStencilState','DepthStencilView','DomainShader',
+ 'GeometryShader','HullShader','InputPatch','LineStream',
+ 'OutputPatch','PixelShader','PointStream','RasterizerState',
+ 'RenderTargetView','RasterizerOrderedBuffer',
+ 'RasterizerOrderedByteAddressBuffer',
+ 'RasterizerOrderedStructuredBuffer','RasterizerOrderedTexture1D',
+ 'RasterizerOrderedTexture1DArray','RasterizerOrderedTexture2D',
+ 'RasterizerOrderedTexture2DArray','RasterizerOrderedTexture3D',
+ 'RWBuffer','RWByteAddressBuffer','RWStructuredBuffer',
+ 'RWTexture1D','RWTexture1DArray','RWTexture2D','RWTexture2DArray',
+ 'RWTexture3D','SamplerState','SamplerComparisonState',
+ 'StructuredBuffer','Texture1D','Texture1DArray','Texture2D',
+ 'Texture2DArray','Texture2DMS','Texture2DMSArray','Texture3D',
+ 'TextureCube','TextureCubeArray','TriangleStream','VertexShader'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword.Type),
+ (words((
+ 'bool','double','float','int','half','min16float','min10float',
+ 'min16int','min12int','min16uint','uint'),
+ prefix=r'\b', suffix=r'([1-4](x[1-4])?)?\b'),
+ Keyword.Type), # vector and matrix types
+ (words((
+ 'abort','abs','acos','all','AllMemoryBarrier',
+ 'AllMemoryBarrierWithGroupSync','any','AppendStructuredBuffer',
+ 'asdouble','asfloat','asin','asint','asuint','asuint','atan',
+ 'atan2','ceil','CheckAccessFullyMapped','clamp','clip',
+ 'CompileShader','ConsumeStructuredBuffer','cos','cosh','countbits',
+ 'cross','D3DCOLORtoUBYTE4','ddx','ddx_coarse','ddx_fine','ddy',
+ 'ddy_coarse','ddy_fine','degrees','determinant',
+ 'DeviceMemoryBarrier','DeviceMemoryBarrierWithGroupSync','distance',
+ 'dot','dst','errorf','EvaluateAttributeAtCentroid',
+ 'EvaluateAttributeAtSample','EvaluateAttributeSnapped','exp',
+ 'exp2','f16tof32','f32tof16','faceforward','firstbithigh',
+ 'firstbitlow','floor','fma','fmod','frac','frexp','fwidth',
+ 'GetRenderTargetSampleCount','GetRenderTargetSamplePosition',
+ 'GlobalOrderedCountIncrement','GroupMemoryBarrier',
+ 'GroupMemoryBarrierWithGroupSync','InterlockedAdd','InterlockedAnd',
+ 'InterlockedCompareExchange','InterlockedCompareStore',
+ 'InterlockedExchange','InterlockedMax','InterlockedMin',
+ 'InterlockedOr','InterlockedXor','isfinite','isinf','isnan',
+ 'ldexp','length','lerp','lit','log','log10','log2','mad','max',
+ 'min','modf','msad4','mul','noise','normalize','pow','printf',
+ 'Process2DQuadTessFactorsAvg','Process2DQuadTessFactorsMax',
+ 'Process2DQuadTessFactorsMin','ProcessIsolineTessFactors',
+ 'ProcessQuadTessFactorsAvg','ProcessQuadTessFactorsMax',
+ 'ProcessQuadTessFactorsMin','ProcessTriTessFactorsAvg',
+ 'ProcessTriTessFactorsMax','ProcessTriTessFactorsMin',
+ 'QuadReadLaneAt','QuadSwapX','QuadSwapY','radians','rcp',
+ 'reflect','refract','reversebits','round','rsqrt','saturate',
+ 'sign','sin','sincos','sinh','smoothstep','sqrt','step','tan',
+ 'tanh','tex1D','tex1D','tex1Dbias','tex1Dgrad','tex1Dlod',
+ 'tex1Dproj','tex2D','tex2D','tex2Dbias','tex2Dgrad','tex2Dlod',
+ 'tex2Dproj','tex3D','tex3D','tex3Dbias','tex3Dgrad','tex3Dlod',
+ 'tex3Dproj','texCUBE','texCUBE','texCUBEbias','texCUBEgrad',
+ 'texCUBElod','texCUBEproj','transpose','trunc','WaveAllBitAnd',
+ 'WaveAllMax','WaveAllMin','WaveAllBitOr','WaveAllBitXor',
+ 'WaveAllEqual','WaveAllProduct','WaveAllSum','WaveAllTrue',
+ 'WaveAnyTrue','WaveBallot','WaveGetLaneCount','WaveGetLaneIndex',
+ 'WaveGetOrderedIndex','WaveIsHelperLane','WaveOnce',
+ 'WavePrefixProduct','WavePrefixSum','WaveReadFirstLane',
+ 'WaveReadLaneAt'),
+ prefix=r'\b', suffix=r'\b'),
+ Name.Builtin), # built-in functions
+ (words((
+ 'SV_ClipDistance','SV_ClipDistance0','SV_ClipDistance1',
+ 'SV_Culldistance','SV_CullDistance0','SV_CullDistance1',
+ 'SV_Coverage','SV_Depth','SV_DepthGreaterEqual',
+ 'SV_DepthLessEqual','SV_DispatchThreadID','SV_DomainLocation',
+ 'SV_GroupID','SV_GroupIndex','SV_GroupThreadID','SV_GSInstanceID',
+ 'SV_InnerCoverage','SV_InsideTessFactor','SV_InstanceID',
+ 'SV_IsFrontFace','SV_OutputControlPointID','SV_Position',
+ 'SV_PrimitiveID','SV_RenderTargetArrayIndex','SV_SampleIndex',
+ 'SV_StencilRef','SV_TessFactor','SV_VertexID',
+ 'SV_ViewportArrayIndex'),
+ prefix=r'\b', suffix=r'\b'),
+ Name.Decorator), # system-value semantics
+ (r'\bSV_Target[0-7]?\b', Name.Decorator),
+ (words((
+ 'allow_uav_condition','branch','call','domain','earlydepthstencil',
+ 'fastopt','flatten','forcecase','instance','loop','maxtessfactor',
+ 'numthreads','outputcontrolpoints','outputtopology','partitioning',
+ 'patchconstantfunc','unroll'),
+ prefix=r'\b', suffix=r'\b'),
+ Name.Decorator), # attributes
+ (r'[a-zA-Z_]\w*', Name),
+ (r'\\$', Comment.Preproc), # backslash at end of line -- usually macro continuation
+ (r'\s+', Text),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ }
+
+
class PostScriptLexer(RegexLexer):
"""
Lexer for PostScript files.
@@ -233,8 +461,8 @@ class AsymptoteLexer(RegexLexer):
r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
r'picture|position|real|revolution|slice|splitface|ticksgridT|'
r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
- ('[a-zA-Z_]\w*:(?!:)', Name.Label),
- ('[a-zA-Z_]\w*', Name),
+ (r'[a-zA-Z_]\w*:(?!:)', Name.Label),
+ (r'[a-zA-Z_]\w*', Name),
],
'root': [
include('whitespace'),
@@ -334,9 +562,9 @@ class GnuplotLexer(RegexLexer):
(_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
'she$ll', 'test$'),
Keyword, 'noargs'),
- ('([a-zA-Z_]\w*)(\s*)(=)',
+ (r'([a-zA-Z_]\w*)(\s*)(=)',
bygroups(Name.Variable, Text, Operator), 'genericargs'),
- ('([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)',
+ (r'([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)',
bygroups(Name.Function, Text, Operator), 'genericargs'),
(r'@[a-zA-Z_]\w*', Name.Constant), # macros
(r';', Keyword),
@@ -382,7 +610,7 @@ class GnuplotLexer(RegexLexer):
(r'(\d+\.\d*|\.\d+)', Number.Float),
(r'-?\d+', Number.Integer),
('[,.~!%^&*+=|?:<>/-]', Operator),
- ('[{}()\[\]]', Punctuation),
+ (r'[{}()\[\]]', Punctuation),
(r'(eq|ne)\b', Operator.Word),
(r'([a-zA-Z_]\w*)(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
diff --git a/pygments/lexers/haskell.py b/pygments/lexers/haskell.py
index ffc3a3a2..b3884f5c 100644
--- a/pygments/lexers/haskell.py
+++ b/pygments/lexers/haskell.py
@@ -5,19 +5,19 @@
Lexers for Haskell and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
- default, include
+ default, include, inherit
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic
from pygments import unistring as uni
-__all__ = ['HaskellLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexer',
+__all__ = ['HaskellLexer', 'HspecLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexer',
'LiterateHaskellLexer', 'LiterateIdrisLexer', 'LiterateAgdaLexer',
'LiterateCryptolLexer', 'KokaLexer']
@@ -39,7 +39,7 @@ class HaskellLexer(RegexLexer):
flags = re.MULTILINE | re.UNICODE
reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
- 'if', 'in', 'infix[lr]?', 'instance',
+ 'family', 'if', 'in', 'infix[lr]?', 'instance',
'let', 'newtype', 'of', 'then', 'type', 'where', '_')
ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
@@ -63,17 +63,23 @@ class HaskellLexer(RegexLexer):
(r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
(r"'?[_" + uni.Ll + r"][\w']*", Name),
(r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
+ (r"(')[" + uni.Lu + r"][\w\']*", Keyword.Type),
+ (r"(')\[[^\]]*\]", Keyword.Type), # tuples and lists get special treatment in GHC
+ (r"(')\([^)]*\)", Keyword.Type), # ..
# Operators
(r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
(r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
(r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
(r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
# Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
+ (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*', Number.Float),
+ (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*\.[\da-fA-F](_*[\da-fA-F])*(_*[pP][+-]?\d(_*\d)*)?', Number.Float),
+ (r'\d(_*\d)*_*[eE][+-]?\d(_*\d)*', Number.Float),
+ (r'\d(_*\d)*\.\d(_*\d)*(_*[eE][+-]?\d(_*\d)*)?', Number.Float),
+ (r'0[bB]_*[01](_*[01])*', Number.Bin),
+ (r'0[oO]_*[0-7](_*[0-7])*', Number.Oct),
+ (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*', Number.Hex),
+ (r'\d(_*\d)*', Number.Integer),
# Character/String Literals
(r"'", String.Char, 'character'),
(r'"', String, 'string'),
@@ -151,6 +157,28 @@ class HaskellLexer(RegexLexer):
}
+class HspecLexer(HaskellLexer):
+ """
+ A Haskell lexer with support for Hspec constructs.
+
+ .. versionadded:: 2.4.0
+ """
+
+ name = 'Hspec'
+ aliases = ['hspec']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'(it\s*)("[^"]*")', bygroups(Text, String.Doc)),
+ (r'(describe\s*)("[^"]*")', bygroups(Text, String.Doc)),
+ (r'(context\s*)("[^"]*")', bygroups(Text, String.Doc)),
+ inherit,
+ ],
+ }
+
+
class IdrisLexer(RegexLexer):
"""
A lexer for the dependently typed programming language Idris.
@@ -674,10 +702,10 @@ class KokaLexer(RegexLexer):
symbols = r'[$%&*+@!/\\^~=.:\-?|<>]+'
# symbol boundary: an operator keyword should not be followed by any of these
- sboundary = '(?!'+symbols+')'
+ sboundary = '(?!' + symbols + ')'
# name boundary: a keyword should not be followed by any of these
- boundary = '(?![\w/])'
+ boundary = r'(?![\w/])'
# koka token abstractions
tokenType = Name.Attribute
diff --git a/pygments/lexers/haxe.py b/pygments/lexers/haxe.py
index e0e15c11..364ad344 100644
--- a/pygments/lexers/haxe.py
+++ b/pygments/lexers/haxe.py
@@ -5,7 +5,7 @@
Lexers for Haxe and related stuff.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -43,7 +43,7 @@ class HaxeLexer(ExtendedRegexLexer):
typeid = r'_*[A-Z]\w*'
# combined ident and dollar and idtype
- ident = r'(?:_*[a-z]\w*|_+[0-9]\w*|' + typeid + '|_+|\$\w+)'
+ ident = r'(?:_*[a-z]\w*|_+[0-9]\w*|' + typeid + r'|_+|\$\w+)'
binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
@@ -182,7 +182,7 @@ class HaxeLexer(ExtendedRegexLexer):
(r'[0-9]+[eE][+\-]?[0-9]+', Number.Float),
(r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float),
(r'[0-9]+\.[0-9]+', Number.Float),
- (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float),
+ (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float),
# Int
(r'0x[0-9a-fA-F]+', Number.Hex),
@@ -219,7 +219,7 @@ class HaxeLexer(ExtendedRegexLexer):
(r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
(r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
(r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, ('#pop', 'preproc-expr-chain')),
# Int
(r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'preproc-expr-chain')),
@@ -456,7 +456,7 @@ class HaxeLexer(ExtendedRegexLexer):
(r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
(r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
(r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, ('#pop', 'expr-chain')),
# Int
(r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
@@ -711,7 +711,7 @@ class HaxeLexer(ExtendedRegexLexer):
(r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, '#pop'),
(r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, '#pop'),
(r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, '#pop'),
+ (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, '#pop'),
# Int
(r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
diff --git a/pygments/lexers/hdl.py b/pygments/lexers/hdl.py
index 04cef14e..57fb7ac9 100644
--- a/pygments/lexers/hdl.py
+++ b/pygments/lexers/hdl.py
@@ -5,7 +5,7 @@
Lexers for hardware descriptor languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/hexdump.py b/pygments/lexers/hexdump.py
index efe16fa7..cba49be7 100644
--- a/pygments/lexers/hexdump.py
+++ b/pygments/lexers/hexdump.py
@@ -5,12 +5,10 @@
Lexers for hexadecimal dumps.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-
from pygments.lexer import RegexLexer, bygroups, include
from pygments.token import Text, Name, Number, String, Punctuation
@@ -36,7 +34,7 @@ class HexdumpLexer(RegexLexer):
* ``od -t x1z FILE``
* ``xxd FILE``
* ``DEBUG.EXE FILE.COM`` and entering ``d`` to the prompt.
-
+
.. versionadded:: 2.1
"""
name = 'Hexdump'
@@ -48,12 +46,17 @@ class HexdumpLexer(RegexLexer):
'root': [
(r'\n', Text),
include('offset'),
- (r'('+hd+r'{2})(\-)('+hd+r'{2})', bygroups(Number.Hex, Punctuation, Number.Hex)),
+ (r'('+hd+r'{2})(\-)('+hd+r'{2})',
+ bygroups(Number.Hex, Punctuation, Number.Hex)),
(hd+r'{2}', Number.Hex),
- (r'(\s{2,3})(\>)(.{16})(\<)$', bygroups(Text, Punctuation, String, Punctuation), 'bracket-strings'),
- (r'(\s{2,3})(\|)(.{16})(\|)$', bygroups(Text, Punctuation, String, Punctuation), 'piped-strings'),
- (r'(\s{2,3})(\>)(.{1,15})(\<)$', bygroups(Text, Punctuation, String, Punctuation)),
- (r'(\s{2,3})(\|)(.{1,15})(\|)$', bygroups(Text, Punctuation, String, Punctuation)),
+ (r'(\s{2,3})(\>)(.{16})(\<)$',
+ bygroups(Text, Punctuation, String, Punctuation), 'bracket-strings'),
+ (r'(\s{2,3})(\|)(.{16})(\|)$',
+ bygroups(Text, Punctuation, String, Punctuation), 'piped-strings'),
+ (r'(\s{2,3})(\>)(.{1,15})(\<)$',
+ bygroups(Text, Punctuation, String, Punctuation)),
+ (r'(\s{2,3})(\|)(.{1,15})(\|)$',
+ bygroups(Text, Punctuation, String, Punctuation)),
(r'(\s{2,3})(.{1,15})$', bygroups(Text, String)),
(r'(\s{2,3})(.{16}|.{20})$', bygroups(Text, String), 'nonpiped-strings'),
(r'\s', Text),
@@ -72,7 +75,8 @@ class HexdumpLexer(RegexLexer):
(r'\n', Text),
include('offset'),
(hd+r'{2}', Number.Hex),
- (r'(\s{2,3})(\|)(.{1,16})(\|)$', bygroups(Text, Punctuation, String, Punctuation)),
+ (r'(\s{2,3})(\|)(.{1,16})(\|)$',
+ bygroups(Text, Punctuation, String, Punctuation)),
(r'\s', Text),
(r'^\*', Punctuation),
],
@@ -80,14 +84,16 @@ class HexdumpLexer(RegexLexer):
(r'\n', Text),
include('offset'),
(hd+r'{2}', Number.Hex),
- (r'(\s{2,3})(\>)(.{1,16})(\<)$', bygroups(Text, Punctuation, String, Punctuation)),
+ (r'(\s{2,3})(\>)(.{1,16})(\<)$',
+ bygroups(Text, Punctuation, String, Punctuation)),
(r'\s', Text),
(r'^\*', Punctuation),
],
'nonpiped-strings': [
(r'\n', Text),
include('offset'),
- (r'('+hd+r'{2})(\-)('+hd+r'{2})', bygroups(Number.Hex, Punctuation, Number.Hex)),
+ (r'('+hd+r'{2})(\-)('+hd+r'{2})',
+ bygroups(Number.Hex, Punctuation, Number.Hex)),
(hd+r'{2}', Number.Hex),
(r'(\s{19,})(.{1,20}?)$', bygroups(Text, String)),
(r'(\s{2,3})(.{1,20})$', bygroups(Text, String)),
diff --git a/pygments/lexers/html.py b/pygments/lexers/html.py
index 24733748..091379ce 100644
--- a/pygments/lexers/html.py
+++ b/pygments/lexers/html.py
@@ -5,7 +5,7 @@
Lexers for HTML, XML and related markup.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -220,7 +220,7 @@ class XmlLexer(RegexLexer):
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
- ('\s+', Text),
+ (r'\s+', Text),
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
@@ -313,7 +313,7 @@ class HamlLexer(ExtendedRegexLexer):
include('css'),
(r'%[\w:-]+', Name.Tag, 'tag'),
(r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
+ (r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
bygroups(Comment, Comment.Special, Comment),
'#pop'),
(r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
@@ -330,8 +330,8 @@ class HamlLexer(ExtendedRegexLexer):
'tag': [
include('css'),
- (r'\{(,\n|' + _dot + ')*?\}', using(RubyLexer)),
- (r'\[' + _dot + '*?\]', using(RubyLexer)),
+ (r'\{(,\n|' + _dot + r')*?\}', using(RubyLexer)),
+ (r'\[' + _dot + r'*?\]', using(RubyLexer)),
(r'\(', Text, 'html-attributes'),
(r'/[ \t]*\n', Punctuation, '#pop:2'),
(r'[<>]{1,2}(?=[ \t=])', Punctuation),
@@ -340,7 +340,7 @@ class HamlLexer(ExtendedRegexLexer):
'plain': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + '*?)(\})',
+ (r'(#\{)(' + _dot + r'*?)(\})',
bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
@@ -373,7 +373,7 @@ class HamlLexer(ExtendedRegexLexer):
'filter-block': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + '*?)(\})',
+ (r'(#\{)(' + _dot + r'*?)(\})',
bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
@@ -422,7 +422,7 @@ class ScamlLexer(ExtendedRegexLexer):
include('css'),
(r'%[\w:-]+', Name.Tag, 'tag'),
(r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
+ (r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
bygroups(Comment, Comment.Special, Comment),
'#pop'),
(r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
@@ -442,8 +442,8 @@ class ScamlLexer(ExtendedRegexLexer):
'tag': [
include('css'),
- (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
- (r'\[' + _dot + '*?\]', using(ScalaLexer)),
+ (r'\{(,\n|' + _dot + r')*?\}', using(ScalaLexer)),
+ (r'\[' + _dot + r'*?\]', using(ScalaLexer)),
(r'\(', Text, 'html-attributes'),
(r'/[ \t]*\n', Punctuation, '#pop:2'),
(r'[<>]{1,2}(?=[ \t=])', Punctuation),
@@ -452,7 +452,7 @@ class ScamlLexer(ExtendedRegexLexer):
'plain': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + '*?)(\})',
+ (r'(#\{)(' + _dot + r'*?)(\})',
bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
@@ -485,7 +485,7 @@ class ScamlLexer(ExtendedRegexLexer):
'filter-block': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + '*?)(\})',
+ (r'(#\{)(' + _dot + r'*?)(\})',
bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
@@ -530,7 +530,7 @@ class PugLexer(ExtendedRegexLexer):
'content': [
include('css'),
(r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
+ (r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
bygroups(Comment, Comment.Special, Comment),
'#pop'),
(r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
@@ -551,8 +551,8 @@ class PugLexer(ExtendedRegexLexer):
'tag': [
include('css'),
- (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
- (r'\[' + _dot + '*?\]', using(ScalaLexer)),
+ (r'\{(,\n|' + _dot + r')*?\}', using(ScalaLexer)),
+ (r'\[' + _dot + r'*?\]', using(ScalaLexer)),
(r'\(', Text, 'html-attributes'),
(r'/[ \t]*\n', Punctuation, '#pop:2'),
(r'[<>]{1,2}(?=[ \t=])', Punctuation),
@@ -561,7 +561,7 @@ class PugLexer(ExtendedRegexLexer):
'plain': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + '*?)(\})',
+ (r'(#\{)(' + _dot + r'*?)(\})',
bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
@@ -594,7 +594,7 @@ class PugLexer(ExtendedRegexLexer):
'filter-block': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + '*?)(\})',
+ (r'(#\{)(' + _dot + r'*?)(\})',
bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
diff --git a/pygments/lexers/idl.py b/pygments/lexers/idl.py
index a0b39492..87cafe6a 100644
--- a/pygments/lexers/idl.py
+++ b/pygments/lexers/idl.py
@@ -5,7 +5,7 @@
Lexers for IDL.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -53,7 +53,7 @@ class IDLLexer(RegexLexer):
'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
'bytscl', 'caldat', 'calendar', 'call_external',
'call_function', 'call_method', 'call_procedure', 'canny',
- 'catch', 'cd', 'cdf_\w*', 'ceil', 'chebyshev',
+ 'catch', 'cd', r'cdf_\w*', 'ceil', 'chebyshev',
'check_math',
'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
@@ -87,7 +87,7 @@ class IDLLexer(RegexLexer):
'dlm_load', 'dlm_register', 'doc_library', 'double',
'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
- 'eof', 'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx',
+ 'eof', r'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx',
'erode', 'errorplot', 'errplot', 'estimator_filter',
'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
@@ -104,11 +104,11 @@ class IDLLexer(RegexLexer):
'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
'getwindows', 'get_drive_list', 'get_dxf_objects',
'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
- 'greg2jul', 'grib_\w*', 'grid3', 'griddata',
+ 'greg2jul', r'grib_\w*', 'grid3', 'griddata',
'grid_input', 'grid_tps', 'gs_iter',
- 'h5[adfgirst]_\w*', 'h5_browser', 'h5_close',
+ r'h5[adfgirst]_\w*', 'h5_browser', 'h5_close',
'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
- 'hanning', 'hash', 'hdf_\w*', 'heap_free',
+ 'hanning', 'hash', r'hdf_\w*', 'heap_free',
'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
@@ -156,7 +156,7 @@ class IDLLexer(RegexLexer):
'modifyct', 'moment', 'morph_close', 'morph_distance',
'morph_gradient', 'morph_hitormiss', 'morph_open',
'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
- 'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick',
+ r'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick',
'noise_scatter', 'noise_slur', 'norm', 'n_elements',
'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
@@ -249,7 +249,7 @@ class IDLLexer(RegexLexer):
tokens = {
'root': [
- (r'^\s*;.*?\n', Comment.Singleline),
+ (r'^\s*;.*?\n', Comment.Single),
(words(_RESERVED, prefix=r'\b', suffix=r'\b'), Keyword),
(words(_BUILTIN_LIB, prefix=r'\b', suffix=r'\b'), Name.Builtin),
(r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
diff --git a/pygments/lexers/igor.py b/pygments/lexers/igor.py
index 17fedf88..e2e2cdfa 100644
--- a/pygments/lexers/igor.py
+++ b/pygments/lexers/igor.py
@@ -5,7 +5,7 @@
Lexers for Igor Pro.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2018 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -49,51 +49,71 @@ class IgorLexer(RegexLexer):
)
operations = (
'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio', 'AddMovieFrame',
- 'AdoptFiles', 'APMath', 'Append', 'AppendImage', 'AppendLayoutObject',
- 'AppendMatrixContour', 'AppendText', 'AppendToGizmo', 'AppendToGraph',
- 'AppendToLayout', 'AppendToTable', 'AppendXYZContour', 'AutoPositionWindow',
- 'BackgroundInfo', 'Beep', 'BoundingBall', 'BoxSmooth', 'BrowseURL', 'BuildMenu',
- 'Button', 'cd', 'Chart', 'CheckBox', 'CheckDisplayed', 'ChooseColor', 'Close',
- 'CloseHelp', 'CloseMovie', 'CloseProc', 'ColorScale', 'ColorTab2Wave',
- 'Concatenate', 'ControlBar', 'ControlInfo', 'ControlUpdate',
- 'ConvertGlobalStringTextEncoding', 'ConvexHull', 'Convolve', 'CopyFile',
- 'CopyFolder', 'CopyScales', 'Correlate', 'CreateAliasShortcut', 'CreateBrowser',
- 'Cross', 'CtrlBackground', 'CtrlFIFO', 'CtrlNamedBackground', 'Cursor',
- 'CurveFit', 'CustomControl', 'CWT', 'Debugger', 'DebuggerOptions', 'DefaultFont',
- 'DefaultGuiControls', 'DefaultGuiFont', 'DefaultTextEncoding', 'DefineGuide',
- 'DelayUpdate', 'DeleteAnnotations', 'DeleteFile', 'DeleteFolder', 'DeletePoints',
- 'Differentiate', 'dir', 'Display', 'DisplayHelpTopic', 'DisplayProcedure',
- 'DoAlert', 'DoIgorMenu', 'DoUpdate', 'DoWindow', 'DoXOPIdle', 'DPSS',
- 'DrawAction', 'DrawArc', 'DrawBezier', 'DrawLine', 'DrawOval', 'DrawPICT',
- 'DrawPoly', 'DrawRect', 'DrawRRect', 'DrawText', 'DrawUserShape', 'DSPDetrend',
- 'DSPPeriodogram', 'Duplicate', 'DuplicateDataFolder', 'DWT', 'EdgeStats', 'Edit',
- 'ErrorBars', 'EstimatePeakSizes', 'Execute', 'ExecuteScriptText',
+ 'AddWavesToBoxPlot', 'AddWavesToViolinPlot', 'AdoptFiles', 'APMath', 'Append',
+ 'AppendBoxPlot', 'AppendImage', 'AppendLayoutObject', 'AppendMatrixContour',
+ 'AppendText', 'AppendToGizmo', 'AppendToGraph', 'AppendToLayout', 'AppendToTable',
+ 'AppendViolinPlot', 'AppendXYZContour', 'AutoPositionWindow',
+ 'AxonTelegraphFindServers', 'BackgroundInfo', 'Beep', 'BoundingBall', 'BoxSmooth',
+ 'BrowseURL', 'BuildMenu', 'Button', 'cd', 'Chart', 'CheckBox', 'CheckDisplayed',
+ 'ChooseColor', 'Close', 'CloseHelp', 'CloseMovie', 'CloseProc', 'ColorScale',
+ 'ColorTab2Wave', 'Concatenate', 'ControlBar', 'ControlInfo', 'ControlUpdate',
+ 'ConvertGlobalStringTextEncoding', 'ConvexHull', 'Convolve', 'CopyDimLabels',
+ 'CopyFile', 'CopyFolder', 'CopyScales', 'Correlate', 'CreateAliasShortcut',
+ 'CreateBrowser', 'Cross', 'CtrlBackground', 'CtrlFIFO', 'CtrlNamedBackground',
+ 'Cursor', 'CurveFit', 'CustomControl', 'CWT', 'DAQmx_AI_SetupReader',
+ 'DAQmx_AO_SetOutputs', 'DAQmx_CTR_CountEdges', 'DAQmx_CTR_OutputPulse',
+ 'DAQmx_CTR_Period', 'DAQmx_CTR_PulseWidth', 'DAQmx_DIO_Config',
+ 'DAQmx_DIO_WriteNewData', 'DAQmx_Scan', 'DAQmx_WaveformGen', 'Debugger',
+ 'DebuggerOptions', 'DefaultFont', 'DefaultGuiControls', 'DefaultGuiFont',
+ 'DefaultTextEncoding', 'DefineGuide', 'DelayUpdate', 'DeleteAnnotations',
+ 'DeleteFile', 'DeleteFolder', 'DeletePoints', 'Differentiate', 'dir', 'Display',
+ 'DisplayHelpTopic', 'DisplayProcedure', 'DoAlert', 'DoIgorMenu', 'DoUpdate',
+ 'DoWindow', 'DoXOPIdle', 'DPSS', 'DrawAction', 'DrawArc', 'DrawBezier',
+ 'DrawLine', 'DrawOval', 'DrawPICT', 'DrawPoly', 'DrawRect', 'DrawRRect',
+ 'DrawText', 'DrawUserShape', 'DSPDetrend', 'DSPPeriodogram', 'Duplicate',
+ 'DuplicateDataFolder', 'DWT', 'EdgeStats', 'Edit', 'ErrorBars',
+ 'EstimatePeakSizes', 'Execute', 'ExecuteScriptText', 'ExperimentInfo',
'ExperimentModified', 'ExportGizmo', 'Extract', 'FastGaussTransform', 'FastOp',
- 'FBinRead', 'FBinWrite', 'FFT', 'FIFOStatus', 'FIFO2Wave', 'FilterFIR',
+ 'FBinRead', 'FBinWrite', 'FFT', 'FGetPos', 'FIFOStatus', 'FIFO2Wave', 'FilterFIR',
'FilterIIR', 'FindAPeak', 'FindContour', 'FindDuplicates', 'FindLevel',
'FindLevels', 'FindPeak', 'FindPointsInPoly', 'FindRoots', 'FindSequence',
- 'FindValue', 'FPClustering', 'fprintf', 'FReadLine', 'FSetPos', 'FStatus',
- 'FTPCreateDirectory', 'FTPDelete', 'FTPDownload', 'FTPUpload', 'FuncFit',
- 'FuncFitMD', 'GBLoadWave', 'GetAxis', 'GetCamera', 'GetFileFolderInfo',
+ 'FindValue', 'FMaxFlat', 'FPClustering', 'fprintf', 'FReadLine', 'FSetPos',
+ 'FStatus', 'FTPCreateDirectory', 'FTPDelete', 'FTPDownload', 'FTPUpload',
+ 'FuncFit', 'FuncFitMD', 'GBLoadWave', 'GetAxis', 'GetCamera', 'GetFileFolderInfo',
'GetGizmo', 'GetLastUserMenuInfo', 'GetMarquee', 'GetMouse', 'GetSelection',
- 'GetWindow', 'GPIBReadBinaryWave2', 'GPIBReadBinary2', 'GPIBReadWave2',
- 'GPIBRead2', 'GPIBWriteBinaryWave2', 'GPIBWriteBinary2', 'GPIBWriteWave2',
- 'GPIBWrite2', 'GPIB2', 'GraphNormal', 'GraphWaveDraw', 'GraphWaveEdit', 'Grep',
- 'GroupBox', 'Hanning', 'HDF5CloseFile', 'HDF5CloseGroup', 'HDF5ConvertColors',
- 'HDF5CreateFile', 'HDF5CreateGroup', 'HDF5CreateLink', 'HDF5Dump',
- 'HDF5DumpErrors', 'HDF5DumpState', 'HDF5ListAttributes', 'HDF5ListGroup',
- 'HDF5LoadData', 'HDF5LoadGroup', 'HDF5LoadImage', 'HDF5OpenFile', 'HDF5OpenGroup',
- 'HDF5SaveData', 'HDF5SaveGroup', 'HDF5SaveImage', 'HDF5TestOperation',
- 'HDF5UnlinkObject', 'HideIgorMenus', 'HideInfo', 'HideProcedures', 'HideTools',
- 'HilbertTransform', 'Histogram', 'ICA', 'IFFT', 'ImageAnalyzeParticles',
- 'ImageBlend', 'ImageBoundaryToMask', 'ImageEdgeDetection', 'ImageFileInfo',
- 'ImageFilter', 'ImageFocus', 'ImageFromXYZ', 'ImageGenerateROIMask', 'ImageGLCM',
+ 'GetWindow', 'GISCreateVectorLayer', 'GISGetRasterInfo',
+ 'GISGetRegisteredFileInfo', 'GISGetVectorLayerInfo', 'GISLoadRasterData',
+ 'GISLoadVectorData', 'GISRasterizeVectorData', 'GISRegisterFile',
+ 'GISTransformCoords', 'GISUnRegisterFile', 'GISWriteFieldData',
+ 'GISWriteGeometryData', 'GISWriteRaster', 'GPIBReadBinaryWave2',
+ 'GPIBReadBinary2', 'GPIBReadWave2', 'GPIBRead2', 'GPIBWriteBinaryWave2',
+ 'GPIBWriteBinary2', 'GPIBWriteWave2', 'GPIBWrite2', 'GPIB2', 'GraphNormal',
+ 'GraphWaveDraw', 'GraphWaveEdit', 'Grep', 'GroupBox', 'Hanning', 'HDFInfo',
+ 'HDFReadImage', 'HDFReadSDS', 'HDFReadVset', 'HDF5CloseFile', 'HDF5CloseGroup',
+ 'HDF5ConvertColors', 'HDF5CreateFile', 'HDF5CreateGroup', 'HDF5CreateLink',
+ 'HDF5Dump', 'HDF5DumpErrors', 'HDF5DumpState', 'HDF5FlushFile',
+ 'HDF5ListAttributes', 'HDF5ListGroup', 'HDF5LoadData', 'HDF5LoadGroup',
+ 'HDF5LoadImage', 'HDF5OpenFile', 'HDF5OpenGroup', 'HDF5SaveData', 'HDF5SaveGroup',
+ 'HDF5SaveImage', 'HDF5TestOperation', 'HDF5UnlinkObject', 'HideIgorMenus',
+ 'HideInfo', 'HideProcedures', 'HideTools', 'HilbertTransform', 'Histogram', 'ICA',
+ 'IFFT', 'ImageAnalyzeParticles', 'ImageBlend', 'ImageBoundaryToMask',
+ 'ImageComposite', 'ImageEdgeDetection', 'ImageFileInfo', 'ImageFilter',
+ 'ImageFocus', 'ImageFromXYZ', 'ImageGenerateROIMask', 'ImageGLCM',
'ImageHistModification', 'ImageHistogram', 'ImageInterpolate', 'ImageLineProfile',
'ImageLoad', 'ImageMorphology', 'ImageRegistration', 'ImageRemoveBackground',
'ImageRestore', 'ImageRotate', 'ImageSave', 'ImageSeedFill', 'ImageSkeleton3d',
'ImageSnake', 'ImageStats', 'ImageThreshold', 'ImageTransform',
'ImageUnwrapPhase', 'ImageWindow', 'IndexSort', 'InsertPoints', 'Integrate',
'IntegrateODE', 'Integrate2D', 'Interpolate2', 'Interpolate3D', 'Interp3DPath',
+ 'ITCCloseAll2', 'ITCCloseDevice2', 'ITCConfigAllChannels2',
+ 'ITCConfigChannelReset2', 'ITCConfigChannelUpload2', 'ITCConfigChannel2',
+ 'ITCFIFOAvailableAll2', 'ITCFIFOAvailable2', 'ITCGetAllChannelsConfig2',
+ 'ITCGetChannelConfig2', 'ITCGetCurrentDevice2', 'ITCGetDeviceInfo2',
+ 'ITCGetDevices2', 'ITCGetErrorString2', 'ITCGetSerialNumber2', 'ITCGetState2',
+ 'ITCGetVersions2', 'ITCInitialize2', 'ITCOpenDevice2', 'ITCReadADC2',
+ 'ITCReadDigital2', 'ITCReadTimer2', 'ITCSelectDevice2', 'ITCSetDAC2',
+ 'ITCSetGlobals2', 'ITCSetModes2', 'ITCSetState2', 'ITCStartAcq2', 'ITCStopAcq2',
+ 'ITCUpdateFIFOPositionAll2', 'ITCUpdateFIFOPosition2', 'ITCWriteDigital2',
'JCAMPLoadWave', 'JointHistogram', 'KillBackground', 'KillControl',
'KillDataFolder', 'KillFIFO', 'KillFreeAxis', 'KillPath', 'KillPICTs',
'KillStrings', 'KillVariables', 'KillWaves', 'KillWindow', 'KMeans', 'Label',
@@ -104,39 +124,48 @@ class IgorLexer(RegexLexer):
'MatrixFilter', 'MatrixGaussJ', 'MatrixGLM', 'MatrixInverse', 'MatrixLinearSolve',
'MatrixLinearSolveTD', 'MatrixLLS', 'MatrixLUBkSub', 'MatrixLUD', 'MatrixLUDTD',
'MatrixMultiply', 'MatrixOP', 'MatrixSchur', 'MatrixSolve', 'MatrixSVBkSub',
- 'MatrixSVD', 'MatrixTranspose', 'MeasureStyledText', 'MLLoadWave', 'Modify',
- 'ModifyBrowser', 'ModifyCamera', 'ModifyContour', 'ModifyControl',
- 'ModifyControlList', 'ModifyFreeAxis', 'ModifyGizmo', 'ModifyGraph',
- 'ModifyImage', 'ModifyLayout', 'ModifyPanel', 'ModifyTable', 'ModifyWaterfall',
- 'MoveDataFolder', 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow',
- 'MoveVariable', 'MoveWave', 'MoveWindow', 'MultiTaperPSD',
- 'MultiThreadingControl', 'NeuralNetworkRun', 'NeuralNetworkTrain', 'NewCamera',
- 'NewDataFolder', 'NewFIFO', 'NewFIFOChan', 'NewFreeAxis', 'NewGizmo', 'NewImage',
- 'NewLayout', 'NewMovie', 'NewNotebook', 'NewPanel', 'NewPath', 'NewWaterfall',
- 'NI4882', 'Note', 'Notebook', 'NotebookAction', 'Open', 'OpenHelp',
- 'OpenNotebook', 'Optimize', 'ParseOperationTemplate', 'PathInfo', 'PauseForUser',
- 'PauseUpdate', 'PCA', 'PlayMovie', 'PlayMovieAction', 'PlaySound',
- 'PopupContextualMenu', 'PopupMenu', 'Preferences', 'PrimeFactors', 'Print',
- 'printf', 'PrintGraphs', 'PrintLayout', 'PrintNotebook', 'PrintSettings',
- 'PrintTable', 'Project', 'PulseStats', 'PutScrapText', 'pwd', 'Quit',
- 'RatioFromNumber', 'Redimension', 'Remove', 'RemoveContour', 'RemoveFromGizmo',
- 'RemoveFromGraph', 'RemoveFromLayout', 'RemoveFromTable', 'RemoveImage',
- 'RemoveLayoutObjects', 'RemovePath', 'Rename', 'RenameDataFolder', 'RenamePath',
- 'RenamePICT', 'RenameWindow', 'ReorderImages', 'ReorderTraces', 'ReplaceText',
- 'ReplaceWave', 'Resample', 'ResumeUpdate', 'Reverse', 'Rotate', 'Save',
- 'SaveData', 'SaveExperiment', 'SaveGraphCopy', 'SaveNotebook',
+ 'MatrixSVD', 'MatrixTranspose', 'MCC_FindServers', 'MeasureStyledText',
+ 'MFR_CheckForNewBricklets',
+ 'MFR_CloseResultFile', 'MFR_CreateOverviewTable', 'MFR_GetBrickletCount',
+ 'MFR_GetBrickletData', 'MFR_GetBrickletDeployData', 'MFR_GetBrickletMetaData',
+ 'MFR_GetBrickletRawData', 'MFR_GetReportTemplate', 'MFR_GetResultFileMetaData',
+ 'MFR_GetResultFileName', 'MFR_GetVernissageVersion', 'MFR_GetVersion',
+ 'MFR_GetXOPErrorMessage', 'MFR_OpenResultFile',
+ 'MLLoadWave', 'Modify', 'ModifyBoxPlot', 'ModifyBrowser', 'ModifyCamera',
+ 'ModifyContour', 'ModifyControl', 'ModifyControlList', 'ModifyFreeAxis',
+ 'ModifyGizmo', 'ModifyGraph', 'ModifyImage', 'ModifyLayout', 'ModifyPanel',
+ 'ModifyTable', 'ModifyViolinPlot', 'ModifyWaterfall', 'MoveDataFolder',
+ 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable',
+ 'MoveWave', 'MoveWindow', 'MultiTaperPSD', 'MultiThreadingControl',
+ 'NC_CloseFile', 'NC_DumpErrors', 'NC_Inquire', 'NC_ListAttributes',
+ 'NC_ListObjects', 'NC_LoadData', 'NC_OpenFile', 'NeuralNetworkRun',
+ 'NeuralNetworkTrain', 'NewCamera', 'NewDataFolder', 'NewFIFO', 'NewFIFOChan',
+ 'NewFreeAxis', 'NewGizmo', 'NewImage', 'NewLayout', 'NewMovie', 'NewNotebook',
+ 'NewPanel', 'NewPath', 'NewWaterfall', 'NILoadWave', 'NI4882', 'Note', 'Notebook',
+ 'NotebookAction', 'Open', 'OpenHelp', 'OpenNotebook', 'Optimize',
+ 'ParseOperationTemplate', 'PathInfo', 'PauseForUser', 'PauseUpdate', 'PCA',
+ 'PlayMovie', 'PlayMovieAction', 'PlaySound', 'PopupContextualMenu', 'PopupMenu',
+ 'Preferences', 'PrimeFactors', 'Print', 'printf', 'PrintGraphs', 'PrintLayout',
+ 'PrintNotebook', 'PrintSettings', 'PrintTable', 'Project', 'PulseStats',
+ 'PutScrapText', 'pwd', 'Quit', 'RatioFromNumber', 'Redimension', 'Remez',
+ 'Remove', 'RemoveContour', 'RemoveFromGizmo', 'RemoveFromGraph',
+ 'RemoveFromLayout', 'RemoveFromTable', 'RemoveImage', 'RemoveLayoutObjects',
+ 'RemovePath', 'Rename', 'RenameDataFolder', 'RenamePath', 'RenamePICT',
+ 'RenameWindow', 'ReorderImages', 'ReorderTraces', 'ReplaceText', 'ReplaceWave',
+ 'Resample', 'ResumeUpdate', 'Reverse', 'Rotate', 'Save', 'SaveData',
+ 'SaveExperiment', 'SaveGizmoCopy', 'SaveGraphCopy', 'SaveNotebook',
'SavePackagePreferences', 'SavePICT', 'SaveTableCopy', 'SetActiveSubwindow',
'SetAxis', 'SetBackground', 'SetDashPattern', 'SetDataFolder', 'SetDimLabel',
- 'SetDrawEnv', 'SetDrawLayer', 'SetFileFolderInfo', 'SetFormula', 'SetIgorHook',
- 'SetIgorMenuMode', 'SetIgorOption', 'SetMarquee', 'SetProcessSleep',
- 'SetRandomSeed', 'SetScale', 'SetVariable', 'SetWaveLock', 'SetWaveTextEncoding',
- 'SetWindow', 'ShowIgorMenus', 'ShowInfo', 'ShowTools', 'Silent', 'Sleep',
- 'Slider', 'Smooth', 'SmoothCustom', 'Sort', 'SortColumns', 'SoundInRecord',
- 'SoundInSet', 'SoundInStartChart', 'SoundInStatus', 'SoundInStopChart',
- 'SoundLoadWave', 'SoundSaveWave', 'SphericalInterpolate', 'SphericalTriangulate',
- 'SplitString', 'SplitWave', 'sprintf', 'sscanf', 'Stack', 'StackWindows',
- 'StatsAngularDistanceTest', 'StatsANOVA1Test', 'StatsANOVA2NRTest',
- 'StatsANOVA2RMTest', 'StatsANOVA2Test', 'StatsChiTest',
+ 'SetDrawEnv', 'SetDrawLayer', 'SetFileFolderInfo', 'SetFormula', 'SetIdlePeriod',
+ 'SetIgorHook', 'SetIgorMenuMode', 'SetIgorOption', 'SetMarquee',
+ 'SetProcessSleep', 'SetRandomSeed', 'SetScale', 'SetVariable', 'SetWaveLock',
+ 'SetWaveTextEncoding', 'SetWindow', 'ShowIgorMenus', 'ShowInfo', 'ShowTools',
+ 'Silent', 'Sleep', 'Slider', 'Smooth', 'SmoothCustom', 'Sort', 'SortColumns',
+ 'SoundInRecord', 'SoundInSet', 'SoundInStartChart', 'SoundInStatus',
+ 'SoundInStopChart', 'SoundLoadWave', 'SoundSaveWave', 'SphericalInterpolate',
+ 'SphericalTriangulate', 'SplitString', 'SplitWave', 'sprintf', 'SQLHighLevelOp',
+ 'sscanf', 'Stack', 'StackWindows', 'StatsAngularDistanceTest', 'StatsANOVA1Test',
+ 'StatsANOVA2NRTest', 'StatsANOVA2RMTest', 'StatsANOVA2Test', 'StatsChiTest',
'StatsCircularCorrelationTest', 'StatsCircularMeans', 'StatsCircularMoments',
'StatsCircularTwoSampleTest', 'StatsCochranTest', 'StatsContingencyTable',
'StatsDIPTest', 'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest',
@@ -148,23 +177,30 @@ class IgorLexer(RegexLexer):
'StatsSignTest', 'StatsSRTest', 'StatsTTest', 'StatsTukeyTest',
'StatsVariancesTest', 'StatsWatsonUSquaredTest', 'StatsWatsonWilliamsTest',
'StatsWheelerWatsonTest', 'StatsWilcoxonRankTest', 'StatsWRCorrelationTest',
- 'String', 'StructGet', 'StructPut', 'SumDimension', 'SumSeries', 'TabControl',
- 'Tag', 'TextBox', 'ThreadGroupPutDF', 'ThreadStart', 'Tile', 'TileWindows',
+ 'STFT', 'String', 'StructFill', 'StructGet', 'StructPut', 'SumDimension',
+ 'SumSeries', 'TabControl', 'Tag', 'TDMLoadData', 'TDMSaveData', 'TextBox',
+ 'ThreadGroupPutDF', 'ThreadStart', 'TickWavesFromAxis', 'Tile', 'TileWindows',
'TitleBox', 'ToCommandLine', 'ToolsGrid', 'Triangulate3d', 'Unwrap', 'URLRequest',
'ValDisplay', 'Variable', 'VDTClosePort2', 'VDTGetPortList2', 'VDTGetStatus2',
'VDTOpenPort2', 'VDTOperationsPort2', 'VDTReadBinaryWave2', 'VDTReadBinary2',
'VDTReadHexWave2', 'VDTReadHex2', 'VDTReadWave2', 'VDTRead2', 'VDTTerminalPort2',
'VDTWriteBinaryWave2', 'VDTWriteBinary2', 'VDTWriteHexWave2', 'VDTWriteHex2',
- 'VDTWriteWave2', 'VDTWrite2', 'VDT2', 'WaveMeanStdv', 'WaveStats',
+ 'VDTWriteWave2', 'VDTWrite2', 'VDT2', 'VISAControl', 'VISARead', 'VISAReadBinary',
+ 'VISAReadBinaryWave', 'VISAReadWave', 'VISAWrite', 'VISAWriteBinary',
+ 'VISAWriteBinaryWave', 'VISAWriteWave', 'WaveMeanStdv', 'WaveStats',
'WaveTransform', 'wfprintf', 'WignerTransform', 'WindowFunction', 'XLLoadWave'
)
functions = (
'abs', 'acos', 'acosh', 'AddListItem', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD',
'alog', 'AnnotationInfo', 'AnnotationList', 'area', 'areaXY', 'asin', 'asinh',
- 'atan', 'atanh', 'atan2', 'AxisInfo', 'AxisList', 'AxisValFromPixel', 'Besseli',
- 'Besselj', 'Besselk', 'Bessely', 'beta', 'betai', 'BinarySearch',
- 'BinarySearchInterp', 'binomial', 'binomialln', 'binomialNoise', 'cabs',
- 'CaptureHistory', 'CaptureHistoryStart', 'ceil', 'cequal', 'char2num',
+ 'atan', 'atanh', 'atan2', 'AxisInfo', 'AxisList', 'AxisValFromPixel',
+ 'AxonTelegraphAGetDataNum', 'AxonTelegraphAGetDataString',
+ 'AxonTelegraphAGetDataStruct', 'AxonTelegraphGetDataNum',
+ 'AxonTelegraphGetDataString', 'AxonTelegraphGetDataStruct',
+ 'AxonTelegraphGetTimeoutMs', 'AxonTelegraphSetTimeoutMs', 'Base64Decode',
+ 'Base64Encode', 'Besseli', 'Besselj', 'Besselk', 'Bessely', 'beta', 'betai',
+ 'BinarySearch', 'BinarySearchInterp', 'binomial', 'binomialln', 'binomialNoise',
+ 'cabs', 'CaptureHistory', 'CaptureHistoryStart', 'ceil', 'cequal', 'char2num',
'chebyshev', 'chebyshevU', 'CheckName', 'ChildWindowList', 'CleanupName', 'cmplx',
'cmpstr', 'conj', 'ContourInfo', 'ContourNameList', 'ContourNameToWaveRef',
'ContourZ', 'ControlNameList', 'ConvertTextEncoding', 'cos', 'cosh',
@@ -172,37 +208,70 @@ class IgorLexer(RegexLexer):
'CreationDate', 'csc', 'csch', 'CsrInfo', 'CsrWave', 'CsrWaveRef', 'CsrXWave',
'CsrXWaveRef', 'CTabList', 'DataFolderDir', 'DataFolderExists',
'DataFolderRefsEqual', 'DataFolderRefStatus', 'date', 'datetime', 'DateToJulian',
- 'date2secs', 'Dawson', 'DDERequestString', 'defined', 'deltax', 'digamma',
- 'dilogarithm', 'DimDelta', 'DimOffset', 'DimSize', 'ei', 'enoise', 'equalWaves',
- 'erf', 'erfc', 'erfcw', 'exists', 'exp', 'ExpConvExp', 'ExpConvExpFit',
- 'ExpConvExpFitBL', 'ExpConvExpFit1Shape', 'ExpConvExpFit1ShapeBL', 'ExpGauss',
- 'ExpGaussFit', 'ExpGaussFitBL', 'ExpGaussFit1Shape', 'ExpGaussFit1ShapeBL',
- 'expInt', 'expIntegralE1', 'expNoise', 'factorial', 'fakedata', 'faverage',
- 'faverageXY', 'FetchURL', 'FindDimLabel', 'FindListItem', 'floor', 'FontList',
- 'FontSizeHeight', 'FontSizeStringWidth', 'FresnelCos', 'FresnelSin',
+ 'date2secs', 'Dawson', 'defined', 'deltax', 'digamma', 'dilogarithm', 'DimDelta',
+ 'DimOffset', 'DimSize', 'ei', 'enoise', 'equalWaves', 'erf', 'erfc', 'erfcw',
+ 'exists', 'exp', 'expInt', 'expIntegralE1', 'expNoise', 'factorial', 'Faddeeva',
+ 'fakedata', 'faverage', 'faverageXY', 'fDAQmx_AI_GetReader',
+ 'fDAQmx_AO_UpdateOutputs', 'fDAQmx_ConnectTerminals', 'fDAQmx_CTR_Finished',
+ 'fDAQmx_CTR_IsFinished', 'fDAQmx_CTR_IsPulseFinished', 'fDAQmx_CTR_ReadCounter',
+ 'fDAQmx_CTR_ReadWithOptions', 'fDAQmx_CTR_SetPulseFrequency', 'fDAQmx_CTR_Start',
+ 'fDAQmx_DeviceNames', 'fDAQmx_DIO_Finished', 'fDAQmx_DIO_PortWidth',
+ 'fDAQmx_DIO_Read', 'fDAQmx_DIO_Write', 'fDAQmx_DisconnectTerminals',
+ 'fDAQmx_ErrorString', 'fDAQmx_ExternalCalDate', 'fDAQmx_NumAnalogInputs',
+ 'fDAQmx_NumAnalogOutputs', 'fDAQmx_NumCounters', 'fDAQmx_NumDIOPorts',
+ 'fDAQmx_ReadChan', 'fDAQmx_ReadNamedChan', 'fDAQmx_ResetDevice',
+ 'fDAQmx_ScanGetAvailable', 'fDAQmx_ScanGetNextIndex', 'fDAQmx_ScanStart',
+ 'fDAQmx_ScanStop', 'fDAQmx_ScanWait', 'fDAQmx_ScanWaitWithTimeout',
+ 'fDAQmx_SelfCalDate', 'fDAQmx_SelfCalibration', 'fDAQmx_WaveformStart',
+ 'fDAQmx_WaveformStop', 'fDAQmx_WF_IsFinished', 'fDAQmx_WF_WaitUntilFinished',
+ 'fDAQmx_WriteChan', 'FetchURL', 'FindDimLabel', 'FindListItem', 'floor',
+ 'FontList', 'FontSizeHeight', 'FontSizeStringWidth', 'FresnelCos', 'FresnelSin',
'FuncRefInfo', 'FunctionInfo', 'FunctionList', 'FunctionPath', 'gamma',
'gammaEuler', 'gammaInc', 'gammaNoise', 'gammln', 'gammp', 'gammq', 'Gauss',
- 'GaussFit', 'GaussFitBL', 'GaussFit1Width', 'GaussFit1WidthBL', 'Gauss1D',
- 'Gauss2D', 'gcd', 'GetBrowserLine', 'GetBrowserSelection', 'GetDataFolder',
- 'GetDataFolderDFR', 'GetDefaultFont', 'GetDefaultFontSize', 'GetDefaultFontStyle',
- 'GetDimLabel', 'GetEnvironmentVariable', 'GetErrMessage', 'GetFormula',
- 'GetIndependentModuleName', 'GetIndexedObjName', 'GetIndexedObjNameDFR',
- 'GetKeyState', 'GetRTErrMessage', 'GetRTError', 'GetRTLocation', 'GetRTLocInfo',
- 'GetRTStackInfo', 'GetScrapText', 'GetUserData', 'GetWavesDataFolder',
- 'GetWavesDataFolderDFR', 'GizmoInfo', 'GizmoScale', 'gnoise', 'GrepList',
- 'GrepString', 'GuideInfo', 'GuideNameList', 'Hash', 'hcsr', 'HDF5AttributeInfo',
+ 'Gauss1D', 'Gauss2D', 'gcd', 'GetBrowserLine', 'GetBrowserSelection',
+ 'GetDataFolder', 'GetDataFolderDFR', 'GetDefaultFont', 'GetDefaultFontSize',
+ 'GetDefaultFontStyle', 'GetDimLabel', 'GetEnvironmentVariable', 'GetErrMessage',
+ 'GetFormula', 'GetIndependentModuleName', 'GetIndexedObjName',
+ 'GetIndexedObjNameDFR', 'GetKeyState', 'GetRTErrMessage', 'GetRTError',
+ 'GetRTLocation', 'GetRTLocInfo', 'GetRTStackInfo', 'GetScrapText', 'GetUserData',
+ 'GetWavesDataFolder', 'GetWavesDataFolderDFR', 'GISGetAllFileFormats',
+ 'GISSRefsAreEqual', 'GizmoInfo', 'GizmoScale', 'gnoise', 'GrepList', 'GrepString',
+ 'GuideInfo', 'GuideNameList', 'Hash', 'hcsr', 'HDF5AttributeInfo',
'HDF5DatasetInfo', 'HDF5LibraryInfo', 'HDF5TypeInfo', 'hermite', 'hermiteGauss',
'HyperGNoise', 'HyperGPFQ', 'HyperG0F1', 'HyperG1F1', 'HyperG2F1', 'IgorInfo',
'IgorVersion', 'imag', 'ImageInfo', 'ImageNameList', 'ImageNameToWaveRef',
- 'IndependentModuleList', 'IndexedDir', 'IndexedFile', 'Inf', 'Integrate1D',
- 'interp', 'Interp2D', 'Interp3D', 'inverseERF', 'inverseERFC', 'ItemsInList',
- 'JacobiCn', 'JacobiSn', 'JulianToDate', 'Laguerre', 'LaguerreA', 'LaguerreGauss',
- 'LambertW', 'LayoutInfo', 'leftx', 'LegendreA', 'limit', 'ListMatch',
- 'ListToTextWave', 'ListToWaveRefWave', 'ln', 'log', 'logNormalNoise',
- 'LorentzianFit', 'LorentzianFitBL', 'LorentzianFit1Width',
- 'LorentzianFit1WidthBL', 'lorentzianNoise', 'LowerStr', 'MacroList', 'magsqr',
- 'MandelbrotPoint', 'MarcumQ', 'MatrixCondition', 'MatrixDet', 'MatrixDot',
- 'MatrixRank', 'MatrixTrace', 'max', 'mean', 'median', 'min', 'mod', 'ModDate',
+ 'IndependentModuleList', 'IndexedDir', 'IndexedFile', 'IndexToScale', 'Inf',
+ 'Integrate1D', 'interp', 'Interp2D', 'Interp3D', 'inverseERF', 'inverseERFC',
+ 'ItemsInList', 'JacobiCn', 'JacobiSn', 'JulianToDate', 'Laguerre', 'LaguerreA',
+ 'LaguerreGauss', 'LambertW', 'LayoutInfo', 'leftx', 'LegendreA', 'limit',
+ 'ListMatch', 'ListToTextWave', 'ListToWaveRefWave', 'ln', 'log', 'logNormalNoise',
+ 'lorentzianNoise', 'LowerStr', 'MacroList', 'magsqr', 'MandelbrotPoint',
+ 'MarcumQ', 'MatrixCondition', 'MatrixDet', 'MatrixDot', 'MatrixRank',
+ 'MatrixTrace', 'max', 'MCC_AutoBridgeBal', 'MCC_AutoFastComp',
+ 'MCC_AutoPipetteOffset', 'MCC_AutoSlowComp', 'MCC_AutoWholeCellComp',
+ 'MCC_GetBridgeBalEnable', 'MCC_GetBridgeBalResist', 'MCC_GetFastCompCap',
+ 'MCC_GetFastCompTau', 'MCC_GetHolding', 'MCC_GetHoldingEnable', 'MCC_GetMode',
+ 'MCC_GetNeutralizationCap', 'MCC_GetNeutralizationEnable',
+ 'MCC_GetOscKillerEnable', 'MCC_GetPipetteOffset', 'MCC_GetPrimarySignalGain',
+ 'MCC_GetPrimarySignalHPF', 'MCC_GetPrimarySignalLPF', 'MCC_GetRsCompBandwidth',
+ 'MCC_GetRsCompCorrection', 'MCC_GetRsCompEnable', 'MCC_GetRsCompPrediction',
+ 'MCC_GetSecondarySignalGain', 'MCC_GetSecondarySignalLPF', 'MCC_GetSlowCompCap',
+ 'MCC_GetSlowCompTau', 'MCC_GetSlowCompTauX20Enable',
+ 'MCC_GetSlowCurrentInjEnable', 'MCC_GetSlowCurrentInjLevel',
+ 'MCC_GetSlowCurrentInjSetlTime', 'MCC_GetWholeCellCompCap',
+ 'MCC_GetWholeCellCompEnable', 'MCC_GetWholeCellCompResist',
+ 'MCC_SelectMultiClamp700B', 'MCC_SetBridgeBalEnable', 'MCC_SetBridgeBalResist',
+ 'MCC_SetFastCompCap', 'MCC_SetFastCompTau', 'MCC_SetHolding',
+ 'MCC_SetHoldingEnable', 'MCC_SetMode', 'MCC_SetNeutralizationCap',
+ 'MCC_SetNeutralizationEnable', 'MCC_SetOscKillerEnable', 'MCC_SetPipetteOffset',
+ 'MCC_SetPrimarySignalGain', 'MCC_SetPrimarySignalHPF', 'MCC_SetPrimarySignalLPF',
+ 'MCC_SetRsCompBandwidth', 'MCC_SetRsCompCorrection', 'MCC_SetRsCompEnable',
+ 'MCC_SetRsCompPrediction', 'MCC_SetSecondarySignalGain',
+ 'MCC_SetSecondarySignalLPF', 'MCC_SetSlowCompCap', 'MCC_SetSlowCompTau',
+ 'MCC_SetSlowCompTauX20Enable', 'MCC_SetSlowCurrentInjEnable',
+ 'MCC_SetSlowCurrentInjLevel', 'MCC_SetSlowCurrentInjSetlTime', 'MCC_SetTimeoutMs',
+ 'MCC_SetWholeCellCompCap', 'MCC_SetWholeCellCompEnable',
+ 'MCC_SetWholeCellCompResist', 'mean', 'median', 'min', 'mod', 'ModDate',
'MPFXEMGPeak', 'MPFXExpConvExpPeak', 'MPFXGaussPeak', 'MPFXLorenzianPeak',
'MPFXVoigtPeak', 'NameOfWave', 'NaN', 'NewFreeDataFolder', 'NewFreeWave', 'norm',
'NormalizeUnicode', 'note', 'NumberByKey', 'numpnts', 'numtype',
@@ -217,9 +286,30 @@ class IgorLexer(RegexLexer):
'SelectNumber', 'SelectString', 'SetEnvironmentVariable', 'sign', 'sin', 'sinc',
'sinh', 'sinIntegral', 'SortList', 'SpecialCharacterInfo', 'SpecialCharacterList',
'SpecialDirPath', 'SphericalBessJ', 'SphericalBessJD', 'SphericalBessY',
- 'SphericalBessYD', 'SphericalHarmonics', 'sqrt', 'StartMSTimer', 'StatsBetaCDF',
- 'StatsBetaPDF', 'StatsBinomialCDF', 'StatsBinomialPDF', 'StatsCauchyCDF',
- 'StatsCauchyPDF', 'StatsChiCDF', 'StatsChiPDF', 'StatsCMSSDCDF',
+ 'SphericalBessYD', 'SphericalHarmonics', 'SQLAllocHandle', 'SQLAllocStmt',
+ 'SQLBinaryWavesToTextWave', 'SQLBindCol', 'SQLBindParameter', 'SQLBrowseConnect',
+ 'SQLBulkOperations', 'SQLCancel', 'SQLCloseCursor', 'SQLColAttributeNum',
+ 'SQLColAttributeStr', 'SQLColumnPrivileges', 'SQLColumns', 'SQLConnect',
+ 'SQLDataSources', 'SQLDescribeCol', 'SQLDescribeParam', 'SQLDisconnect',
+ 'SQLDriverConnect', 'SQLDrivers', 'SQLEndTran', 'SQLError', 'SQLExecDirect',
+ 'SQLExecute', 'SQLFetch', 'SQLFetchScroll', 'SQLForeignKeys', 'SQLFreeConnect',
+ 'SQLFreeEnv', 'SQLFreeHandle', 'SQLFreeStmt', 'SQLGetConnectAttrNum',
+ 'SQLGetConnectAttrStr', 'SQLGetCursorName', 'SQLGetDataNum', 'SQLGetDataStr',
+ 'SQLGetDescFieldNum', 'SQLGetDescFieldStr', 'SQLGetDescRec', 'SQLGetDiagFieldNum',
+ 'SQLGetDiagFieldStr', 'SQLGetDiagRec', 'SQLGetEnvAttrNum', 'SQLGetEnvAttrStr',
+ 'SQLGetFunctions', 'SQLGetInfoNum', 'SQLGetInfoStr', 'SQLGetStmtAttrNum',
+ 'SQLGetStmtAttrStr', 'SQLGetTypeInfo', 'SQLMoreResults', 'SQLNativeSql',
+ 'SQLNumParams', 'SQLNumResultCols', 'SQLNumResultRowsIfKnown',
+ 'SQLNumRowsFetched', 'SQLParamData', 'SQLPrepare', 'SQLPrimaryKeys',
+ 'SQLProcedureColumns', 'SQLProcedures', 'SQLPutData', 'SQLReinitialize',
+ 'SQLRowCount', 'SQLSetConnectAttrNum', 'SQLSetConnectAttrStr', 'SQLSetCursorName',
+ 'SQLSetDescFieldNum', 'SQLSetDescFieldStr', 'SQLSetDescRec', 'SQLSetEnvAttrNum',
+ 'SQLSetEnvAttrStr', 'SQLSetPos', 'SQLSetStmtAttrNum', 'SQLSetStmtAttrStr',
+ 'SQLSpecialColumns', 'SQLStatistics', 'SQLTablePrivileges', 'SQLTables',
+ 'SQLTextWaveToBinaryWaves', 'SQLTextWaveTo2DBinaryWave', 'SQLUpdateBoundValues',
+ 'SQLXOPCheckState', 'SQL2DBinaryWaveToTextWave', 'sqrt', 'StartMSTimer',
+ 'StatsBetaCDF', 'StatsBetaPDF', 'StatsBinomialCDF', 'StatsBinomialPDF',
+ 'StatsCauchyCDF', 'StatsCauchyPDF', 'StatsChiCDF', 'StatsChiPDF', 'StatsCMSSDCDF',
'StatsCorrelation', 'StatsDExpCDF', 'StatsDExpPDF', 'StatsErlangCDF',
'StatsErlangPDF', 'StatsErrorPDF', 'StatsEValueCDF', 'StatsEValuePDF',
'StatsExpCDF', 'StatsExpPDF', 'StatsFCDF', 'StatsFPDF', 'StatsFriedmanCDF',
@@ -250,19 +340,66 @@ class IgorLexer(RegexLexer):
'StopMSTimer', 'StringByKey', 'stringCRC', 'StringFromList', 'StringList',
'stringmatch', 'strlen', 'strsearch', 'StrVarOrDefault', 'str2num', 'StudentA',
'StudentT', 'sum', 'SVAR_Exists', 'TableInfo', 'TagVal', 'TagWaveRef', 'tan',
- 'tanh', 'TextEncodingCode', 'TextEncodingName', 'TextFile', 'ThreadGroupCreate',
+ 'tango_close_device', 'tango_command_inout', 'tango_compute_image_proj',
+ 'tango_get_dev_attr_list', 'tango_get_dev_black_box', 'tango_get_dev_cmd_list',
+ 'tango_get_dev_status', 'tango_get_dev_timeout', 'tango_get_error_stack',
+ 'tango_open_device', 'tango_ping_device', 'tango_read_attribute',
+ 'tango_read_attributes', 'tango_reload_dev_interface',
+ 'tango_resume_attr_monitor', 'tango_set_attr_monitor_period',
+ 'tango_set_dev_timeout', 'tango_start_attr_monitor', 'tango_stop_attr_monitor',
+ 'tango_suspend_attr_monitor', 'tango_write_attribute', 'tango_write_attributes',
+ 'tanh', 'TDMAddChannel', 'TDMAddGroup', 'TDMAppendDataValues',
+ 'TDMAppendDataValuesTime', 'TDMChannelPropertyExists', 'TDMCloseChannel',
+ 'TDMCloseFile', 'TDMCloseGroup', 'TDMCreateChannelProperty', 'TDMCreateFile',
+ 'TDMCreateFileProperty', 'TDMCreateGroupProperty', 'TDMFilePropertyExists',
+ 'TDMGetChannelPropertyNames', 'TDMGetChannelPropertyNum',
+ 'TDMGetChannelPropertyStr', 'TDMGetChannelPropertyTime',
+ 'TDMGetChannelPropertyType', 'TDMGetChannels', 'TDMGetChannelStringPropertyLen',
+ 'TDMGetDataType', 'TDMGetDataValues', 'TDMGetDataValuesTime',
+ 'TDMGetFilePropertyNames', 'TDMGetFilePropertyNum', 'TDMGetFilePropertyStr',
+ 'TDMGetFilePropertyTime', 'TDMGetFilePropertyType', 'TDMGetFileStringPropertyLen',
+ 'TDMGetGroupPropertyNames', 'TDMGetGroupPropertyNum', 'TDMGetGroupPropertyStr',
+ 'TDMGetGroupPropertyTime', 'TDMGetGroupPropertyType', 'TDMGetGroups',
+ 'TDMGetGroupStringPropertyLen', 'TDMGetLibraryErrorDescription',
+ 'TDMGetNumChannelProperties', 'TDMGetNumChannels', 'TDMGetNumDataValues',
+ 'TDMGetNumFileProperties', 'TDMGetNumGroupProperties', 'TDMGetNumGroups',
+ 'TDMGroupPropertyExists', 'TDMOpenFile', 'TDMOpenFileEx', 'TDMRemoveChannel',
+ 'TDMRemoveGroup', 'TDMReplaceDataValues', 'TDMReplaceDataValuesTime',
+ 'TDMSaveFile', 'TDMSetChannelPropertyNum', 'TDMSetChannelPropertyStr',
+ 'TDMSetChannelPropertyTime', 'TDMSetDataValues', 'TDMSetDataValuesTime',
+ 'TDMSetFilePropertyNum', 'TDMSetFilePropertyStr', 'TDMSetFilePropertyTime',
+ 'TDMSetGroupPropertyNum', 'TDMSetGroupPropertyStr', 'TDMSetGroupPropertyTime',
+ 'TextEncodingCode', 'TextEncodingName', 'TextFile', 'ThreadGroupCreate',
'ThreadGroupGetDF', 'ThreadGroupGetDFR', 'ThreadGroupRelease', 'ThreadGroupWait',
'ThreadProcessorCount', 'ThreadReturnValue', 'ticks', 'time', 'TraceFromPixel',
- 'TraceInfo', 'TraceNameList', 'TraceNameToWaveRef', 'trunc', 'UniqueName',
- 'UnPadString', 'UnsetEnvironmentVariable', 'UpperStr', 'URLDecode', 'URLEncode',
- 'VariableList', 'Variance', 'vcsr', 'Voigt', 'VoigtFit', 'VoigtFitBL',
- 'VoigtFit1Shape', 'VoigtFit1ShapeBL', 'VoigtFit1Shape1Width',
- 'VoigtFit1Shape1WidthBL', 'VoigtFunc', 'WaveCRC', 'WaveDims', 'WaveExists',
- 'WaveInfo', 'WaveList', 'WaveMax', 'WaveMin', 'WaveName', 'WaveRefIndexed',
+ 'TraceInfo', 'TraceNameList', 'TraceNameToWaveRef', 'TrimString', 'trunc',
+ 'UniqueName', 'UnPadString', 'UnsetEnvironmentVariable', 'UpperStr', 'URLDecode',
+ 'URLEncode', 'VariableList', 'Variance', 'vcsr', 'viAssertIntrSignal',
+ 'viAssertTrigger', 'viAssertUtilSignal', 'viClear', 'viClose', 'viDisableEvent',
+ 'viDiscardEvents', 'viEnableEvent', 'viFindNext', 'viFindRsrc', 'viGetAttribute',
+ 'viGetAttributeString', 'viGpibCommand', 'viGpibControlATN', 'viGpibControlREN',
+ 'viGpibPassControl', 'viGpibSendIFC', 'viIn8', 'viIn16', 'viIn32', 'viLock',
+ 'viMapAddress', 'viMapTrigger', 'viMemAlloc', 'viMemFree', 'viMoveIn8',
+ 'viMoveIn16', 'viMoveIn32', 'viMoveOut8', 'viMoveOut16', 'viMoveOut32', 'viOpen',
+ 'viOpenDefaultRM', 'viOut8', 'viOut16', 'viOut32', 'viPeek8', 'viPeek16',
+ 'viPeek32', 'viPoke8', 'viPoke16', 'viPoke32', 'viRead', 'viReadSTB',
+ 'viSetAttribute', 'viSetAttributeString', 'viStatusDesc', 'viTerminate',
+ 'viUnlock', 'viUnmapAddress', 'viUnmapTrigger', 'viUsbControlIn',
+ 'viUsbControlOut', 'viVxiCommandQuery', 'viWaitOnEvent', 'viWrite', 'VoigtFunc',
+ 'VoigtPeak', 'WaveCRC', 'WaveDims', 'WaveExists', 'WaveHash', 'WaveInfo',
+ 'WaveList', 'WaveMax', 'WaveMin', 'WaveName', 'WaveRefIndexed',
'WaveRefIndexedDFR', 'WaveRefsEqual', 'WaveRefWaveToList', 'WaveTextEncoding',
'WaveType', 'WaveUnits', 'WhichListItem', 'WinList', 'WinName', 'WinRecreation',
- 'WinType', 'WMFindWholeWord', 'WNoise', 'xcsr', 'XWaveName', 'XWaveRefFromTrace',
- 'x2pnt', 'zcsr', 'ZernikeR', 'zeta'
+ 'WinType', 'wnoise', 'xcsr', 'XWaveName', 'XWaveRefFromTrace', 'x2pnt', 'zcsr',
+ 'ZernikeR', 'zeromq_client_connect', 'zeromq_client_connect',
+ 'zeromq_client_recv', 'zeromq_client_recv', 'zeromq_client_send',
+ 'zeromq_client_send', 'zeromq_handler_start', 'zeromq_handler_start',
+ 'zeromq_handler_stop', 'zeromq_handler_stop', 'zeromq_server_bind',
+ 'zeromq_server_bind', 'zeromq_server_recv', 'zeromq_server_recv',
+ 'zeromq_server_send', 'zeromq_server_send', 'zeromq_set', 'zeromq_set',
+ 'zeromq_stop', 'zeromq_stop', 'zeromq_test_callfunction',
+ 'zeromq_test_callfunction', 'zeromq_test_serializeWave',
+ 'zeromq_test_serializeWave', 'zeta'
)
tokens = {
diff --git a/pygments/lexers/inferno.py b/pygments/lexers/inferno.py
index bfbea571..0d68856d 100644
--- a/pygments/lexers/inferno.py
+++ b/pygments/lexers/inferno.py
@@ -5,7 +5,7 @@
Lexers for Inferno os and all the related stuff.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -64,7 +64,7 @@ class LimboLexer(RegexLexer):
(r'(byte|int|big|real|string|array|chan|list|adt'
r'|fn|ref|of|module|self|type)\b', Keyword.Type),
(r'(con|iota|nil)\b', Keyword.Constant),
- ('[a-zA-Z_]\w*', Name),
+ (r'[a-zA-Z_]\w*', Name),
],
'statement' : [
include('whitespace'),
diff --git a/pygments/lexers/installers.py b/pygments/lexers/installers.py
index c436afed..0323d140 100644
--- a/pygments/lexers/installers.py
+++ b/pygments/lexers/installers.py
@@ -5,7 +5,7 @@
Lexers for installer/packager DSLs and formats.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/int_fiction.py b/pygments/lexers/int_fiction.py
index 724f9b27..57ace259 100644
--- a/pygments/lexers/int_fiction.py
+++ b/pygments/lexers/int_fiction.py
@@ -5,7 +5,7 @@
Lexers for interactive fiction languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -911,7 +911,7 @@ class Tads3Lexer(RegexLexer):
'block?/root': [
(r'\{', Punctuation, ('#pop', 'block')),
include('whitespace'),
- (r'(?=[[\'"<(:])', Text, # It might be a VerbRule macro.
+ (r'(?=[\[\'"<(:])', Text, # It might be a VerbRule macro.
('#pop', 'object-body/no-braces', 'grammar', 'grammar-rules')),
# It might be a macro like DefineAction.
default(('#pop', 'object-body/no-braces'))
diff --git a/pygments/lexers/iolang.py b/pygments/lexers/iolang.py
index e62dd434..26f44e27 100644
--- a/pygments/lexers/iolang.py
+++ b/pygments/lexers/iolang.py
@@ -5,7 +5,7 @@
Lexers for the Io language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -49,7 +49,7 @@ class IoLexer(RegexLexer):
# names
(r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b',
Name.Builtin),
- ('[a-zA-Z_]\w*', Name),
+ (r'[a-zA-Z_]\w*', Name),
# numbers
(r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
(r'\d+', Number.Integer)
diff --git a/pygments/lexers/j.py b/pygments/lexers/j.py
index 1231d597..46037820 100644
--- a/pygments/lexers/j.py
+++ b/pygments/lexers/j.py
@@ -5,7 +5,7 @@
Lexer for the J programming language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -52,13 +52,13 @@ class JLexer(RegexLexer):
Name.Function, 'explicitDefinition'),
# Flow Control
- (words(('for_', 'goto_', 'label_'), suffix=validName+'\.'), Name.Label),
+ (words(('for_', 'goto_', 'label_'), suffix=validName+r'\.'), Name.Label),
(words((
'assert', 'break', 'case', 'catch', 'catchd',
'catcht', 'continue', 'do', 'else', 'elseif',
'end', 'fcase', 'for', 'if', 'return',
'select', 'throw', 'try', 'while', 'whilst',
- ), suffix='\.'), Name.Label),
+ ), suffix=r'\.'), Name.Label),
# Variable Names
(validName, Name.Variable),
diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py
index a23ba184..0507375f 100644
--- a/pygments/lexers/javascript.py
+++ b/pygments/lexers/javascript.py
@@ -5,7 +5,7 @@
Lexers for JavaScript and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -53,7 +53,7 @@ class JavascriptLexer(RegexLexer):
'slashstartsregex': [
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
+ r'([gimuy]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
default('#pop')
],
@@ -64,9 +64,14 @@ class JavascriptLexer(RegexLexer):
(r'\A#! ?/.*?\n', Comment.Hashbang), # recognized by node.js
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
+ (r'(\.\d+|[0-9]+\.[0-9]*)([eE][-+]?[0-9]+)?', Number.Float),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'\.\.\.|=>', Punctuation),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|=>|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'\.\.\.', Punctuation),
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
@@ -84,11 +89,6 @@ class JavascriptLexer(RegexLexer):
r'Error|eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|'
r'document|this|window)\b', Name.Builtin),
(JS_IDENT, Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'`', String.Backtick, 'interp'),
@@ -366,9 +366,10 @@ class DartLexer(RegexLexer):
(r'\b(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
Keyword),
- (r'\b(abstract|const|extends|factory|final|get|implements|'
- r'native|operator|set|static|typedef|var)\b', Keyword.Declaration),
- (r'\b(bool|double|Dynamic|int|num|Object|String|void)\b', Keyword.Type),
+ (r'\b(abstract|async|await|const|extends|factory|final|get|'
+ r'implements|native|operator|set|static|sync|typedef|var|with|'
+ r'yield)\b', Keyword.Declaration),
+ (r'\b(bool|double|dynamic|int|num|Object|String|void)\b', Keyword.Type),
(r'\b(false|null|true)\b', Keyword.Constant),
(r'[~!%^&*+=|?:<>/-]|as\b', Operator),
(r'[a-zA-Z_$]\w*:', Name.Label),
@@ -447,11 +448,15 @@ class TypeScriptLexer(RegexLexer):
name = 'TypeScript'
aliases = ['ts', 'typescript']
- filenames = ['*.ts']
+ filenames = ['*.ts', '*.tsx']
mimetypes = ['text/x-typescript']
flags = re.DOTALL | re.MULTILINE
+ # Higher priority than the TypoScriptLexer, as TypeScript is far more
+ # common these days
+ priority = 0.5
+
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
@@ -511,9 +516,26 @@ class TypeScriptLexer(RegexLexer):
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'`', String.Backtick, 'interp'),
# Match stuff like: Decorators
(r'@\w+', Keyword.Declaration),
- ]
+ ],
+
+ # The 'interp*' rules match those in JavascriptLexer. Changes made
+ # there should be reflected here as well.
+ 'interp': [
+ (r'`', String.Backtick, '#pop'),
+ (r'\\\\', String.Backtick),
+ (r'\\`', String.Backtick),
+ (r'\$\{', String.Interpol, 'interp-inside'),
+ (r'\$', String.Backtick),
+ (r'[^`\\$]+', String.Backtick),
+ ],
+ 'interp-inside': [
+ # TODO: should this include single-line comments and allow nesting strings?
+ (r'\}', String.Interpol, '#pop'),
+ include('root'),
+ ],
}
@@ -545,12 +567,7 @@ class LassoLexer(RegexLexer):
tokens = {
'root': [
(r'^#![ \S]+lasso9\b', Comment.Preproc, 'lasso'),
- (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
- (r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')),
- (r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')),
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc,
- ('delimiters', 'anglebrackets')),
- (r'<(!--.*?-->)?', Other, 'delimiters'),
+ (r'(?=\[|<)', Other, 'delimiters'),
(r'\s+', Other),
default(('delimiters', 'lassofile')),
],
@@ -558,14 +575,14 @@ class LassoLexer(RegexLexer):
(r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
(r'\[noprocess\]', Comment.Preproc, 'noprocess'),
(r'\[', Comment.Preproc, 'squarebrackets'),
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
+ (r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
(r'<(!--.*?-->)?', Other),
(r'[^[<]+', Other),
],
'nosquarebrackets': [
(r'\[noprocess\]', Comment.Preproc, 'noprocess'),
(r'\[', Other),
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
+ (r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
(r'<(!--.*?-->)?', Other),
(r'[^[<]+', Other),
],
@@ -607,7 +624,7 @@ class LassoLexer(RegexLexer):
# names
(r'\$[a-z_][\w.]*', Name.Variable),
- (r'#([a-z_][\w.]*|\d+)', Name.Variable.Instance),
+ (r'#([a-z_][\w.]*|\d+\b)', Name.Variable.Instance),
(r"(\.\s*)('[a-z_][\w.]*')",
bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
(r"(self)(\s*->\s*)('[a-z_][\w.]*')",
@@ -658,20 +675,20 @@ class LassoLexer(RegexLexer):
r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
- r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|'
- r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|'
- r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|'
- r'NoProcess|Output_None|Portal|Private|Protect|Records|Referer|'
- r'Referrer|Repeating|ResultSet|Rows|Search_Args|Search_Arguments|'
- r'Select|Sort_Args|Sort_Arguments|Thread_Atomic|Value_List|While|'
- r'Abort|Case|Else|If_Empty|If_False|If_Null|If_True|Loop_Abort|'
- r'Loop_Continue|Loop_Count|Params|Params_Up|Return|Return_Value|'
- r'Run_Children|SOAP_DefineTag|SOAP_LastRequest|SOAP_LastResponse|'
- r'Tag_Name|ascending|average|by|define|descending|do|equals|'
- r'frozen|group|handle_failure|import|in|into|join|let|match|max|'
- r'min|on|order|parent|protected|provide|public|require|returnhome|'
- r'skip|split_thread|sum|take|thread|to|trait|type|where|with|'
- r'yield|yieldhome)\b',
+ r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|Link_FirstRecord|'
+ r'Link_LastGroup|Link_LastRecord|Link_NextGroup|Link_NextRecord|'
+ r'Link_PrevGroup|Link_PrevRecord|Log|Loop|Output_None|Portal|'
+ r'Private|Protect|Records|Referer|Referrer|Repeating|ResultSet|'
+ r'Rows|Search_Args|Search_Arguments|Select|Sort_Args|'
+ r'Sort_Arguments|Thread_Atomic|Value_List|While|Abort|Case|Else|'
+ r'Fail_If|Fail_IfNot|Fail|If_Empty|If_False|If_Null|If_True|'
+ r'Loop_Abort|Loop_Continue|Loop_Count|Params|Params_Up|Return|'
+ r'Return_Value|Run_Children|SOAP_DefineTag|SOAP_LastRequest|'
+ r'SOAP_LastResponse|Tag_Name|ascending|average|by|define|'
+ r'descending|do|equals|frozen|group|handle_failure|import|in|into|'
+ r'join|let|match|max|min|on|order|parent|protected|provide|public|'
+ r'require|returnhome|skip|split_thread|sum|take|thread|to|trait|'
+ r'type|where|with|yield|yieldhome)\b',
bygroups(Punctuation, Keyword)),
# other
@@ -996,7 +1013,7 @@ class ObjectiveJLexer(RegexLexer):
}
def analyse_text(text):
- if re.search('^\s*@import\s+[<"]', text, re.MULTILINE):
+ if re.search(r'^\s*@import\s+[<"]', text, re.MULTILINE):
# special directive found in most Objective-J files
return True
return False
@@ -1481,8 +1498,10 @@ class JuttleLexer(RegexLexer):
(r'^(?=\s|/)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r':\d{2}:\d{2}:\d{2}(\.\d*)?:', String.Moment),
- (r':(now|beginning|end|forever|yesterday|today|tomorrow|(\d+(\.\d*)?|\.\d+)(ms|[smhdwMy])?):', String.Moment),
- (r':\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(\.\d*)?)?(Z|[+-]\d{2}:\d{2}|[+-]\d{4})?:', String.Moment),
+ (r':(now|beginning|end|forever|yesterday|today|tomorrow|'
+ r'(\d+(\.\d*)?|\.\d+)(ms|[smhdwMy])?):', String.Moment),
+ (r':\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(\.\d*)?)?'
+ r'(Z|[+-]\d{2}:\d{2}|[+-]\d{4})?:', String.Moment),
(r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?'
r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?)'
r'|[ ]+(ago|from[ ]+now))*:', String.Moment),
diff --git a/pygments/lexers/julia.py b/pygments/lexers/julia.py
index 95c503a0..84ae1ae7 100644
--- a/pygments/lexers/julia.py
+++ b/pygments/lexers/julia.py
@@ -5,7 +5,7 @@
Lexers for the Julia language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -48,6 +48,7 @@ class JuliaLexer(RegexLexer):
# keywords
(r'in\b', Keyword.Pseudo),
+ (r'isa\b', Keyword.Pseudo),
(r'(true|false)\b', Keyword.Constant),
(r'(local|global|const)\b', Keyword.Declaration),
(words([
@@ -55,7 +56,8 @@ class JuliaLexer(RegexLexer):
'baremodule', 'begin', 'bitstype', 'break', 'catch', 'ccall',
'continue', 'do', 'else', 'elseif', 'end', 'export', 'finally',
'for', 'if', 'import', 'importall', 'let', 'macro', 'module',
- 'quote', 'return', 'try', 'using', 'while'],
+ 'mutable', 'primitive', 'quote', 'return', 'struct', 'try',
+ 'using', 'while'],
suffix=r'\b'), Keyword),
# NOTE
@@ -146,7 +148,7 @@ class JuliaLexer(RegexLexer):
(words([
# prec-assignment
u'=', u':=', u'+=', u'-=', u'*=', u'/=', u'//=', u'.//=', u'.*=', u'./=',
- u'\=', u'.\=', u'^=', u'.^=', u'÷=', u'.÷=', u'%=', u'.%=', u'|=', u'&=',
+ u'\\=', u'.\\=', u'^=', u'.^=', u'÷=', u'.÷=', u'%=', u'.%=', u'|=', u'&=',
u'$=', u'=>', u'<<=', u'>>=', u'>>>=', u'~', u'.+=', u'.-=',
# prec-conditional
u'?',
@@ -181,7 +183,7 @@ class JuliaLexer(RegexLexer):
# prec-dot
u'.',
# unary op
- u'+', u'-', u'!', u'~', u'√', u'∛', u'∜'
+ u'+', u'-', u'!', u'√', u'∛', u'∜'
]), Operator),
# chars
@@ -231,7 +233,7 @@ class JuliaLexer(RegexLexer):
'string': [
(r'"', String, '#pop'),
# FIXME: This escape pattern is not perfect.
- (r'\\([\\"\'\$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
+ (r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
# Interpolation is defined as "$" followed by the shortest full
# expression, which is something we can't parse.
# Include the most common cases here: $word, and $(paren'd expr).
@@ -246,7 +248,7 @@ class JuliaLexer(RegexLexer):
'tqstring': [
(r'"""', String, '#pop'),
- (r'\\([\\"\'\$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
+ (r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
(r'\$' + allowed_variable, String.Interpol),
(r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
(r'.|\s', String),
diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py
index 5d747561..8de6e9f2 100644
--- a/pygments/lexers/jvm.py
+++ b/pygments/lexers/jvm.py
@@ -5,7 +5,7 @@
Pygments lexers for JVM languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,7 +21,7 @@ from pygments import unistring as uni
__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer',
'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer',
- 'PigLexer', 'GoloLexer', 'JasminLexer']
+ 'PigLexer', 'GoloLexer', 'JasminLexer', 'SarlLexer']
class JavaLexer(RegexLexer):
@@ -257,7 +257,7 @@ class ScalaLexer(RegexLexer):
u'\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]')
idrest = u'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
- letter_letter_digit = u'%s(?:%s|\d)*' % (letter, letter)
+ letter_letter_digit = u'%s(?:%s|\\d)*' % (letter, letter)
tokens = {
'root': [
@@ -689,7 +689,7 @@ class IokeLexer(RegexLexer):
# functions
(u'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
u'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
- u'(?![\w!:?])', Name.Function),
+ u'(?![\\w!:?])', Name.Function),
# Numbers
(r'-?0[xX][0-9a-fA-F]+', Number.Hex),
@@ -801,7 +801,7 @@ class ClojureLexer(RegexLexer):
# TODO / should divide keywords/symbols into namespace/rest
# but that's hard, so just pretend / is part of the name
- valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
+ valid_name = r'(?!#)[\w!$%*+<=>?/.#|-]+'
tokens = {
'root': [
@@ -1006,7 +1006,7 @@ class KotlinLexer(RegexLexer):
.. versionadded:: 1.5
"""
-
+
name = 'Kotlin'
aliases = ['kotlin']
filenames = ['*.kt']
@@ -1017,15 +1017,22 @@ class KotlinLexer(RegexLexer):
kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
'Mn', 'Mc') + ']*')
- kt_id = '(' + kt_name + '|`' + kt_name + '`)'
+
+ kt_space_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
+ '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
+ 'Mn', 'Mc', 'Zs') + ',-]*')
+
+ kt_id = '(' + kt_name + '|`' + kt_space_name + '`)'
tokens = {
'root': [
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
+ (r'\s+', Text),
(r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
(r'/[*].*?[*]/', Comment.Multiline),
+ (r'""".*?"""', String),
(r'\n', Text),
(r'::|!!|\?[:.]', Operator),
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
@@ -1035,11 +1042,14 @@ class KotlinLexer(RegexLexer):
(r"'\\.'|'[^\\]'", String.Char),
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r'(class)(\s+)(object)', bygroups(Keyword, Text, Keyword)),
+ (r'(object)(\s+)(:)(\s+)', bygroups(Keyword, Text, Punctuation, Text), 'class'),
+ (r'(companion)(\s+)(object)', bygroups(Keyword, Text, Keyword)),
(r'(class|interface|object)(\s+)', bygroups(Keyword, Text), 'class'),
(r'(package|import)(\s+)', bygroups(Keyword, Text), 'package'),
+ (r'(val|var)(\s+)([(])', bygroups(Keyword, Text, Punctuation), 'property_dec'),
(r'(val|var)(\s+)', bygroups(Keyword, Text), 'property'),
(r'(fun)(\s+)', bygroups(Keyword, Text), 'function'),
+ (r'(inline fun)(\s+)', bygroups(Keyword, Text), 'function'),
(r'(abstract|annotation|as|break|by|catch|class|companion|const|'
r'constructor|continue|crossinline|data|do|dynamic|else|enum|'
r'external|false|final|finally|for|fun|get|if|import|in|infix|'
@@ -1058,9 +1068,26 @@ class KotlinLexer(RegexLexer):
'property': [
(kt_id, Name.Property, '#pop')
],
+ 'property_dec': [
+ (r'(,)(\s*)', bygroups(Punctuation, Text)),
+ (r'(:)(\s*)', bygroups(Punctuation, Text)),
+ (r'<', Punctuation, 'generic'),
+ (r'([)])', Punctuation, '#pop'),
+ (kt_id, Name.Property)
+ ],
'function': [
+ (r'<', Punctuation, 'generic'),
+ (r''+kt_id+'([.])'+kt_id, bygroups(Name.Class, Punctuation, Name.Function), '#pop'),
(kt_id, Name.Function, '#pop')
],
+ 'generic': [
+ (r'(>)(\s*)', bygroups(Punctuation, Text), '#pop'),
+ (r':',Punctuation),
+ (r'(reified|out|in)\b', Keyword),
+ (r',',Text),
+ (r'\s+',Text),
+ (kt_id,Name)
+ ]
}
@@ -1258,7 +1285,7 @@ class GoloLexer(RegexLexer):
(r'-?\d[\d_]*L', Number.Integer.Long),
(r'-?\d[\d_]*', Number.Integer),
- ('`?[a-zA-Z_][\w$]*', Name),
+ (r'`?[a-zA-Z_][\w$]*', Name),
(r'@[a-zA-Z_][\w$.]*', Name.Decorator),
(r'"""', String, combined('stringescape', 'triplestring')),
@@ -1571,3 +1598,57 @@ class JasminLexer(RegexLexer):
re.MULTILINE):
score += 0.6
return score
+
+
+class SarlLexer(RegexLexer):
+ """
+ For `SARL <http://www.sarl.io>`_ source code.
+
+ .. versionadded:: 2.4
+ """
+
+ name = 'SARL'
+ aliases = ['sarl']
+ filenames = ['*.sarl']
+ mimetypes = ['text/x-sarl']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_$][\w$]*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ (r'(as|break|case|catch|default|do|else|extends|extension|finally|fires|for|if|implements|instanceof|new|on|requires|return|super|switch|throw|throws|try|typeof|uses|while|with)\b',
+ Keyword),
+ (r'(abstract|def|dispatch|final|native|override|private|protected|public|static|strictfp|synchronized|transient|val|var|volatile)\b', Keyword.Declaration),
+ (r'(boolean|byte|char|double|float|int|long|short|void)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(false|it|null|occurrence|this|true|void)\b', Keyword.Constant),
+ (r'(agent|annotation|artifact|behavior|capacity|class|enum|event|interface|skill|space)(\s+)', bygroups(Keyword.Declaration, Text),
+ 'class'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'(\\\\|\\'|[^'])*'", String),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py
index 67d74566..3bfc83a6 100644
--- a/pygments/lexers/lisp.py
+++ b/pygments/lexers/lisp.py
@@ -5,7 +5,7 @@
Lexers for Lispy languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,7 +19,7 @@ from pygments.lexers.python import PythonLexer
__all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer',
'NewLispLexer', 'EmacsLispLexer', 'ShenLexer', 'CPSALexer',
- 'XtlangLexer']
+ 'XtlangLexer', 'FennelLexer']
class SchemeLexer(RegexLexer):
@@ -139,7 +139,7 @@ class SchemeLexer(RegexLexer):
(r"(?<=#\()" + valid_name, Name.Variable),
# highlight the builtins
- ("(?<=\()(%s)" % '|'.join(re.escape(entry) + ' ' for entry in builtins),
+ (r"(?<=\()(%s)" % '|'.join(re.escape(entry) + ' ' for entry in builtins),
Name.Builtin),
# the remaining functions
@@ -321,7 +321,7 @@ class CommonLispLexer(RegexLexer):
(r'#\d+#', Operator),
# read-time comment
- (r'#+nil' + terminated + '\s*\(', Comment.Preproc, 'commented-form'),
+ (r'#+nil' + terminated + r'\s*\(', Comment.Preproc, 'commented-form'),
# read-time conditional
(r'#[+-]', Operator),
@@ -333,7 +333,7 @@ class CommonLispLexer(RegexLexer):
(r'(t|nil)' + terminated, Name.Constant),
# functions and variables
- (r'\*' + symbol + '\*', Name.Variable.Global),
+ (r'\*' + symbol + r'\*', Name.Variable.Global),
(symbol, Name.Variable),
# parentheses
@@ -382,7 +382,7 @@ class HyLexer(RegexLexer):
# valid names for identifiers
# well, names can only not consist fully of numbers
# but this should be good enough for now
- valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
+ valid_name = r'(?!#)[\w!$%*+<=>?/.#-:]+'
def _multi_escape(entries):
return words(entries, suffix=' ')
@@ -1249,7 +1249,7 @@ class RacketLexer(RegexLexer):
_opening_parenthesis = r'[([{]'
_closing_parenthesis = r'[)\]}]'
_delimiters = r'()[\]{}",\'`;\s'
- _symbol = r'(?u)(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters
+ _symbol = r'(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters
_exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?'
_exponent = r'(?:[defls][-+]?\d+)'
_inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)'
@@ -1301,16 +1301,16 @@ class RacketLexer(RegexLexer):
(_inexact_simple, _delimiters), Number.Float, '#pop'),
# #b
- (r'(?i)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'),
+ (r'(?iu)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'),
# #o
- (r'(?i)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'),
+ (r'(?iu)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'),
# #x
- (r'(?i)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'),
+ (r'(?iu)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'),
# #i is always inexact, i.e. float
- (r'(?i)(#d)?#i%s' % _symbol, Number.Float, '#pop'),
+ (r'(?iu)(#d)?#i%s' % _symbol, Number.Float, '#pop'),
# Strings and characters
(r'#?"', String.Double, ('#pop', 'string')),
@@ -1323,7 +1323,7 @@ class RacketLexer(RegexLexer):
(r'#(true|false|[tTfF])', Name.Constant, '#pop'),
# Keyword argument names (e.g. #:keyword)
- (r'#:%s' % _symbol, Keyword.Declaration, '#pop'),
+ (r'(?u)#:%s' % _symbol, Keyword.Declaration, '#pop'),
# Reader extensions
(r'(#lang |#!)(\S+)',
@@ -1400,7 +1400,7 @@ class RacketLexer(RegexLexer):
class NewLispLexer(RegexLexer):
"""
- For `newLISP. <www.newlisp.org>`_ source code (version 10.3.0).
+ For `newLISP. <http://www.newlisp.org/>`_ source code (version 10.3.0).
.. versionadded:: 1.5
"""
@@ -2154,7 +2154,7 @@ class EmacsLispLexer(RegexLexer):
(r'(t|nil)' + terminated, Name.Constant),
# functions and variables
- (r'\*' + symbol + '\*', Name.Variable.Global),
+ (r'\*' + symbol + r'\*', Name.Variable.Global),
(symbol, Name.Variable),
# parentheses
@@ -2327,13 +2327,13 @@ class ShenLexer(RegexLexer):
token = Name.Function if token == Literal else token
yield index, token, value
- raise StopIteration
+ return
def _process_signature(self, tokens):
for index, token, value in tokens:
if token == Literal and value == '}':
yield index, Punctuation, value
- raise StopIteration
+ return
elif token in (Literal, Name.Function):
token = Name.Variable if value.istitle() else Keyword.Type
yield index, token, value
@@ -2619,3 +2619,75 @@ class XtlangLexer(RegexLexer):
include('scheme')
],
}
+
+
+class FennelLexer(RegexLexer):
+ """A lexer for the `Fennel programming language <https://fennel-lang.org>`_.
+
+ Fennel compiles to Lua, so all the Lua builtins are recognized as well
+ as the special forms that are particular to the Fennel compiler.
+
+ .. versionadded:: 2.3
+ """
+ name = 'Fennel'
+ aliases = ['fennel', 'fnl']
+ filenames = ['*.fnl']
+
+ # these two lists are taken from fennel-mode.el:
+ # https://gitlab.com/technomancy/fennel-mode
+ # this list is current as of Fennel version 0.1.0.
+ special_forms = (
+ u'require-macros', u'eval-compiler',
+ u'do', u'values', u'if', u'when', u'each', u'for', u'fn', u'lambda',
+ u'λ', u'set', u'global', u'var', u'local', u'let', u'tset', u'doto',
+ u'set-forcibly!', u'defn', u'partial', u'while', u'or', u'and', u'true',
+ u'false', u'nil', u'.', u'+', u'..', u'^', u'-', u'*', u'%', u'/', u'>',
+ u'<', u'>=', u'<=', u'=', u'~=', u'#', u'...', u':', u'->', u'->>',
+ )
+
+ # Might be nicer to use the list from _lua_builtins.py but it's unclear how?
+ builtins = (
+ u'_G', u'_VERSION', u'arg', u'assert', u'bit32', u'collectgarbage',
+ u'coroutine', u'debug', u'dofile', u'error', u'getfenv',
+ u'getmetatable', u'io', u'ipairs', u'load', u'loadfile', u'loadstring',
+ u'math', u'next', u'os', u'package', u'pairs', u'pcall', u'print',
+ u'rawequal', u'rawget', u'rawlen', u'rawset', u'require', u'select',
+ u'setfenv', u'setmetatable', u'string', u'table', u'tonumber',
+ u'tostring', u'type', u'unpack', u'xpcall'
+ )
+
+ # based on the scheme definition, but disallowing leading digits and commas
+ valid_name = r'[a-zA-Z_!$%&*+/:<=>?@^~|-][\w!$%&*+/:<=>?@^~|\.-]*'
+
+ tokens = {
+ 'root': [
+ # the only comment form is a semicolon; goes to the end of the line
+ (r';.*$', Comment.Single),
+
+ (r'[,\s]+', Text),
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'(\\\\|\\'|[^'])*'", String),
+
+ # these are technically strings, but it's worth visually
+ # distinguishing them because their intent is different
+ # from regular strings.
+ (r':' + valid_name, String.Symbol),
+
+ # special forms are keywords
+ (words(special_forms, suffix=' '), Keyword),
+ # lua standard library are builtins
+ (words(builtins, suffix=' '), Name.Builtin),
+ # special-case the vararg symbol
+ (r'\.\.\.', Name.Variable),
+ # regular identifiers
+ (valid_name, Name.Variable),
+
+ # all your normal paired delimiters for your programming enjoyment
+ (r'(\(|\))', Punctuation),
+ (r'(\[|\])', Punctuation),
+ (r'(\{|\})', Punctuation),
+ ]
+ }
diff --git a/pygments/lexers/make.py b/pygments/lexers/make.py
index 9b6273d7..b222b672 100644
--- a/pygments/lexers/make.py
+++ b/pygments/lexers/make.py
@@ -5,7 +5,7 @@
Lexers for Makefiles and similar.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/markup.py b/pygments/lexers/markup.py
index bb4ae6c5..6eb55fc4 100644
--- a/pygments/lexers/markup.py
+++ b/pygments/lexers/markup.py
@@ -5,7 +5,7 @@
Lexers for non-HTML markup languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -536,10 +536,9 @@ class MarkdownLexer(RegexLexer):
# no lexer for this language. handle it like it was a code block
if lexer is None:
yield match.start(4), String, code
- return
-
- for item in do_insertions([], lexer.get_tokens_unprocessed(code)):
- yield item
+ else:
+ for item in do_insertions([], lexer.get_tokens_unprocessed(code)):
+ yield item
yield match.start(5), String , match.group(5)
@@ -583,6 +582,11 @@ class MarkdownLexer(RegexLexer):
(r'[@#][\w/:]+', Name.Entity),
# (image?) links eg: ![Image of Yaktocat](https://octodex.github.com/images/yaktocat.png)
(r'(!?\[)([^]]+)(\])(\()([^)]+)(\))', bygroups(Text, Name.Tag, Text, Text, Name.Attribute, Text)),
+ # reference-style links, e.g.:
+ # [an example][id]
+ # [id]: http://example.com/
+ (r'(\[)([^]]+)(\])(\[)([^]]*)(\])', bygroups(Text, Name.Tag, Text, Text, Name.Label, Text)),
+ (r'^(\s*\[)([^]]*)(\]:\s*)(.+)', bygroups(Text, Name.Label, Text, Name.Attribute)),
# general text, must come last!
(r'[^\\\s]+', Text),
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index 7a92f5bb..ea0ebee2 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -5,7 +5,7 @@
Just export lexers that were contained in this module.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/matlab.py b/pygments/lexers/matlab.py
index ccb11a5d..1c77b60c 100644
--- a/pygments/lexers/matlab.py
+++ b/pygments/lexers/matlab.py
@@ -5,7 +5,7 @@
Lexers for Matlab and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -134,9 +134,9 @@ class MatlabLexer(RegexLexer):
}
def analyse_text(text):
- if re.match('^\s*%', text, re.M): # comment
+ if re.match(r'^\s*%', text, re.M): # comment
return 0.2
- elif re.match('^!\w+', text, re.M): # system cmd
+ elif re.match(r'^!\w+', text, re.M): # system cmd
return 0.2
diff --git a/pygments/lexers/ml.py b/pygments/lexers/ml.py
index 4f10edd0..0bff9816 100644
--- a/pygments/lexers/ml.py
+++ b/pygments/lexers/ml.py
@@ -5,7 +5,7 @@
Lexers for ML family languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -43,7 +43,7 @@ class SMLLexer(RegexLexer):
symbolicid_reserved = set((
# Core
- ':', '\|', '=', '=>', '->', '#',
+ ':', r'\|', '=', '=>', '->', '#',
# Modules
':>',
))
diff --git a/pygments/lexers/modeling.py b/pygments/lexers/modeling.py
index a6b0cb77..481cce38 100644
--- a/pygments/lexers/modeling.py
+++ b/pygments/lexers/modeling.py
@@ -5,7 +5,7 @@
Lexers for modeling languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,7 +13,7 @@ import re
from pygments.lexer import RegexLexer, include, bygroups, using, default
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
+ Number, Punctuation, Whitespace
from pygments.lexers.html import HtmlLexer
from pygments.lexers import _stan_builtins
@@ -284,8 +284,8 @@ class StanLexer(RegexLexer):
"""Pygments Lexer for Stan models.
The Stan modeling language is specified in the *Stan Modeling Language
- User's Guide and Reference Manual, v2.8.0*,
- `pdf <https://github.com/stan-dev/stan/releases/download/v2.8.8/stan-reference-2.8.0.pdf>`__.
+ User's Guide and Reference Manual, v2.17.0*,
+ `pdf <https://github.com/stan-dev/stan/releases/download/v2.17.0/stan-reference-2.17.0.pdf>`__.
.. versionadded:: 1.6
"""
@@ -316,19 +316,24 @@ class StanLexer(RegexLexer):
'parameters', r'transformed\s+parameters',
'model', r'generated\s+quantities')),
bygroups(Keyword.Namespace, Text, Punctuation)),
+ # target keyword
+ (r'target\s*\+=', Keyword),
# Reserved Words
(r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
# Truncation
(r'T(?=\s*\[)', Keyword),
# Data types
(r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
+ # < should be punctuation, but elsewhere I can't tell if it is in
+ # a range constraint
+ (r'(<)(\s*)(upper|lower)(\s*)(=)', bygroups(Operator, Whitespace, Keyword, Whitespace, Punctuation)),
+ (r'(,)(\s*)(upper)(\s*)(=)', bygroups(Punctuation, Whitespace, Keyword, Whitespace, Punctuation)),
# Punctuation
- (r"[;:,\[\]()]", Punctuation),
+ (r"[;,\[\]()]", Punctuation),
# Builtin
- (r'(%s)(?=\s*\()'
- % r'|'.join(_stan_builtins.FUNCTIONS
- + _stan_builtins.DISTRIBUTIONS),
- Name.Builtin),
+ (r'(%s)(?=\s*\()' % '|'.join(_stan_builtins.FUNCTIONS), Name.Builtin),
+ (r'(~)(\s*)(%s)(?=\s*\()' % '|'.join(_stan_builtins.DISTRIBUTIONS),
+ bygroups(Operator, Whitespace, Name.Builtin)),
# Special names ending in __, like lp__
(r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
(r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
@@ -337,17 +342,18 @@ class StanLexer(RegexLexer):
# Regular variable names
(r'[A-Za-z]\w*\b', Name),
# Real Literals
- (r'-?[0-9]+(\.[0-9]+)?[eE]-?[0-9]+', Number.Float),
- (r'-?[0-9]*\.[0-9]*', Number.Float),
+ (r'[0-9]+(\.[0-9]*)?([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\.[0-9]+([eE][+-]?[0-9]+)?', Number.Float),
# Integer Literals
- (r'-?[0-9]+', Number.Integer),
+ (r'[0-9]+', Number.Integer),
# Assignment operators
- # SLexer makes these tokens Operators.
- (r'<-|~', Operator),
+ (r'<-|(?:\+|-|\.?/|\.?\*|=)?=|~', Operator),
# Infix, prefix and postfix operators (and = )
- (r"\+|-|\.?\*|\.?/|\\|'|\^|==?|!=?|<=?|>=?|\|\||&&", Operator),
+ (r"\+|-|\.?\*|\.?/|\\|'|\^|!=?|<=?|>=?|\|\||&&|%|\?|:", Operator),
# Block delimiters
(r'[{}]', Punctuation),
+ # Distribution |
+ (r'\|', Punctuation)
]
}
diff --git a/pygments/lexers/modula2.py b/pygments/lexers/modula2.py
index 01771f55..c0a69b40 100644
--- a/pygments/lexers/modula2.py
+++ b/pygments/lexers/modula2.py
@@ -5,7 +5,7 @@
Multi-Dialect Lexer for Modula-2.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/monte.py b/pygments/lexers/monte.py
index aa5c75f7..ed6e20f8 100644
--- a/pygments/lexers/monte.py
+++ b/pygments/lexers/monte.py
@@ -5,7 +5,7 @@
Lexer for the Monte programming language.
- :copyright: Copyright 2016 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -43,8 +43,8 @@ _operators = [
_escape_pattern = (
r'(?:\\x[0-9a-fA-F]{2}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
r'\\["\'\\bftnr])')
-#_char = _escape_chars + [('.', String.Char)]
-_identifier = '[_a-zA-Z][_0-9a-zA-Z]*'
+# _char = _escape_chars + [('.', String.Char)]
+_identifier = r'[_a-zA-Z]\w*'
_constants = [
# Void constants
@@ -75,6 +75,7 @@ _safeScope = [
'makeBrandPair', 'makeLazySlot', 'safeScope', 'simple__quasiParser',
]
+
class MonteLexer(RegexLexer):
"""
Lexer for the `Monte <https://monte.readthedocs.io/>`_ programming language.
diff --git a/pygments/lexers/ncl.py b/pygments/lexers/ncl.py
index 85f46f20..3ca5135c 100644
--- a/pygments/lexers/ncl.py
+++ b/pygments/lexers/ncl.py
@@ -5,7 +5,7 @@
Lexers for NCAR Command Language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -35,7 +35,7 @@ class NCLLexer(RegexLexer):
(r';.*\n', Comment),
include('strings'),
include('core'),
- (r'[a-z][\w$]*', Name),
+ (r'[a-zA-Z_]\w*', Name),
include('nums'),
(r'[\s]+', Text),
],
@@ -43,7 +43,7 @@ class NCLLexer(RegexLexer):
# Statements
(words((
'begin', 'break', 'continue', 'create', 'defaultapp', 'do',
- 'else', 'end', 'external', 'exit', 'False', 'file', 'function',
+ 'else', 'end', 'external', 'exit', 'True', 'False', 'file', 'function',
'getvalues', 'graphic', 'group', 'if', 'list', 'load', 'local',
'new', '_Missing', 'Missing', 'noparent', 'procedure',
'quit', 'QUIT', 'Quit', 'record', 'return', 'setvalues', 'stop',
@@ -59,10 +59,10 @@ class NCLLexer(RegexLexer):
Keyword.Type),
# Operators
- (r'[\^*+\-/<>]', Operator),
+ (r'[\%^*+\-/<>]', Operator),
# punctuation:
- (r'[\[\]():@$.,]', Punctuation),
+ (r'[\[\]():@$!&|.,\\{}]', Punctuation),
(r'[=:]', Punctuation),
# Intrinsics
@@ -589,149 +589,60 @@ class NCLLexer(RegexLexer):
'lgTitleFontQuality', 'lgTitleFontThicknessF', 'lgTitleFuncCode',
'lgTitleJust', 'lgTitleOffsetF', 'lgTitleOn', 'lgTitlePosition',
'lgTitleString', 'lgTopMarginF', 'mpAreaGroupCount',
- 'mpAreaGroupCount_MapPlot', 'mpAreaMaskingOn',
- 'mpAreaMaskingOn_MapPlot', 'mpAreaNames', 'mpAreaNames_MapPlot',
- 'mpAreaTypes', 'mpAreaTypes_MapPlot', 'mpBottomAngleF',
- 'mpBottomAngleF_MapTransformation', 'mpBottomMapPosF',
- 'mpBottomMapPosF_MapTransformation', 'mpBottomNDCF',
- 'mpBottomNDCF_MapTransformation', 'mpBottomNPCF',
- 'mpBottomNPCF_MapTransformation', 'mpBottomPointLatF',
- 'mpBottomPointLatF_MapTransformation', 'mpBottomPointLonF',
- 'mpBottomPointLonF_MapTransformation', 'mpBottomWindowF',
- 'mpBottomWindowF_MapTransformation', 'mpCenterLatF',
- 'mpCenterLatF_MapTransformation', 'mpCenterLonF',
- 'mpCenterLonF_MapTransformation', 'mpCenterRotF',
- 'mpCenterRotF_MapTransformation', 'mpCountyLineColor',
- 'mpCountyLineColor_MapPlot', 'mpCountyLineDashPattern',
- 'mpCountyLineDashPattern_MapPlot', 'mpCountyLineDashSegLenF',
- 'mpCountyLineDashSegLenF_MapPlot', 'mpCountyLineThicknessF',
- 'mpCountyLineThicknessF_MapPlot', 'mpDataBaseVersion',
- 'mpDataBaseVersion_MapPlot', 'mpDataResolution',
- 'mpDataResolution_MapPlot', 'mpDataSetName', 'mpDataSetName_MapPlot',
- 'mpDefaultFillColor', 'mpDefaultFillColor_MapPlot',
- 'mpDefaultFillPattern', 'mpDefaultFillPattern_MapPlot',
- 'mpDefaultFillScaleF', 'mpDefaultFillScaleF_MapPlot',
- 'mpDynamicAreaGroups', 'mpDynamicAreaGroups_MapPlot',
- 'mpEllipticalBoundary', 'mpEllipticalBoundary_MapTransformation',
- 'mpFillAreaSpecifiers', 'mpFillAreaSpecifiers_MapPlot',
- 'mpFillBoundarySets', 'mpFillBoundarySets_MapPlot', 'mpFillColor',
- 'mpFillColor_MapPlot', 'mpFillColors', 'mpFillColors_MapPlot',
- 'mpFillColors-default', 'mpFillDotSizeF', 'mpFillDotSizeF_MapPlot',
- 'mpFillDrawOrder', 'mpFillDrawOrder_MapPlot', 'mpFillOn',
- 'mpFillOn_MapPlot', 'mpFillPatternBackground',
- 'mpFillPatternBackground_MapPlot', 'mpFillPattern',
- 'mpFillPattern_MapPlot', 'mpFillPatterns', 'mpFillPatterns_MapPlot',
- 'mpFillPatterns-default', 'mpFillScaleF', 'mpFillScaleF_MapPlot',
- 'mpFillScales', 'mpFillScales_MapPlot', 'mpFillScales-default',
- 'mpFixedAreaGroups', 'mpFixedAreaGroups_MapPlot',
- 'mpGeophysicalLineColor', 'mpGeophysicalLineColor_MapPlot',
- 'mpGeophysicalLineDashPattern',
- 'mpGeophysicalLineDashPattern_MapPlot',
- 'mpGeophysicalLineDashSegLenF',
- 'mpGeophysicalLineDashSegLenF_MapPlot', 'mpGeophysicalLineThicknessF',
- 'mpGeophysicalLineThicknessF_MapPlot', 'mpGreatCircleLinesOn',
- 'mpGreatCircleLinesOn_MapTransformation', 'mpGridAndLimbDrawOrder',
- 'mpGridAndLimbDrawOrder_MapPlot', 'mpGridAndLimbOn',
- 'mpGridAndLimbOn_MapPlot', 'mpGridLatSpacingF',
- 'mpGridLatSpacingF_MapPlot', 'mpGridLineColor',
- 'mpGridLineColor_MapPlot', 'mpGridLineDashPattern',
- 'mpGridLineDashPattern_MapPlot', 'mpGridLineDashSegLenF',
- 'mpGridLineDashSegLenF_MapPlot', 'mpGridLineThicknessF',
- 'mpGridLineThicknessF_MapPlot', 'mpGridLonSpacingF',
- 'mpGridLonSpacingF_MapPlot', 'mpGridMaskMode',
- 'mpGridMaskMode_MapPlot', 'mpGridMaxLatF', 'mpGridMaxLatF_MapPlot',
- 'mpGridPolarLonSpacingF', 'mpGridPolarLonSpacingF_MapPlot',
- 'mpGridSpacingF', 'mpGridSpacingF_MapPlot', 'mpInlandWaterFillColor',
- 'mpInlandWaterFillColor_MapPlot', 'mpInlandWaterFillPattern',
- 'mpInlandWaterFillPattern_MapPlot', 'mpInlandWaterFillScaleF',
- 'mpInlandWaterFillScaleF_MapPlot', 'mpLabelDrawOrder',
- 'mpLabelDrawOrder_MapPlot', 'mpLabelFontColor',
- 'mpLabelFontColor_MapPlot', 'mpLabelFontHeightF',
- 'mpLabelFontHeightF_MapPlot', 'mpLabelsOn', 'mpLabelsOn_MapPlot',
- 'mpLambertMeridianF', 'mpLambertMeridianF_MapTransformation',
- 'mpLambertParallel1F', 'mpLambertParallel1F_MapTransformation',
- 'mpLambertParallel2F', 'mpLambertParallel2F_MapTransformation',
- 'mpLandFillColor', 'mpLandFillColor_MapPlot', 'mpLandFillPattern',
- 'mpLandFillPattern_MapPlot', 'mpLandFillScaleF',
- 'mpLandFillScaleF_MapPlot', 'mpLeftAngleF',
- 'mpLeftAngleF_MapTransformation', 'mpLeftCornerLatF',
- 'mpLeftCornerLatF_MapTransformation', 'mpLeftCornerLonF',
- 'mpLeftCornerLonF_MapTransformation', 'mpLeftMapPosF',
- 'mpLeftMapPosF_MapTransformation', 'mpLeftNDCF',
- 'mpLeftNDCF_MapTransformation', 'mpLeftNPCF',
- 'mpLeftNPCF_MapTransformation', 'mpLeftPointLatF',
- 'mpLeftPointLatF_MapTransformation', 'mpLeftPointLonF',
- 'mpLeftPointLonF_MapTransformation', 'mpLeftWindowF',
- 'mpLeftWindowF_MapTransformation', 'mpLimbLineColor',
- 'mpLimbLineColor_MapPlot', 'mpLimbLineDashPattern',
- 'mpLimbLineDashPattern_MapPlot', 'mpLimbLineDashSegLenF',
- 'mpLimbLineDashSegLenF_MapPlot', 'mpLimbLineThicknessF',
- 'mpLimbLineThicknessF_MapPlot', 'mpLimitMode',
- 'mpLimitMode_MapTransformation', 'Angle_projection_limits',
- 'mpMaskAreaSpecifiers', 'mpMaskAreaSpecifiers_MapPlot',
- 'mpMaskOutlineSpecifiers', 'mpMaskOutlineSpecifiers_MapPlot',
- 'mpMaxLatF', 'mpMaxLatF_MapTransformation', 'mpMaxLonF',
- 'mpMaxLonF_MapTransformation', 'mpMinLatF',
- 'mpMinLatF_MapTransformation', 'mpMinLonF',
- 'mpMinLonF_MapTransformation', 'mpMonoFillColor',
- 'mpMonoFillColor_MapPlot', 'mpMonoFillPattern',
- 'mpMonoFillPattern_MapPlot', 'mpMonoFillScale',
- 'mpMonoFillScale_MapPlot', 'mpNationalLineColor',
- 'mpNationalLineColor_MapPlot', 'mpNationalLineDashPattern',
- 'mpNationalLineDashPattern_MapPlot',
- 'mpNationalLineDashSegLenF_MapPlot', 'mpNationalLineThicknessF',
- 'mpNationalLineThicknessF_MapPlot', 'mpOceanFillColor',
- 'mpOceanFillColor_MapPlot', 'mpOceanFillPattern',
- 'mpOceanFillPattern_MapPlot', 'mpOceanFillScaleF',
- 'mpOceanFillScaleF_MapPlot', 'mpOutlineBoundarySets',
- 'mpOutlineBoundarySets_MapPlot', 'mpOutlineDrawOrder',
- 'mpOutlineDrawOrder_MapPlot', 'mpOutlineMaskingOn',
- 'mpOutlineMaskingOn_MapPlot', 'mpOutlineOn', 'mpOutlineOn_MapPlot',
- 'mpOutlineSpecifiers', 'mpOutlineSpecifiers_MapPlot',
- 'mpPerimDrawOrder', 'mpPerimDrawOrder_MapPlot', 'mpPerimLineColor',
- 'mpPerimLineColor_MapPlot', 'mpPerimLineDashPattern',
- 'mpPerimLineDashPattern_MapPlot', 'mpPerimLineDashSegLenF',
- 'mpPerimLineDashSegLenF_MapPlot', 'mpPerimLineThicknessF',
- 'mpPerimLineThicknessF_MapPlot', 'mpPerimOn', 'mpPerimOn_MapPlot',
- 'mpPolyMode', 'mpPolyMode_MapTransformation', 'mpProjection',
- 'mpProjection_MapTransformation', 'mpProvincialLineColor',
- 'mpProvincialLineColor_MapPlot', 'mpProvincialLineDashPattern',
- 'mpProvincialLineDashPattern_MapPlot', 'mpProvincialLineDashSegLenF',
- 'mpProvincialLineDashSegLenF_MapPlot', 'mpProvincialLineThicknessF',
- 'mpProvincialLineThicknessF_MapPlot', 'mpRelativeCenterLat',
- 'mpRelativeCenterLat_MapTransformation', 'mpRelativeCenterLon',
- 'mpRelativeCenterLon_MapTransformation', 'mpRightAngleF',
- 'mpRightAngleF_MapTransformation', 'mpRightCornerLatF',
- 'mpRightCornerLatF_MapTransformation', 'mpRightCornerLonF',
- 'mpRightCornerLonF_MapTransformation', 'mpRightMapPosF',
- 'mpRightMapPosF_MapTransformation', 'mpRightNDCF',
- 'mpRightNDCF_MapTransformation', 'mpRightNPCF',
- 'mpRightNPCF_MapTransformation', 'mpRightPointLatF',
- 'mpRightPointLatF_MapTransformation', 'mpRightPointLonF',
- 'mpRightPointLonF_MapTransformation', 'mpRightWindowF',
- 'mpRightWindowF_MapTransformation', 'mpSatelliteAngle1F',
- 'mpSatelliteAngle1F_MapTransformation', 'mpSatelliteAngle2F',
- 'mpSatelliteAngle2F_MapTransformation', 'mpSatelliteDistF',
- 'mpSatelliteDistF_MapTransformation', 'mpShapeMode',
- 'mpShapeMode_MapPlot', 'mpSpecifiedFillColors',
- 'mpSpecifiedFillColors_MapPlot', 'mpSpecifiedFillDirectIndexing',
- 'mpSpecifiedFillDirectIndexing_MapPlot', 'mpSpecifiedFillPatterns',
- 'mpSpecifiedFillPatterns_MapPlot', 'mpSpecifiedFillPriority',
- 'mpSpecifiedFillPriority_MapPlot', 'mpSpecifiedFillScales',
- 'mpSpecifiedFillScales_MapPlot', 'mpTopAngleF',
- 'mpTopAngleF_MapTransformation', 'mpTopMapPosF',
- 'mpTopMapPosF_MapTransformation', 'mpTopNDCF',
- 'mpTopNDCF_MapTransformation', 'mpTopNPCF',
- 'mpTopNPCF_MapTransformation', 'mpTopPointLatF',
- 'mpTopPointLatF_MapTransformation', 'mpTopPointLonF',
- 'mpTopPointLonF_MapTransformation', 'mpTopWindowF',
- 'mpTopWindowF_MapTransformation', 'mpUSStateLineColor',
- 'mpUSStateLineColor_MapPlot', 'mpUSStateLineDashPattern',
- 'mpUSStateLineDashPattern_MapPlot', 'mpUSStateLineDashSegLenF',
- 'mpUSStateLineDashSegLenF_MapPlot', 'mpUSStateLineThicknessF',
- 'mpUSStateLineThicknessF_MapPlot', 'pmAnnoManagers',
- 'pmAnnoViews', 'pmLabelBarDisplayMode', 'pmLabelBarHeightF',
- 'pmLabelBarKeepAspect', 'pmLabelBarOrthogonalPosF',
+ 'mpAreaMaskingOn', 'mpAreaNames', 'mpAreaTypes', 'mpBottomAngleF',
+ 'mpBottomMapPosF', 'mpBottomNDCF', 'mpBottomNPCF',
+ 'mpBottomPointLatF', 'mpBottomPointLonF', 'mpBottomWindowF',
+ 'mpCenterLatF', 'mpCenterLonF', 'mpCenterRotF', 'mpCountyLineColor',
+ 'mpCountyLineDashPattern', 'mpCountyLineDashSegLenF',
+ 'mpCountyLineThicknessF', 'mpDataBaseVersion', 'mpDataResolution',
+ 'mpDataSetName', 'mpDefaultFillColor', 'mpDefaultFillPattern',
+ 'mpDefaultFillScaleF', 'mpDynamicAreaGroups', 'mpEllipticalBoundary',
+ 'mpFillAreaSpecifiers', 'mpFillBoundarySets', 'mpFillColor',
+ 'mpFillColors', 'mpFillColors-default', 'mpFillDotSizeF',
+ 'mpFillDrawOrder', 'mpFillOn', 'mpFillPatternBackground',
+ 'mpFillPattern', 'mpFillPatterns', 'mpFillPatterns-default',
+ 'mpFillScaleF', 'mpFillScales', 'mpFillScales-default',
+ 'mpFixedAreaGroups', 'mpGeophysicalLineColor',
+ 'mpGeophysicalLineDashPattern', 'mpGeophysicalLineDashSegLenF',
+ 'mpGeophysicalLineThicknessF', 'mpGreatCircleLinesOn',
+ 'mpGridAndLimbDrawOrder', 'mpGridAndLimbOn', 'mpGridLatSpacingF',
+ 'mpGridLineColor', 'mpGridLineDashPattern', 'mpGridLineDashSegLenF',
+ 'mpGridLineThicknessF', 'mpGridLonSpacingF', 'mpGridMaskMode',
+ 'mpGridMaxLatF', 'mpGridPolarLonSpacingF', 'mpGridSpacingF',
+ 'mpInlandWaterFillColor', 'mpInlandWaterFillPattern',
+ 'mpInlandWaterFillScaleF', 'mpLabelDrawOrder', 'mpLabelFontColor',
+ 'mpLabelFontHeightF', 'mpLabelsOn', 'mpLambertMeridianF',
+ 'mpLambertParallel1F', 'mpLambertParallel2F', 'mpLandFillColor',
+ 'mpLandFillPattern', 'mpLandFillScaleF', 'mpLeftAngleF',
+ 'mpLeftCornerLatF', 'mpLeftCornerLonF', 'mpLeftMapPosF',
+ 'mpLeftNDCF', 'mpLeftNPCF', 'mpLeftPointLatF',
+ 'mpLeftPointLonF', 'mpLeftWindowF', 'mpLimbLineColor',
+ 'mpLimbLineDashPattern', 'mpLimbLineDashSegLenF',
+ 'mpLimbLineThicknessF', 'mpLimitMode', 'mpMaskAreaSpecifiers',
+ 'mpMaskOutlineSpecifiers', 'mpMaxLatF', 'mpMaxLonF',
+ 'mpMinLatF', 'mpMinLonF', 'mpMonoFillColor', 'mpMonoFillPattern',
+ 'mpMonoFillScale', 'mpNationalLineColor', 'mpNationalLineDashPattern',
+ 'mpNationalLineThicknessF', 'mpOceanFillColor', 'mpOceanFillPattern',
+ 'mpOceanFillScaleF', 'mpOutlineBoundarySets', 'mpOutlineDrawOrder',
+ 'mpOutlineMaskingOn', 'mpOutlineOn', 'mpOutlineSpecifiers',
+ 'mpPerimDrawOrder', 'mpPerimLineColor', 'mpPerimLineDashPattern',
+ 'mpPerimLineDashSegLenF', 'mpPerimLineThicknessF', 'mpPerimOn',
+ 'mpPolyMode', 'mpProjection', 'mpProvincialLineColor',
+ 'mpProvincialLineDashPattern', 'mpProvincialLineDashSegLenF',
+ 'mpProvincialLineThicknessF', 'mpRelativeCenterLat',
+ 'mpRelativeCenterLon', 'mpRightAngleF', 'mpRightCornerLatF',
+ 'mpRightCornerLonF', 'mpRightMapPosF', 'mpRightNDCF',
+ 'mpRightNPCF', 'mpRightPointLatF', 'mpRightPointLonF',
+ 'mpRightWindowF', 'mpSatelliteAngle1F', 'mpSatelliteAngle2F',
+ 'mpSatelliteDistF', 'mpShapeMode', 'mpSpecifiedFillColors',
+ 'mpSpecifiedFillDirectIndexing', 'mpSpecifiedFillPatterns',
+ 'mpSpecifiedFillPriority', 'mpSpecifiedFillScales',
+ 'mpTopAngleF', 'mpTopMapPosF', 'mpTopNDCF', 'mpTopNPCF',
+ 'mpTopPointLatF', 'mpTopPointLonF', 'mpTopWindowF',
+ 'mpUSStateLineColor', 'mpUSStateLineDashPattern',
+ 'mpUSStateLineDashSegLenF', 'mpUSStateLineThicknessF',
+ 'pmAnnoManagers', 'pmAnnoViews', 'pmLabelBarDisplayMode',
+ 'pmLabelBarHeightF', 'pmLabelBarKeepAspect', 'pmLabelBarOrthogonalPosF',
'pmLabelBarParallelPosF', 'pmLabelBarSide', 'pmLabelBarWidthF',
'pmLabelBarZone', 'pmLegendDisplayMode', 'pmLegendHeightF',
'pmLegendKeepAspect', 'pmLegendOrthogonalPosF',
@@ -739,35 +650,14 @@ class NCLLexer(RegexLexer):
'pmLegendZone', 'pmOverlaySequenceIds', 'pmTickMarkDisplayMode',
'pmTickMarkZone', 'pmTitleDisplayMode', 'pmTitleZone',
'prGraphicStyle', 'prPolyType', 'prXArray', 'prYArray',
- 'sfCopyData_MeshScalarField', 'sfCopyData', 'sfCopyData_ScalarField',
- 'sfDataArray_MeshScalarField', 'sfDataArray',
- 'sfDataArray_ScalarField', 'sfDataMaxV_MeshScalarField', 'sfDataMaxV',
- 'sfDataMaxV_ScalarField', 'sfDataMinV_MeshScalarField', 'sfDataMinV',
- 'sfDataMinV_ScalarField', 'sfElementNodes',
- 'sfElementNodes_MeshScalarField', 'sfExchangeDimensions',
- 'sfExchangeDimensions_ScalarField', 'sfFirstNodeIndex',
- 'sfFirstNodeIndex_MeshScalarField', 'sfMissingValueV_MeshScalarField',
- 'sfMissingValueV', 'sfMissingValueV_ScalarField',
- 'sfXArray_MeshScalarField', 'sfXArray', 'sfXArray_ScalarField',
- 'sfXCActualEndF_MeshScalarField', 'sfXCActualEndF',
- 'sfXCActualEndF_ScalarField', 'sfXCActualStartF_MeshScalarField',
- 'sfXCActualStartF', 'sfXCActualStartF_ScalarField', 'sfXCEndIndex',
- 'sfXCEndIndex_ScalarField', 'sfXCEndSubsetV',
- 'sfXCEndSubsetV_ScalarField', 'sfXCEndV', 'sfXCEndV_ScalarField',
- 'sfXCStartIndex', 'sfXCStartIndex_ScalarField', 'sfXCStartSubsetV',
- 'sfXCStartSubsetV_ScalarField', 'sfXCStartV',
- 'sfXCStartV_ScalarField', 'sfXCStride', 'sfXCStride_ScalarField',
- 'sfXCellBounds', 'sfXCellBounds_MeshScalarField',
- 'sfYArray_MeshScalarField', 'sfYArray', 'sfYArray_ScalarField',
- 'sfYCActualEndF_MeshScalarField', 'sfYCActualEndF',
- 'sfYCActualEndF_ScalarField', 'sfYCActualStartF_MeshScalarField',
- 'sfYCActualStartF', 'sfYCActualStartF_ScalarField', 'sfYCEndIndex',
- 'sfYCEndIndex_ScalarField', 'sfYCEndSubsetV',
- 'sfYCEndSubsetV_ScalarField', 'sfYCEndV', 'sfYCEndV_ScalarField',
- 'sfYCStartIndex', 'sfYCStartIndex_ScalarField', 'sfYCStartSubsetV',
- 'sfYCStartSubsetV_ScalarField', 'sfYCStartV',
- 'sfYCStartV_ScalarField', 'sfYCStride', 'sfYCStride_ScalarField',
- 'sfYCellBounds', 'sfYCellBounds_MeshScalarField', 'stArrowLengthF',
+ 'sfCopyData', 'sfDataArray', 'sfDataMaxV', 'sfDataMinV',
+ 'sfElementNodes', 'sfExchangeDimensions', 'sfFirstNodeIndex',
+ 'sfMissingValueV', 'sfXArray', 'sfXCActualEndF', 'sfXCActualStartF',
+ 'sfXCEndIndex', 'sfXCEndSubsetV', 'sfXCEndV', 'sfXCStartIndex',
+ 'sfXCStartSubsetV', 'sfXCStartV', 'sfXCStride', 'sfXCellBounds',
+ 'sfYArray', 'sfYCActualEndF', 'sfYCActualStartF', 'sfYCEndIndex',
+ 'sfYCEndSubsetV', 'sfYCEndV', 'sfYCStartIndex', 'sfYCStartSubsetV',
+ 'sfYCStartV', 'sfYCStride', 'sfYCellBounds', 'stArrowLengthF',
'stArrowStride', 'stCrossoverCheckCount',
'stExplicitLabelBarLabelsOn', 'stLabelBarEndLabelsOn',
'stLabelFormat', 'stLengthCheckCount', 'stLevelColors',
@@ -870,25 +760,12 @@ class NCLLexer(RegexLexer):
'tmYRMinorPerMajor', 'tmYRMinorThicknessF', 'tmYRMinorValues',
'tmYRMode', 'tmYROn', 'tmYRPrecision', 'tmYRStyle', 'tmYRTickEndF',
'tmYRTickSpacingF', 'tmYRTickStartF', 'tmYRValues', 'tmYUseLeft',
- 'trGridType', 'trGridType_Transformation', 'trLineInterpolationOn',
- 'trLineInterpolationOn_Transformation', 'trXAxisType',
- 'trXAxisType_IrregularTransformation', 'trXCoordPoints',
- 'trXCoordPoints_IrregularTransformation', 'trXInterPoints',
- 'trXInterPoints_IrregularTransformation', 'trXLog',
- 'trXLog_LogLinTransformation', 'trXMaxF', 'trXMaxF_Transformation',
- 'trXMinF', 'trXMinF_Transformation', 'trXReverse',
- 'trXReverse_Transformation', 'trXSamples',
- 'trXSamples_IrregularTransformation', 'trXTensionF',
- 'trXTensionF_IrregularTransformation', 'trYAxisType',
- 'trYAxisType_IrregularTransformation', 'trYCoordPoints',
- 'trYCoordPoints_IrregularTransformation', 'trYInterPoints',
- 'trYInterPoints_IrregularTransformation', 'trYLog',
- 'trYLog_LogLinTransformation', 'trYMaxF', 'trYMaxF_Transformation',
- 'trYMinF', 'trYMinF_Transformation', 'trYReverse',
- 'trYReverse_Transformation', 'trYSamples',
- 'trYSamples_IrregularTransformation', 'trYTensionF',
- 'trYTensionF_IrregularTransformation', 'txAngleF',
- 'txBackgroundFillColor', 'txConstantSpacingF', 'txDirection',
+ 'trGridType', 'trLineInterpolationOn',
+ 'trXAxisType', 'trXCoordPoints', 'trXInterPoints', 'trXLog',
+ 'trXMaxF', 'trXMinF', 'trXReverse', 'trXSamples', 'trXTensionF',
+ 'trYAxisType', 'trYCoordPoints', 'trYInterPoints', 'trYLog',
+ 'trYMaxF', 'trYMinF', 'trYReverse', 'trYSamples', 'trYTensionF',
+ 'txAngleF', 'txBackgroundFillColor', 'txConstantSpacingF', 'txDirection',
'txFont', 'HLU-Fonts', 'txFontAspectF', 'txFontColor',
'txFontHeightF', 'txFontOpacityF', 'txFontQuality',
'txFontThicknessF', 'txFuncCode', 'txJust', 'txPerimColor',
@@ -969,53 +846,16 @@ class NCLLexer(RegexLexer):
'vfYCEndSubsetV', 'vfYCEndV', 'vfYCStartIndex', 'vfYCStartSubsetV',
'vfYCStartV', 'vfYCStride', 'vpAnnoManagerId', 'vpClipOn',
'vpHeightF', 'vpKeepAspect', 'vpOn', 'vpUseSegments', 'vpWidthF',
- 'vpXF', 'vpYF', 'wkAntiAlias', 'wkAntiAlias_DocumentWorkstation',
- 'wkAntiAlias_ImageWorkstation', 'wkAntiAlias_XWorkstation',
- 'wkBackgroundColor', 'wkBackgroundColor_Workstation',
- 'wkBackgroundOpacityF', 'wkBackgroundOpacityF_DocumentWorkstation',
- 'wkBackgroundOpacityF_ImageWorkstation',
- 'wkBackgroundOpacityF_XWorkstation', 'wkColorMapLen',
- 'wkColorMapLen_Workstation', 'wkColorMap', 'wkColorMap_Workstation',
- 'wkColorModel', 'wkColorModel_PDFWorkstation',
- 'wkColorModel_PSWorkstation', 'wkDashTableLength',
- 'wkDashTableLength_Workstation', 'wkDefGraphicStyleId',
- 'wkDefGraphicStyleId_Workstation', 'wkDeviceLowerX',
- 'wkDeviceLowerX_DocumentWorkstation', 'wkDeviceLowerX_PDFWorkstation',
- 'wkDeviceLowerX_PSWorkstation', 'wkDeviceLowerY',
- 'wkDeviceLowerY_DocumentWorkstation', 'wkDeviceLowerY_PDFWorkstation',
- 'wkDeviceLowerY_PSWorkstation', 'wkDeviceUpperX',
- 'wkDeviceUpperX_DocumentWorkstation', 'wkDeviceUpperX_PDFWorkstation',
- 'wkDeviceUpperX_PSWorkstation', 'wkDeviceUpperY',
- 'wkDeviceUpperY_DocumentWorkstation', 'wkDeviceUpperY_PDFWorkstation',
- 'wkDeviceUpperY_PSWorkstation', 'wkFileName',
- 'wkFileName_DocumentWorkstation', 'wkFileName_ImageWorkstation',
- 'wkFillTableLength', 'wkFillTableLength_Workstation',
- 'wkForegroundColor', 'wkForegroundColor_Workstation', 'wkFormat',
- 'wkFormat_DocumentWorkstation', 'wkFormat_ImageWorkstation',
- 'wkFullBackground', 'wkFullBackground_PDFWorkstation',
- 'wkFullBackground_PSWorkstation', 'wkGksWorkId',
- 'wkGksWorkId_Workstation', 'wkHeight', 'wkHeight_ImageWorkstation',
- 'wkHeight_XWorkstation', 'wkMarkerTableLength',
- 'wkMarkerTableLength_Workstation', 'wkMetaName',
- 'wkMetaName_NcgmWorkstation', 'wkOrientation',
- 'wkOrientation_PDFWorkstation', 'wkOrientation_PSWorkstation',
- 'wkPDFFileName', 'wkPDFFileName_PDFWorkstation', 'wkPDFFormat',
- 'wkPDFFormat_PDFWorkstation', 'wkPDFResolution',
- 'wkPDFResolution_PDFWorkstation', 'wkPSFileName',
- 'wkPSFileName_PSWorkstation', 'wkPSFormat',
- 'wkPSFormat_PSWorkstation', 'wkPSResolution',
- 'wkPSResolution_PSWorkstation', 'wkPaperHeightF',
- 'wkPaperHeightF_DocumentWorkstation', 'wkPaperHeightF_PDFWorkstation',
- 'wkPaperHeightF_PSWorkstation', 'wkPaperSize',
- 'wkPaperSize_DocumentWorkstation', 'wkPaperSize_PDFWorkstation',
- 'wkPaperSize_PSWorkstation', 'wkPaperWidthF',
- 'wkPaperWidthF_DocumentWorkstation', 'wkPaperWidthF_PDFWorkstation',
- 'wkPaperWidthF_PSWorkstation', 'wkPause', 'wkPause_XWorkstation',
- 'wkTopLevelViews', 'wkTopLevelViews_Workstation', 'wkViews',
- 'wkViews_Workstation', 'wkVisualType', 'wkVisualType_PDFWorkstation',
- 'wkVisualType_PSWorkstation', 'wkWidth', 'wkWidth_ImageWorkstation',
- 'wkWidth_XWorkstation', 'wkWindowId', 'wkWindowId_XWorkstation',
- 'wkXColorMode', 'wkXColorMode_XWorkstation', 'wsCurrentSize',
+ 'vpXF', 'vpYF', 'wkAntiAlias', 'wkBackgroundColor', 'wkBackgroundOpacityF',
+ 'wkColorMapLen', 'wkColorMap', 'wkColorModel', 'wkDashTableLength',
+ 'wkDefGraphicStyleId', 'wkDeviceLowerX', 'wkDeviceLowerY',
+ 'wkDeviceUpperX', 'wkDeviceUpperY', 'wkFileName', 'wkFillTableLength',
+ 'wkForegroundColor', 'wkFormat', 'wkFullBackground', 'wkGksWorkId',
+ 'wkHeight', 'wkMarkerTableLength', 'wkMetaName', 'wkOrientation',
+ 'wkPDFFileName', 'wkPDFFormat', 'wkPDFResolution', 'wkPSFileName',
+ 'wkPSFormat', 'wkPSResolution', 'wkPaperHeightF', 'wkPaperSize',
+ 'wkPaperWidthF', 'wkPause', 'wkTopLevelViews', 'wkViews',
+ 'wkVisualType', 'wkWidth', 'wkWindowId', 'wkXColorMode', 'wsCurrentSize',
'wsMaximumSize', 'wsThresholdSize', 'xyComputeXMax',
'xyComputeXMin', 'xyComputeYMax', 'xyComputeYMin', 'xyCoordData',
'xyCoordDataSpec', 'xyCurveDrawOrder', 'xyDashPattern',
@@ -1037,7 +877,8 @@ class NCLLexer(RegexLexer):
Name.Builtin),
# Booleans
- (r'True|False', Name.Builtin), # Comparing Operators
+ (r'\.(True|False)\.', Name.Builtin),
+ # Comparing Operators
(r'\.(eq|ne|lt|le|gt|ge|not|and|or|xor)\.', Operator.Word),
],
diff --git a/pygments/lexers/nimrod.py b/pygments/lexers/nimrod.py
index e1bbcc03..d438c1bf 100644
--- a/pygments/lexers/nimrod.py
+++ b/pygments/lexers/nimrod.py
@@ -5,7 +5,7 @@
Lexer for the Nim language (formerly known as Nimrod).
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/nit.py b/pygments/lexers/nit.py
index ab59c4e5..21116499 100644
--- a/pygments/lexers/nit.py
+++ b/pygments/lexers/nit.py
@@ -5,7 +5,7 @@
Lexer for the Nit language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/nix.py b/pygments/lexers/nix.py
index 57f08623..e148c919 100644
--- a/pygments/lexers/nix.py
+++ b/pygments/lexers/nix.py
@@ -5,7 +5,7 @@
Lexers for the NixOS Nix language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/oberon.py b/pygments/lexers/oberon.py
index 51dfdab6..3b5fb3e4 100644
--- a/pygments/lexers/oberon.py
+++ b/pygments/lexers/oberon.py
@@ -5,7 +5,7 @@
Lexers for Oberon family languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/objective.py b/pygments/lexers/objective.py
index ba1e0bae..179928e9 100644
--- a/pygments/lexers/objective.py
+++ b/pygments/lexers/objective.py
@@ -5,7 +5,7 @@
Lexers for Objective-C family languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -87,26 +87,26 @@ def objective(baselexer):
],
'oc_classname': [
# interface definition that inherits
- ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)',
+ (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)',
bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
('#pop', 'oc_ivars')),
- ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
+ (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
bygroups(Name.Class, Text, Name.Class), '#pop'),
# interface definition for a category
- ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)',
+ (r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)',
bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
('#pop', 'oc_ivars')),
- ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))',
+ (r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))',
bygroups(Name.Class, Text, Name.Label), '#pop'),
# simple interface / implementation
- ('([a-zA-Z$_][\w$]*)(\s*)(\{)',
+ (r'([a-zA-Z$_][\w$]*)(\s*)(\{)',
bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
- ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
+ (r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
],
'oc_forward_classname': [
- ('([a-zA-Z$_][\w$]*)(\s*,\s*)',
+ (r'([a-zA-Z$_][\w$]*)(\s*,\s*)',
bygroups(Name.Class, Text), 'oc_forward_classname'),
- ('([a-zA-Z$_][\w$]*)(\s*;?)',
+ (r'([a-zA-Z$_][\w$]*)(\s*;?)',
bygroups(Name.Class, Text), '#pop')
],
'oc_ivars': [
@@ -244,17 +244,17 @@ class LogosLexer(ObjectiveCppLexer):
inherit,
],
'logos_init_directive': [
- ('\s+', Text),
+ (r'\s+', Text),
(',', Punctuation, ('logos_init_directive', '#pop')),
- ('([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)',
+ (r'([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)',
bygroups(Name.Class, Text, Punctuation, Text, Text)),
- ('([a-zA-Z$_][\w$]*)', Name.Class),
- ('\)', Punctuation, '#pop'),
+ (r'([a-zA-Z$_][\w$]*)', Name.Class),
+ (r'\)', Punctuation, '#pop'),
],
'logos_classname': [
- ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
+ (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
bygroups(Name.Class, Text, Name.Class), '#pop'),
- ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
+ (r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
],
'root': [
(r'(%subclass)(\s+)', bygroups(Keyword, Text),
diff --git a/pygments/lexers/ooc.py b/pygments/lexers/ooc.py
index b4e8c6db..957b72f1 100644
--- a/pygments/lexers/ooc.py
+++ b/pygments/lexers/ooc.py
@@ -5,7 +5,7 @@
Lexers for the Ooc language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index dd45083c..bfce4c3c 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/parasail.py b/pygments/lexers/parasail.py
index 812e2923..53088023 100644
--- a/pygments/lexers/parasail.py
+++ b/pygments/lexers/parasail.py
@@ -5,7 +5,7 @@
Lexer for ParaSail.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/parsers.py b/pygments/lexers/parsers.py
index e1b74dee..43eb6c1f 100644
--- a/pygments/lexers/parsers.py
+++ b/pygments/lexers/parsers.py
@@ -5,7 +5,7 @@
Lexers for parser generators.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -364,13 +364,13 @@ class AntlrLexer(RegexLexer):
# tokensSpec
(r'tokens\b', Keyword, 'tokens'),
# attrScope
- (r'(scope)(\s*)(' + _id + ')(\s*)(\{)',
+ (r'(scope)(\s*)(' + _id + r')(\s*)(\{)',
bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
Punctuation), 'action'),
# exception
(r'(catch|finally)\b', Keyword, 'exception'),
# action
- (r'(@' + _id + ')(\s*)(::)?(\s*)(' + _id + ')(\s*)(\{)',
+ (r'(@' + _id + r')(\s*)(::)?(\s*)(' + _id + r')(\s*)(\{)',
bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
Name.Label, Whitespace, Punctuation), 'action'),
# rule
@@ -405,10 +405,10 @@ class AntlrLexer(RegexLexer):
# L173 ANTLRv3.g from ANTLR book
(r'(scope)(\s+)(\{)', bygroups(Keyword, Whitespace, Punctuation),
'action'),
- (r'(scope)(\s+)(' + _id + ')(\s*)(;)',
+ (r'(scope)(\s+)(' + _id + r')(\s*)(;)',
bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
# ruleAction
- (r'(@' + _id + ')(\s*)(\{)',
+ (r'(@' + _id + r')(\s*)(\{)',
bygroups(Name.Label, Whitespace, Punctuation), 'action'),
# finished prelims, go to rule alts!
(r':', Punctuation, '#pop')
@@ -442,7 +442,7 @@ class AntlrLexer(RegexLexer):
include('comments'),
(r'\{', Punctuation),
(r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
- + ')?(\s*)(;)',
+ + r')?(\s*)(;)',
bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
String, Whitespace, Punctuation)),
(r'\}', Punctuation, '#pop'),
@@ -452,7 +452,7 @@ class AntlrLexer(RegexLexer):
include('comments'),
(r'\{', Punctuation),
(r'(' + _id + r')(\s*)(=)(\s*)(' +
- '|'.join((_id, _STRING_LITERAL, _INT, '\*')) + ')(\s*)(;)',
+ '|'.join((_id, _STRING_LITERAL, _INT, r'\*')) + r')(\s*)(;)',
bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
Text, Whitespace, Punctuation)),
(r'\}', Punctuation, '#pop'),
diff --git a/pygments/lexers/pascal.py b/pygments/lexers/pascal.py
index ce991a77..467a0b2c 100644
--- a/pygments/lexers/pascal.py
+++ b/pygments/lexers/pascal.py
@@ -5,7 +5,7 @@
Lexers for Pascal family languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -44,7 +44,7 @@ class DelphiLexer(Lexer):
"""
name = 'Delphi'
aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
- filenames = ['*.pas']
+ filenames = ['*.pas', '*.dpr']
mimetypes = ['text/x-pascal']
TURBO_PASCAL_KEYWORDS = (
@@ -593,8 +593,8 @@ class AdaLexer(RegexLexer):
],
'end': [
('(if|case|record|loop|select)', Keyword.Reserved),
- ('"[^"]+"|[\w.]+', Name.Function),
- ('\s+', Text),
+ (r'"[^"]+"|[\w.]+', Name.Function),
+ (r'\s+', Text),
(';', Punctuation, '#pop'),
],
'type_def': [
@@ -628,11 +628,11 @@ class AdaLexer(RegexLexer):
],
'package': [
('body', Keyword.Declaration),
- ('is\s+new|renames', Keyword.Reserved),
+ (r'is\s+new|renames', Keyword.Reserved),
('is', Keyword.Reserved, '#pop'),
(';', Punctuation, '#pop'),
- ('\(', Punctuation, 'package_instantiation'),
- ('([\w.]+)', Name.Class),
+ (r'\(', Punctuation, 'package_instantiation'),
+ (r'([\w.]+)', Name.Class),
include('root'),
],
'package_instantiation': [
diff --git a/pygments/lexers/pawn.py b/pygments/lexers/pawn.py
index f32fdbed..0ef28175 100644
--- a/pygments/lexers/pawn.py
+++ b/pygments/lexers/pawn.py
@@ -5,7 +5,7 @@
Lexers for the Pawn languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,7 +36,7 @@ class SourcePawnLexer(RegexLexer):
tokens = {
'root': [
# preprocessor directives: without whitespace
- ('^#if\s+0', Comment.Preproc, 'if0'),
+ (r'^#if\s+0', Comment.Preproc, 'if0'),
('^#', Comment.Preproc, 'macro'),
# or with whitespace
('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
@@ -62,7 +62,7 @@ class SourcePawnLexer(RegexLexer):
r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
(r'(bool|Float)\b', Keyword.Type),
(r'(true|false)\b', Keyword.Constant),
- ('[a-zA-Z_]\w*', Name),
+ (r'[a-zA-Z_]\w*', Name),
],
'string': [
(r'"', String, '#pop'),
@@ -148,7 +148,7 @@ class PawnLexer(RegexLexer):
tokens = {
'root': [
# preprocessor directives: without whitespace
- ('^#if\s+0', Comment.Preproc, 'if0'),
+ (r'^#if\s+0', Comment.Preproc, 'if0'),
('^#', Comment.Preproc, 'macro'),
# or with whitespace
('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
@@ -174,7 +174,7 @@ class PawnLexer(RegexLexer):
r'public|return|sizeof|tagof|state|goto)\b', Keyword),
(r'(bool|Float)\b', Keyword.Type),
(r'(true|false)\b', Keyword.Constant),
- ('[a-zA-Z_]\w*', Name),
+ (r'[a-zA-Z_]\w*', Name),
],
'string': [
(r'"', String, '#pop'),
diff --git a/pygments/lexers/perl.py b/pygments/lexers/perl.py
index 8df3c810..27e3cc79 100644
--- a/pygments/lexers/perl.py
+++ b/pygments/lexers/perl.py
@@ -5,7 +5,7 @@
Lexers for Perl and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -52,7 +52,7 @@ class PerlLexer(RegexLexer):
(words((
'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach',
'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then',
- 'unless', 'until', 'while', 'use', 'print', 'new', 'BEGIN',
+ 'unless', 'until', 'while', 'print', 'new', 'BEGIN',
'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'),
Keyword),
(r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
@@ -94,10 +94,10 @@ class PerlLexer(RegexLexer):
'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime',
'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last',
'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat',
- 'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'no', 'oct', 'open',
- 'opendir', 'ord', 'our', 'pack', 'package', 'pipe', 'pop', 'pos', 'printf',
+ 'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'oct', 'open',
+ 'opendir', 'ord', 'our', 'pack', 'pipe', 'pop', 'pos', 'printf',
'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir',
- 'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename', 'require',
+ 'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename',
'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir',
'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent',
'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent',
@@ -131,8 +131,14 @@ class PerlLexer(RegexLexer):
(r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
(r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
(r'(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2', String.Other),
- (r'package\s+', Keyword, 'modulename'),
- (r'sub\s+', Keyword, 'funcname'),
+ (r'(package)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
+ bygroups(Keyword, Text, Name.Namespace)),
+ (r'(use|require|no)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
+ bygroups(Keyword, Text, Name.Namespace)),
+ (r'(sub)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (words((
+ 'no', 'package', 'require', 'use'), suffix=r'\b'),
+ Keyword),
(r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
r'!~|&&?|\|\||\.{1,3})', Operator),
(r'[-+/*%=<>&^|!\\~]=?', Operator),
@@ -152,14 +158,12 @@ class PerlLexer(RegexLexer):
(r'[\w:]+', Name.Variable, '#pop'),
],
'name': [
- (r'\w+::', Name.Namespace),
+ (r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*(::)?(?=\s*->)', Name.Namespace, '#pop'),
+ (r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*::', Name.Namespace, '#pop'),
(r'[\w:]+', Name, '#pop'),
(r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
(r'(?=\W)', Text, '#pop'),
],
- 'modulename': [
- (r'[a-zA-Z_]\w*', Name.Namespace, '#pop')
- ],
'funcname': [
(r'[a-zA-Z_]\w*[!?]?', Name.Function),
(r'\s+', Text),
@@ -204,7 +208,7 @@ class PerlLexer(RegexLexer):
def analyse_text(text):
if shebang_matches(text, r'perl'):
return True
- if re.search('(?:my|our)\s+[$@%(]', text):
+ if re.search(r'(?:my|our)\s+[$@%(]', text):
return 0.9
@@ -222,7 +226,7 @@ class Perl6Lexer(ExtendedRegexLexer):
mimetypes = ['text/x-perl6', 'application/x-perl6']
flags = re.MULTILINE | re.DOTALL | re.UNICODE
- PERL6_IDENTIFIER_RANGE = "['\w:-]"
+ PERL6_IDENTIFIER_RANGE = r"['\w:-]"
PERL6_KEYWORDS = (
'BEGIN', 'CATCH', 'CHECK', 'CONTROL', 'END', 'ENTER', 'FIRST', 'INIT',
@@ -485,13 +489,13 @@ class Perl6Lexer(ExtendedRegexLexer):
'common': [
(r'#[`|=](?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)',
brackets_callback(Comment.Multiline)),
- (r'#[^\n]*$', Comment.Singleline),
+ (r'#[^\n]*$', Comment.Single),
(r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
(r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
(r'^=.*?\n\s*?\n', Comment.Multiline),
(r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)',
bygroups(Keyword, Name), 'token-sym-brackets'),
- (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + ')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?',
+ (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + r')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?',
bygroups(Keyword, Name), 'pre-token'),
# deal with a special case in the Perl 6 grammar (role q { ... })
(r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)),
@@ -554,7 +558,7 @@ class Perl6Lexer(ExtendedRegexLexer):
# make sure that '#' characters in quotes aren't treated as comments
(r"(?<!\\)'(\\\\|\\[^\\]|[^'\\])*'", String.Regex),
(r'(?<!\\)"(\\\\|\\[^\\]|[^"\\])*"', String.Regex),
- (r'#.*?$', Comment.Singleline),
+ (r'#.*?$', Comment.Single),
(r'\{', embedded_perl6_callback),
('.+?', String.Regex),
],
@@ -587,21 +591,21 @@ class Perl6Lexer(ExtendedRegexLexer):
rating = False
# check for my/our/has declarations
- if re.search("(?:my|our|has)\s+(?:" + Perl6Lexer.PERL6_IDENTIFIER_RANGE +
- "+\s+)?[$@%&(]", text):
+ if re.search(r"(?:my|our|has)\s+(?:" + Perl6Lexer.PERL6_IDENTIFIER_RANGE +
+ r"+\s+)?[$@%&(]", text):
rating = 0.8
saw_perl_decl = True
for line in lines:
line = re.sub('#.*', '', line)
- if re.match('^\s*$', line):
+ if re.match(r'^\s*$', line):
continue
# match v6; use v6; use v6.0; use v6.0.0;
- if re.match('^\s*(?:use\s+)?v6(?:\.\d(?:\.\d)?)?;', line):
+ if re.match(r'^\s*(?:use\s+)?v6(?:\.\d(?:\.\d)?)?;', line):
return True
# match class, module, role, enum, grammar declarations
- class_decl = re.match('^\s*(?:(?P<scope>my|our)\s+)?(?:module|class|role|enum|grammar)', line)
+ class_decl = re.match(r'^\s*(?:(?P<scope>my|our)\s+)?(?:module|class|role|enum|grammar)', line)
if class_decl:
if saw_perl_decl or class_decl.group('scope') is not None:
return True
diff --git a/pygments/lexers/php.py b/pygments/lexers/php.py
index 1931325a..440d9d81 100644
--- a/pygments/lexers/php.py
+++ b/pygments/lexers/php.py
@@ -5,7 +5,7 @@
Lexers for PHP and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -15,7 +15,8 @@ from pygments.lexer import RegexLexer, include, bygroups, default, using, \
this, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Other
-from pygments.util import get_bool_opt, get_list_opt, iteritems
+from pygments.util import get_bool_opt, get_list_opt, iteritems, \
+ shebang_matches
__all__ = ['ZephirLexer', 'PhpLexer']
@@ -173,7 +174,7 @@ class PhpLexer(RegexLexer):
r'finally)\b', Keyword),
(r'(true|false|null)\b', Keyword.Constant),
include('magicconstants'),
- (r'\$\{\$+' + _ident_inner + '\}', Name.Variable),
+ (r'\$\{\$+' + _ident_inner + r'\}', Name.Variable),
(r'\$+' + _ident_inner, Name.Variable),
(_ident_inner, Name.Other),
(r'(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?', Number.Float),
@@ -214,7 +215,7 @@ class PhpLexer(RegexLexer):
(r'"', String.Double, '#pop'),
(r'[^{$"\\]+', String.Double),
(r'\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape),
- (r'\$' + _ident_inner + '(\[\S+?\]|->' + _ident_inner + ')?',
+ (r'\$' + _ident_inner + r'(\[\S+?\]|->' + _ident_inner + ')?',
String.Interpol),
(r'(\{\$\{)(.*?)(\}\})',
bygroups(String.Interpol, using(this, _startinline=True),
@@ -261,6 +262,8 @@ class PhpLexer(RegexLexer):
yield index, token, value
def analyse_text(text):
+ if shebang_matches(text, r'php'):
+ return True
rv = 0.0
if re.search(r'<\?(?!xml)', text):
rv += 0.3
diff --git a/pygments/lexers/praat.py b/pygments/lexers/praat.py
index 6679d3db..1a38a9e8 100644
--- a/pygments/lexers/praat.py
+++ b/pygments/lexers/praat.py
@@ -5,7 +5,7 @@
Lexer for Praat
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/prolog.py b/pygments/lexers/prolog.py
index 7d32d7f6..58e762b0 100644
--- a/pygments/lexers/prolog.py
+++ b/pygments/lexers/prolog.py
@@ -5,7 +5,7 @@
Lexers for Prolog and Prolog-like languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -57,15 +57,15 @@ class PrologLexer(RegexLexer):
(r'_', Keyword), # The don't-care variable
(r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
(u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+ u'[\\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
u'(\\s*)(:-|-->)',
bygroups(Name.Function, Text, Operator)), # function defn
(u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+ u'[\\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
u'(\\s*)(\\()',
bygroups(Name.Function, Text, Punctuation)),
(u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
+ u'[\\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
String.Atom), # atom, characters
# This one includes !
(u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+',
@@ -300,7 +300,7 @@ class LogtalkLexer(RegexLexer):
return 1.0
elif ':- category(' in text:
return 1.0
- elif re.search('^:-\s[a-z]', text, re.M):
+ elif re.search(r'^:-\s[a-z]', text, re.M):
return 0.9
else:
return 0.0
diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py
index 35635ed1..c87282ca 100644
--- a/pygments/lexers/python.py
+++ b/pygments/lexers/python.py
@@ -5,7 +5,7 @@
Lexers for Python and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -124,10 +124,10 @@ class PythonLexer(RegexLexer):
'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
- 'MemoryError', 'NameError', 'NotImplemented', 'NotImplementedError',
+ 'MemoryError', 'ModuleNotFoundError', 'NameError', 'NotImplemented', 'NotImplementedError',
'OSError', 'OverflowError', 'OverflowWarning', 'PendingDeprecationWarning',
- 'ReferenceError', 'RuntimeError', 'RuntimeWarning', 'StandardError',
- 'StopIteration', 'SyntaxError', 'SyntaxWarning', 'SystemError',
+ 'RecursionError', 'ReferenceError', 'RuntimeError', 'RuntimeWarning', 'StandardError',
+ 'StopIteration', 'StopAsyncIteration', 'SyntaxError', 'SyntaxWarning', 'SystemError',
'SystemExit', 'TabError', 'TypeError', 'UnboundLocalError',
'UnicodeDecodeError', 'UnicodeEncodeError', 'UnicodeError',
'UnicodeTranslateError', 'UnicodeWarning', 'UserWarning',
@@ -180,15 +180,15 @@ class PythonLexer(RegexLexer):
],
'name': [
(r'@[\w.]+', Name.Decorator),
- ('[a-zA-Z_]\w*', Name),
+ (r'[a-zA-Z_]\w*', Name),
],
'funcname': [
include('magicfuncs'),
- ('[a-zA-Z_]\w*', Name.Function, '#pop'),
+ (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
default('#pop'),
],
'classname': [
- ('[a-zA-Z_]\w*', Name.Class, '#pop')
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'(?:[ \t]|\\\n)+', Text),
@@ -262,13 +262,13 @@ class Python3Lexer(RegexLexer):
return [
# the old style '%s' % (...) string formatting (still valid in Py3)
(r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[E-GXc-giorsux%]', String.Interpol),
+ '[hlL]?[E-GXc-giorsaux%]', String.Interpol),
# the new style '{}'.format(...) string formatting
(r'\{'
- '((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
- '(\![sra])?' # conversion
- '(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
- '\}', String.Interpol),
+ r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
+ r'(\![sra])?' # conversion
+ r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
+ r'\}', String.Interpol),
# backslashes, quotes and formatting signs must be parsed one at a time
(r'[^\\\'"%{\n]+', ttype),
@@ -361,11 +361,12 @@ class Python3Lexer(RegexLexer):
Name.Variable.Magic),
]
tokens['numbers'] = [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+', Number.Integer)
+ (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)([eE][+-]?\d(?:_?\d)*)?', Number.Float),
+ (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
+ (r'0[oO](?:_?[0-7])+', Number.Oct),
+ (r'0[bB](?:_?[01])+', Number.Bin),
+ (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
+ (r'\d(?:_?\d)*', Number.Integer)
]
tokens['backtick'] = []
tokens['name'] = [
@@ -395,6 +396,7 @@ class Python3Lexer(RegexLexer):
tokens['strings-single'] = innerstring_rules(String.Single)
tokens['strings-double'] = innerstring_rules(String.Double)
+
def analyse_text(text):
return shebang_matches(text, r'pythonw?3(\.\d)?')
@@ -670,10 +672,10 @@ class CythonLexer(RegexLexer):
],
'name': [
(r'@\w+', Name.Decorator),
- ('[a-zA-Z_]\w*', Name),
+ (r'[a-zA-Z_]\w*', Name),
],
'funcname': [
- ('[a-zA-Z_]\w*', Name.Function, '#pop')
+ (r'[a-zA-Z_]\w*', Name.Function, '#pop')
],
'cdef': [
(r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
@@ -690,7 +692,7 @@ class CythonLexer(RegexLexer):
(r'.', Text),
],
'classname': [
- ('[a-zA-Z_]\w*', Name.Class, '#pop')
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
diff --git a/pygments/lexers/qvt.py b/pygments/lexers/qvt.py
index f30e4887..9b2559b1 100644
--- a/pygments/lexers/qvt.py
+++ b/pygments/lexers/qvt.py
@@ -5,7 +5,7 @@
Lexer for QVT Operational language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,7 +18,7 @@ __all__ = ['QVToLexer']
class QVToLexer(RegexLexer):
- """
+ u"""
For the `QVT Operational Mapping language <http://www.omg.org/spec/QVT/1.1/>`_.
Reference for implementing this: «Meta Object Facility (MOF) 2.0
@@ -126,7 +126,7 @@ class QVToLexer(RegexLexer):
(r'[^\\\'"\n]+', String),
# quotes, percents and backslashes must be parsed one at a time
(r'[\'"\\]', String),
- ],
+ ],
'stringescape': [
(r'\\([\\btnfr"\']|u[0-3][0-7]{2}|u[0-7]{1,2})', String.Escape)
],
@@ -134,15 +134,15 @@ class QVToLexer(RegexLexer):
(r'"', String, '#pop'),
(r'\\\\|\\"', String.Escape),
include('strings')
- ],
+ ],
'sqs': [ # single-quoted string
(r"'", String, '#pop'),
(r"\\\\|\\'", String.Escape),
include('strings')
- ],
+ ],
'name': [
- ('[a-zA-Z_]\w*', Name),
- ],
+ (r'[a-zA-Z_]\w*', Name),
+ ],
# numbers: excerpt taken from the python lexer
'numbers': [
(r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
diff --git a/pygments/lexers/r.py b/pygments/lexers/r.py
index 1a47ca26..66d6402c 100644
--- a/pygments/lexers/r.py
+++ b/pygments/lexers/r.py
@@ -5,13 +5,13 @@
Lexers for the R/S languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import Lexer, RegexLexer, include, words, do_insertions
+from pygments.lexer import Lexer, RegexLexer, include, do_insertions, bygroups
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic
@@ -80,286 +80,25 @@ class SLexer(RegexLexer):
mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
'text/x-R', 'text/x-r-history', 'text/x-r-profile']
- builtins_base = (
- 'Arg', 'Conj', 'Cstack_info', 'Encoding', 'FALSE',
- 'Filter', 'Find', 'I', 'ISOdate', 'ISOdatetime', 'Im', 'Inf',
- 'La.svd', 'Map', 'Math.Date', 'Math.POSIXt', 'Math.data.frame',
- 'Math.difftime', 'Math.factor', 'Mod', 'NA_character_',
- 'NA_complex_', 'NA_real_', 'NCOL', 'NROW', 'NULLNA_integer_', 'NaN',
- 'Negate', 'NextMethod', 'Ops.Date', 'Ops.POSIXt', 'Ops.data.frame',
- 'Ops.difftime', 'Ops.factor', 'Ops.numeric_version', 'Ops.ordered',
- 'Position', 'R.Version', 'R.home', 'R.version', 'R.version.string',
- 'RNGkind', 'RNGversion', 'R_system_version', 'Re', 'Recall',
- 'Reduce', 'Summary.Date', 'Summary.POSIXct', 'Summary.POSIXlt',
- 'Summary.data.frame', 'Summary.difftime', 'Summary.factor',
- 'Summary.numeric_version', 'Summary.ordered', 'Sys.Date',
- 'Sys.chmod', 'Sys.getenv', 'Sys.getlocale', 'Sys.getpid',
- 'Sys.glob', 'Sys.info', 'Sys.localeconv', 'Sys.readlink',
- 'Sys.setFileTime', 'Sys.setenv', 'Sys.setlocale', 'Sys.sleep',
- 'Sys.time', 'Sys.timezone', 'Sys.umask', 'Sys.unsetenv',
- 'Sys.which', 'TRUE', 'UseMethod', 'Vectorize', 'abbreviate', 'abs',
- 'acos', 'acosh', 'addNA', 'addTaskCallback', 'agrep', 'alist',
- 'all', 'all.equal', 'all.equal.POSIXct', 'all.equal.character',
- 'all.equal.default', 'all.equal.factor', 'all.equal.formula',
- 'all.equal.language', 'all.equal.list', 'all.equal.numeric',
- 'all.equal.raw', 'all.names', 'all.vars', 'any', 'anyDuplicated',
- 'anyDuplicated.array', 'anyDuplicated.data.frame',
- 'anyDuplicated.default', 'anyDuplicated.matrix', 'aperm',
- 'aperm.default', 'aperm.table', 'append', 'apply', 'args',
- 'arrayInd', 'as.Date', 'as.Date.POSIXct', 'as.Date.POSIXlt',
- 'as.Date.character', 'as.Date.date', 'as.Date.dates',
- 'as.Date.default', 'as.Date.factor', 'as.Date.numeric',
- 'as.POSIXct', 'as.POSIXct.Date', 'as.POSIXct.POSIXlt',
- 'as.POSIXct.date', 'as.POSIXct.dates', 'as.POSIXct.default',
- 'as.POSIXct.numeric', 'as.POSIXlt', 'as.POSIXlt.Date',
- 'as.POSIXlt.POSIXct', 'as.POSIXlt.character', 'as.POSIXlt.date',
- 'as.POSIXlt.dates', 'as.POSIXlt.default', 'as.POSIXlt.factor',
- 'as.POSIXlt.numeric', 'as.array', 'as.array.default', 'as.call',
- 'as.character', 'as.character.Date', 'as.character.POSIXt',
- 'as.character.condition', 'as.character.default',
- 'as.character.error', 'as.character.factor', 'as.character.hexmode',
- 'as.character.numeric_version', 'as.character.octmode',
- 'as.character.srcref', 'as.complex', 'as.data.frame',
- 'as.data.frame.AsIs', 'as.data.frame.Date', 'as.data.frame.POSIXct',
- 'as.data.frame.POSIXlt', 'as.data.frame.array',
- 'as.data.frame.character', 'as.data.frame.complex',
- 'as.data.frame.data.frame', 'as.data.frame.default',
- 'as.data.frame.difftime', 'as.data.frame.factor',
- 'as.data.frame.integer', 'as.data.frame.list',
- 'as.data.frame.logical', 'as.data.frame.matrix',
- 'as.data.frame.model.matrix', 'as.data.frame.numeric',
- 'as.data.frame.numeric_version', 'as.data.frame.ordered',
- 'as.data.frame.raw', 'as.data.frame.table', 'as.data.frame.ts',
- 'as.data.frame.vector', 'as.difftime', 'as.double',
- 'as.double.POSIXlt', 'as.double.difftime', 'as.environment',
- 'as.expression', 'as.expression.default', 'as.factor',
- 'as.function', 'as.function.default', 'as.hexmode', 'as.integer',
- 'as.list', 'as.list.Date', 'as.list.POSIXct', 'as.list.data.frame',
- 'as.list.default', 'as.list.environment', 'as.list.factor',
- 'as.list.function', 'as.list.numeric_version', 'as.logical',
- 'as.logical.factor', 'as.matrix', 'as.matrix.POSIXlt',
- 'as.matrix.data.frame', 'as.matrix.default', 'as.matrix.noquote',
- 'as.name', 'as.null', 'as.null.default', 'as.numeric',
- 'as.numeric_version', 'as.octmode', 'as.ordered',
- 'as.package_version', 'as.pairlist', 'as.qr', 'as.raw', 'as.single',
- 'as.single.default', 'as.symbol', 'as.table', 'as.table.default',
- 'as.vector', 'as.vector.factor', 'asNamespace', 'asS3', 'asS4',
- 'asin', 'asinh', 'assign', 'atan', 'atan2', 'atanh',
- 'attachNamespace', 'attr', 'attr.all.equal', 'attributes',
- 'autoload', 'autoloader', 'backsolve', 'baseenv', 'basename',
- 'besselI', 'besselJ', 'besselK', 'besselY', 'beta',
- 'bindingIsActive', 'bindingIsLocked', 'bindtextdomain', 'bitwAnd',
- 'bitwNot', 'bitwOr', 'bitwShiftL', 'bitwShiftR', 'bitwXor', 'body',
- 'bquote', 'browser', 'browserCondition', 'browserSetDebug',
- 'browserText', 'builtins', 'by', 'by.data.frame', 'by.default',
- 'bzfile', 'c.Date', 'c.POSIXct', 'c.POSIXlt', 'c.noquote',
- 'c.numeric_version', 'call', 'callCC', 'capabilities', 'casefold',
- 'cat', 'category', 'cbind', 'cbind.data.frame', 'ceiling',
- 'char.expand', 'charToRaw', 'charmatch', 'chartr', 'check_tzones',
- 'chol', 'chol.default', 'chol2inv', 'choose', 'class',
- 'clearPushBack', 'close', 'close.connection', 'close.srcfile',
- 'close.srcfilealias', 'closeAllConnections', 'col', 'colMeans',
- 'colSums', 'colnames', 'commandArgs', 'comment', 'computeRestarts',
- 'conditionCall', 'conditionCall.condition', 'conditionMessage',
- 'conditionMessage.condition', 'conflicts', 'contributors', 'cos',
- 'cosh', 'crossprod', 'cummax', 'cummin', 'cumprod', 'cumsum', 'cut',
- 'cut.Date', 'cut.POSIXt', 'cut.default', 'dQuote', 'data.class',
- 'data.matrix', 'date', 'debug', 'debugonce',
- 'default.stringsAsFactors', 'delayedAssign', 'deparse', 'det',
- 'determinant', 'determinant.matrix', 'dget', 'diag', 'diff',
- 'diff.Date', 'diff.POSIXt', 'diff.default', 'difftime', 'digamma',
- 'dim', 'dim.data.frame', 'dimnames', 'dimnames.data.frame', 'dir',
- 'dir.create', 'dirname', 'do.call', 'dput', 'drop', 'droplevels',
- 'droplevels.data.frame', 'droplevels.factor', 'dump', 'duplicated',
- 'duplicated.POSIXlt', 'duplicated.array', 'duplicated.data.frame',
- 'duplicated.default', 'duplicated.matrix',
- 'duplicated.numeric_version', 'dyn.load', 'dyn.unload', 'eapply',
- 'eigen', 'else', 'emptyenv', 'enc2native', 'enc2utf8',
- 'encodeString', 'enquote', 'env.profile', 'environment',
- 'environmentIsLocked', 'environmentName', 'eval', 'eval.parent',
- 'evalq', 'exists', 'exp', 'expand.grid', 'expm1', 'expression',
- 'factor', 'factorial', 'fifo', 'file', 'file.access', 'file.append',
- 'file.choose', 'file.copy', 'file.create', 'file.exists',
- 'file.info', 'file.link', 'file.path', 'file.remove', 'file.rename',
- 'file.show', 'file.symlink', 'find.package', 'findInterval',
- 'findPackageEnv', 'findRestart', 'floor', 'flush',
- 'flush.connection', 'force', 'formals', 'format',
- 'format.AsIs', 'format.Date', 'format.POSIXct', 'format.POSIXlt',
- 'format.data.frame', 'format.default', 'format.difftime',
- 'format.factor', 'format.hexmode', 'format.info',
- 'format.libraryIQR', 'format.numeric_version', 'format.octmode',
- 'format.packageInfo', 'format.pval', 'format.summaryDefault',
- 'formatC', 'formatDL', 'forwardsolve', 'gamma', 'gc', 'gc.time',
- 'gcinfo', 'gctorture', 'gctorture2', 'get', 'getAllConnections',
- 'getCallingDLL', 'getCallingDLLe', 'getConnection',
- 'getDLLRegisteredRoutines', 'getDLLRegisteredRoutines.DLLInfo',
- 'getDLLRegisteredRoutines.character', 'getElement',
- 'getExportedValue', 'getHook', 'getLoadedDLLs', 'getNamespace',
- 'getNamespaceExports', 'getNamespaceImports', 'getNamespaceInfo',
- 'getNamespaceName', 'getNamespaceUsers', 'getNamespaceVersion',
- 'getNativeSymbolInfo', 'getOption', 'getRversion', 'getSrcLines',
- 'getTaskCallbackNames', 'geterrmessage', 'gettext', 'gettextf',
- 'getwd', 'gl', 'globalenv', 'gregexpr', 'grep', 'grepRaw', 'grepl',
- 'gsub', 'gzcon', 'gzfile', 'head', 'iconv', 'iconvlist',
- 'icuSetCollate', 'identical', 'identity', 'ifelse', 'importIntoEnv',
- 'in', 'inherits', 'intToBits', 'intToUtf8', 'interaction', 'interactive',
- 'intersect', 'inverse.rle', 'invisible', 'invokeRestart',
- 'invokeRestartInteractively', 'is.R', 'is.array', 'is.atomic',
- 'is.call', 'is.character', 'is.complex', 'is.data.frame',
- 'is.double', 'is.element', 'is.environment', 'is.expression',
- 'is.factor', 'is.finite', 'is.function', 'is.infinite',
- 'is.integer', 'is.language', 'is.list', 'is.loaded', 'is.logical',
- 'is.matrix', 'is.na', 'is.na.POSIXlt', 'is.na.data.frame',
- 'is.na.numeric_version', 'is.name', 'is.nan', 'is.null',
- 'is.numeric', 'is.numeric.Date', 'is.numeric.POSIXt',
- 'is.numeric.difftime', 'is.numeric_version', 'is.object',
- 'is.ordered', 'is.package_version', 'is.pairlist', 'is.primitive',
- 'is.qr', 'is.raw', 'is.recursive', 'is.single', 'is.symbol',
- 'is.table', 'is.unsorted', 'is.vector', 'isBaseNamespace',
- 'isIncomplete', 'isNamespace', 'isOpen', 'isRestart', 'isS4',
- 'isSeekable', 'isSymmetric', 'isSymmetric.matrix', 'isTRUE',
- 'isatty', 'isdebugged', 'jitter', 'julian', 'julian.Date',
- 'julian.POSIXt', 'kappa', 'kappa.default', 'kappa.lm', 'kappa.qr',
- 'kronecker', 'l10n_info', 'labels', 'labels.default', 'lapply',
- 'lazyLoad', 'lazyLoadDBexec', 'lazyLoadDBfetch', 'lbeta', 'lchoose',
- 'length', 'length.POSIXlt', 'letters', 'levels', 'levels.default',
- 'lfactorial', 'lgamma', 'library.dynam', 'library.dynam.unload',
- 'licence', 'license', 'list.dirs', 'list.files', 'list2env', 'load',
- 'loadNamespace', 'loadedNamespaces', 'loadingNamespaceInfo',
- 'local', 'lockBinding', 'lockEnvironment', 'log', 'log10', 'log1p',
- 'log2', 'logb', 'lower.tri', 'ls', 'make.names', 'make.unique',
- 'makeActiveBinding', 'mapply', 'margin.table', 'mat.or.vec',
- 'match', 'match.arg', 'match.call', 'match.fun', 'max', 'max.col',
- 'mean', 'mean.Date', 'mean.POSIXct', 'mean.POSIXlt', 'mean.default',
- 'mean.difftime', 'mem.limits', 'memCompress', 'memDecompress',
- 'memory.profile', 'merge', 'merge.data.frame', 'merge.default',
- 'message', 'mget', 'min', 'missing', 'mode', 'month.abb',
- 'month.name', 'months', 'months.Date', 'months.POSIXt',
- 'months.abb', 'months.nameletters', 'names', 'names.POSIXlt',
- 'namespaceExport', 'namespaceImport', 'namespaceImportClasses',
- 'namespaceImportFrom', 'namespaceImportMethods', 'nargs', 'nchar',
- 'ncol', 'new.env', 'ngettext', 'nlevels', 'noquote', 'norm',
- 'normalizePath', 'nrow', 'numeric_version', 'nzchar', 'objects',
- 'oldClass', 'on.exit', 'open', 'open.connection', 'open.srcfile',
- 'open.srcfilealias', 'open.srcfilecopy', 'options', 'order',
- 'ordered', 'outer', 'packBits', 'packageEvent',
- 'packageHasNamespace', 'packageStartupMessage', 'package_version',
- 'pairlist', 'parent.env', 'parent.frame', 'parse',
- 'parseNamespaceFile', 'paste', 'paste0', 'path.expand',
- 'path.package', 'pipe', 'pmatch', 'pmax', 'pmax.int', 'pmin',
- 'pmin.int', 'polyroot', 'pos.to.env', 'pretty', 'pretty.default',
- 'prettyNum', 'print', 'print.AsIs', 'print.DLLInfo',
- 'print.DLLInfoList', 'print.DLLRegisteredRoutines', 'print.Date',
- 'print.NativeRoutineList', 'print.POSIXct', 'print.POSIXlt',
- 'print.by', 'print.condition', 'print.connection',
- 'print.data.frame', 'print.default', 'print.difftime',
- 'print.factor', 'print.function', 'print.hexmode',
- 'print.libraryIQR', 'print.listof', 'print.noquote',
- 'print.numeric_version', 'print.octmode', 'print.packageInfo',
- 'print.proc_time', 'print.restart', 'print.rle',
- 'print.simple.list', 'print.srcfile', 'print.srcref',
- 'print.summary.table', 'print.summaryDefault', 'print.table',
- 'print.warnings', 'prmatrix', 'proc.time', 'prod', 'prop.table',
- 'provideDimnames', 'psigamma', 'pushBack', 'pushBackLength', 'q',
- 'qr', 'qr.Q', 'qr.R', 'qr.X', 'qr.coef', 'qr.default', 'qr.fitted',
- 'qr.qty', 'qr.qy', 'qr.resid', 'qr.solve', 'quarters',
- 'quarters.Date', 'quarters.POSIXt', 'quit', 'quote', 'range',
- 'range.default', 'rank', 'rapply', 'raw', 'rawConnection',
- 'rawConnectionValue', 'rawShift', 'rawToBits', 'rawToChar', 'rbind',
- 'rbind.data.frame', 'rcond', 'read.dcf', 'readBin', 'readChar',
- 'readLines', 'readRDS', 'readRenviron', 'readline', 'reg.finalizer',
- 'regexec', 'regexpr', 'registerS3method', 'registerS3methods',
- 'regmatches', 'remove', 'removeTaskCallback', 'rep', 'rep.Date',
- 'rep.POSIXct', 'rep.POSIXlt', 'rep.factor', 'rep.int',
- 'rep.numeric_version', 'rep_len', 'replace', 'replicate',
- 'requireNamespace', 'restartDescription', 'restartFormals',
- 'retracemem', 'rev', 'rev.default', 'rle', 'rm', 'round',
- 'round.Date', 'round.POSIXt', 'row', 'row.names',
- 'row.names.data.frame', 'row.names.default', 'rowMeans', 'rowSums',
- 'rownames', 'rowsum', 'rowsum.data.frame', 'rowsum.default',
- 'sQuote', 'sample', 'sample.int', 'sapply', 'save', 'save.image',
- 'saveRDS', 'scale', 'scale.default', 'scan', 'search',
- 'searchpaths', 'seek', 'seek.connection', 'seq', 'seq.Date',
- 'seq.POSIXt', 'seq.default', 'seq.int', 'seq_along', 'seq_len',
- 'sequence', 'serialize', 'set.seed', 'setHook', 'setNamespaceInfo',
- 'setSessionTimeLimit', 'setTimeLimit', 'setdiff', 'setequal',
- 'setwd', 'shQuote', 'showConnections', 'sign', 'signalCondition',
- 'signif', 'simpleCondition', 'simpleError', 'simpleMessage',
- 'simpleWarning', 'simplify2array', 'sin', 'single',
- 'sinh', 'sink', 'sink.number', 'slice.index', 'socketConnection',
- 'socketSelect', 'solve', 'solve.default', 'solve.qr', 'sort',
- 'sort.POSIXlt', 'sort.default', 'sort.int', 'sort.list', 'split',
- 'split.Date', 'split.POSIXct', 'split.data.frame', 'split.default',
- 'sprintf', 'sqrt', 'srcfile', 'srcfilealias', 'srcfilecopy',
- 'srcref', 'standardGeneric', 'stderr', 'stdin', 'stdout', 'stop',
- 'stopifnot', 'storage.mode', 'strftime', 'strptime', 'strsplit',
- 'strtoi', 'strtrim', 'structure', 'strwrap', 'sub', 'subset',
- 'subset.data.frame', 'subset.default', 'subset.matrix',
- 'substitute', 'substr', 'substring', 'sum', 'summary',
- 'summary.Date', 'summary.POSIXct', 'summary.POSIXlt',
- 'summary.connection', 'summary.data.frame', 'summary.default',
- 'summary.factor', 'summary.matrix', 'summary.proc_time',
- 'summary.srcfile', 'summary.srcref', 'summary.table',
- 'suppressMessages', 'suppressPackageStartupMessages',
- 'suppressWarnings', 'svd', 'sweep', 'sys.call', 'sys.calls',
- 'sys.frame', 'sys.frames', 'sys.function', 'sys.load.image',
- 'sys.nframe', 'sys.on.exit', 'sys.parent', 'sys.parents',
- 'sys.save.image', 'sys.source', 'sys.status', 'system',
- 'system.file', 'system.time', 'system2', 't', 't.data.frame',
- 't.default', 'table', 'tabulate', 'tail', 'tan', 'tanh', 'tapply',
- 'taskCallbackManager', 'tcrossprod', 'tempdir', 'tempfile',
- 'testPlatformEquivalence', 'textConnection', 'textConnectionValue',
- 'toString', 'toString.default', 'tolower', 'topenv', 'toupper',
- 'trace', 'traceback', 'tracemem', 'tracingState', 'transform',
- 'transform.data.frame', 'transform.default', 'trigamma', 'trunc',
- 'trunc.Date', 'trunc.POSIXt', 'truncate', 'truncate.connection',
- 'try', 'tryCatch', 'typeof', 'unclass', 'undebug', 'union',
- 'unique', 'unique.POSIXlt', 'unique.array', 'unique.data.frame',
- 'unique.default', 'unique.matrix', 'unique.numeric_version',
- 'units', 'units.difftime', 'unix.time', 'unlink', 'unlist',
- 'unloadNamespace', 'unlockBinding', 'unname', 'unserialize',
- 'unsplit', 'untrace', 'untracemem', 'unz', 'upper.tri', 'url',
- 'utf8ToInt', 'vapply', 'version', 'warning', 'warnings', 'weekdays',
- 'weekdays.Date', 'weekdays.POSIXt', 'which', 'which.max',
- 'which.min', 'with', 'with.default', 'withCallingHandlers',
- 'withRestarts', 'withVisible', 'within', 'within.data.frame',
- 'within.list', 'write', 'write.dcf', 'writeBin', 'writeChar',
- 'writeLines', 'xor', 'xor.hexmode', 'xor.octmode',
- 'xpdrows.data.frame', 'xtfrm', 'xtfrm.AsIs', 'xtfrm.Date',
- 'xtfrm.POSIXct', 'xtfrm.POSIXlt', 'xtfrm.Surv', 'xtfrm.default',
- 'xtfrm.difftime', 'xtfrm.factor', 'xtfrm.numeric_version', 'xzfile',
- 'zapsmall'
- )
-
+ valid_name = r'(?:`[^`\\]*(?:\\.[^`\\]*)*`)|(?:(?:[a-zA-z]|[_.][^0-9])[\w_.]*)'
tokens = {
'comments': [
(r'#.*$', Comment.Single),
],
'valid_name': [
- (r'[a-zA-Z][\w.]*', Text),
- # can begin with ., but not if that is followed by a digit
- (r'\.[a-zA-Z_][\w.]*', Text),
+ (valid_name, Name),
],
'punctuation': [
(r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
],
'keywords': [
- (words(builtins_base, suffix=r'(?![\w. =])'),
- Keyword.Pseudo),
(r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
r'(?![\w.])',
Keyword.Reserved),
- (r'(array|category|character|complex|double|function|integer|list|'
- r'logical|matrix|numeric|vector|data.frame|c)'
- r'(?![\w.])',
- Keyword.Type),
- (r'(library|require|attach|detach|source)'
- r'(?![\w.])',
- Keyword.Namespace)
],
'operators': [
(r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
- (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator)
+ (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator),
],
'builtin_symbols': [
(r'(NULL|NA(_(integer|real|complex|character)_)?|'
@@ -379,17 +118,18 @@ class SLexer(RegexLexer):
include('comments'),
# whitespaces
(r'\s+', Text),
- (r'`.*?`', String.Backtick),
(r'\'', String, 'string_squote'),
(r'\"', String, 'string_dquote'),
include('builtin_symbols'),
+ include('valid_name'),
include('numbers'),
include('keywords'),
include('punctuation'),
include('operators'),
- include('valid_name'),
],
'root': [
+ # calls:
+ (r'(%s)\s*(?=\()' % valid_name, Name.Function),
include('statements'),
# blocks:
(r'\{|\}', Punctuation),
@@ -421,7 +161,7 @@ class RdLexer(RegexLexer):
This is a very minimal implementation, highlighting little more
than the macros. A description of Rd syntax is found in `Writing R
Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
- and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_.
+ and `Parsing Rd files <http://developer.r-project.org/parseRd.pdf>`_.
.. versionadded:: 1.6
"""
diff --git a/pygments/lexers/rdf.py b/pygments/lexers/rdf.py
index 6dd6e8b9..27bbe154 100644
--- a/pygments/lexers/rdf.py
+++ b/pygments/lexers/rdf.py
@@ -5,7 +5,7 @@
Lexers for semantic web and RDF query languages and markup.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -97,7 +97,7 @@ class SparqlLexer(RegexLexer):
'root': [
(r'\s+', Text),
# keywords ::
- (r'((?i)select|construct|describe|ask|where|filter|group\s+by|minus|'
+ (r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|'
r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
r'offset|bindings|load|clear|drop|create|add|move|copy|'
r'insert\s+data|delete\s+data|delete\s+where|delete|insert|'
@@ -111,10 +111,10 @@ class SparqlLexer(RegexLexer):
# # variables ::
('[?$]' + VARNAME, Name.Variable),
# prefixed names ::
- (r'(' + PN_PREFIX + ')?(\:)(' + PN_LOCAL + ')?',
+ (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
bygroups(Name.Namespace, Punctuation, Name.Tag)),
# function names ::
- (r'((?i)str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
+ (r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
r'hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|'
@@ -125,7 +125,7 @@ class SparqlLexer(RegexLexer):
# boolean literals ::
(r'(true|false)', Keyword.Constant),
# double literals ::
- (r'[+\-]?(\d+\.\d*' + EXPONENT + '|\.?\d+' + EXPONENT + ')', Number.Float),
+ (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
# decimal literals ::
(r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
# integer literals ::
diff --git a/pygments/lexers/rebol.py b/pygments/lexers/rebol.py
index b844ad96..e58e01fa 100644
--- a/pygments/lexers/rebol.py
+++ b/pygments/lexers/rebol.py
@@ -5,7 +5,7 @@
Lexers for the REBOL and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -102,12 +102,12 @@ class RebolLexer(RegexLexer):
yield match.start(), Generic.Heading, word
elif re.match("to-.*", word):
yield match.start(), Keyword, word
- elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
+ elif re.match(r'(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
word):
yield match.start(), Operator, word
- elif re.match(".*\?$", word):
+ elif re.match(r".*\?$", word):
yield match.start(), Keyword, word
- elif re.match(".*\!$", word):
+ elif re.match(r".*\!$", word):
yield match.start(), Keyword.Type, word
elif re.match("'.*", word):
yield match.start(), Name.Variable.Instance, word # lit-word
@@ -239,7 +239,7 @@ class RebolLexer(RegexLexer):
if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
# The code starts with REBOL header
return 1.0
- elif re.search(r'\s*REBOL\s*[', text, re.IGNORECASE):
+ elif re.search(r'\s*REBOL\s*\[', text, re.IGNORECASE):
# The code contains REBOL header but also some text before it
return 0.5
@@ -297,10 +297,10 @@ class RedLexer(RegexLexer):
yield match.start(), Keyword.Namespace, word
elif re.match("to-.*", word):
yield match.start(), Keyword, word
- elif re.match('(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|'
- '<<<|>>>|<<|>>|<|>%)$', word):
+ elif re.match(r'(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|'
+ r'<<<|>>>|<<|>>|<|>%)$', word):
yield match.start(), Operator, word
- elif re.match(".*\!$", word):
+ elif re.match(r".*\!$", word):
yield match.start(), Keyword.Type, word
elif re.match("'.*", word):
yield match.start(), Name.Variable.Instance, word # lit-word
diff --git a/pygments/lexers/resource.py b/pygments/lexers/resource.py
index 40429a3c..f7494904 100644
--- a/pygments/lexers/resource.py
+++ b/pygments/lexers/resource.py
@@ -5,7 +5,7 @@
Lexer for resource definition files.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/rnc.py b/pygments/lexers/rnc.py
index f60141e8..2f2aacdd 100644
--- a/pygments/lexers/rnc.py
+++ b/pygments/lexers/rnc.py
@@ -5,7 +5,7 @@
Lexer for Relax-NG Compact syntax
- :copyright: Copyright 2016 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/roboconf.py b/pygments/lexers/roboconf.py
index 59755a68..8c7df83d 100644
--- a/pygments/lexers/roboconf.py
+++ b/pygments/lexers/roboconf.py
@@ -5,7 +5,7 @@
Lexers for Roboconf DSL.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/robotframework.py b/pygments/lexers/robotframework.py
index eab06efe..5bacffa3 100644
--- a/pygments/lexers/robotframework.py
+++ b/pygments/lexers/robotframework.py
@@ -5,7 +5,7 @@
Lexer for Robot Framework.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -161,7 +161,7 @@ class RowTokenizer(object):
class RowSplitter(object):
_space_splitter = re.compile('( {2,})')
- _pipe_splitter = re.compile('((?:^| +)\|(?: +|$))')
+ _pipe_splitter = re.compile(r'((?:^| +)\|(?: +|$))')
def split(self, row):
splitter = (row.startswith('| ') and self._split_from_pipes
diff --git a/pygments/lexers/ruby.py b/pygments/lexers/ruby.py
index f16416d3..ce2fc7a7 100644
--- a/pygments/lexers/ruby.py
+++ b/pygments/lexers/ruby.py
@@ -5,7 +5,7 @@
Lexers for Ruby and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -403,8 +403,8 @@ class RubyConsoleLexer(Lexer):
aliases = ['rbcon', 'irb']
mimetypes = ['text/x-ruby-shellsession']
- _prompt_re = re.compile('irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
- '|>> |\?> ')
+ _prompt_re = re.compile(r'irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
+ r'|>> |\?> ')
def get_tokens_unprocessed(self, text):
rblexer = RubyLexer(**self.options)
@@ -498,11 +498,11 @@ class FancyLexer(RegexLexer):
(r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
# operators, must be below functions
(r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
- ('[A-Z]\w*', Name.Constant),
- ('@[a-zA-Z_]\w*', Name.Variable.Instance),
- ('@@[a-zA-Z_]\w*', Name.Variable.Class),
+ (r'[A-Z]\w*', Name.Constant),
+ (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
+ (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
('@@?', Operator),
- ('[a-zA-Z_]\w*', Name),
+ (r'[a-zA-Z_]\w*', Name),
# numbers - / checks are necessary to avoid mismarking regexes,
# see comment in RubyLexer
(r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
diff --git a/pygments/lexers/rust.py b/pygments/lexers/rust.py
index d3d98ee8..10097fba 100644
--- a/pygments/lexers/rust.py
+++ b/pygments/lexers/rust.py
@@ -5,7 +5,7 @@
Lexers for the Rust language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -24,9 +24,38 @@ class RustLexer(RegexLexer):
"""
name = 'Rust'
filenames = ['*.rs', '*.rs.in']
- aliases = ['rust']
+ aliases = ['rust', 'rs']
mimetypes = ['text/rust']
+ keyword_types = (
+ words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64',
+ 'usize', 'isize', 'f32', 'f64', 'str', 'bool'),
+ suffix=r'\b'),
+ Keyword.Type)
+
+ builtin_types = (words((
+ # Reexported core operators
+ 'Copy', 'Send', 'Sized', 'Sync',
+ 'Drop', 'Fn', 'FnMut', 'FnOnce',
+
+ # Reexported types and traits
+ 'Box',
+ 'ToOwned',
+ 'Clone',
+ 'PartialEq', 'PartialOrd', 'Eq', 'Ord',
+ 'AsRef', 'AsMut', 'Into', 'From',
+ 'Default',
+ 'Iterator', 'Extend', 'IntoIterator',
+ 'DoubleEndedIterator', 'ExactSizeIterator',
+ 'Option',
+ 'Some', 'None',
+ 'Result',
+ 'Ok', 'Err',
+ 'SliceConcatExt',
+ 'String', 'ToString',
+ 'Vec'), suffix=r'\b'),
+ Name.Builtin)
+
tokens = {
'root': [
# rust allows a file to start with a shebang, but if the first line
@@ -64,36 +93,14 @@ class RustLexer(RegexLexer):
(r'fn\b', Keyword, 'funcname'),
(r'(struct|enum|type|union)\b', Keyword, 'typename'),
(r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
- (words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', 'usize',
- 'isize', 'f32', 'f64', 'str', 'bool'), suffix=r'\b'),
- Keyword.Type),
+ keyword_types,
(r'self\b', Name.Builtin.Pseudo),
# Prelude (taken from Rust’s src/libstd/prelude.rs)
- (words((
- # Reexported core operators
- 'Copy', 'Send', 'Sized', 'Sync',
- 'Drop', 'Fn', 'FnMut', 'FnOnce',
-
- # Reexported functions
- 'drop',
-
- # Reexported types and traits
- 'Box',
- 'ToOwned',
- 'Clone',
- 'PartialEq', 'PartialOrd', 'Eq', 'Ord',
- 'AsRef', 'AsMut', 'Into', 'From',
- 'Default',
- 'Iterator', 'Extend', 'IntoIterator',
- 'DoubleEndedIterator', 'ExactSizeIterator',
- 'Option',
- 'Some', 'None',
- 'Result',
- 'Ok', 'Err',
- 'SliceConcatExt',
- 'String', 'ToString',
- 'Vec'), suffix=r'\b'),
- Name.Builtin),
+ builtin_types,
+ # Path seperators, so types don't catch them.
+ (r'::\b', Text),
+ # Types in positions.
+ (r'(?::|->)', Text, 'typename'),
# Labels
(r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?',
bygroups(Keyword, Text.Whitespace, Name.Label)),
@@ -112,7 +119,8 @@ class RustLexer(RegexLexer):
(r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
# Decimal Literal
(r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float, 'number_lit'),
+ r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float,
+ 'number_lit'),
(r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
# String Literal
(r'b"', String, 'bytestring'),
@@ -164,6 +172,9 @@ class RustLexer(RegexLexer):
],
'typename': [
(r'\s+', Text),
+ (r'&', Keyword.Pseudo),
+ builtin_types,
+ keyword_types,
(r'[a-zA-Z_]\w*', Name.Class, '#pop'),
default('#pop'),
],
diff --git a/pygments/lexers/sas.py b/pygments/lexers/sas.py
index c91ea319..3747ed9a 100644
--- a/pygments/lexers/sas.py
+++ b/pygments/lexers/sas.py
@@ -5,7 +5,7 @@
Lexer for SAS.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -16,6 +16,7 @@ from pygments.token import Comment, Keyword, Name, Number, String, Text, \
__all__ = ['SASLexer']
+
class SASLexer(RegexLexer):
"""
For `SAS <http://www.sas.com/>`_ files.
@@ -136,7 +137,7 @@ class SASLexer(RegexLexer):
],
# Special highlight for proc, data, quit, run
'proc-data': [
- (r'(^|;)\s*(proc [a-zA-Z0-9_]+|data|run|quit)[\s;\n]',
+ (r'(^|;)\s*(proc \w+|data|run|quit)[\s;]',
Keyword.Reserved),
],
# Special highlight cards and datalines
@@ -154,7 +155,6 @@ class SASLexer(RegexLexer):
(r'NOTE(:|-).*', Generic, '#pop'),
(r'WARNING(:|-).*', Generic.Emph, '#pop'),
(r'ERROR(:|-).*', Generic.Error, '#pop'),
- (r'(?!(WARNING|NOTE|ERROR))+', Text, '#pop'),
include('general'),
],
'general': [
@@ -188,8 +188,8 @@ class SASLexer(RegexLexer):
],
# Strings and user-defined variables and macros (order matters)
'vars-strings': [
- (r'&[a-zA-Z_][a-zA-Z0-9_]{0,31}\.?', Name.Variable),
- (r'%[a-zA-Z_][a-zA-Z0-9_]{0,31}', Name.Function),
+ (r'&[a-z_]\w{0,31}\.?', Name.Variable),
+ (r'%[a-z_]\w{0,31}', Name.Function),
(r'\'', String, 'string_squote'),
(r'"', String, 'string_dquote'),
],
@@ -209,11 +209,11 @@ class SASLexer(RegexLexer):
(r'[$"\\]', String),
],
'validvar': [
- (r'[a-zA-Z_][a-zA-Z0-9_]{0,31}\.?', Name.Variable, '#pop'),
+ (r'[a-z_]\w{0,31}\.?', Name.Variable, '#pop'),
],
# SAS numbers and special variables
'numbers': [
- (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[i]?\b',
+ (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)(E[+-]?[0-9]+)?i?\b',
Number),
],
'special': [
diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py
index 5849161b..28c37db8 100644
--- a/pygments/lexers/scripting.py
+++ b/pygments/lexers/scripting.py
@@ -5,7 +5,7 @@
Lexer for scripting and embedded languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -104,7 +104,7 @@ class LuaLexer(RegexLexer):
(r'%s(?=%s*[.:])' % (_name, _s), Name.Class),
(_name, Name.Function, '#pop'),
# inline function
- ('\(', Punctuation, '#pop'),
+ (r'\(', Punctuation, '#pop'),
],
'goto': [
@@ -696,8 +696,8 @@ class AppleScriptLexer(RegexLexer):
(r'[-+]?\d+', Number.Integer),
],
'comment': [
- ('\(\*', Comment.Multiline, '#push'),
- ('\*\)', Comment.Multiline, '#pop'),
+ (r'\(\*', Comment.Multiline, '#push'),
+ (r'\*\)', Comment.Multiline, '#pop'),
('[^*(]+', Comment.Multiline),
('[*(]', Comment.Multiline),
],
diff --git a/pygments/lexers/sgf.py b/pygments/lexers/sgf.py
new file mode 100644
index 00000000..aa934b49
--- /dev/null
+++ b/pygments/lexers/sgf.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.sgf
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Smart Game Format (sgf) file format.
+
+ The format is used to store game records of board games for two players
+ (mainly Go game).
+ For more information about the definition of the format, see:
+ https://www.red-bean.com/sgf/
+
+ :copyright: Copyright 2006-2018 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+
+ .. versionadded:: 2.4
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import *
+
+__all__ = ["SmartGameFormatLexer"]
+
+
+class SmartGameFormatLexer(RegexLexer):
+ name = 'SmartGameFormat'
+ aliases = ['sgf']
+ filenames = ['*.sgf']
+
+ tokens = {
+ 'root': [
+ (r'[\s():;]', Punctuation),
+ # tokens:
+ (r'(A[BW]|AE|AN|AP|AR|AS|[BW]L|BM|[BW]R|[BW]S|[BW]T|CA|CH|CP|CR|DD|DM|DO|DT|EL|EV|EX|FF|FG|G[BW]|GC|GM|GN|HA|HO|ID|IP|IT|IY|KM|KO|L|LB|LN|LT|M|MA|MN|N|OB|OM|ON|OP|OT|OV|P[BW]|PC|PL|PM|RE|RG|RO|RU|SO|SC|SE|SI|SL|SO|SQ|ST|SU|SZ|T[BW]|TC|TE|TM|TR|UC|US|V|VW|[BW]|C)',
+ Name.Builtin),
+ # number:
+ (r'(\[)([0-9.]+)(\])',
+ bygroups(Punctuation, Literal.Number, Punctuation)),
+ # date:
+ (r'(\[)([0-9]{4}-[0-9]{2}-[0-9]{2})(\])',
+ bygroups(Punctuation, Literal.Date, Punctuation)),
+ # point:
+ (r'(\[)([a-z]{2})(\])',
+ bygroups(Punctuation, String, Punctuation)),
+ # double points:
+ (r'(\[)([a-z]{2})(:)([a-z]{2})(\])',
+ bygroups(Punctuation, String, Punctuation, String, Punctuation)),
+
+ (r'(\[)([\w\s#()+,\-.:?]+)(\])',
+ bygroups(Punctuation, String, Punctuation)),
+ (r'(\[)(\s.*)(\])',
+ bygroups(Punctuation, Text, Punctuation)),
+ ],
+ }
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
index a5933afb..31bc7e94 100644
--- a/pygments/lexers/shell.py
+++ b/pygments/lexers/shell.py
@@ -5,7 +5,7 @@
Lexers for various shells.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,7 +19,7 @@ from pygments.util import shebang_matches
__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
- 'MSDOSSessionLexer', 'PowerShellLexer',
+ 'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer',
'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer']
line_re = re.compile('.*?\n')
@@ -27,17 +27,18 @@ line_re = re.compile('.*?\n')
class BashLexer(RegexLexer):
"""
- Lexer for (ba|k|)sh shell scripts.
+ Lexer for (ba|k|z|)sh shell scripts.
.. versionadded:: 0.6
"""
name = 'Bash'
- aliases = ['bash', 'sh', 'ksh', 'shell']
+ aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
- '*.exheres-0', '*.exlib',
- '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD']
- mimetypes = ['application/x-sh', 'application/x-shellscript']
+ '*.exheres-0', '*.exlib', '*.zsh',
+ '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
+ 'PKGBUILD']
+ mimetypes = ['application/x-sh', 'application/x-shellscript', 'text/x-shellscript']
tokens = {
'root': [
@@ -50,7 +51,7 @@ class BashLexer(RegexLexer):
(r'\$\(\(', Keyword, 'math'),
(r'\$\(', Keyword, 'paren'),
(r'\$\{#?', String.Interpol, 'curly'),
- (r'\$[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable), # user variable
+ (r'\$[a-zA-Z_]\w*', Name.Variable), # user variable
(r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
(r'\$', Text),
],
@@ -75,7 +76,7 @@ class BashLexer(RegexLexer):
(r'&&|\|\|', Operator),
],
'data': [
- (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
+ (r'(?s)\$?"(\\.|[^"\\$])*"', String.Double),
(r'"', String.Double, 'string'),
(r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r"(?s)'.*?'", String.Single),
@@ -125,6 +126,28 @@ class BashLexer(RegexLexer):
return 0.2
+class SlurmBashLexer(BashLexer):
+ """
+ Lexer for (ba|k|z|)sh Slurm scripts.
+
+ .. versionadded:: 2.4
+ """
+
+ name = 'Slurm'
+ aliases = ['slurm', 'sbatch']
+ filenames = ['*.sl']
+ mimetypes = []
+ EXTRA_KEYWORDS = {'srun'}
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in BashLexer.get_tokens_unprocessed(self, text):
+ if token is Text and value in self.EXTRA_KEYWORDS:
+ yield index, Name.Builtin, value
+ elif token is Comment.Single and 'SBATCH' in value:
+ yield index, Keyword.Pseudo, value
+ else:
+ yield index, token, value
+
class ShellSessionBaseLexer(Lexer):
"""
Base lexer for simplistic shell sessions.
@@ -478,13 +501,16 @@ class BatchLexer(RegexLexer):
using(this, state='variable')), '#pop'),
(r'(exist%s)(%s%s)' % (_token_terminator, _space, _stoken),
bygroups(Keyword, using(this, state='text')), '#pop'),
- (r'(%s%s?)(==)(%s?%s)' % (_stoken, _space, _space, _stoken),
- bygroups(using(this, state='text'), Operator,
- using(this, state='text')), '#pop'),
(r'(%s%s)(%s)(%s%s)' % (_number, _space, _opword, _space, _number),
bygroups(using(this, state='arithmetic'), Operator.Word,
using(this, state='arithmetic')), '#pop'),
- (r'(%s%s)(%s)(%s%s)' % (_stoken, _space, _opword, _space, _stoken),
+ (_stoken, using(this, state='text'), ('#pop', 'if2')),
+ ],
+ 'if2': [
+ (r'(%s?)(==)(%s?%s)' % (_space, _space, _stoken),
+ bygroups(using(this, state='text'), Operator,
+ using(this, state='text')), '#pop'),
+ (r'(%s)(%s)(%s%s)' % (_space, _opword, _space, _stoken),
bygroups(using(this, state='text'), Operator.Word,
using(this, state='text')), '#pop')
],
@@ -634,13 +660,29 @@ class PowerShellLexer(RegexLexer):
'wildcard').split()
verbs = (
- 'write where wait use update unregister undo trace test tee take '
- 'suspend stop start split sort skip show set send select scroll resume '
- 'restore restart resolve resize reset rename remove register receive '
- 'read push pop ping out new move measure limit join invoke import '
- 'group get format foreach export expand exit enter enable disconnect '
- 'disable debug cxnew copy convertto convertfrom convert connect '
- 'complete compare clear checkpoint aggregate add').split()
+ 'write where watch wait use update unregister unpublish unprotect '
+ 'unlock uninstall undo unblock trace test tee take sync switch '
+ 'suspend submit stop step start split sort skip show set send select '
+ 'search scroll save revoke resume restore restart resolve resize '
+ 'reset request repair rename remove register redo receive read push '
+ 'publish protect pop ping out optimize open new move mount merge '
+ 'measure lock limit join invoke install initialize import hide group '
+ 'grant get format foreach find export expand exit enter enable edit '
+ 'dismount disconnect disable deny debug cxnew copy convertto '
+ 'convertfrom convert connect confirm compress complete compare close '
+ 'clear checkpoint block backup assert approve aggregate add').split()
+
+ aliases_ = (
+ 'ac asnp cat cd cfs chdir clc clear clhy cli clp cls clv cnsn '
+ 'compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo epal '
+ 'epcsv epsn erase etsn exsn fc fhx fl foreach ft fw gal gbp gc gci gcm '
+ 'gcs gdr ghy gi gjb gl gm gmo gp gps gpv group gsn gsnp gsv gu gv gwmi '
+ 'h history icm iex ihy ii ipal ipcsv ipmo ipsn irm ise iwmi iwr kill lp '
+ 'ls man md measure mi mount move mp mv nal ndr ni nmo npssc nsn nv ogv '
+ 'oh popd ps pushd pwd r rbp rcjb rcsn rd rdr ren ri rjb rm rmdir rmo '
+ 'rni rnp rp rsn rsnp rujb rv rvpa rwmi sajb sal saps sasv sbp sc select '
+ 'set shcm si sl sleep sls sort sp spjb spps spsv start sujb sv swmi tee '
+ 'trcm type wget where wjb write').split()
commenthelp = (
'component description example externalhelp forwardhelpcategory '
@@ -668,6 +710,7 @@ class PowerShellLexer(RegexLexer):
(r'(%s)\b' % '|'.join(keywords), Keyword),
(r'-(%s)\b' % '|'.join(operators), Operator),
(r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
+ (r'(%s)\s' % '|'.join(aliases_), Name.Builtin),
(r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
(r'-[a-z_]\w*', Name),
(r'\w+', Name),
diff --git a/pygments/lexers/slash.py b/pygments/lexers/slash.py
new file mode 100644
index 00000000..bd73d463
--- /dev/null
+++ b/pygments/lexers/slash.py
@@ -0,0 +1,187 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.slash
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the `Slash <https://github.com/arturadib/Slash-A>`_ programming
+ language.
+
+ :copyright: Copyright 2012 by GitHub, Inc
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import ExtendedRegexLexer, bygroups, DelegatingLexer
+from pygments.token import Name, Number, String, Comment, Punctuation, \
+ Other, Keyword, Operator, Whitespace
+
+__all__ = ['SlashLexer']
+
+
+class SlashLanguageLexer(ExtendedRegexLexer):
+ _nkw = r'(?=[^a-zA-Z_0-9])'
+
+ def move_state(new_state):
+ return ("#pop", new_state)
+
+ def right_angle_bracket(lexer, match, ctx):
+ if len(ctx.stack) > 1 and ctx.stack[-2] == "string":
+ ctx.stack.pop()
+ yield match.start(), String.Interpol, "}"
+ ctx.pos = match.end()
+ pass
+
+ tokens = {
+ "root": [
+ (r"<%=", Comment.Preproc, move_state("slash")),
+ (r"<%!!", Comment.Preproc, move_state("slash")),
+ (r"<%#.*?%>", Comment.Multiline),
+ (r"<%", Comment.Preproc, move_state("slash")),
+ (r".|\n", Other),
+ ],
+ "string": [
+ (r"\\", String.Escape, move_state("string_e")),
+ (r"\"", String, move_state("slash")),
+ (r"#\{", String.Interpol, "slash"),
+ (r'.|\n', String),
+ ],
+ "string_e": [
+ (r'n', String.Escape, move_state("string")),
+ (r't', String.Escape, move_state("string")),
+ (r'r', String.Escape, move_state("string")),
+ (r'e', String.Escape, move_state("string")),
+ (r'x[a-fA-F0-9]{2}', String.Escape, move_state("string")),
+ (r'.', String.Escape, move_state("string")),
+ ],
+ "regexp": [
+ (r'}[a-z]*', String.Regex, move_state("slash")),
+ (r'\\(.|\n)', String.Regex),
+ (r'{', String.Regex, "regexp_r"),
+ (r'.|\n', String.Regex),
+ ],
+ "regexp_r": [
+ (r'}[a-z]*', String.Regex, "#pop"),
+ (r'\\(.|\n)', String.Regex),
+ (r'{', String.Regex, "regexp_r"),
+ ],
+ "slash": [
+ (r"%>", Comment.Preproc, move_state("root")),
+ (r"\"", String, move_state("string")),
+ (r"'[a-zA-Z0-9_]+", String),
+ (r'%r{', String.Regex, move_state("regexp")),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r"(#|//).*?\n", Comment.Single),
+ (r'-?[0-9]+e[+-]?[0-9]+', Number.Float),
+ (r'-?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?', Number.Float),
+ (r'-?[0-9]+', Number.Integer),
+ (r'nil'+_nkw, Name.Builtin),
+ (r'true'+_nkw, Name.Builtin),
+ (r'false'+_nkw, Name.Builtin),
+ (r'self'+_nkw, Name.Builtin),
+ (r'(class)(\s+)([A-Z][a-zA-Z0-9_\']*)',
+ bygroups(Keyword, Whitespace, Name.Class)),
+ (r'class'+_nkw, Keyword),
+ (r'extends'+_nkw, Keyword),
+ (r'(def)(\s+)(self)(\s*)(\.)(\s*)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)',
+ bygroups(Keyword, Whitespace, Name.Builtin, Whitespace, Punctuation, Whitespace, Name.Function)),
+ (r'(def)(\s+)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)',
+ bygroups(Keyword, Whitespace, Name.Function)),
+ (r'def'+_nkw, Keyword),
+ (r'if'+_nkw, Keyword),
+ (r'elsif'+_nkw, Keyword),
+ (r'else'+_nkw, Keyword),
+ (r'unless'+_nkw, Keyword),
+ (r'for'+_nkw, Keyword),
+ (r'in'+_nkw, Keyword),
+ (r'while'+_nkw, Keyword),
+ (r'until'+_nkw, Keyword),
+ (r'and'+_nkw, Keyword),
+ (r'or'+_nkw, Keyword),
+ (r'not'+_nkw, Keyword),
+ (r'lambda'+_nkw, Keyword),
+ (r'try'+_nkw, Keyword),
+ (r'catch'+_nkw, Keyword),
+ (r'return'+_nkw, Keyword),
+ (r'next'+_nkw, Keyword),
+ (r'last'+_nkw, Keyword),
+ (r'throw'+_nkw, Keyword),
+ (r'use'+_nkw, Keyword),
+ (r'switch'+_nkw, Keyword),
+ (r'\\', Keyword),
+ (r'λ', Keyword),
+ (r'__FILE__'+_nkw, Name.Builtin.Pseudo),
+ (r'__LINE__'+_nkw, Name.Builtin.Pseudo),
+ (r'[A-Z][a-zA-Z0-9_\']*'+_nkw, Name.Constant),
+ (r'[a-z_][a-zA-Z0-9_\']*'+_nkw, Name),
+ (r'@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Instance),
+ (r'@@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Class),
+ (r'\(', Punctuation),
+ (r'\)', Punctuation),
+ (r'\[', Punctuation),
+ (r'\]', Punctuation),
+ (r'\{', Punctuation),
+ (r'\}', right_angle_bracket),
+ (r';', Punctuation),
+ (r',', Punctuation),
+ (r'<<=', Operator),
+ (r'>>=', Operator),
+ (r'<<', Operator),
+ (r'>>', Operator),
+ (r'==', Operator),
+ (r'!=', Operator),
+ (r'=>', Operator),
+ (r'=', Operator),
+ (r'<=>', Operator),
+ (r'<=', Operator),
+ (r'>=', Operator),
+ (r'<', Operator),
+ (r'>', Operator),
+ (r'\+\+', Operator),
+ (r'\+=', Operator),
+ (r'-=', Operator),
+ (r'\*\*=', Operator),
+ (r'\*=', Operator),
+ (r'\*\*', Operator),
+ (r'\*', Operator),
+ (r'/=', Operator),
+ (r'\+', Operator),
+ (r'-', Operator),
+ (r'/', Operator),
+ (r'%=', Operator),
+ (r'%', Operator),
+ (r'^=', Operator),
+ (r'&&=', Operator),
+ (r'&=', Operator),
+ (r'&&', Operator),
+ (r'&', Operator),
+ (r'\|\|=', Operator),
+ (r'\|=', Operator),
+ (r'\|\|', Operator),
+ (r'\|', Operator),
+ (r'!', Operator),
+ (r'\.\.\.', Operator),
+ (r'\.\.', Operator),
+ (r'\.', Operator),
+ (r'::', Operator),
+ (r':', Operator),
+ (r'(\s|\n)+', Whitespace),
+ (r'[a-z_][a-zA-Z0-9_\']*', Name.Variable),
+ ],
+ }
+
+
+class SlashLexer(DelegatingLexer):
+ """
+ Lexer for the Slash programming language.
+
+ .. versionadded:: 2.4
+ """
+
+ name = 'Slash'
+ aliases = ['slash']
+ filenames = ['*.sl']
+
+ def __init__(self, **options):
+ from pygments.lexers.web import HtmlLexer
+ super(SlashLexer, self).__init__(HtmlLexer, SlashLanguageLexer, **options)
diff --git a/pygments/lexers/smalltalk.py b/pygments/lexers/smalltalk.py
index ebeb6320..79078b66 100644
--- a/pygments/lexers/smalltalk.py
+++ b/pygments/lexers/smalltalk.py
@@ -5,7 +5,7 @@
Lexers for Smalltalk and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/smv.py b/pygments/lexers/smv.py
index 15fc9381..380a3b70 100644
--- a/pygments/lexers/smv.py
+++ b/pygments/lexers/smv.py
@@ -5,7 +5,7 @@
Lexers for the SMV languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,6 +19,8 @@ __all__ = ['NuSMVLexer']
class NuSMVLexer(RegexLexer):
"""
Lexer for the NuSMV language.
+
+ .. versionadded:: 2.2
"""
name = 'NuSMV'
@@ -33,43 +35,45 @@ class NuSMVLexer(RegexLexer):
(r'--.*\n', Comment),
# Reserved
- (words(('MODULE','DEFINE','MDEFINE','CONSTANTS','VAR','IVAR',
- 'FROZENVAR','INIT','TRANS','INVAR','SPEC','CTLSPEC','LTLSPEC',
- 'PSLSPEC','COMPUTE','NAME','INVARSPEC','FAIRNESS','JUSTICE',
- 'COMPASSION','ISA','ASSIGN','CONSTRAINT','SIMPWFF','CTLWFF',
- 'LTLWFF','PSLWFF','COMPWFF','IN','MIN','MAX','MIRROR','PRED',
- 'PREDICATES'), suffix=r'(?![\w$#-])'), Keyword.Declaration),
+ (words(('MODULE', 'DEFINE', 'MDEFINE', 'CONSTANTS', 'VAR', 'IVAR',
+ 'FROZENVAR', 'INIT', 'TRANS', 'INVAR', 'SPEC', 'CTLSPEC',
+ 'LTLSPEC', 'PSLSPEC', 'COMPUTE', 'NAME', 'INVARSPEC',
+ 'FAIRNESS', 'JUSTICE', 'COMPASSION', 'ISA', 'ASSIGN',
+ 'CONSTRAINT', 'SIMPWFF', 'CTLWFF', 'LTLWFF', 'PSLWFF',
+ 'COMPWFF', 'IN', 'MIN', 'MAX', 'MIRROR', 'PRED',
+ 'PREDICATES'), suffix=r'(?![\w$#-])'),
+ Keyword.Declaration),
(r'process(?![\w$#-])', Keyword),
- (words(('array','of','boolean','integer','real','word'),
- suffix=r'(?![\w$#-])'), Keyword.Type),
- (words(('case','esac'), suffix=r'(?![\w$#-])'), Keyword),
- (words(('word1','bool','signed','unsigned','extend','resize',
- 'sizeof','uwconst','swconst','init','self','count','abs','max',
- 'min'), suffix=r'(?![\w$#-])'), Name.Builtin),
- (words(('EX','AX','EF','AF','EG','AG','E','F','O','G','H','X','Y',
- 'Z','A','U','S','V','T','BU','EBF','ABF','EBG','ABG','next',
- 'mod','union','in','xor','xnor'), suffix=r'(?![\w$#-])'),
+ (words(('array', 'of', 'boolean', 'integer', 'real', 'word'),
+ suffix=r'(?![\w$#-])'), Keyword.Type),
+ (words(('case', 'esac'), suffix=r'(?![\w$#-])'), Keyword),
+ (words(('word1', 'bool', 'signed', 'unsigned', 'extend', 'resize',
+ 'sizeof', 'uwconst', 'swconst', 'init', 'self', 'count',
+ 'abs', 'max', 'min'), suffix=r'(?![\w$#-])'),
+ Name.Builtin),
+ (words(('EX', 'AX', 'EF', 'AF', 'EG', 'AG', 'E', 'F', 'O', 'G',
+ 'H', 'X', 'Y', 'Z', 'A', 'U', 'S', 'V', 'T', 'BU', 'EBF',
+ 'ABF', 'EBG', 'ABG', 'next', 'mod', 'union', 'in', 'xor',
+ 'xnor'), suffix=r'(?![\w$#-])'),
Operator.Word),
- (words(('TRUE','FALSE'), suffix=r'(?![\w$#-])'), Keyword.Constant),
+ (words(('TRUE', 'FALSE'), suffix=r'(?![\w$#-])'), Keyword.Constant),
# Names
(r'[a-zA-Z_][\w$#-]*', Name.Variable),
-
+
# Operators
(r':=', Operator),
- (r'[&\|\+\-\*/<>!=]', Operator),
+ (r'[-&|+*/<>!=]', Operator),
# Literals
(r'\-?\d+\b', Number.Integer),
(r'0[su][bB]\d*_[01_]+', Number.Bin),
- (r'0[su][oO]\d*_[01234567_]+', Number.Oct),
+ (r'0[su][oO]\d*_[0-7_]+', Number.Oct),
(r'0[su][dD]\d*_[\d_]+', Number.Dec),
(r'0[su][hH]\d*_[\da-fA-F_]+', Number.Hex),
# Whitespace, punctuation and the rest
(r'\s+', Text.Whitespace),
- (r'[\(\)\[\]\{\};\?:\.,]', Punctuation),
- (r'.', Generic.Error),
- ]
+ (r'[()\[\]{};?:.,]', Punctuation),
+ ],
}
-
diff --git a/pygments/lexers/snobol.py b/pygments/lexers/snobol.py
index e4178f9c..f6e12fd2 100644
--- a/pygments/lexers/snobol.py
+++ b/pygments/lexers/snobol.py
@@ -5,7 +5,7 @@
Lexers for the SNOBOL language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/special.py b/pygments/lexers/special.py
index e66a01cf..6e076b0c 100644
--- a/pygments/lexers/special.py
+++ b/pygments/lexers/special.py
@@ -5,7 +5,7 @@
Special lexers.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py
index e225a66e..8884db22 100644
--- a/pygments/lexers/sql.py
+++ b/pygments/lexers/sql.py
@@ -34,7 +34,7 @@
The ``tests/examplefiles`` contains a few test files with data to be
parsed by these lexers.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -59,7 +59,14 @@ line_re = re.compile('.*?\n')
language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
-do_re = re.compile(r'\bDO\b', re.IGNORECASE)
+do_re = re.compile(r'\bDO\b', re.IGNORECASE)
+
+# Regular expressions for analyse_text()
+name_between_bracket_re = re.compile(r'\[[a-zA-Z_]\w*\]')
+name_between_backtick_re = re.compile(r'`[a-zA-Z_]\w*`')
+tsql_go_re = re.compile(r'\bgo\b', re.IGNORECASE)
+tsql_declare_re = re.compile(r'\bdeclare\s+@', re.IGNORECASE)
+tsql_variable_re = re.compile(r'@[a-zA-Z_]\w*\b')
def language_callback(lexer, match):
@@ -82,7 +89,7 @@ def language_callback(lexer, match):
lexer.text[max(0, match.start()-25):match.start()]))
if m:
l = lexer._get_lexer('plpgsql')
-
+
# 1 = $, 2 = delimiter, 3 = $
yield (match.start(1), String, match.group(1))
yield (match.start(2), String.Delimiter, match.group(2))
@@ -155,7 +162,7 @@ class PostgresLexer(PostgresBase, RegexLexer):
(r'\s+', Text),
(r'--.*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'(' + '|'.join(s.replace(" ", "\s+")
+ (r'(' + '|'.join(s.replace(" ", r"\s+")
for s in DATATYPES + PSEUDO_TYPES)
+ r')\b', Name.Builtin),
(words(KEYWORDS, suffix=r'\b'), Keyword),
@@ -308,14 +315,7 @@ class PostgresConsoleLexer(Lexer):
# and continue until the end of command is detected
curcode = ''
insertions = []
- while 1:
- try:
- line = next(lines)
- except StopIteration:
- # allow the emission of partially collected items
- # the repl loop will be broken below
- break
-
+ for line in lines:
# Identify a shell prompt in case of psql commandline example
if line.startswith('$') and not curcode:
lexer = get_lexer_by_name('console', **self.options)
@@ -346,8 +346,7 @@ class PostgresConsoleLexer(Lexer):
# Emit the output lines
out_token = Generic.Output
- while 1:
- line = next(lines)
+ for line in lines:
mprompt = re_prompt.match(line)
if mprompt is not None:
# push the line back to have it processed by the prompt
@@ -363,6 +362,8 @@ class PostgresConsoleLexer(Lexer):
yield (mmsg.start(2), out_token, mmsg.group(2))
else:
yield (0, out_token, line)
+ else:
+ return
class SqlLexer(RegexLexer):
@@ -480,6 +481,9 @@ class SqlLexer(RegexLexer):
]
}
+ def analyse_text(text):
+ return 0.01
+
class TransactSqlLexer(RegexLexer):
"""
@@ -499,7 +503,7 @@ class TransactSqlLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Whitespace),
- (r'--(?m).*?$\n?', Comment.Single),
+ (r'(?m)--.*?$\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(words(_tsql_builtins.OPERATORS), Operator),
(words(_tsql_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word),
@@ -536,6 +540,33 @@ class TransactSqlLexer(RegexLexer):
]
}
+ def analyse_text(text):
+ rating = 0
+ if tsql_declare_re.search(text):
+ # Found T-SQL variable declaration.
+ rating = 1.0
+ else:
+ name_between_backtick_count = len(
+ name_between_backtick_re.findall((text)))
+ name_between_bracket_count = len(
+ name_between_bracket_re.findall(text))
+ # We need to check if there are any names using
+ # backticks or brackets, as otherwise both are 0
+ # and 0 >= 2 * 0, so we would always assume it's true
+ dialect_name_count = name_between_backtick_count + name_between_bracket_count
+ if dialect_name_count >= 1 and name_between_bracket_count >= 2 * name_between_backtick_count:
+ # Found at least twice as many [name] as `name`.
+ rating += 0.5
+ elif name_between_bracket_count > name_between_backtick_count:
+ rating += 0.2
+ elif name_between_bracket_count > 0:
+ rating += 0.1
+ if tsql_variable_re.search(text) is not None:
+ rating += 0.1
+ if tsql_go_re.search(text) is not None:
+ rating += 0.1
+ return rating
+
class MySqlLexer(RegexLexer):
"""
@@ -609,6 +640,23 @@ class MySqlLexer(RegexLexer):
]
}
+ def analyse_text(text):
+ rating = 0
+ name_between_backtick_count = len(
+ name_between_backtick_re.findall((text)))
+ name_between_bracket_count = len(
+ name_between_bracket_re.findall(text))
+ # Same logic as above in the TSQL analysis
+ dialect_name_count = name_between_backtick_count + name_between_bracket_count
+ if dialect_name_count >= 1 and name_between_backtick_count >= 2 * name_between_bracket_count:
+ # Found at least twice as many `name` as [name].
+ rating += 0.5
+ elif name_between_backtick_count > name_between_bracket_count:
+ rating += 0.2
+ elif name_between_backtick_count > 0:
+ rating += 0.1
+ return rating
+
class SqliteConsoleLexer(Lexer):
"""
diff --git a/pygments/lexers/stata.py b/pygments/lexers/stata.py
index b2be64d2..9566d12a 100644
--- a/pygments/lexers/stata.py
+++ b/pygments/lexers/stata.py
@@ -5,10 +5,11 @@
Lexer for Stata
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+import re
from pygments.lexer import RegexLexer, include, words
from pygments.token import Comment, Keyword, Name, Number, \
String, Text, Operator
@@ -17,6 +18,7 @@ from pygments.lexers._stata_builtins import builtins_base, builtins_functions
__all__ = ['StataLexer']
+
class StataLexer(RegexLexer):
"""
For `Stata <http://www.stata.com/>`_ do files.
@@ -32,55 +34,118 @@ class StataLexer(RegexLexer):
aliases = ['stata', 'do']
filenames = ['*.do', '*.ado']
mimetypes = ['text/x-stata', 'text/stata', 'application/x-stata']
+ flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
include('comments'),
- include('vars-strings'),
+ include('strings'),
+ include('macros'),
include('numbers'),
include('keywords'),
+ include('operators'),
+ include('format'),
(r'.', Text),
],
- # Global and local macros; regular and special strings
- 'vars-strings': [
- (r'\$[a-zA-Z_0-9\{]', Name.Variable.Global, 'var_validglobal'),
- (r'`[a-zA-Z_0-9]{0,31}\'', Name.Variable),
- (r'"', String, 'string_dquote'),
- (r'`"', String, 'string_mquote'),
- ],
- # For either string type, highlight macros as macros
- 'string_dquote': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape),
- (r'\$', Name.Variable.Global, 'var_validglobal'),
- (r'`', Name.Variable, 'var_validlocal'),
- (r'[^$\$`"\\]+', String),
- (r'[$"\\]', String),
- ],
- 'string_mquote': [
+ # Comments are a complicated beast in Stata because they can be
+ # nested and there are a few corner cases with that. See:
+ # - github.com/kylebarron/language-stata/issues/90
+ # - statalist.org/forums/forum/general-stata-discussion/general/1448244
+ 'comments': [
+ (r'(^//|(?<=\s)//)(?!/)', Comment.Single, 'comments-double-slash'),
+ (r'^\s*\*', Comment.Single, 'comments-star'),
+ (r'/\*', Comment.Multiline, 'comments-block'),
+ (r'(^///|(?<=\s)///)', Comment.Special, 'comments-triple-slash')
+ ],
+ 'comments-block': [
+ (r'/\*', Comment.Multiline, '#push'),
+ # this ends and restarts a comment block. but need to catch this so
+ # that it doesn\'t start _another_ level of comment blocks
+ (r'\*/\*', Comment.Multiline),
+ (r'(\*/\s+\*(?!/)[^\n]*)|(\*/)', Comment.Multiline, '#pop'),
+ # Match anything else as a character inside the comment
+ (r'.', Comment.Multiline),
+ ],
+ 'comments-star': [
+ (r'///.*?\n', Comment.Single,
+ ('#pop', 'comments-triple-slash')),
+ (r'(^//|(?<=\s)//)(?!/)', Comment.Single,
+ ('#pop', 'comments-double-slash')),
+ (r'/\*', Comment.Multiline, 'comments-block'),
+ (r'.(?=\n)', Comment.Single, '#pop'),
+ (r'.', Comment.Single),
+ ],
+ 'comments-triple-slash': [
+ (r'\n', Comment.Special, '#pop'),
+ # A // breaks out of a comment for the rest of the line
+ (r'//.*?(?=\n)', Comment.Single, '#pop'),
+ (r'.', Comment.Special),
+ ],
+ 'comments-double-slash': [
+ (r'\n', Text, '#pop'),
+ (r'.', Comment.Single),
+ ],
+ # `"compound string"' and regular "string"; note the former are
+ # nested.
+ 'strings': [
+ (r'`"', String, 'string-compound'),
+ (r'(?<!`)"', String, 'string-regular'),
+ ],
+ 'string-compound': [
+ (r'`"', String, '#push'),
(r'"\'', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape),
- (r'\$', Name.Variable.Global, 'var_validglobal'),
- (r'`', Name.Variable, 'var_validlocal'),
- (r'[^$\$`"\\]+', String),
- (r'[$"\\]', String),
+ (r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
+ include('macros'),
+ (r'.', String)
],
- 'var_validglobal': [
- (r'\{?[a-zA-Z0-9_]{0,32}\}?', Name.Variable.Global, '#pop'),
+ 'string-regular': [
+ (r'(")(?!\')|(?=\n)', String, '#pop'),
+ (r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
+ include('macros'),
+ (r'.', String)
],
- 'var_validlocal': [
- (r'[a-zA-Z0-9_]{0,31}\'', Name.Variable, '#pop'),
+ # A local is usually
+ # `\w{0,31}'
+ # `:extended macro'
+ # `=expression'
+ # `[rsen](results)'
+ # `(++--)scalar(++--)'
+ #
+ # However, there are all sorts of weird rules wrt edge
+ # cases. Instead of writing 27 exceptions, anything inside
+ # `' is a local.
+ #
+ # A global is more restricted, so we do follow rules. Note only
+ # locals explicitly enclosed ${} can be nested.
+ 'macros': [
+ (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested'),
+ (r'\$', Name.Variable.Global, 'macro-global-name'),
+ (r'`', Name.Variable, 'macro-local'),
],
- # * only OK at line start, // OK anywhere
- 'comments': [
- (r'^\s*\*.*$', Comment),
- (r'//.*', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ 'macro-local': [
+ (r'`', Name.Variable, '#push'),
+ (r"'", Name.Variable, '#pop'),
+ (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested'),
+ (r'\$', Name.Variable.Global, 'macro-global-name'),
+ (r'.', Name.Variable), # fallback
+ ],
+ 'macro-global-nested': [
+ (r'\$(\{|(?=[\$`]))', Name.Variable.Global, '#push'),
+ (r'\}', Name.Variable.Global, '#pop'),
+ (r'\$', Name.Variable.Global, 'macro-global-name'),
+ (r'`', Name.Variable, 'macro-local'),
+ (r'\w', Name.Variable.Global), # fallback
+ (r'(?!\w)', Name.Variable.Global, '#pop'),
+ ],
+ 'macro-global-name': [
+ (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'),
+ (r'\$', Name.Variable.Global, 'macro-global-name', '#pop'),
+ (r'`', Name.Variable, 'macro-local', '#pop'),
+ (r'\w{1,32}', Name.Variable.Global, '#pop'),
],
# Built in functions and statements
'keywords': [
- (words(builtins_functions, prefix = r'\b', suffix = r'\('),
+ (words(builtins_functions, prefix = r'\b', suffix = r'(?=\()'),
Name.Function),
(words(builtins_base, prefix = r'(^\s*|\s)', suffix = r'\b'),
Keyword),
@@ -98,9 +163,9 @@ class StataLexer(RegexLexer):
],
# Stata formats
'format': [
- (r'%-?\d{1,2}(\.\d{1,2})?[gfe]c?', Name.Variable),
- (r'%(21x|16H|16L|8H|8L)', Name.Variable),
- (r'%-?(tc|tC|td|tw|tm|tq|th|ty|tg).{0,32}', Name.Variable),
- (r'%[-~]?\d{1,4}s', Name.Variable),
+ (r'%-?\d{1,2}(\.\d{1,2})?[gfe]c?', Name.Other),
+ (r'%(21x|16H|16L|8H|8L)', Name.Other),
+ (r'%-?(tc|tC|td|tw|tm|tq|th|ty|tg)\S{0,32}', Name.Other),
+ (r'%[-~]?\d{1,4}s', Name.Other),
]
}
diff --git a/pygments/lexers/supercollider.py b/pygments/lexers/supercollider.py
index 137b753c..40ff0aeb 100644
--- a/pygments/lexers/supercollider.py
+++ b/pygments/lexers/supercollider.py
@@ -5,7 +5,7 @@
Lexer for SuperCollider
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/tcl.py b/pygments/lexers/tcl.py
index 96feb7a8..1d1be033 100644
--- a/pygments/lexers/tcl.py
+++ b/pygments/lexers/tcl.py
@@ -5,7 +5,7 @@
Lexers for Tcl and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
index e6eeaa25..8000deba 100644
--- a/pygments/lexers/templates.py
+++ b/pygments/lexers/templates.py
@@ -5,7 +5,7 @@
Lexers for various template engines' markup.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -187,13 +187,13 @@ class SmartyLexer(RegexLexer):
def analyse_text(text):
rv = 0.0
- if re.search('\{if\s+.*?\}.*?\{/if\}', text):
+ if re.search(r'\{if\s+.*?\}.*?\{/if\}', text):
rv += 0.15
- if re.search('\{include\s+file=.*?\}', text):
+ if re.search(r'\{include\s+file=.*?\}', text):
rv += 0.15
- if re.search('\{foreach\s+.*?\}.*?\{/foreach\}', text):
+ if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text):
rv += 0.15
- if re.search('\{\$.*?\}', text):
+ if re.search(r'\{\$.*?\}', text):
rv += 0.01
return rv
@@ -375,7 +375,7 @@ class DjangoLexer(RegexLexer):
(r'\.\w+', Name.Variable),
(r':?"(\\\\|\\"|[^"])*"', String.Double),
(r":?'(\\\\|\\'|[^'])*'", String.Single),
- (r'([{}()\[\]+\-*/,:~]|[><=]=?)', Operator),
+ (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
],
@@ -421,18 +421,18 @@ class MyghtyLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Text),
- (r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
+ (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
bygroups(Name.Tag, Text, Name.Function, Name.Tag,
using(this), Name.Tag)),
- (r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
+ (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
bygroups(Name.Tag, Name.Function, Name.Tag,
using(PythonLexer), Name.Tag)),
(r'(<&[^|])(.*?)(,.*?)?(&>)',
bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
- (r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
+ (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
(r'</&>', Name.Tag),
- (r'(<%!?)(.*?)(%>)(?s)',
+ (r'(?s)(<%!?)(.*?)(%>)',
bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
(r'(?<=^)#[^\n]*(\n|\Z)', Comment),
(r'(?<=^)(%)([^\n]*)(\n|\Z)',
@@ -538,20 +538,20 @@ class MasonLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Text),
- (r'(<%doc>)(.*?)(</%doc>)(?s)',
+ (r'(?s)(<%doc>)(.*?)(</%doc>)',
bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
- (r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
+ (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
bygroups(Name.Tag, Text, Name.Function, Name.Tag,
using(this), Name.Tag)),
- (r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
+ (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
bygroups(Name.Tag, Name.Function, Name.Tag,
using(PerlLexer), Name.Tag)),
- (r'(<&[^|])(.*?)(,.*?)?(&>)(?s)',
+ (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
- (r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
+ (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
(r'</&>', Name.Tag),
- (r'(<%!?)(.*?)(%>)(?s)',
+ (r'(?s)(<%!?)(.*?)(%>)',
bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
(r'(?<=^)#[^\n]*(\n|\Z)', Comment),
(r'(?<=^)(%)([^\n]*)(\n|\Z)',
@@ -607,7 +607,7 @@ class MakoLexer(RegexLexer):
(r'(</%)([\w.:]+)(>)',
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
(r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
- (r'(<%(?:!?))(.*?)(%>)(?s)',
+ (r'(?s)(<%(?:!?))(.*?)(%>)',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(\$\{)(.*?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
@@ -759,7 +759,7 @@ class CheetahLexer(RegexLexer):
# TODO support other Python syntax like $foo['bar']
(r'(\$)([a-zA-Z_][\w.]*\w)',
bygroups(Comment.Preproc, using(CheetahPythonLexer))),
- (r'(\$\{!?)(.*?)(\})(?s)',
+ (r'(?s)(\$\{!?)(.*?)(\})',
bygroups(Comment.Preproc, using(CheetahPythonLexer),
Comment.Preproc)),
(r'''(?sx)
@@ -942,9 +942,9 @@ class HtmlGenshiLexer(DelegatingLexer):
def analyse_text(text):
rv = 0.0
- if re.search('\$\{.*?\}', text) is not None:
+ if re.search(r'\$\{.*?\}', text) is not None:
rv += 0.2
- if re.search('py:(.*?)=["\']', text) is not None:
+ if re.search(r'py:(.*?)=["\']', text) is not None:
rv += 0.2
return rv + HtmlLexer.analyse_text(text) - 0.01
@@ -967,9 +967,9 @@ class GenshiLexer(DelegatingLexer):
def analyse_text(text):
rv = 0.0
- if re.search('\$\{.*?\}', text) is not None:
+ if re.search(r'\$\{.*?\}', text) is not None:
rv += 0.2
- if re.search('py:(.*?)=["\']', text) is not None:
+ if re.search(r'py:(.*?)=["\']', text) is not None:
rv += 0.2
return rv + XmlLexer.analyse_text(text) - 0.01
@@ -1627,7 +1627,7 @@ class SspLexer(DelegatingLexer):
def analyse_text(text):
rv = 0.0
- if re.search('val \w+\s*:', text):
+ if re.search(r'val \w+\s*:', text):
rv += 0.6
if looks_like_xml(text):
rv += 0.2
@@ -1814,8 +1814,9 @@ class HandlebarsLexer(RegexLexer):
(r'\}\}', Comment.Preproc, '#pop'),
# Handlebars
- (r'([#/]*)(each|if|unless|else|with|log|in)', bygroups(Keyword,
+ (r'([#/]*)(each|if|unless|else|with|log|in(line)?)', bygroups(Keyword,
Keyword)),
+ (r'#\*inline', Keyword),
# General {{#block}}
(r'([#/])([\w-]+)', bygroups(Name.Function, Name.Function)),
@@ -1823,11 +1824,37 @@ class HandlebarsLexer(RegexLexer):
# {{opt=something}}
(r'([\w-]+)(=)', bygroups(Name.Attribute, Operator)),
+ # Partials {{> ...}}
+ (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
+ (r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
+ (r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation),
+ 'dynamic-partial'),
+
+ include('generic'),
+ ],
+ 'dynamic-partial': [
+ (r'\s+', Text),
+ (r'\)', Punctuation, '#pop'),
+
+ (r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
+ Name.Variable, Text)),
+ (r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text,
+ using(this, state='variable'))),
+ (r'[\w-]+', Name.Function),
+
+ include('generic'),
+ ],
+ 'variable': [
+ (r'[a-zA-Z][\w-]*', Name.Variable),
+ (r'\.[\w-]+', Name.Variable),
+ (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
+ ],
+ 'generic': [
+ include('variable'),
+
# borrowed from DjangoLexer
(r':?"(\\\\|\\"|[^"])*"', String.Double),
(r":?'(\\\\|\\'|[^'])*'", String.Single),
- (r'[a-zA-Z][\w-]*', Name.Variable),
- (r'\.[\w-]+', Name.Variable),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
]
@@ -1928,7 +1955,7 @@ class LiquidLexer(RegexLexer):
'output': [
include('whitespace'),
- ('\}\}', Punctuation, '#pop'), # end of output
+ (r'\}\}', Punctuation, '#pop'), # end of output
(r'\|', Punctuation, 'filters')
],
@@ -2175,16 +2202,18 @@ class TwigHtmlLexer(DelegatingLexer):
def __init__(self, **options):
super(TwigHtmlLexer, self).__init__(HtmlLexer, TwigLexer, **options)
-
+
class Angular2Lexer(RegexLexer):
"""
- Generic `angular2 <http://victorsavkin.com/post/119943127151/angular-2-template-syntax>` template lexer.
+ Generic
+ `angular2 <http://victorsavkin.com/post/119943127151/angular-2-template-syntax>`_
+ template lexer.
- Highlights only the Angular template tags (stuff between `{{` and `}}` and
+ Highlights only the Angular template tags (stuff between `{{` and `}}` and
special attributes: '(event)=', '[property]=', '[(twoWayBinding)]=').
Everything else is left for a delegating lexer.
- .. versionadded:: 2.1a0
+ .. versionadded:: 2.1
"""
name = "Angular2"
@@ -2196,37 +2225,39 @@ class Angular2Lexer(RegexLexer):
# {{meal.name}}
(r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'),
-
+
# (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar"
(r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)',
- bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text), 'attr'),
+ bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text),
+ 'attr'),
(r'([([]+)([\w:.-]+)([\])]+)(\s*)',
bygroups(Punctuation, Name.Attribute, Punctuation, Text)),
-
+
# *ngIf="..."; #f="ngForm"
(r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
bygroups(Punctuation, Name.Attribute, Punctuation, Operator), 'attr'),
(r'([*#])([\w:.-]+)(\s*)',
bygroups(Punctuation, Name.Attribute, Punctuation)),
],
-
+
'ngExpression': [
(r'\s+(\|\s+)?', Text),
(r'\}\}', Comment.Preproc, '#pop'),
-
+
# Literals
(r':?(true|false)', String.Boolean),
(r':?"(\\\\|\\"|[^"])*"', String.Double),
(r":?'(\\\\|\\'|[^'])*'", String.Single),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
-
+
# Variabletext
(r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable),
(r'\.[\w-]+(\(.*\))?', Name.Variable),
-
+
# inline If
- (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)', bygroups(Operator, Text, String, Text, Operator, Text, String, Text)),
+ (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)',
+ bygroups(Operator, Text, String, Text, Operator, Text, String, Text)),
],
'attr': [
('".*?"', String, '#pop'),
@@ -2249,4 +2280,4 @@ class Angular2HtmlLexer(DelegatingLexer):
filenames = ['*.ng2']
def __init__(self, **options):
- super(Angular2HtmlLexer, self).__init__(HtmlLexer, Angular2Lexer, **options) \ No newline at end of file
+ super(Angular2HtmlLexer, self).__init__(HtmlLexer, Angular2Lexer, **options)
diff --git a/pygments/lexers/testing.py b/pygments/lexers/testing.py
index be8b6f71..86e60f25 100644
--- a/pygments/lexers/testing.py
+++ b/pygments/lexers/testing.py
@@ -5,7 +5,7 @@
Lexers for testing languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -29,7 +29,7 @@ class GherkinLexer(RegexLexer):
feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
- step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
+ step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\\* )'
tokens = {
'comments': [
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index 4bec5ec8..bb1dccf2 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -5,7 +5,7 @@
Lexers for non-source code file types.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,6 +18,7 @@ from pygments.lexers.markup import BBCodeLexer, MoinWikiLexer, RstLexer, \
from pygments.lexers.installers import DebianControlLexer, SourcesListLexer
from pygments.lexers.make import MakefileLexer, BaseMakefileLexer, CMakeLexer
from pygments.lexers.haxe import HxmlLexer
+from pygments.lexers.sgf import SmartGameFormatLexer
from pygments.lexers.diff import DiffLexer, DarcsPatchLexer
from pygments.lexers.data import YamlLexer
from pygments.lexers.textfmts import IrcLogsLexer, GettextLexer, HttpLexer
diff --git a/pygments/lexers/textedit.py b/pygments/lexers/textedit.py
index 89417216..e8856dbd 100644
--- a/pygments/lexers/textedit.py
+++ b/pygments/lexers/textedit.py
@@ -5,7 +5,7 @@
Lexers for languages related to text processing.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/textfmts.py b/pygments/lexers/textfmts.py
index cab9add5..b70c2ad6 100644
--- a/pygments/lexers/textfmts.py
+++ b/pygments/lexers/textfmts.py
@@ -5,7 +5,7 @@
Lexers for various text formats.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -266,7 +266,7 @@ class TodotxtLexer(RegexLexer):
# 5. Leading project
(project_regex, Project, 'incomplete'),
# 6. Non-whitespace catch-all
- ('\S+', IncompleteTaskText, 'incomplete'),
+ (r'\S+', IncompleteTaskText, 'incomplete'),
],
# Parse a complete task
@@ -277,9 +277,9 @@ class TodotxtLexer(RegexLexer):
(context_regex, Context),
(project_regex, Project),
# Tokenize non-whitespace text
- ('\S+', CompleteTaskText),
+ (r'\S+', CompleteTaskText),
# Tokenize whitespace not containing a newline
- ('\s+', CompleteTaskText),
+ (r'\s+', CompleteTaskText),
],
# Parse an incomplete task
@@ -290,8 +290,8 @@ class TodotxtLexer(RegexLexer):
(context_regex, Context),
(project_regex, Project),
# Tokenize non-whitespace text
- ('\S+', IncompleteTaskText),
+ (r'\S+', IncompleteTaskText),
# Tokenize whitespace not containing a newline
- ('\s+', IncompleteTaskText),
+ (r'\s+', IncompleteTaskText),
],
}
diff --git a/pygments/lexers/theorem.py b/pygments/lexers/theorem.py
index 6f16d030..e84a398b 100644
--- a/pygments/lexers/theorem.py
+++ b/pygments/lexers/theorem.py
@@ -5,7 +5,7 @@
Lexers for theorem-proving languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/trafficscript.py b/pygments/lexers/trafficscript.py
index 03ab6a06..42542280 100644
--- a/pygments/lexers/trafficscript.py
+++ b/pygments/lexers/trafficscript.py
@@ -5,7 +5,7 @@
Lexer for RiverBed's TrafficScript (RTS) language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/typoscript.py b/pygments/lexers/typoscript.py
index 407847ed..f75a6f02 100644
--- a/pygments/lexers/typoscript.py
+++ b/pygments/lexers/typoscript.py
@@ -14,17 +14,15 @@
`TypoScriptHtmlDataLexer`
Lexer that highlights markers, constants and registers within html tags.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using
-from pygments.token import Keyword, Text, Comment, Name, String, Number, \
+from pygments.token import Text, Comment, Name, String, Number, \
Operator, Punctuation
-from pygments.lexer import DelegatingLexer
-from pygments.lexers.web import HtmlLexer, CssLexer
__all__ = ['TypoScriptLexer', 'TypoScriptCssDataLexer', 'TypoScriptHtmlDataLexer']
@@ -110,11 +108,14 @@ class TypoScriptLexer(RegexLexer):
name = 'TypoScript'
aliases = ['typoscript']
- filenames = ['*.ts', '*.txt']
+ filenames = ['*.typoscript']
mimetypes = ['text/x-typoscript']
flags = re.DOTALL | re.MULTILINE
+ # Slightly higher than TypeScript (which is 0).
+ priority = 0.0
+
tokens = {
'root': [
include('comment'),
@@ -131,7 +132,7 @@ class TypoScriptLexer(RegexLexer):
],
'keywords': [
# Conditions
- (r'(\[)(?i)(browser|compatVersion|dayofmonth|dayofweek|dayofyear|'
+ (r'(?i)(\[)(browser|compatVersion|dayofmonth|dayofweek|dayofyear|'
r'device|ELSE|END|GLOBAL|globalString|globalVar|hostname|hour|IP|'
r'language|loginUser|loginuser|minute|month|page|PIDinRootline|'
r'PIDupinRootline|system|treeLevel|useragent|userFunc|usergroup|'
@@ -168,14 +169,14 @@ class TypoScriptLexer(RegexLexer):
'whitespace': [
(r'\s+', Text),
],
- 'html':[
+ 'html': [
(r'<\S[^\n>]*>', using(TypoScriptHtmlDataLexer)),
(r'&[^;\n]*;', String),
- (r'(_CSS_DEFAULT_STYLE)(\s*)(\()(?s)(.*(?=\n\)))',
- bygroups(Name.Class, Text, String.Symbol, using(TypoScriptCssDataLexer))),
+ (r'(?s)(_CSS_DEFAULT_STYLE)(\s*)(\()(.*(?=\n\)))',
+ bygroups(Name.Class, Text, String.Symbol, using(TypoScriptCssDataLexer))),
],
'literal': [
- (r'0x[0-9A-Fa-f]+t?',Number.Hex),
+ (r'0x[0-9A-Fa-f]+t?', Number.Hex),
# (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?\s*(?:[^=])', Number.Float),
(r'[0-9]+', Number.Integer),
(r'(###\w+###)', Name.Constant),
@@ -219,7 +220,3 @@ class TypoScriptLexer(RegexLexer):
(r'[\w"\-!/&;]+', Text),
],
}
-
- def analyse_text(text):
- if '<INCLUDE_TYPOSCRIPT:' in text:
- return 1.0
diff --git a/pygments/lexers/unicon.py b/pygments/lexers/unicon.py
new file mode 100644
index 00000000..6301a88b
--- /dev/null
+++ b/pygments/lexers/unicon.py
@@ -0,0 +1,390 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.icon
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Icon and Unicon languages, including ucode VM.
+
+ :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, words, \
+ using, this, default
+from pygments.util import get_bool_opt, get_list_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+from pygments.scanner import Scanner
+
+__all__ = ['IconLexer', 'UcodeLexer', 'UniconLexer']
+
+class UniconLexer(RegexLexer):
+ """
+ For Unicon source code.
+
+ .. versionadded:: 2.4
+ """
+
+ name = 'Unicon'
+ aliases = ['unicon']
+ filenames = ['*.icn']
+ mimetypes = ['text/unicon']
+
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'#.*?\n', Comment.Single),
+ (r'[^\S\n]+', Text),
+ (r'class|method|procedure', Keyword.Declaration, 'subprogram'),
+ (r'(record)(\s+)(\w+)',
+ bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
+ (r'(#line|\$C|\$Cend|\$define|\$else|\$endif|\$error|\$ifdef|'
+ r'\$ifndef|\$include|\$line|\$undef)\b', Keyword.PreProc),
+ (r'(&null|&fail)\b', Keyword.Constant),
+ (r'&allocated|&ascii|&clock|&collections|&column|&col|&control|'
+ r'&cset|&current|&dateline|&date|&digits|&dump|'
+ r'&errno|&errornumber|&errortext|&errorvalue|&error|&errout|'
+ r'&eventcode|&eventvalue|&eventsource|&e|'
+ r'&features|&file|&host|&input|&interval|&lcase|&letters|'
+ r'&level|&line|&ldrag|&lpress|&lrelease|'
+ r'&main|&mdrag|&meta|&mpress|&mrelease|&now|&output|'
+ r'&phi|&pick|&pi|&pos|&progname|'
+ r'&random|&rdrag|&regions|&resize|&row|&rpress|&rrelease|'
+ r'&shift|&source|&storage|&subject|'
+ r'&time|&trace|&ucase|&version|'
+ r'&window|&x|&y', Keyword.Reserved),
+ (r'(by|of|not|to)\b', Keyword.Reserved),
+ (r'(global|local|static|abstract)\b', Keyword.Reserved),
+ (r'package|link|import', Keyword.Declaration),
+ (words((
+ 'break', 'case', 'create', 'critical', 'default', 'end', 'all',
+ 'do', 'else', 'every', 'fail', 'if', 'import', 'initial',
+ 'initially', 'invocable', 'next',
+ 'repeat', 'return', 'suspend',
+ 'then', 'thread', 'until', 'while'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ (words((
+ 'Abort', 'abs', 'acos', 'Active', 'Alert', 'any', 'Any', 'Arb',
+ 'Arbno', 'args', 'array', 'asin', 'atan', 'atanh', 'Attrib',
+ 'Bal', 'bal', 'Bg', 'Break', 'Breakx',
+ 'callout', 'center', 'char', 'chdir', 'chmod', 'chown', 'chroot',
+ 'classname', 'Clip', 'Clone', 'close', 'cofail', 'collect',
+ 'Color', 'ColorValue', 'condvar', 'constructor', 'copy',
+ 'CopyArea', 'cos', 'Couple', 'crypt', 'cset', 'ctime',
+ 'dbcolumns', 'dbdriver', 'dbkeys', 'dblimits', 'dbproduct',
+ 'dbtables', 'delay', 'delete', 'detab', 'display', 'DrawArc',
+ 'DrawCircle', 'DrawCube', 'DrawCurve', 'DrawCylinder',
+ 'DrawDisk', 'DrawImage', 'DrawLine', 'DrawPoint', 'DrawPolygon',
+ 'DrawRectangle', 'DrawSegment', 'DrawSphere', 'DrawString',
+ 'DrawTorus', 'dtor',
+ 'entab', 'EraseArea', 'errorclear', 'Event', 'eventmask',
+ 'EvGet', 'EvSend', 'exec', 'exit', 'exp', 'Eye',
+ 'Fail', 'fcntl', 'fdup', 'Fence', 'fetch', 'Fg', 'fieldnames',
+ 'filepair', 'FillArc', 'FillCircle', 'FillPolygon',
+ 'FillRectangle', 'find', 'flock', 'flush', 'Font', 'fork',
+ 'FreeColor', 'FreeSpace', 'function',
+ 'get', 'getch', 'getche', 'getegid', 'getenv', 'geteuid',
+ 'getgid', 'getgr', 'gethost', 'getpgrp', 'getpid', 'getppid',
+ 'getpw', 'getrusage', 'getserv', 'GetSpace', 'gettimeofday',
+ 'getuid', 'globalnames', 'GotoRC', 'GotoXY', 'gtime', 'hardlink',
+ 'iand', 'icom', 'IdentityMatrix', 'image', 'InPort', 'insert',
+ 'Int86', 'integer', 'ioctl', 'ior', 'ishift', 'istate', 'ixor',
+ 'kbhit', 'key', 'keyword', 'kill',
+ 'left', 'Len', 'list', 'load', 'loadfunc', 'localnames',
+ 'lock', 'log', 'Lower', 'lstat',
+ 'many', 'map', 'match', 'MatrixMode', 'max', 'member',
+ 'membernames', 'methodnames', 'methods', 'min', 'mkdir', 'move',
+ 'MultMatrix', 'mutex',
+ 'name', 'NewColor', 'Normals', 'NotAny', 'numeric',
+ 'open', 'opencl', 'oprec', 'ord', 'OutPort',
+ 'PaletteChars', 'PaletteColor', 'PaletteKey', 'paramnames',
+ 'parent', 'Pattern', 'Peek', 'Pending', 'pipe', 'Pixel',
+ 'PlayAudio', 'Poke', 'pop', 'PopMatrix', 'Pos', 'pos',
+ 'proc', 'pull', 'push', 'PushMatrix', 'PushRotate', 'PushScale',
+ 'PushTranslate', 'put',
+ 'QueryPointer',
+ 'Raise', 'read', 'ReadImage', 'readlink', 'reads', 'ready',
+ 'real', 'receive', 'Refresh', 'Rem', 'remove', 'rename',
+ 'repl', 'reverse', 'right', 'rmdir', 'Rotate', 'Rpos',
+ 'Rtab', 'rtod', 'runerr',
+ 'save', 'Scale', 'seek', 'select', 'send', 'seq',
+ 'serial', 'set', 'setenv', 'setgid', 'setgrent',
+ 'sethostent', 'setpgrp', 'setpwent', 'setservent',
+ 'setuid', 'signal', 'sin', 'sort', 'sortf', 'Span',
+ 'spawn', 'sql', 'sqrt', 'stat', 'staticnames', 'stop',
+ 'StopAudio', 'string', 'structure', 'Succeed', 'Swi',
+ 'symlink', 'sys_errstr', 'system', 'syswrite',
+ 'Tab', 'tab', 'table', 'tan',
+ 'Texcoord', 'Texture', 'TextWidth', 'Translate',
+ 'trap', 'trim', 'truncate', 'trylock', 'type',
+ 'umask', 'Uncouple', 'unlock', 'upto', 'utime',
+ 'variable', 'VAttrib',
+ 'wait', 'WAttrib', 'WDefault', 'WFlush', 'where',
+ 'WinAssociate', 'WinButton', 'WinColorDialog', 'WindowContents',
+ 'WinEditRegion', 'WinFontDialog', 'WinMenuBar', 'WinOpenDialog',
+ 'WinPlayMedia', 'WinSaveDialog', 'WinScrollBar', 'WinSelectDialog',
+ 'write', 'WriteImage', 'writes', 'WSection',
+ 'WSync'), prefix=r'\b', suffix=r'\b'),
+ Name.Function),
+ include('numbers'),
+ (r'<@|<<@|>@|>>@|\.>|\->', Operator),
+ (r'\*\*|\+\+|\-\-|\.|\=|\~\=|<\=|>\=|\=\=|\~\=\=|<<|<<\=|>>|>>\=', Operator),
+ (r':\=|:\=:|\->|<\->|\+:\=|\|', Operator),
+ (r'\=\=\=|\~\=\=\=', Operator),
+ (r'"(?:[^\\"]|\\.)*"', String),
+ (r"'(?:[^\\']|\\.)*'", String.Character),
+ (r'[*<>+=/&!?@~\\-]', Operator),
+ (r'\^', Operator),
+ (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
+ (r"([\[\]])", Punctuation),
+ (r"(<>|=>|[()|:;,.'`]|[{}]|[%]|[&?])", Punctuation),
+ (r'\n+', Text),
+ ],
+ 'numbers': [
+ (r'\b([+-]?([2-9]|[12][0-9]|3[0-6])[rR][0-9a-zA-Z]+)\b', Number.Hex),
+ (r'[+-]?[0-9]*\.([0-9]*)([Ee][+-]?[0-9]*)?', Number.Float),
+ (r'\b([+-]?[0-9]+[KMGTPkmgtp]?)\b', Number.Integer),
+ ],
+ 'subprogram': [
+ (r'\(', Punctuation, ('#pop', 'formal_part')),
+ (r';', Punctuation, '#pop'),
+ (r'"[^"]+"|\w+', Name.Function),
+ include('root'),
+ ],
+ 'type_def': [
+ (r'\(', Punctuation, 'formal_part'),
+ ],
+ 'formal_part': [
+ (r'\)', Punctuation, '#pop'),
+ (r'\w+', Name.Variable),
+ (r',', Punctuation),
+ (r'(:string|:integer|:real)\b', Keyword.Reserved),
+ include('root'),
+ ],
+ }
+
+class IconLexer(RegexLexer):
+ """
+ Lexer for Icon
+
+ .. versionadded:: 1.6
+ """
+ name = 'Icon'
+ aliases = ['icon']
+ filenames = ['*.icon', '*.ICON']
+ mimetypes = []
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'#.*?\n', Comment.Single),
+ (r'[^\S\n]+', Text),
+ (r'class|method|procedure', Keyword.Declaration, 'subprogram'),
+ (r'(record)(\s+)(\w+)',
+ bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
+ (r'(#line|\$C|\$Cend|\$define|\$else|\$endif|\$error|\$ifdef|'
+ r'\$ifndef|\$include|\$line|\$undef)\b', Keyword.PreProc),
+ (r'(&null|&fail)\b', Keyword.Constant),
+ (r'&allocated|&ascii|&clock|&collections|&column|&col|&control|'
+ r'&cset|&current|&dateline|&date|&digits|&dump|'
+ r'&errno|&errornumber|&errortext|&errorvalue|&error|&errout|'
+ r'&eventcode|&eventvalue|&eventsource|&e|'
+ r'&features|&file|&host|&input|&interval|&lcase|&letters|'
+ r'&level|&line|&ldrag|&lpress|&lrelease|'
+ r'&main|&mdrag|&meta|&mpress|&mrelease|&now|&output|'
+ r'&phi|&pick|&pi|&pos|&progname|'
+ r'&random|&rdrag|&regions|&resize|&row|&rpress|&rrelease|'
+ r'&shift|&source|&storage|&subject|'
+ r'&time|&trace|&ucase|&version|'
+ r'&window|&x|&y', Keyword.Reserved),
+ (r'(by|of|not|to)\b', Keyword.Reserved),
+ (r'(global|local|static)\b', Keyword.Reserved),
+ (r'link', Keyword.Declaration),
+ (words((
+ 'break', 'case', 'create', 'default', 'end', 'all',
+ 'do', 'else', 'every', 'fail', 'if', 'initial',
+ 'invocable', 'next',
+ 'repeat', 'return', 'suspend',
+ 'then', 'until', 'while'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ (words((
+ 'abs', 'acos', 'Active', 'Alert', 'any',
+ 'args', 'array', 'asin', 'atan', 'atanh', 'Attrib',
+ 'bal', 'Bg',
+ 'callout', 'center', 'char', 'chdir', 'chmod', 'chown', 'chroot',
+ 'Clip', 'Clone', 'close', 'cofail', 'collect',
+ 'Color', 'ColorValue', 'condvar', 'copy',
+ 'CopyArea', 'cos', 'Couple', 'crypt', 'cset', 'ctime',
+ 'delay', 'delete', 'detab', 'display', 'DrawArc',
+ 'DrawCircle', 'DrawCube', 'DrawCurve', 'DrawCylinder',
+ 'DrawDisk', 'DrawImage', 'DrawLine', 'DrawPoint', 'DrawPolygon',
+ 'DrawRectangle', 'DrawSegment', 'DrawSphere', 'DrawString',
+ 'DrawTorus', 'dtor',
+ 'entab', 'EraseArea', 'errorclear', 'Event', 'eventmask',
+ 'EvGet', 'EvSend', 'exec', 'exit', 'exp', 'Eye',
+ 'fcntl', 'fdup', 'fetch', 'Fg', 'fieldnames',
+ 'FillArc', 'FillCircle', 'FillPolygon',
+ 'FillRectangle', 'find', 'flock', 'flush', 'Font',
+ 'FreeColor', 'FreeSpace', 'function',
+ 'get', 'getch', 'getche', 'getenv',
+ 'GetSpace', 'gettimeofday',
+ 'getuid', 'globalnames', 'GotoRC', 'GotoXY', 'gtime', 'hardlink',
+ 'iand', 'icom', 'IdentityMatrix', 'image', 'InPort', 'insert',
+ 'Int86', 'integer', 'ioctl', 'ior', 'ishift', 'istate', 'ixor',
+ 'kbhit', 'key', 'keyword', 'kill',
+ 'left', 'Len', 'list', 'load', 'loadfunc', 'localnames',
+ 'lock', 'log', 'Lower', 'lstat',
+ 'many', 'map', 'match', 'MatrixMode', 'max', 'member',
+ 'membernames', 'methodnames', 'methods', 'min', 'mkdir', 'move',
+ 'MultMatrix', 'mutex',
+ 'name', 'NewColor', 'Normals', 'numeric',
+ 'open', 'opencl', 'oprec', 'ord', 'OutPort',
+ 'PaletteChars', 'PaletteColor', 'PaletteKey', 'paramnames',
+ 'parent', 'Pattern', 'Peek', 'Pending', 'pipe', 'Pixel',
+ 'Poke', 'pop', 'PopMatrix', 'Pos', 'pos',
+ 'proc', 'pull', 'push', 'PushMatrix', 'PushRotate', 'PushScale',
+ 'PushTranslate', 'put',
+ 'QueryPointer',
+ 'Raise', 'read', 'ReadImage', 'readlink', 'reads', 'ready',
+ 'real', 'receive', 'Refresh', 'Rem', 'remove', 'rename',
+ 'repl', 'reverse', 'right', 'rmdir', 'Rotate', 'Rpos',
+ 'rtod', 'runerr',
+ 'save', 'Scale', 'seek', 'select', 'send', 'seq',
+ 'serial', 'set', 'setenv',
+ 'setuid', 'signal', 'sin', 'sort', 'sortf',
+ 'spawn', 'sql', 'sqrt', 'stat', 'staticnames', 'stop',
+ 'string', 'structure', 'Swi',
+ 'symlink', 'sys_errstr', 'system', 'syswrite',
+ 'tab', 'table', 'tan',
+ 'Texcoord', 'Texture', 'TextWidth', 'Translate',
+ 'trap', 'trim', 'truncate', 'trylock', 'type',
+ 'umask', 'Uncouple', 'unlock', 'upto', 'utime',
+ 'variable',
+ 'wait', 'WAttrib', 'WDefault', 'WFlush', 'where',
+ 'WinAssociate', 'WinButton', 'WinColorDialog', 'WindowContents',
+ 'WinEditRegion', 'WinFontDialog', 'WinMenuBar', 'WinOpenDialog',
+ 'WinPlayMedia', 'WinSaveDialog', 'WinScrollBar', 'WinSelectDialog',
+ 'write', 'WriteImage', 'writes', 'WSection',
+ 'WSync'), prefix=r'\b', suffix=r'\b'),
+ Name.Function),
+ include('numbers'),
+ (r'\*\*|\+\+|\-\-|\.|\=|\~\=|<\=|>\=|\=\=|\~\=\=|<<|<<\=|>>|>>\=', Operator),
+ (r':\=|:\=:|<\-|<\->|\+:\=|\||\|\|', Operator),
+ (r'\=\=\=|\~\=\=\=', Operator),
+ (r'"(?:[^\\"]|\\.)*"', String),
+ (r"'(?:[^\\']|\\.)*'", String.Character),
+ (r'[*<>+=/&!?@~\\-]', Operator),
+ (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
+ (r"([\[\]])", Punctuation),
+ (r"(<>|=>|[()|:;,.'`]|[{}]|[%^]|[&?])", Punctuation),
+ (r'\n+', Text),
+ ],
+ 'numbers': [
+ (r'\b([+-]?([2-9]|[12][0-9]|3[0-6])[rR][0-9a-zA-Z]+)\b', Number.Hex),
+ (r'[+-]?[0-9]*\.([0-9]*)([Ee][+-]?[0-9]*)?', Number.Float),
+ (r'\b([+-]?[0-9]+[KMGTPkmgtp]?)\b', Number.Integer),
+ ],
+ 'subprogram': [
+ (r'\(', Punctuation, ('#pop', 'formal_part')),
+ (r';', Punctuation, '#pop'),
+ (r'"[^"]+"|\w+', Name.Function),
+ include('root'),
+ ],
+ 'type_def': [
+ (r'\(', Punctuation, 'formal_part'),
+ ],
+ 'formal_part': [
+ (r'\)', Punctuation, '#pop'),
+ (r'\w+', Name.Variable),
+ (r',', Punctuation),
+ (r'(:string|:integer|:real)\b', Keyword.Reserved),
+ include('root'),
+ ],
+ }
+
+class UcodeLexer(RegexLexer):
+ """
+ Lexer for Icon ucode files
+
+ .. versionadded:: 2.4
+ """
+ name = 'ucode'
+ aliases = ['ucode']
+ filenames = ['*.u', '*.u1', '*.u2']
+ mimetypes = []
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'(#.*\n)', Comment),
+ (words((
+ 'con', 'declend', 'end',
+ 'global',
+ 'impl', 'invocable',
+ 'lab', 'link', 'local',
+ 'record',
+ 'uid', 'unions',
+ 'version'),
+ prefix=r'\b', suffix=r'\b'),
+ Name.Function),
+ (words((
+ 'colm', 'filen', 'line', 'synt'),
+ prefix=r'\b', suffix=r'\b'),
+ Comment),
+ (words((
+ 'asgn',
+ 'bang', 'bscan',
+ 'cat', 'ccase', 'chfail',
+ 'coact', 'cofail', 'compl',
+ 'coret', 'create', 'cset',
+ 'diff', 'div', 'dup',
+ 'efail', 'einit', 'end', 'eqv', 'eret',
+ 'error', 'escan', 'esusp',
+ 'field',
+ 'goto',
+ 'init', 'int', 'inter',
+ 'invoke',
+ 'keywd',
+ 'lconcat', 'lexeq', 'lexge',
+ 'lexgt', 'lexle', 'lexlt', 'lexne',
+ 'limit', 'llist', 'lsusp',
+ 'mark', 'mark0', 'minus', 'mod', 'mult',
+ 'neg', 'neqv', 'nonnull', 'noop', 'null',
+ 'number', 'numeq', 'numge', 'numgt',
+ 'numle', 'numlt', 'numne',
+ 'pfail', 'plus', 'pnull', 'pop', 'power',
+ 'pret', 'proc', 'psusp', 'push1', 'pushn1',
+ 'random', 'rasgn', 'rcv', 'rcvbk', 'real',
+ 'refresh', 'rswap',
+ 'sdup', 'sect', 'size', 'snd', 'sndbk',
+ 'str', 'subsc', 'swap',
+ 'tabmat', 'tally', 'toby', 'trace',
+ 'unmark',
+ 'value', 'var'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Declaration),
+ (words((
+ 'any',
+ 'case',
+ 'endcase', 'endevery', 'endif',
+ 'endifelse', 'endrepeat', 'endsuspend',
+ 'enduntil', 'endwhile', 'every',
+ 'if', 'ifelse',
+ 'repeat',
+ 'suspend',
+ 'until',
+ 'while'),
+ prefix=r'\b', suffix=r'\b'),
+ Name.Constant),
+ (r'\d+(\s*|\.$|$)', Number.Integer),
+ (r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float),
+ (r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float),
+ (r"(<>|=>|[()|:;,.'`]|[{}]|[%^]|[&?])", Punctuation),
+ (r'\s+\b', Text),
+ (r'[\w-]+', Text),
+ ],
+}
diff --git a/pygments/lexers/urbi.py b/pygments/lexers/urbi.py
index 558a21fb..7aaba90c 100644
--- a/pygments/lexers/urbi.py
+++ b/pygments/lexers/urbi.py
@@ -5,7 +5,7 @@
Lexers for UrbiScript language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/varnish.py b/pygments/lexers/varnish.py
index de8e598b..f3b37d60 100644
--- a/pygments/lexers/varnish.py
+++ b/pygments/lexers/varnish.py
@@ -5,7 +5,7 @@
Lexers for Varnish configuration
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,7 +36,7 @@ class VCLLexer(RegexLexer):
# Skip over comments and blank lines
# This is accurate enough that returning 0.9 is reasonable.
# Almost no VCL files start without some comments.
- elif '\nvcl 4\.0;' in text[:1000]:
+ elif '\nvcl 4.0;' in text[:1000]:
return 0.9
tokens = {
@@ -120,14 +120,14 @@ class VCLLexer(RegexLexer):
r'([a-zA-Z_]\w*)'
r'(\s*\(.*\))',
bygroups(Name.Function, Punctuation, Name.Function, using(this))),
- ('[a-zA-Z_]\w*', Name),
- ],
+ (r'[a-zA-Z_]\w*', Name),
+ ],
'comment': [
(r'[^*/]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
- ],
+ ],
'comments': [
(r'#.*$', Comment),
(r'/\*', Comment.Multiline, 'comment'),
diff --git a/pygments/lexers/verification.py b/pygments/lexers/verification.py
index 3e77e04a..5322e17f 100644
--- a/pygments/lexers/verification.py
+++ b/pygments/lexers/verification.py
@@ -5,7 +5,7 @@
Lexer for Intermediate Verification Languages (IVLs).
- :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -69,7 +69,7 @@ class SilverLexer(RegexLexer):
"""
name = 'Silver'
aliases = ['silver']
- filenames = ['*.sil']
+ filenames = ['*.sil', '*.vpr']
tokens = {
'root': [
@@ -89,7 +89,7 @@ class SilverLexer(RegexLexer):
'constraining', 'Seq', 'Set', 'Multiset', 'union', 'intersection',
'setminus', 'subset', 'unfolding', 'in', 'old', 'forall', 'exists',
'acc', 'wildcard', 'write', 'none', 'epsilon', 'perm', 'unique',
- 'apply', 'package', 'folding', 'label'),
+ 'apply', 'package', 'folding', 'label', 'forperm'),
suffix=r'\b'), Keyword),
(words(('Int', 'Perm', 'Bool', 'Ref'), suffix=r'\b'), Keyword.Type),
include('numbers'),
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index 59fbf2fc..6e9c4f92 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -5,7 +5,7 @@
Just export previously exported lexers.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py
index 551846c2..67aefe23 100644
--- a/pygments/lexers/webmisc.py
+++ b/pygments/lexers/webmisc.py
@@ -5,7 +5,7 @@
Lexers for misc. web stuff.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -358,8 +358,10 @@ class XQueryLexer(ExtendedRegexLexer):
bygroups(Keyword, Text, Keyword), 'itemtype'),
(r'(treat)(\s+)(as)\b',
bygroups(Keyword, Text, Keyword), 'itemtype'),
- (r'(case)(\s+)(' + stringdouble + ')', bygroups(Keyword, Text, String.Double), 'itemtype'),
- (r'(case)(\s+)(' + stringsingle + ')', bygroups(Keyword, Text, String.Single), 'itemtype'),
+ (r'(case)(\s+)(' + stringdouble + ')',
+ bygroups(Keyword, Text, String.Double), 'itemtype'),
+ (r'(case)(\s+)(' + stringsingle + ')',
+ bygroups(Keyword, Text, String.Single), 'itemtype'),
(r'(case|as)\b', Keyword, 'itemtype'),
(r'(\))(\s*)(as)',
bygroups(Punctuation, Text, Keyword), 'itemtype'),
@@ -367,7 +369,8 @@ class XQueryLexer(ExtendedRegexLexer):
(r'(for|let|previous|next)(\s+)(\$)',
bygroups(Keyword, Text, Name.Variable), 'varname'),
(r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
- bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
+ bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable),
+ 'varname'),
# (r'\)|\?|\]', Punctuation, '#push'),
(r'\)|\?|\]', Punctuation),
(r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
@@ -417,21 +420,25 @@ class XQueryLexer(ExtendedRegexLexer):
(r'preserve|no-preserve', Keyword),
(r',', Punctuation),
],
- 'annotationname':[
+ 'annotationname': [
(r'\(:', Comment, 'comment'),
(qname, Name.Decorator),
(r'(\()(' + stringdouble + ')', bygroups(Punctuation, String.Double)),
(r'(\()(' + stringsingle + ')', bygroups(Punctuation, String.Single)),
- (r'(\,)(\s+)(' + stringdouble + ')', bygroups(Punctuation, Text, String.Double)),
- (r'(\,)(\s+)(' + stringsingle + ')', bygroups(Punctuation, Text, String.Single)),
+ (r'(\,)(\s+)(' + stringdouble + ')',
+ bygroups(Punctuation, Text, String.Double)),
+ (r'(\,)(\s+)(' + stringsingle + ')',
+ bygroups(Punctuation, Text, String.Single)),
(r'\)', Punctuation),
(r'(\s+)(\%)', bygroups(Text, Name.Decorator), 'annotationname'),
- (r'(\s+)(variable)(\s+)(\$)', bygroups(Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
- (r'(\s+)(function)(\s+)', bygroups(Text, Keyword.Declaration, Text), 'root')
+ (r'(\s+)(variable)(\s+)(\$)',
+ bygroups(Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
+ (r'(\s+)(function)(\s+)',
+ bygroups(Text, Keyword.Declaration, Text), 'root')
],
'varname': [
(r'\(:', Comment, 'comment'),
- (r'(' + qname + ')(\()?', bygroups(Name, Punctuation), 'operator'),
+ (r'(' + qname + r')(\()?', bygroups(Name, Punctuation), 'operator'),
],
'singletype': [
include('whitespace'),
@@ -473,8 +480,10 @@ class XQueryLexer(ExtendedRegexLexer):
bygroups(Keyword, Text, Keyword), 'singletype'),
(r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
(r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
- (r'(case)(\s+)(' + stringdouble + ')', bygroups(Keyword, Text, String.Double), 'itemtype'),
- (r'(case)(\s+)(' + stringsingle + ')', bygroups(Keyword, Text, String.Single), 'itemtype'),
+ (r'(case)(\s+)(' + stringdouble + ')',
+ bygroups(Keyword, Text, String.Double), 'itemtype'),
+ (r'(case)(\s+)(' + stringsingle + ')',
+ bygroups(Keyword, Text, String.Single), 'itemtype'),
(r'case|as', Keyword, 'itemtype'),
(r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
(ncname + r':\*', Keyword.Type, 'operator'),
@@ -634,9 +643,9 @@ class XQueryLexer(ExtendedRegexLexer):
bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
(r'(declare)(\s+)(context)(\s+)(item)',
bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
- (ncname + ':\*', Name, 'operator'),
- ('\*:'+ncname, Name.Tag, 'operator'),
- ('\*', Name.Tag, 'operator'),
+ (ncname + r':\*', Name, 'operator'),
+ (r'\*:'+ncname, Name.Tag, 'operator'),
+ (r'\*', Name.Tag, 'operator'),
(stringdouble, String.Double, 'operator'),
(stringsingle, String.Single, 'operator'),
@@ -652,7 +661,8 @@ class XQueryLexer(ExtendedRegexLexer):
# NAMESPACE KEYWORD
(r'(declare)(\s+)(default)(\s+)(element|function)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'namespacekeyword'),
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration),
+ 'namespacekeyword'),
(r'(import)(\s+)(schema|module)',
bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
(r'(declare)(\s+)(copy-namespaces)',
@@ -852,7 +862,7 @@ class QmlLexer(RegexLexer):
class CirruLexer(RegexLexer):
- """
+ r"""
Syntax rules of Cirru can be found at:
http://cirru.org/
diff --git a/pygments/lexers/whiley.py b/pygments/lexers/whiley.py
index 0795a030..0d0e8ab8 100644
--- a/pygments/lexers/whiley.py
+++ b/pygments/lexers/whiley.py
@@ -5,7 +5,7 @@
Lexers for the Whiley language.
- :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,6 +19,8 @@ __all__ = ['WhileyLexer']
class WhileyLexer(RegexLexer):
"""
Lexer for the Whiley programming language.
+
+ .. versionadded:: 2.2
"""
name = 'Whiley'
filenames = ['*.whiley']
@@ -47,12 +49,10 @@ class WhileyLexer(RegexLexer):
'requires', 'ensures', 'where', 'assert', 'assume',
'all', 'no', 'some', 'in', 'is', 'new',
'throw', 'try', 'catch', 'debug', 'skip', 'fail',
- 'finite', 'total',
- ), suffix=r'\b'), Keyword.Reserved),
+ 'finite', 'total'), suffix=r'\b'), Keyword.Reserved),
(words((
'function', 'method', 'public', 'private', 'protected',
- 'export', 'native',
- ), suffix=r'\b'), Keyword.Declaration),
+ 'export', 'native'), suffix=r'\b'), Keyword.Declaration),
# "constant" & "type" are not keywords unless used in declarations
(r'(constant|type)(\s+)([a-zA-Z_]\w*)(\s+)(is)\b',
bygroups(Keyword.Declaration, Text, Name, Text, Keyword.Reserved)),
@@ -73,8 +73,7 @@ class WhileyLexer(RegexLexer):
'uint', 'nat',
# whiley.lang.Any
- 'toString',
- ), suffix=r'\b'), Name.Builtin),
+ 'toString'), suffix=r'\b'), Name.Builtin),
# byte literal
(r'[01]+b', Number.Bin),
@@ -99,7 +98,7 @@ class WhileyLexer(RegexLexer):
# operators and punctuation
(r'[{}()\[\],.;]', Punctuation),
(u'[+\\-*/%&|<>^!~@=:?'
- # unicode operators
+ # unicode operators
u'\u2200\u2203\u2205\u2282\u2286\u2283\u2287'
u'\u222A\u2229\u2264\u2265\u2208\u2227\u2228'
u']', Operator),
diff --git a/pygments/lexers/x10.py b/pygments/lexers/x10.py
index ea75ab71..1c63326d 100644
--- a/pygments/lexers/x10.py
+++ b/pygments/lexers/x10.py
@@ -5,7 +5,7 @@
Lexers for the X10 programming language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/xorg.py b/pygments/lexers/xorg.py
new file mode 100644
index 00000000..3bba930f
--- /dev/null
+++ b/pygments/lexers/xorg.py
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.xorg
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Xorg configs.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Comment, String, Name, Text
+
+__all__ = ['XorgLexer']
+
+
+class XorgLexer(RegexLexer):
+ """Lexer for xorg.conf file."""
+ name = 'Xorg'
+ aliases = ['xorg.conf']
+ filenames = ['xorg.conf']
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*$', Comment),
+
+ (r'((?:Sub)?Section)(\s+)("\w+")',
+ bygroups(String.Escape, Text, String.Escape)),
+ (r'(End(|Sub)Section)', String.Escape),
+
+ (r'(\w+)(\s+)([^\n#]+)',
+ bygroups(Name.Builtin, Text, Name.Constant)),
+ ],
+ }