diff options
author | Anteru <bitbucket@ca.sh13.net> | 2019-04-30 15:52:31 +0000 |
---|---|---|
committer | Anteru <bitbucket@ca.sh13.net> | 2019-04-30 15:52:31 +0000 |
commit | 28a21bdafa1b73d5515de18bd263797af61c2471 (patch) | |
tree | 3436c730359d89153f04ef572368abfcde1d3fcb /pygments/lexers | |
parent | e3872ea8f65ac2c3c40046a7a3acf4f0541f0e56 (diff) | |
parent | 601e6e86e45f31e23a7b29e1265d68aeba5e1366 (diff) | |
download | pygments-28a21bdafa1b73d5515de18bd263797af61c2471.tar.gz |
Merged in andrescarrasco/pygments-main/boa (pull request #756)
Add a lexer for the Boa Domain-Specific Langauge.
Diffstat (limited to 'pygments/lexers')
84 files changed, 5076 insertions, 2767 deletions
diff --git a/pygments/lexers/__init__.py b/pygments/lexers/__init__.py index 328e072c..50f39d4e 100644 --- a/pygments/lexers/__init__.py +++ b/pygments/lexers/__init__.py @@ -133,7 +133,8 @@ def load_lexer_from_file(filename, lexername="CustomLexer", **options): try: # This empty dict will contain the namespace for the exec'd file custom_namespace = {} - exec(open(filename, 'rb').read(), custom_namespace) + with open(filename, 'rb') as f: + exec(f.read(), custom_namespace) # Retrieve the class `lexername` from that namespace if lexername not in custom_namespace: raise ClassNotFound('no valid %s class found in %s' % diff --git a/pygments/lexers/_cocoa_builtins.py b/pygments/lexers/_cocoa_builtins.py index 064167ff..f17ea876 100644 --- a/pygments/lexers/_cocoa_builtins.py +++ b/pygments/lexers/_cocoa_builtins.py @@ -40,24 +40,25 @@ if __name__ == '__main__': # pragma: no cover continue headerFilePath = frameworkHeadersDir + f - content = open(headerFilePath).read() - res = re.findall('(?<=@interface )\w+', content) + with open(headerFilePath) as f: + content = f.read() + res = re.findall(r'(?<=@interface )\w+', content) for r in res: all_interfaces.add(r) - res = re.findall('(?<=@protocol )\w+', content) + res = re.findall(r'(?<=@protocol )\w+', content) for r in res: all_protocols.add(r) - res = re.findall('(?<=typedef enum )\w+', content) + res = re.findall(r'(?<=typedef enum )\w+', content) for r in res: all_primitives.add(r) - res = re.findall('(?<=typedef struct )\w+', content) + res = re.findall(r'(?<=typedef struct )\w+', content) for r in res: all_primitives.add(r) - res = re.findall('(?<=typedef const struct )\w+', content) + res = re.findall(r'(?<=typedef const struct )\w+', content) for r in res: all_primitives.add(r) diff --git a/pygments/lexers/_csound_builtins.py b/pygments/lexers/_csound_builtins.py index e5a9aaf7..56b5a452 100644 --- a/pygments/lexers/_csound_builtins.py +++ b/pygments/lexers/_csound_builtins.py @@ -3,1344 +3,1658 @@ pygments.lexers._csound_builtins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2018 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -# Opcodes in Csound 6.05 from -# csound --list-opcodes -# except -# cggoto <http://www.csounds.com/manual/html/cggoto.html> -# cigoto <http://www.csounds.com/manual/html/cigoto.html> -# cingoto (undocumented) -# ckgoto <http://www.csounds.com/manual/html/ckgoto.html> -# cngoto <http://www.csounds.com/manual/html/cngoto.html> -# endin <http://www.csounds.com/manual/html/endin.html -# endop <http://www.csounds.com/manual/html/endop.html -# goto <http://www.csounds.com/manual/html/goto.html> -# igoto <http://www.csounds.com/manual/html/igoto.html> -# instr <http://www.csounds.com/manual/html/instr.html> -# kgoto <http://www.csounds.com/manual/html/kgoto.html> -# loop_ge <http://www.csounds.com/manual/html/loop_ge.html> -# loop_gt <http://www.csounds.com/manual/html/loop_gt.html> -# loop_le <http://www.csounds.com/manual/html/loop_le.html> -# loop_lt <http://www.csounds.com/manual/html/loop_lt.html> -# opcode <http://www.csounds.com/manual/html/opcode.html> -# return <http://www.csounds.com/manual/html/return.html> -# rigoto <http://www.csounds.com/manual/html/rigoto.html> -# tigoto <http://www.csounds.com/manual/html/tigoto.html> -# timout <http://www.csounds.com/manual/html/timout.html> -# which are treated as keywords; the scoreline opcodes -# scoreline <http://www.csounds.com/manual/html/scoreline.html> -# scoreline_i <http://www.csounds.com/manual/html/scoreline_i.html> -# which allow Csound Score highlighting; the pyrun opcodes -# <http://www.csounds.com/manual/html/pyrun.html> -# pylrun -# pylruni -# pylrunt -# pyrun -# pyruni -# pyrunt -# which allow Python highlighting; and the Lua opcodes -# lua_exec <http://www.csounds.com/manual/html/lua_exec.html> -# lua_opdef <http://www.csounds.com/manual/html/lua_opdef.html> -# which allow Lua highlighting. -OPCODES = set(( - 'ATSadd', - 'ATSaddnz', - 'ATSbufread', - 'ATScross', - 'ATSinfo', - 'ATSinterpread', - 'ATSpartialtap', - 'ATSread', - 'ATSreadnz', - 'ATSsinnoi', - 'FLbox', - 'FLbutBank', - 'FLbutton', - 'FLcloseButton', - 'FLcolor', - 'FLcolor2', - 'FLcount', - 'FLexecButton', - 'FLgetsnap', - 'FLgroup', - 'FLgroupEnd', - 'FLgroup_end', - 'FLhide', - 'FLhvsBox', - 'FLhvsBoxSetValue', - 'FLjoy', - 'FLkeyIn', - 'FLknob', - 'FLlabel', - 'FLloadsnap', - 'FLmouse', - 'FLpack', - 'FLpackEnd', - 'FLpack_end', - 'FLpanel', - 'FLpanelEnd', - 'FLpanel_end', - 'FLprintk', - 'FLprintk2', - 'FLroller', - 'FLrun', - 'FLsavesnap', - 'FLscroll', - 'FLscrollEnd', - 'FLscroll_end', - 'FLsetAlign', - 'FLsetBox', - 'FLsetColor', - 'FLsetColor2', - 'FLsetFont', - 'FLsetPosition', - 'FLsetSize', - 'FLsetSnapGroup', - 'FLsetText', - 'FLsetTextColor', - 'FLsetTextSize', - 'FLsetTextType', - 'FLsetVal', - 'FLsetVal_i', - 'FLsetVali', - 'FLsetsnap', - 'FLshow', - 'FLslidBnk', - 'FLslidBnk2', - 'FLslidBnk2Set', - 'FLslidBnk2Setk', - 'FLslidBnkGetHandle', - 'FLslidBnkSet', - 'FLslidBnkSetk', - 'FLslider', - 'FLtabs', - 'FLtabsEnd', - 'FLtabs_end', - 'FLtext', - 'FLupdate', - 'FLvalue', - 'FLvkeybd', - 'FLvslidBnk', - 'FLvslidBnk2', - 'FLxyin', - 'MixerClear', - 'MixerGetLevel', - 'MixerReceive', - 'MixerSend', - 'MixerSetLevel', - 'MixerSetLevel_i', - 'OSCinit', - 'OSClisten', - 'OSCsend', - 'a', - 'abs', - 'active', - 'adsr', - 'adsyn', - 'adsynt', - 'adsynt2', - 'aftouch', - 'alpass', - 'alwayson', - 'ampdb', - 'ampdbfs', - 'ampmidi', - 'ampmidid', - 'areson', - 'aresonk', - 'array', - 'atone', - 'atonek', - 'atonex', - 'babo', - 'balance', - 'bamboo', - 'barmodel', - 'bbcutm', - 'bbcuts', - 'betarand', - 'bexprnd', - 'bformdec', - 'bformdec1', - 'bformenc', - 'bformenc1', - 'binit', - 'biquad', - 'biquada', - 'birnd', - 'bqrez', - 'buchla', - 'butbp', - 'butbr', - 'buthp', - 'butlp', - 'butterbp', - 'butterbr', - 'butterhp', - 'butterlp', - 'button', - 'buzz', - 'c2r', - 'cabasa', - 'cauchy', - 'cauchyi', - 'ceil', - 'cell', - 'cent', - 'centroid', - 'ceps', - #'cggoto', - 'chanctrl', - 'changed', - 'chani', - 'chano', - 'chebyshevpoly', - 'checkbox', - 'chn_S', - 'chn_a', - 'chn_k', - 'chnclear', - 'chnexport', - 'chnget', - 'chnmix', - 'chnparams', - 'chnset', - 'chuap', - #'cigoto', - #'cingoto', - #'ckgoto', - 'clear', - 'clfilt', - 'clip', - 'clockoff', - 'clockon', - 'cmplxprod', - #'cngoto', - 'comb', - 'combinv', - 'compilecsd', - 'compileorc', - 'compilestr', - 'compress', - 'connect', - 'control', - 'convle', - 'convolve', - 'copy2ftab', - 'copy2ttab', - 'copya2ftab', - 'copyf2array', - 'cos', - 'cosh', - 'cosinv', - 'cosseg', - 'cossegb', - 'cossegr', - 'cps2pch', - 'cpsmidi', - 'cpsmidib', - 'cpsmidinn', - 'cpsoct', - 'cpspch', - 'cpstmid', - 'cpstun', - 'cpstuni', - 'cpsxpch', - 'cpuprc', - 'cross2', - 'crossfm', - 'crossfmi', - 'crossfmpm', - 'crossfmpmi', - 'crosspm', - 'crosspmi', - 'crunch', - 'ctlchn', - 'ctrl14', - 'ctrl21', - 'ctrl7', - 'ctrlinit', - 'cuserrnd', - 'dam', - 'date', - 'dates', - 'db', - 'dbamp', - 'dbfsamp', - 'dcblock', - 'dcblock2', - 'dconv', - 'delay', - 'delay1', - 'delayk', - 'delayr', - 'delayw', - 'deltap', - 'deltap3', - 'deltapi', - 'deltapn', - 'deltapx', - 'deltapxw', - 'denorm', - 'diff', - 'diskgrain', - 'diskin', - 'diskin2', - 'dispfft', - 'display', - 'distort', - 'distort1', - 'divz', - 'doppler', - 'downsamp', - 'dripwater', - 'dumpk', - 'dumpk2', - 'dumpk3', - 'dumpk4', - 'duserrnd', - 'dust', - 'dust2', - #'endin', - #'endop', - 'envlpx', - 'envlpxr', - 'ephasor', - 'eqfil', - 'evalstr', - 'event', - 'event_i', - 'exciter', - 'exitnow', - 'exp', - 'expcurve', - 'expon', - 'exprand', - 'exprandi', - 'expseg', - 'expsega', - 'expsegb', - 'expsegba', - 'expsegr', - 'fareylen', - 'fareyleni', - 'faustaudio', - 'faustcompile', - 'faustctl', - 'faustgen', - 'fft', - 'fftinv', - 'ficlose', - 'filebit', - 'filelen', - 'filenchnls', - 'filepeak', - 'filesr', - 'filevalid', - 'fillarray', - 'filter2', - 'fin', - 'fini', - 'fink', - 'fiopen', - 'flanger', - 'flashtxt', - 'flooper', - 'flooper2', - 'floor', - 'fluidAllOut', - 'fluidCCi', - 'fluidCCk', - 'fluidControl', - 'fluidEngine', - 'fluidLoad', - 'fluidNote', - 'fluidOut', - 'fluidProgramSelect', - 'fluidSetInterpMethod', - 'fmb3', - 'fmbell', - 'fmmetal', - 'fmpercfl', - 'fmrhode', - 'fmvoice', - 'fmwurlie', - 'fof', - 'fof2', - 'fofilter', - 'fog', - 'fold', - 'follow', - 'follow2', - 'foscil', - 'foscili', - 'fout', - 'fouti', - 'foutir', - 'foutk', - 'fprintks', - 'fprints', - 'frac', - 'fractalnoise', - 'freeverb', - 'ftchnls', - 'ftconv', - 'ftcps', - 'ftfree', - 'ftgen', - 'ftgenonce', - 'ftgentmp', - 'ftlen', - 'ftload', - 'ftloadk', - 'ftlptim', - 'ftmorf', - 'ftresize', - 'ftresizei', - 'ftsave', - 'ftsavek', - 'ftsr', - 'gain', - 'gainslider', - 'gauss', - 'gaussi', - 'gausstrig', - 'gbuzz', - 'genarray', - 'genarray_i', - 'gendy', - 'gendyc', - 'gendyx', - 'getcfg', - 'getcol', - 'getrow', - 'gogobel', - #'goto', - 'grain', - 'grain2', - 'grain3', - 'granule', - 'guiro', - 'harmon', - 'harmon2', - 'harmon3', - 'harmon4', - 'hdf5read', - 'hdf5write', - 'hilbert', - 'hrtfearly', - 'hrtfer', - 'hrtfmove', - 'hrtfmove2', - 'hrtfreverb', - 'hrtfstat', - 'hsboscil', - 'hvs1', - 'hvs2', - 'hvs3', - 'i', - 'iceps', - #'igoto', - 'ihold', - 'imagecreate', - 'imagefree', - 'imagegetpixel', - 'imageload', - 'imagesave', - 'imagesetpixel', - 'imagesize', - 'in', - 'in32', - 'inch', - 'inh', - 'init', - 'initc14', - 'initc21', - 'initc7', - 'inleta', - 'inletf', - 'inletk', - 'inletkid', - 'inletv', - 'ino', - 'inq', - 'inrg', - 'ins', - 'insglobal', - 'insremot', - #'instr', - 'int', - 'integ', - 'interp', - 'invalue', - 'inx', - 'inz', - 'jitter', - 'jitter2', - 'jspline', - 'k', - #'kgoto', - 'ktableseg', - 'lenarray', - 'lentab', - 'lfo', - 'limit', - 'line', - 'linen', - 'linenr', - 'lineto', - 'linrand', - 'linseg', - 'linsegb', - 'linsegr', - 'locsend', - 'locsig', - 'log', - 'log10', - 'log2', - 'logbtwo', - 'logcurve', - #'loop_ge', - #'loop_gt', - #'loop_le', - #'loop_lt', - 'loopseg', - 'loopsegp', - 'looptseg', - 'loopxseg', - 'lorenz', - 'loscil', - 'loscil3', - 'loscilx', - 'lowpass2', - 'lowres', - 'lowresx', - 'lpf18', - 'lpform', - 'lpfreson', - 'lphasor', - 'lpinterp', - 'lposcil', - 'lposcil3', - 'lposcila', - 'lposcilsa', - 'lposcilsa2', - 'lpread', - 'lpreson', - 'lpshold', - 'lpsholdp', - 'lpslot', - #'lua_exec', - 'lua_ikopcall', - #'lua_opdef', - 'mac', - 'maca', - 'madsr', - 'mags', - 'mandel', - 'mandol', - 'maparray', - 'maparray_i', - 'marimba', - 'massign', - 'max', - 'max_k', - 'maxabs', - 'maxabsaccum', - 'maxaccum', - 'maxalloc', - 'maxarray', - 'maxtab', - 'mclock', - 'mdelay', - 'median', - 'mediank', - 'metro', - 'midglobal', - 'midic14', - 'midic21', - 'midic7', - 'midichannelaftertouch', - 'midichn', - 'midicontrolchange', - 'midictrl', - 'mididefault', - 'midifilestatus', - 'midiin', - 'midinoteoff', - 'midinoteoncps', - 'midinoteonkey', - 'midinoteonoct', - 'midinoteonpch', - 'midion', - 'midion2', - 'midiout', - 'midipgm', - 'midipitchbend', - 'midipolyaftertouch', - 'midiprogramchange', - 'miditempo', - 'midremot', - 'min', - 'minabs', - 'minabsaccum', - 'minaccum', - 'minarray', - 'mincer', - 'mintab', - 'mirror', - 'mode', - 'modmatrix', - 'monitor', - 'moog', - 'moogladder', - 'moogvcf', - 'moogvcf2', - 'moscil', - 'mp3bitrate', - 'mp3in', - 'mp3len', - 'mp3nchnls', - 'mp3sr', - 'mpulse', - 'mrtmsg', - 'multitap', - 'mute', - 'mxadsr', - 'nestedap', - 'nlalp', - 'nlfilt', - 'nlfilt2', - 'noise', - 'noteoff', - 'noteon', - 'noteondur', - 'noteondur2', - 'notnum', - 'nreverb', - 'nrpn', - 'nsamp', - 'nstance', - 'nstrnum', - 'ntrpol', - 'octave', - 'octcps', - 'octmidi', - 'octmidib', - 'octmidinn', - 'octpch', - #'opcode', - 'oscbnk', - 'oscil', - 'oscil1', - 'oscil1i', - 'oscil3', - 'oscili', - 'oscilikt', - 'osciliktp', - 'oscilikts', - 'osciln', - 'oscils', - 'oscilx', - 'out', - 'out32', - 'outc', - 'outch', - 'outh', - 'outiat', - 'outic', - 'outic14', - 'outipat', - 'outipb', - 'outipc', - 'outkat', - 'outkc', - 'outkc14', - 'outkpat', - 'outkpb', - 'outkpc', - 'outleta', - 'outletf', - 'outletk', - 'outletkid', - 'outletv', - 'outo', - 'outq', - 'outq1', - 'outq2', - 'outq3', - 'outq4', - 'outrg', - 'outs', - 'outs1', - 'outs2', - 'outvalue', - 'outx', - 'outz', - 'p', - 'pan', - 'pan2', - 'pareq', - 'partials', - 'partikkel', - 'partikkelget', - 'partikkelset', - 'partikkelsync', - 'passign', - 'pcauchy', - 'pchbend', - 'pchmidi', - 'pchmidib', - 'pchmidinn', - 'pchoct', - 'pconvolve', - 'pcount', - 'pdclip', - 'pdhalf', - 'pdhalfy', - 'peak', - 'pgmassign', - 'pgmchn', - 'phaser1', - 'phaser2', - 'phasor', - 'phasorbnk', - 'phs', - 'pindex', - 'pinker', - 'pinkish', - 'pitch', - 'pitchac', - 'pitchamdf', - 'planet', - 'platerev', - 'plltrack', - 'pluck', - 'poisson', - 'pol2rect', - 'polyaft', - 'polynomial', - 'pop', - 'pop_f', - 'port', - 'portk', - 'poscil', - 'poscil3', - 'pow', - 'powershape', - 'powoftwo', - 'prealloc', - 'prepiano', - 'print', - 'print_type', - 'printf', - 'printf_i', - 'printk', - 'printk2', - 'printks', - 'printks2', - 'prints', - 'product', - 'pset', - 'ptable', - 'ptable3', - 'ptablei', - 'ptableiw', - 'ptablew', - 'ptrack', - 'push', - 'push_f', - 'puts', - 'pvadd', - 'pvbufread', - 'pvcross', - 'pvinterp', - 'pvoc', - 'pvread', - 'pvs2array', - 'pvs2tab', - 'pvsadsyn', - 'pvsanal', - 'pvsarp', - 'pvsbandp', - 'pvsbandr', - 'pvsbin', - 'pvsblur', - 'pvsbuffer', - 'pvsbufread', - 'pvsbufread2', - 'pvscale', - 'pvscent', - 'pvsceps', - 'pvscross', - 'pvsdemix', - 'pvsdiskin', - 'pvsdisp', - 'pvsenvftw', - 'pvsfilter', - 'pvsfread', - 'pvsfreeze', - 'pvsfromarray', - 'pvsftr', - 'pvsftw', - 'pvsfwrite', - 'pvsgain', - 'pvsgendy', - 'pvshift', - 'pvsifd', - 'pvsin', - 'pvsinfo', - 'pvsinit', - 'pvslock', - 'pvsmaska', - 'pvsmix', - 'pvsmooth', - 'pvsmorph', - 'pvsosc', - 'pvsout', - 'pvspitch', - 'pvstanal', - 'pvstencil', - 'pvsvoc', - 'pvswarp', - 'pvsynth', - 'pwd', - 'pyassign', - 'pyassigni', - 'pyassignt', - 'pycall', - 'pycall1', - 'pycall1i', - 'pycall1t', - 'pycall2', - 'pycall2i', - 'pycall2t', - 'pycall3', - 'pycall3i', - 'pycall3t', - 'pycall4', - 'pycall4i', - 'pycall4t', - 'pycall5', - 'pycall5i', - 'pycall5t', - 'pycall6', - 'pycall6i', - 'pycall6t', - 'pycall7', - 'pycall7i', - 'pycall7t', - 'pycall8', - 'pycall8i', - 'pycall8t', - 'pycalli', - 'pycalln', - 'pycallni', - 'pycallt', - 'pyeval', - 'pyevali', - 'pyevalt', - 'pyexec', - 'pyexeci', - 'pyexect', - 'pyinit', - 'pylassign', - 'pylassigni', - 'pylassignt', - 'pylcall', - 'pylcall1', - 'pylcall1i', - 'pylcall1t', - 'pylcall2', - 'pylcall2i', - 'pylcall2t', - 'pylcall3', - 'pylcall3i', - 'pylcall3t', - 'pylcall4', - 'pylcall4i', - 'pylcall4t', - 'pylcall5', - 'pylcall5i', - 'pylcall5t', - 'pylcall6', - 'pylcall6i', - 'pylcall6t', - 'pylcall7', - 'pylcall7i', - 'pylcall7t', - 'pylcall8', - 'pylcall8i', - 'pylcall8t', - 'pylcalli', - 'pylcalln', - 'pylcallni', - 'pylcallt', - 'pyleval', - 'pylevali', - 'pylevalt', - 'pylexec', - 'pylexeci', - 'pylexect', - #'pylrun', - #'pylruni', - #'pylrunt', - #'pyrun', - #'pyruni', - #'pyrunt', - 'qinf', - 'qnan', - 'r2c', - 'rand', - 'randh', - 'randi', - 'random', - 'randomh', - 'randomi', - 'rbjeq', - 'readclock', - 'readf', - 'readfi', - 'readk', - 'readk2', - 'readk3', - 'readk4', - 'readks', - 'readscore', - 'readscratch', - 'rect2pol', - 'reinit', - 'release', - 'remoteport', - 'remove', - 'repluck', - 'reson', - 'resonk', - 'resonr', - 'resonx', - 'resonxk', - 'resony', - 'resonz', - 'resyn', - #'return', - 'reverb', - 'reverb2', - 'reverbsc', - 'rewindscore', - 'rezzy', - 'rfft', - 'rifft', - #'rigoto', - 'rireturn', - 'rms', - 'rnd', - 'rnd31', - 'round', - 'rspline', - 'rtclock', - 's16b14', - 's32b14', - 'samphold', - 'sandpaper', - 'scale', - 'scalearray', - 'scalet', - 'scanhammer', - 'scans', - 'scantable', - 'scanu', - 'schedkwhen', - 'schedkwhennamed', - 'schedule', - 'schedwhen', - #'scoreline', - #'scoreline_i', - 'seed', - 'sekere', - 'semitone', - 'sense', - 'sensekey', - 'seqtime', - 'seqtime2', - 'serialBegin', - 'serialEnd', - 'serialFlush', - 'serialPrint', - 'serialRead', - 'serialWrite', - 'serialWrite_i', - 'setcol', - 'setctrl', - 'setksmps', - 'setrow', - 'setscorepos', - 'sfilist', - 'sfinstr', - 'sfinstr3', - 'sfinstr3m', - 'sfinstrm', - 'sfload', - 'sflooper', - 'sfpassign', - 'sfplay', - 'sfplay3', - 'sfplay3m', - 'sfplaym', - 'sfplist', - 'sfpreset', - 'shaker', - 'shiftin', - 'shiftout', - 'signalflowgraph', - 'signum', - 'sin', - 'sinh', - 'sininv', - 'sinsyn', - 'sleighbells', - 'slicearray', - 'slider16', - 'slider16f', - 'slider16table', - 'slider16tablef', - 'slider32', - 'slider32f', - 'slider32table', - 'slider32tablef', - 'slider64', - 'slider64f', - 'slider64table', - 'slider64tablef', - 'slider8', - 'slider8f', - 'slider8table', - 'slider8tablef', - 'sliderKawai', - 'sndload', - 'sndloop', - 'sndwarp', - 'sndwarpst', - 'sockrecv', - 'sockrecvs', - 'socksend', - 'socksends', - 'soundin', - 'soundout', - 'soundouts', - 'space', - 'spat3d', - 'spat3di', - 'spat3dt', - 'spdist', - 'specaddm', - 'specdiff', - 'specdisp', - 'specfilt', - 'spechist', - 'specptrk', - 'specscal', - 'specsum', - 'spectrum', - 'splitrig', - 'sprintf', - 'sprintfk', - 'spsend', - 'sqrt', - 'stack', - 'statevar', - 'stix', - 'strcat', - 'strcatk', - 'strchar', - 'strchark', - 'strcmp', - 'strcmpk', - 'strcpy', - 'strcpyk', - 'strecv', - 'streson', - 'strfromurl', - 'strget', - 'strindex', - 'strindexk', - 'strlen', - 'strlenk', - 'strlower', - 'strlowerk', - 'strrindex', - 'strrindexk', - 'strset', - 'strsub', - 'strsubk', - 'strtod', - 'strtodk', - 'strtol', - 'strtolk', - 'strupper', - 'strupperk', - 'stsend', - 'subinstr', - 'subinstrinit', - 'sum', - 'sumarray', - 'sumtab', - 'svfilter', - 'syncgrain', - 'syncloop', - 'syncphasor', - 'system', - 'system_i', - 'tab', - 'tab2pvs', - 'tab_i', - 'tabgen', - 'table', - 'table3', - 'table3kt', - 'tablecopy', - 'tablefilter', - 'tablefilteri', - 'tablegpw', - 'tablei', - 'tableicopy', - 'tableigpw', - 'tableikt', - 'tableimix', - 'tableiw', - 'tablekt', - 'tablemix', - 'tableng', - 'tablera', - 'tableseg', - 'tableshuffle', - 'tableshufflei', - 'tablew', - 'tablewa', - 'tablewkt', - 'tablexkt', - 'tablexseg', - 'tabmap', - 'tabmap_i', - 'tabmorph', - 'tabmorpha', - 'tabmorphak', - 'tabmorphi', - 'tabplay', - 'tabrec', - 'tabslice', - 'tabsum', - 'tabw', - 'tabw_i', - 'tambourine', - 'tan', - 'tanh', - 'taninv', - 'taninv2', - 'tb0', - 'tb0_init', - 'tb1', - 'tb10', - 'tb10_init', - 'tb11', - 'tb11_init', - 'tb12', - 'tb12_init', - 'tb13', - 'tb13_init', - 'tb14', - 'tb14_init', - 'tb15', - 'tb15_init', - 'tb1_init', - 'tb2', - 'tb2_init', - 'tb3', - 'tb3_init', - 'tb4', - 'tb4_init', - 'tb5', - 'tb5_init', - 'tb6', - 'tb6_init', - 'tb7', - 'tb7_init', - 'tb8', - 'tb8_init', - 'tb9', - 'tb9_init', - 'tbvcf', - 'tempest', - 'tempo', - 'temposcal', - 'tempoval', - #'tigoto', - 'timedseq', - 'timeinstk', - 'timeinsts', - 'timek', - 'times', - #'timout', - 'tival', - 'tlineto', - 'tone', - 'tonek', - 'tonex', - 'tradsyn', - 'trandom', - 'transeg', - 'transegb', - 'transegr', - 'trcross', - 'trfilter', - 'trhighest', - 'trigger', - 'trigseq', - 'trirand', - 'trlowest', - 'trmix', - 'trscale', - 'trshift', - 'trsplit', - 'turnoff', - 'turnoff2', - 'turnon', - 'unirand', - 'unwrap', - 'upsamp', - 'urd', - 'vactrol', - 'vadd', - 'vadd_i', - 'vaddv', - 'vaddv_i', - 'vaget', - 'valpass', - 'vaset', - 'vbap', - 'vbap16', - 'vbap4', - 'vbap4move', - 'vbap8', - 'vbap8move', - 'vbapg', - 'vbapgmove', - 'vbaplsinit', - 'vbapmove', - 'vbapz', - 'vbapzmove', - 'vcella', - 'vco', - 'vco2', - 'vco2ft', - 'vco2ift', - 'vco2init', - 'vcomb', - 'vcopy', - 'vcopy_i', - 'vdel_k', - 'vdelay', - 'vdelay3', - 'vdelayk', - 'vdelayx', - 'vdelayxq', - 'vdelayxs', - 'vdelayxw', - 'vdelayxwq', - 'vdelayxws', - 'vdivv', - 'vdivv_i', - 'vecdelay', - 'veloc', - 'vexp', - 'vexp_i', - 'vexpseg', - 'vexpv', - 'vexpv_i', - 'vibes', - 'vibr', - 'vibrato', - 'vincr', - 'vlimit', - 'vlinseg', - 'vlowres', - 'vmap', - 'vmirror', - 'vmult', - 'vmult_i', - 'vmultv', - 'vmultv_i', - 'voice', - 'vosim', - 'vphaseseg', - 'vport', - 'vpow', - 'vpow_i', - 'vpowv', - 'vpowv_i', - 'vpvoc', - 'vrandh', - 'vrandi', - 'vsubv', - 'vsubv_i', - 'vtaba', - 'vtabi', - 'vtabk', - 'vtable1k', - 'vtablea', - 'vtablei', - 'vtablek', - 'vtablewa', - 'vtablewi', - 'vtablewk', - 'vtabwa', - 'vtabwi', - 'vtabwk', - 'vwrap', - 'waveset', - 'weibull', - 'wgbow', - 'wgbowedbar', - 'wgbrass', - 'wgclar', - 'wgflute', - 'wgpluck', - 'wgpluck2', - 'wguide1', - 'wguide2', - 'wiiconnect', - 'wiidata', - 'wiirange', - 'wiisend', - 'window', - 'wrap', - 'writescratch', - 'wterrain', - 'xadsr', - 'xin', - 'xout', - 'xscanmap', - 'xscans', - 'xscansmap', - 'xscanu', - 'xtratim', - 'xyin', - 'zacl', - 'zakinit', - 'zamod', - 'zar', - 'zarg', - 'zaw', - 'zawm', - 'zfilter2', - 'zir', - 'ziw', - 'ziwm', - 'zkcl', - 'zkmod', - 'zkr', - 'zkw', - 'zkwm' -)) +# Opcodes in Csound 6.12.0 at commit 6ca322bd31f1ca907c008616b40a5f237ff449db using +# python -c " +# import re, subprocess +# output = subprocess.Popen(['csound', '--list-opcodes0'], stderr=subprocess.PIPE).communicate()[1] +# opcodes = output[re.search(r'^$', output, re.M).end():re.search(r'^\d+ opcodes$', output, re.M).start()].split() +# output = subprocess.Popen(['csound', '--list-opcodes2'], stderr=subprocess.PIPE).communicate()[1] +# all_opcodes = output[re.search(r'^$', output, re.M).end():re.search(r'^\d+ opcodes$', output, re.M).start()].split() +# deprecated_opcodes = [opcode for opcode in all_opcodes if opcode not in opcodes] +# print '''OPCODES = set(\''' +# {} +# \'''.split()) +# +# DEPRECATED_OPCODES = set(\''' +# {} +# \'''.split()) +# '''.format('\n'.join(opcodes), '\n'.join(deprecated_opcodes)) +# " +# except for +# cggoto csound.com/docs/manual/cggoto.html +# cigoto csound.com/docs/manual/cigoto.html +# cingoto (undocumented) +# ckgoto csound.com/docs/manual/ckgoto.html +# cngoto csound.com/docs/manual/cngoto.html +# cnkgoto (undocumented) +# endin csound.com/docs/manual/endin.html +# endop csound.com/docs/manual/endop.html +# goto csound.com/docs/manual/goto.html +# igoto csound.com/docs/manual/igoto.html +# instr csound.com/docs/manual/instr.html +# kgoto csound.com/docs/manual/kgoto.html +# loop_ge csound.com/docs/manual/loop_ge.html +# loop_gt csound.com/docs/manual/loop_gt.html +# loop_le csound.com/docs/manual/loop_le.html +# loop_lt csound.com/docs/manual/loop_lt.html +# opcode csound.com/docs/manual/opcode.html +# reinit csound.com/docs/manual/reinit.html +# return csound.com/docs/manual/return.html +# rireturn csound.com/docs/manual/rireturn.html +# rigoto csound.com/docs/manual/rigoto.html +# tigoto csound.com/docs/manual/tigoto.html +# timout csound.com/docs/manual/timout.html +# which are treated as keywords in csound.py. + +OPCODES = set(''' +ATSadd +ATSaddnz +ATSbufread +ATScross +ATSinfo +ATSinterpread +ATSpartialtap +ATSread +ATSreadnz +ATSsinnoi +FLbox +FLbutBank +FLbutton +FLcloseButton +FLcolor +FLcolor2 +FLcount +FLexecButton +FLgetsnap +FLgroup +FLgroupEnd +FLgroup_end +FLhide +FLhvsBox +FLhvsBoxSetValue +FLjoy +FLkeyIn +FLknob +FLlabel +FLloadsnap +FLmouse +FLpack +FLpackEnd +FLpack_end +FLpanel +FLpanelEnd +FLpanel_end +FLprintk +FLprintk2 +FLroller +FLrun +FLsavesnap +FLscroll +FLscrollEnd +FLscroll_end +FLsetAlign +FLsetBox +FLsetColor +FLsetColor2 +FLsetFont +FLsetPosition +FLsetSize +FLsetSnapGroup +FLsetText +FLsetTextColor +FLsetTextSize +FLsetTextType +FLsetVal +FLsetVal_i +FLsetVali +FLsetsnap +FLshow +FLslidBnk +FLslidBnk2 +FLslidBnk2Set +FLslidBnk2Setk +FLslidBnkGetHandle +FLslidBnkSet +FLslidBnkSetk +FLslider +FLtabs +FLtabsEnd +FLtabs_end +FLtext +FLupdate +FLvalue +FLvkeybd +FLvslidBnk +FLvslidBnk2 +FLxyin +JackoAudioIn +JackoAudioInConnect +JackoAudioOut +JackoAudioOutConnect +JackoFreewheel +JackoInfo +JackoInit +JackoMidiInConnect +JackoMidiOut +JackoMidiOutConnect +JackoNoteOut +JackoOn +JackoTransport +K35_hpf +K35_lpf +MixerClear +MixerGetLevel +MixerReceive +MixerSend +MixerSetLevel +MixerSetLevel_i +OSCbundle +OSCcount +OSCinit +OSCinitM +OSClisten +OSCraw +OSCsend +OSCsend_lo +S +STKBandedWG +STKBeeThree +STKBlowBotl +STKBlowHole +STKBowed +STKBrass +STKClarinet +STKDrummer +STKFlute +STKFMVoices +STKHevyMetl +STKMandolin +STKModalBar +STKMoog +STKPercFlut +STKPlucked +STKResonate +STKRhodey +STKSaxofony +STKShakers +STKSimple +STKSitar +STKStifKarp +STKTubeBell +STKVoicForm +STKWhistle +STKWurley +a +abs +active +adsr +adsyn +adsynt +adsynt2 +aftouch +alpass +alwayson +ampdb +ampdbfs +ampmidi +ampmidid +areson +aresonk +atone +atonek +atonex +babo +balance +balance2 +bamboo +barmodel +bbcutm +bbcuts +beadsynt +beosc +betarand +bexprnd +bformdec1 +bformenc1 +binit +biquad +biquada +birnd +bpf +bpfcos +bqrez +butbp +butbr +buthp +butlp +butterbp +butterbr +butterhp +butterlp +button +buzz +c2r +cabasa +cauchy +cauchyi +cbrt +ceil +cell +cent +centroid +ceps +cepsinv +chanctrl +changed +changed2 +chani +chano +chebyshevpoly +checkbox +chn_S +chn_a +chn_k +chnclear +chnexport +chnget +chngetks +chnmix +chnparams +chnset +chnsetks +chuap +clear +clfilt +clip +clockoff +clockon +cmp +cmplxprod +comb +combinv +compilecsd +compileorc +compilestr +compress +compress2 +connect +control +convle +convolve +copya2ftab +copyf2array +cos +cosh +cosinv +cosseg +cossegb +cossegr +cps2pch +cpsmidi +cpsmidib +cpsmidinn +cpsoct +cpspch +cpstmid +cpstun +cpstuni +cpsxpch +cpumeter +cpuprc +cross2 +crossfm +crossfmi +crossfmpm +crossfmpmi +crosspm +crosspmi +crunch +ctlchn +ctrl14 +ctrl21 +ctrl7 +ctrlinit +cuserrnd +dam +date +dates +db +dbamp +dbfsamp +dcblock +dcblock2 +dconv +dct +dctinv +deinterleave +delay +delay1 +delayk +delayr +delayw +deltap +deltap3 +deltapi +deltapn +deltapx +deltapxw +denorm +diff +diode_ladder +directory +diskgrain +diskin +diskin2 +dispfft +display +distort +distort1 +divz +doppler +dot +downsamp +dripwater +dssiactivate +dssiaudio +dssictls +dssiinit +dssilist +dumpk +dumpk2 +dumpk3 +dumpk4 +duserrnd +dust +dust2 +envlpx +envlpxr +ephasor +eqfil +evalstr +event +event_i +exciter +exitnow +exp +expcurve +expon +exprand +exprandi +expseg +expsega +expsegb +expsegba +expsegr +fareylen +fareyleni +faustaudio +faustcompile +faustctl +faustdsp +faustgen +faustplay +fft +fftinv +ficlose +filebit +filelen +filenchnls +filepeak +filescal +filesr +filevalid +fillarray +filter2 +fin +fini +fink +fiopen +flanger +flashtxt +flooper +flooper2 +floor +fmanal +fmax +fmb3 +fmbell +fmin +fmmetal +fmod +fmpercfl +fmrhode +fmvoice +fmwurlie +fof +fof2 +fofilter +fog +fold +follow +follow2 +foscil +foscili +fout +fouti +foutir +foutk +fprintks +fprints +frac +fractalnoise +framebuffer +freeverb +ftaudio +ftchnls +ftconv +ftcps +ftfree +ftgen +ftgenonce +ftgentmp +ftlen +ftload +ftloadk +ftlptim +ftmorf +ftom +ftprint +ftresize +ftresizei +ftsamplebank +ftsave +ftsavek +ftslice +ftsr +gain +gainslider +gauss +gaussi +gausstrig +gbuzz +genarray +genarray_i +gendy +gendyc +gendyx +getcfg +getcol +getftargs +getrow +getrowlin +getseed +gogobel +grain +grain2 +grain3 +granule +guiro +harmon +harmon2 +harmon3 +harmon4 +hdf5read +hdf5write +hilbert +hilbert2 +hrtfearly +hrtfmove +hrtfmove2 +hrtfreverb +hrtfstat +hsboscil +hvs1 +hvs2 +hvs3 +hypot +i +ihold +imagecreate +imagefree +imagegetpixel +imageload +imagesave +imagesetpixel +imagesize +in +in32 +inch +inh +init +initc14 +initc21 +initc7 +inleta +inletf +inletk +inletkid +inletv +ino +inq +inrg +ins +insglobal +insremot +int +integ +interleave +interp +invalue +inx +inz +jacktransport +jitter +jitter2 +joystick +jspline +k +la_i_add_mc +la_i_add_mr +la_i_add_vc +la_i_add_vr +la_i_assign_mc +la_i_assign_mr +la_i_assign_t +la_i_assign_vc +la_i_assign_vr +la_i_conjugate_mc +la_i_conjugate_mr +la_i_conjugate_vc +la_i_conjugate_vr +la_i_distance_vc +la_i_distance_vr +la_i_divide_mc +la_i_divide_mr +la_i_divide_vc +la_i_divide_vr +la_i_dot_mc +la_i_dot_mc_vc +la_i_dot_mr +la_i_dot_mr_vr +la_i_dot_vc +la_i_dot_vr +la_i_get_mc +la_i_get_mr +la_i_get_vc +la_i_get_vr +la_i_invert_mc +la_i_invert_mr +la_i_lower_solve_mc +la_i_lower_solve_mr +la_i_lu_det_mc +la_i_lu_det_mr +la_i_lu_factor_mc +la_i_lu_factor_mr +la_i_lu_solve_mc +la_i_lu_solve_mr +la_i_mc_create +la_i_mc_set +la_i_mr_create +la_i_mr_set +la_i_multiply_mc +la_i_multiply_mr +la_i_multiply_vc +la_i_multiply_vr +la_i_norm_euclid_mc +la_i_norm_euclid_mr +la_i_norm_euclid_vc +la_i_norm_euclid_vr +la_i_norm_inf_mc +la_i_norm_inf_mr +la_i_norm_inf_vc +la_i_norm_inf_vr +la_i_norm_max_mc +la_i_norm_max_mr +la_i_norm1_mc +la_i_norm1_mr +la_i_norm1_vc +la_i_norm1_vr +la_i_print_mc +la_i_print_mr +la_i_print_vc +la_i_print_vr +la_i_qr_eigen_mc +la_i_qr_eigen_mr +la_i_qr_factor_mc +la_i_qr_factor_mr +la_i_qr_sym_eigen_mc +la_i_qr_sym_eigen_mr +la_i_random_mc +la_i_random_mr +la_i_random_vc +la_i_random_vr +la_i_size_mc +la_i_size_mr +la_i_size_vc +la_i_size_vr +la_i_subtract_mc +la_i_subtract_mr +la_i_subtract_vc +la_i_subtract_vr +la_i_t_assign +la_i_trace_mc +la_i_trace_mr +la_i_transpose_mc +la_i_transpose_mr +la_i_upper_solve_mc +la_i_upper_solve_mr +la_i_vc_create +la_i_vc_set +la_i_vr_create +la_i_vr_set +la_k_a_assign +la_k_add_mc +la_k_add_mr +la_k_add_vc +la_k_add_vr +la_k_assign_a +la_k_assign_f +la_k_assign_mc +la_k_assign_mr +la_k_assign_t +la_k_assign_vc +la_k_assign_vr +la_k_conjugate_mc +la_k_conjugate_mr +la_k_conjugate_vc +la_k_conjugate_vr +la_k_current_f +la_k_current_vr +la_k_distance_vc +la_k_distance_vr +la_k_divide_mc +la_k_divide_mr +la_k_divide_vc +la_k_divide_vr +la_k_dot_mc +la_k_dot_mc_vc +la_k_dot_mr +la_k_dot_mr_vr +la_k_dot_vc +la_k_dot_vr +la_k_f_assign +la_k_get_mc +la_k_get_mr +la_k_get_vc +la_k_get_vr +la_k_invert_mc +la_k_invert_mr +la_k_lower_solve_mc +la_k_lower_solve_mr +la_k_lu_det_mc +la_k_lu_det_mr +la_k_lu_factor_mc +la_k_lu_factor_mr +la_k_lu_solve_mc +la_k_lu_solve_mr +la_k_mc_set +la_k_mr_set +la_k_multiply_mc +la_k_multiply_mr +la_k_multiply_vc +la_k_multiply_vr +la_k_norm_euclid_mc +la_k_norm_euclid_mr +la_k_norm_euclid_vc +la_k_norm_euclid_vr +la_k_norm_inf_mc +la_k_norm_inf_mr +la_k_norm_inf_vc +la_k_norm_inf_vr +la_k_norm_max_mc +la_k_norm_max_mr +la_k_norm1_mc +la_k_norm1_mr +la_k_norm1_vc +la_k_norm1_vr +la_k_qr_eigen_mc +la_k_qr_eigen_mr +la_k_qr_factor_mc +la_k_qr_factor_mr +la_k_qr_sym_eigen_mc +la_k_qr_sym_eigen_mr +la_k_random_mc +la_k_random_mr +la_k_random_vc +la_k_random_vr +la_k_subtract_mc +la_k_subtract_mr +la_k_subtract_vc +la_k_subtract_vr +la_k_t_assign +la_k_trace_mc +la_k_trace_mr +la_k_upper_solve_mc +la_k_upper_solve_mr +la_k_vc_set +la_k_vr_set +lenarray +lfo +limit +limit1 +lincos +line +linen +linenr +lineto +link_beat_force +link_beat_get +link_beat_request +link_create +link_enable +link_is_enabled +link_metro +link_peers +link_tempo_get +link_tempo_set +linlin +linrand +linseg +linsegb +linsegr +liveconv +locsend +locsig +log +log10 +log2 +logbtwo +logcurve +loopseg +loopsegp +looptseg +loopxseg +lorenz +loscil +loscil3 +loscil3phs +loscilphs +loscilx +lowpass2 +lowres +lowresx +lpf18 +lpform +lpfreson +lphasor +lpinterp +lposcil +lposcil3 +lposcila +lposcilsa +lposcilsa2 +lpread +lpreson +lpshold +lpsholdp +lpslot +lua_exec +lua_iaopcall +lua_iaopcall_off +lua_ikopcall +lua_ikopcall_off +lua_iopcall +lua_iopcall_off +lua_opdef +mac +maca +madsr +mags +mandel +mandol +maparray +maparray_i +marimba +massign +max +max_k +maxabs +maxabsaccum +maxaccum +maxalloc +maxarray +mclock +mdelay +median +mediank +metro +mfb +midglobal +midiarp +midic14 +midic21 +midic7 +midichannelaftertouch +midichn +midicontrolchange +midictrl +mididefault +midifilestatus +midiin +midinoteoff +midinoteoncps +midinoteonkey +midinoteonoct +midinoteonpch +midion +midion2 +midiout +midiout_i +midipgm +midipitchbend +midipolyaftertouch +midiprogramchange +miditempo +midremot +min +minabs +minabsaccum +minaccum +minarray +mincer +mirror +mode +modmatrix +monitor +moog +moogladder +moogladder2 +moogvcf +moogvcf2 +moscil +mp3bitrate +mp3in +mp3len +mp3nchnls +mp3scal +mp3sr +mpulse +mrtmsg +mtof +mton +multitap +mute +mvchpf +mvclpf1 +mvclpf2 +mvclpf3 +mvclpf4 +mxadsr +nchnls_hw +nestedap +nlalp +nlfilt +nlfilt2 +noise +noteoff +noteon +noteondur +noteondur2 +notnum +nreverb +nrpn +nsamp +nstance +nstrnum +ntom +ntrpol +nxtpow2 +octave +octcps +octmidi +octmidib +octmidinn +octpch +olabuffer +oscbnk +oscil +oscil1 +oscil1i +oscil3 +oscili +oscilikt +osciliktp +oscilikts +osciln +oscils +oscilx +out +out32 +outc +outch +outh +outiat +outic +outic14 +outipat +outipb +outipc +outkat +outkc +outkc14 +outkpat +outkpb +outkpc +outleta +outletf +outletk +outletkid +outletv +outo +outq +outq1 +outq2 +outq3 +outq4 +outrg +outs +outs1 +outs2 +outvalue +outx +outz +p +p5gconnect +p5gdata +pan +pan2 +pareq +part2txt +partials +partikkel +partikkelget +partikkelset +partikkelsync +passign +paulstretch +pcauchy +pchbend +pchmidi +pchmidib +pchmidinn +pchoct +pchtom +pconvolve +pcount +pdclip +pdhalf +pdhalfy +peak +pgmassign +pgmchn +phaser1 +phaser2 +phasor +phasorbnk +phs +pindex +pinker +pinkish +pitch +pitchac +pitchamdf +planet +platerev +plltrack +pluck +poisson +pol2rect +polyaft +polynomial +port +portk +poscil +poscil3 +pow +powershape +powoftwo +pows +prealloc +prepiano +print +print_type +printarray +printf +printf_i +printk +printk2 +printks +printks2 +prints +product +pset +ptable +ptable3 +ptablei +ptableiw +ptablew +ptrack +puts +pvadd +pvbufread +pvcross +pvinterp +pvoc +pvread +pvs2array +pvs2tab +pvsadsyn +pvsanal +pvsarp +pvsbandp +pvsbandr +pvsbin +pvsblur +pvsbuffer +pvsbufread +pvsbufread2 +pvscale +pvscent +pvsceps +pvscross +pvsdemix +pvsdiskin +pvsdisp +pvsenvftw +pvsfilter +pvsfread +pvsfreeze +pvsfromarray +pvsftr +pvsftw +pvsfwrite +pvsgain +pvshift +pvsifd +pvsin +pvsinfo +pvsinit +pvslock +pvsmaska +pvsmix +pvsmooth +pvsmorph +pvsosc +pvsout +pvspitch +pvstanal +pvstencil +pvstrace +pvsvoc +pvswarp +pvsynth +pwd +pyassign +pyassigni +pyassignt +pycall +pycall1 +pycall1i +pycall1t +pycall2 +pycall2i +pycall2t +pycall3 +pycall3i +pycall3t +pycall4 +pycall4i +pycall4t +pycall5 +pycall5i +pycall5t +pycall6 +pycall6i +pycall6t +pycall7 +pycall7i +pycall7t +pycall8 +pycall8i +pycall8t +pycalli +pycalln +pycallni +pycallt +pyeval +pyevali +pyevalt +pyexec +pyexeci +pyexect +pyinit +pylassign +pylassigni +pylassignt +pylcall +pylcall1 +pylcall1i +pylcall1t +pylcall2 +pylcall2i +pylcall2t +pylcall3 +pylcall3i +pylcall3t +pylcall4 +pylcall4i +pylcall4t +pylcall5 +pylcall5i +pylcall5t +pylcall6 +pylcall6i +pylcall6t +pylcall7 +pylcall7i +pylcall7t +pylcall8 +pylcall8i +pylcall8t +pylcalli +pylcalln +pylcallni +pylcallt +pyleval +pylevali +pylevalt +pylexec +pylexeci +pylexect +pylrun +pylruni +pylrunt +pyrun +pyruni +pyrunt +qinf +qnan +r2c +rand +randh +randi +random +randomh +randomi +rbjeq +readclock +readf +readfi +readk +readk2 +readk3 +readk4 +readks +readscore +readscratch +rect2pol +release +remoteport +remove +repluck +reshapearray +reson +resonk +resonr +resonx +resonxk +resony +resonz +resyn +reverb +reverb2 +reverbsc +rewindscore +rezzy +rfft +rifft +rms +rnd +rnd31 +round +rspline +rtclock +s16b14 +s32b14 +samphold +sandpaper +sc_lag +sc_lagud +sc_phasor +sc_trig +scale +scalearray +scanhammer +scans +scantable +scanu +schedkwhen +schedkwhennamed +schedule +schedwhen +scoreline +scoreline_i +seed +sekere +select +semitone +sense +sensekey +seqtime +seqtime2 +serialBegin +serialEnd +serialFlush +serialPrint +serialRead +serialWrite +serialWrite_i +setcol +setctrl +setksmps +setrow +setscorepos +sfilist +sfinstr +sfinstr3 +sfinstr3m +sfinstrm +sfload +sflooper +sfpassign +sfplay +sfplay3 +sfplay3m +sfplaym +sfplist +sfpreset +shaker +shiftin +shiftout +signum +sin +sinh +sininv +sinsyn +sleighbells +slicearray +slicearray_i +slider16 +slider16f +slider16table +slider16tablef +slider32 +slider32f +slider32table +slider32tablef +slider64 +slider64f +slider64table +slider64tablef +slider8 +slider8f +slider8table +slider8tablef +sliderKawai +sndloop +sndwarp +sndwarpst +sockrecv +sockrecvs +socksend +socksends +sorta +sortd +soundin +space +spat3d +spat3di +spat3dt +spdist +splitrig +sprintf +sprintfk +spsend +sqrt +squinewave +statevar +stix +strcat +strcatk +strchar +strchark +strcmp +strcmpk +strcpy +strcpyk +strecv +streson +strfromurl +strget +strindex +strindexk +strlen +strlenk +strlower +strlowerk +strrindex +strrindexk +strset +strsub +strsubk +strtod +strtodk +strtol +strtolk +strupper +strupperk +stsend +subinstr +subinstrinit +sum +sumarray +svfilter +syncgrain +syncloop +syncphasor +system +system_i +tab +tab2array +tab2pvs +tab_i +tabifd +table +table3 +table3kt +tablecopy +tablefilter +tablefilteri +tablegpw +tablei +tableicopy +tableigpw +tableikt +tableimix +tableiw +tablekt +tablemix +tableng +tablera +tableseg +tableshuffle +tableshufflei +tablew +tablewa +tablewkt +tablexkt +tablexseg +tabmorph +tabmorpha +tabmorphak +tabmorphi +tabplay +tabrec +tabrowlin +tabsum +tabw +tabw_i +tambourine +tan +tanh +taninv +taninv2 +tbvcf +tempest +tempo +temposcal +tempoval +timedseq +timeinstk +timeinsts +timek +times +tival +tlineto +tone +tonek +tonex +tradsyn +trandom +transeg +transegb +transegr +trcross +trfilter +trhighest +trigger +trigseq +trim +trim_i +trirand +trlowest +trmix +trscale +trshift +trsplit +turnoff +turnoff2 +turnon +tvconv +unirand +unwrap +upsamp +urandom +urd +vactrol +vadd +vadd_i +vaddv +vaddv_i +vaget +valpass +vaset +vbap +vbapg +vbapgmove +vbaplsinit +vbapmove +vbapz +vbapzmove +vcella +vco +vco2 +vco2ft +vco2ift +vco2init +vcomb +vcopy +vcopy_i +vdel_k +vdelay +vdelay3 +vdelayk +vdelayx +vdelayxq +vdelayxs +vdelayxw +vdelayxwq +vdelayxws +vdivv +vdivv_i +vecdelay +veloc +vexp +vexp_i +vexpseg +vexpv +vexpv_i +vibes +vibr +vibrato +vincr +vlimit +vlinseg +vlowres +vmap +vmirror +vmult +vmult_i +vmultv +vmultv_i +voice +vosim +vphaseseg +vport +vpow +vpow_i +vpowv +vpowv_i +vpvoc +vrandh +vrandi +vsubv +vsubv_i +vtaba +vtabi +vtabk +vtable1k +vtablea +vtablei +vtablek +vtablewa +vtablewi +vtablewk +vtabwa +vtabwi +vtabwk +vwrap +waveset +websocket +weibull +wgbow +wgbowedbar +wgbrass +wgclar +wgflute +wgpluck +wgpluck2 +wguide1 +wguide2 +wiiconnect +wiidata +wiirange +wiisend +window +wrap +writescratch +wterrain +xadsr +xin +xout +xscanmap +xscans +xscansmap +xscanu +xtratim +xyscale +zacl +zakinit +zamod +zar +zarg +zaw +zawm +zdf_1pole +zdf_1pole_mode +zdf_2pole +zdf_2pole_mode +zdf_ladder +zfilter2 +zir +ziw +ziwm +zkcl +zkmod +zkr +zkw +zkwm +'''.split()) + +DEPRECATED_OPCODES = set(''' +array +bformdec +bformenc +copy2ftab +copy2ttab +hrtfer +ktableseg +lentab +maxtab +mintab +pop +pop_f +push +push_f +scalet +sndload +soundout +soundouts +specaddm +specdiff +specdisp +specfilt +spechist +specptrk +specscal +specsum +spectrum +stack +sumtab +tabgen +tabmap +tabmap_i +tabslice +tb0 +tb0_init +tb1 +tb10 +tb10_init +tb11 +tb11_init +tb12 +tb12_init +tb13 +tb13_init +tb14 +tb14_init +tb15 +tb15_init +tb1_init +tb2 +tb2_init +tb3 +tb3_init +tb4 +tb4_init +tb5 +tb5_init +tb6 +tb6_init +tb7 +tb7_init +tb8 +tb8_init +tb9 +tb9_init +vbap16 +vbap4 +vbap4move +vbap8 +vbap8move +xyin +'''.split()) diff --git a/pygments/lexers/_lua_builtins.py b/pygments/lexers/_lua_builtins.py index c60bf5a2..0561725d 100644 --- a/pygments/lexers/_lua_builtins.py +++ b/pygments/lexers/_lua_builtins.py @@ -288,7 +288,7 @@ if __name__ == '__main__': # pragma: no cover print('>> %s' % full_function_name) m = get_function_module(full_function_name) modules.setdefault(m, []).append(full_function_name) - modules = {k: tuple(v) for k, v in modules.iteritems()} + modules = dict((k, tuple(v)) for k, v in modules.iteritems()) regenerate(__file__, modules) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index a176238e..aa4de074 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -44,14 +44,16 @@ LEXERS = { 'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)), 'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), + 'AugeasLexer': ('pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()), 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), + 'BBCBasicLexer': ('pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()), 'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), 'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()), 'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()), 'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()), - 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')), + 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')), 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')), 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), @@ -81,6 +83,7 @@ LEXERS = { 'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()), 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')), 'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()), + 'CharmciLexer': ('pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()), 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')), 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')), 'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')), @@ -106,7 +109,7 @@ LEXERS = { 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), 'CrystalLexer': ('pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)), 'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()), - 'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc',), ()), + 'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()), 'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()), 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')), 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)), @@ -121,6 +124,7 @@ LEXERS = { 'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)), 'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), 'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), + 'Dasm16Lexer': ('pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)), 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()), 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)), 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), @@ -154,8 +158,10 @@ LEXERS = { 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), 'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), 'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), + 'FennelLexer': ('pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()), 'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)), 'FlatlineLexer': ('pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)), + 'FloScriptLexer': ('pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()), 'ForthLexer': ('pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)), 'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()), 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)), @@ -175,6 +181,7 @@ LEXERS = { 'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)), 'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')), 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)), + 'HLSLShaderLexer': ('pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)), 'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)), 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')), 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()), @@ -182,6 +189,7 @@ LEXERS = { 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')), 'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()), 'HsailLexer': ('pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)), + 'HspecLexer': ('pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()), 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')), 'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)), 'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')), @@ -192,6 +200,7 @@ LEXERS = { 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), 'IDLLexer': ('pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), + 'IconLexer': ('pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()), 'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), 'IgorLexer': ('pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)), 'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), @@ -317,6 +326,7 @@ LEXERS = { 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), 'PkgConfigLexer': ('pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), + 'PonyLexer': ('pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()), 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), @@ -368,10 +378,11 @@ LEXERS = { 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()), 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')), - 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs', '*.rs.in'), ('text/rust',)), + 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust',)), 'SASLexer': ('pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')), 'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), 'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), + 'SarlLexer': ('pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)), 'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), 'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)), 'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), @@ -380,9 +391,12 @@ LEXERS = { 'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)), 'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')), 'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()), + 'SlashLexer': ('pygments.lexers.slash', 'Slash', ('slash',), ('*.sl',), ()), 'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)), + 'SlurmBashLexer': ('pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()), 'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)), 'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)), + 'SmartGameFormatLexer': ('pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()), 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), 'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), 'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()), @@ -400,6 +414,7 @@ LEXERS = { 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), 'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()), + 'TOMLLexer': ('pygments.lexers.configs', 'TOML', ('toml',), ('*.toml',), ()), 'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()), 'TasmLexer': ('pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)), 'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), @@ -421,8 +436,11 @@ LEXERS = { 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts', 'typescript'), ('*.ts', '*.tsx'), ('text/x-typescript',)), 'TypoScriptCssDataLexer': ('pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()), 'TypoScriptHtmlDataLexer': ('pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()), - 'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.ts', '*.txt'), ('text/x-typoscript',)), + 'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)), + 'UcodeLexer': ('pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()), + 'UniconLexer': ('pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)), 'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), + 'VBScriptLexer': ('pygments.lexers.basic', 'VBScript', (), ('*.vbs', '*.VBS'), ()), 'VCLLexer': ('pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)), 'VCLSnippetLexer': ('pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)), 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()), diff --git a/pygments/lexers/_php_builtins.py b/pygments/lexers/_php_builtins.py index fec3286a..c6084003 100644 --- a/pygments/lexers/_php_builtins.py +++ b/pygments/lexers/_php_builtins.py @@ -4688,7 +4688,7 @@ if __name__ == '__main__': # pragma: no cover PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz' PHP_MANUAL_DIR = './php-chunked-xhtml/' PHP_REFERENCE_GLOB = 'ref.*' - PHP_FUNCTION_RE = '<a href="function\..*?\.html">(.*?)</a>' + PHP_FUNCTION_RE = r'<a href="function\..*?\.html">(.*?)</a>' PHP_MODULE_RE = '<title>(.*?) Functions</title>' def get_php_functions(): @@ -4698,18 +4698,19 @@ if __name__ == '__main__': # pragma: no cover for file in get_php_references(): module = '' - for line in open(file): - if not module: - search = module_re.search(line) - if search: - module = search.group(1) - modules[module] = [] + with open(file) as f: + for line in f: + if not module: + search = module_re.search(line) + if search: + module = search.group(1) + modules[module] = [] - elif 'href="function.' in line: - for match in function_re.finditer(line): - fn = match.group(1) - if '->' not in fn and '::' not in fn and fn not in modules[module]: - modules[module].append(fn) + elif 'href="function.' in line: + for match in function_re.finditer(line): + fn = match.group(1) + if '->' not in fn and '::' not in fn and fn not in modules[module]: + modules[module].append(fn) if module: # These are dummy manual pages, not actual functions @@ -4726,9 +4727,8 @@ if __name__ == '__main__': # pragma: no cover def get_php_references(): download = urlretrieve(PHP_MANUAL_URL) - tar = tarfile.open(download[0]) - tar.extractall() - tar.close() + with tarfile.open(download[0]) as tar: + tar.extractall() for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)): yield file os.remove(download[0]) diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py index a189647a..7f1e0ce3 100644 --- a/pygments/lexers/_stan_builtins.py +++ b/pygments/lexers/_stan_builtins.py @@ -4,24 +4,23 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This file contains the names of functions for Stan used by - ``pygments.lexers.math.StanLexer. This is for Stan language version 2.8.0. + ``pygments.lexers.math.StanLexer. This is for Stan language version 2.17.0. - :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2018 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ KEYWORDS = ( + 'break', + 'continue', 'else', 'for', 'if', 'in', - 'increment_log_prob', - 'integrate_ode', - 'lp__', 'print', 'reject', 'return', - 'while' + 'while', ) TYPES = ( @@ -35,18 +34,18 @@ TYPES = ( 'positive_ordered', 'real', 'row_vector', - 'row_vectormatrix', 'simplex', 'unit_vector', 'vector', - 'void') + 'void', +) FUNCTIONS = ( - 'Phi', - 'Phi_approx', 'abs', 'acos', 'acosh', + 'algebra_solver', + 'append_array', 'append_col', 'append_row', 'asin', @@ -54,55 +53,59 @@ FUNCTIONS = ( 'atan', 'atan2', 'atanh', - 'bernoulli_ccdf_log', 'bernoulli_cdf', - 'bernoulli_cdf_log', - 'bernoulli_log', - 'bernoulli_logit_log', + 'bernoulli_lccdf', + 'bernoulli_lcdf', + 'bernoulli_logit_lpmf', + 'bernoulli_logit_rng', + 'bernoulli_lpmf', 'bernoulli_rng', 'bessel_first_kind', 'bessel_second_kind', - 'beta_binomial_ccdf_log', 'beta_binomial_cdf', - 'beta_binomial_cdf_log', - 'beta_binomial_log', + 'beta_binomial_lccdf', + 'beta_binomial_lcdf', + 'beta_binomial_lpmf', 'beta_binomial_rng', - 'beta_ccdf_log', 'beta_cdf', - 'beta_cdf_log', - 'beta_log', + 'beta_lccdf', + 'beta_lcdf', + 'beta_lpdf', 'beta_rng', 'binary_log_loss', - 'binomial_ccdf_log', 'binomial_cdf', - 'binomial_cdf_log', 'binomial_coefficient_log', - 'binomial_log', - 'binomial_logit_log', + 'binomial_lccdf', + 'binomial_lcdf', + 'binomial_logit_lpmf', + 'binomial_lpmf', 'binomial_rng', 'block', - 'categorical_log', - 'categorical_logit_log', + 'categorical_logit_lpmf', + 'categorical_logit_rng', + 'categorical_lpmf', 'categorical_rng', - 'cauchy_ccdf_log', 'cauchy_cdf', - 'cauchy_cdf_log', - 'cauchy_log', + 'cauchy_lccdf', + 'cauchy_lcdf', + 'cauchy_lpdf', 'cauchy_rng', 'cbrt', 'ceil', - 'chi_square_ccdf_log', 'chi_square_cdf', - 'chi_square_cdf_log', - 'chi_square_log', + 'chi_square_lccdf', + 'chi_square_lcdf', + 'chi_square_lpdf', 'chi_square_rng', 'cholesky_decompose', + 'choose', 'col', 'cols', 'columns_dot_product', 'columns_dot_self', 'cos', 'cosh', + 'cov_exp_quad', 'crossprod', 'csr_extract_u', 'csr_extract_v', @@ -117,15 +120,15 @@ FUNCTIONS = ( 'diagonal', 'digamma', 'dims', - 'dirichlet_log', + 'dirichlet_lpdf', 'dirichlet_rng', 'distance', 'dot_product', 'dot_self', - 'double_exponential_ccdf_log', 'double_exponential_cdf', - 'double_exponential_cdf_log', - 'double_exponential_log', + 'double_exponential_lccdf', + 'double_exponential_lcdf', + 'double_exponential_lpdf', 'double_exponential_rng', 'e', 'eigenvalues_sym', @@ -134,16 +137,16 @@ FUNCTIONS = ( 'erfc', 'exp', 'exp2', - 'exp_mod_normal_ccdf_log', 'exp_mod_normal_cdf', - 'exp_mod_normal_cdf_log', - 'exp_mod_normal_log', + 'exp_mod_normal_lccdf', + 'exp_mod_normal_lcdf', + 'exp_mod_normal_lpdf', 'exp_mod_normal_rng', 'expm1', - 'exponential_ccdf_log', 'exponential_cdf', - 'exponential_cdf_log', - 'exponential_log', + 'exponential_lccdf', + 'exponential_lcdf', + 'exponential_lpdf', 'exponential_rng', 'fabs', 'falling_factorial', @@ -153,60 +156,65 @@ FUNCTIONS = ( 'fmax', 'fmin', 'fmod', - 'frechet_ccdf_log', 'frechet_cdf', - 'frechet_cdf_log', - 'frechet_log', + 'frechet_lccdf', + 'frechet_lcdf', + 'frechet_lpdf', 'frechet_rng', - 'gamma_ccdf_log', 'gamma_cdf', - 'gamma_cdf_log', - 'gamma_log', + 'gamma_lccdf', + 'gamma_lcdf', + 'gamma_lpdf', 'gamma_p', 'gamma_q', 'gamma_rng', - 'gaussian_dlm_obs_log', + 'gaussian_dlm_obs_lpdf', 'get_lp', - 'gumbel_ccdf_log', 'gumbel_cdf', - 'gumbel_cdf_log', - 'gumbel_log', + 'gumbel_lccdf', + 'gumbel_lcdf', + 'gumbel_lpdf', 'gumbel_rng', 'head', - 'hypergeometric_log', + 'hypergeometric_lpmf', 'hypergeometric_rng', 'hypot', - 'if_else', + 'inc_beta', 'int_step', + 'integrate_ode', + 'integrate_ode_bdf', + 'integrate_ode_rk45', 'inv', - 'inv_chi_square_ccdf_log', 'inv_chi_square_cdf', - 'inv_chi_square_cdf_log', - 'inv_chi_square_log', + 'inv_chi_square_lccdf', + 'inv_chi_square_lcdf', + 'inv_chi_square_lpdf', 'inv_chi_square_rng', 'inv_cloglog', - 'inv_gamma_ccdf_log', 'inv_gamma_cdf', - 'inv_gamma_cdf_log', - 'inv_gamma_log', + 'inv_gamma_lccdf', + 'inv_gamma_lcdf', + 'inv_gamma_lpdf', 'inv_gamma_rng', 'inv_logit', - 'inv_phi', + 'inv_Phi', 'inv_sqrt', 'inv_square', - 'inv_wishart_log', + 'inv_wishart_lpdf', 'inv_wishart_rng', 'inverse', 'inverse_spd', 'is_inf', 'is_nan', 'lbeta', + 'lchoose', 'lgamma', - 'lkj_corr_cholesky_log', + 'lkj_corr_cholesky_lpdf', 'lkj_corr_cholesky_rng', - 'lkj_corr_log', + 'lkj_corr_lpdf', 'lkj_corr_rng', 'lmgamma', + 'lmultiply', 'log', 'log10', 'log1m', @@ -223,81 +231,87 @@ FUNCTIONS = ( 'log_rising_factorial', 'log_softmax', 'log_sum_exp', - 'logistic_ccdf_log', 'logistic_cdf', - 'logistic_cdf_log', - 'logistic_log', + 'logistic_lccdf', + 'logistic_lcdf', + 'logistic_lpdf', 'logistic_rng', 'logit', - 'lognormal_ccdf_log', 'lognormal_cdf', - 'lognormal_cdf_log', - 'lognormal_log', + 'lognormal_lccdf', + 'lognormal_lcdf', + 'lognormal_lpdf', 'lognormal_rng', 'machine_precision', + 'matrix_exp', 'max', + 'mdivide_left_spd', 'mdivide_left_tri_low', + 'mdivide_right_spd', 'mdivide_right_tri_low', 'mean', 'min', 'modified_bessel_first_kind', 'modified_bessel_second_kind', - 'multi_gp_cholesky_log', - 'multi_gp_log', - 'multi_normal_cholesky_log', + 'multi_gp_cholesky_lpdf', + 'multi_gp_lpdf', + 'multi_normal_cholesky_lpdf', 'multi_normal_cholesky_rng', - 'multi_normal_log', - 'multi_normal_prec_log', + 'multi_normal_lpdf', + 'multi_normal_prec_lpdf', 'multi_normal_rng', - 'multi_student_t_log', + 'multi_student_t_lpdf', 'multi_student_t_rng', - 'multinomial_log', + 'multinomial_lpmf', 'multinomial_rng', 'multiply_log', 'multiply_lower_tri_self_transpose', - 'neg_binomial_2_ccdf_log', 'neg_binomial_2_cdf', - 'neg_binomial_2_cdf_log', - 'neg_binomial_2_log', - 'neg_binomial_2_log_log', + 'neg_binomial_2_lccdf', + 'neg_binomial_2_lcdf', + 'neg_binomial_2_log_lpmf', 'neg_binomial_2_log_rng', + 'neg_binomial_2_lpmf', 'neg_binomial_2_rng', - 'neg_binomial_ccdf_log', 'neg_binomial_cdf', - 'neg_binomial_cdf_log', - 'neg_binomial_log', + 'neg_binomial_lccdf', + 'neg_binomial_lcdf', + 'neg_binomial_lpmf', 'neg_binomial_rng', 'negative_infinity', - 'normal_ccdf_log', 'normal_cdf', - 'normal_cdf_log', - 'normal_log', + 'normal_lccdf', + 'normal_lcdf', + 'normal_lpdf', 'normal_rng', 'not_a_number', 'num_elements', - 'ordered_logistic_log', + 'ordered_logistic_lpmf', 'ordered_logistic_rng', 'owens_t', - 'pareto_ccdf_log', 'pareto_cdf', - 'pareto_cdf_log', - 'pareto_log', + 'pareto_lccdf', + 'pareto_lcdf', + 'pareto_lpdf', 'pareto_rng', - 'pareto_type_2_ccdf_log', 'pareto_type_2_cdf', - 'pareto_type_2_cdf_log', - 'pareto_type_2_log', + 'pareto_type_2_lccdf', + 'pareto_type_2_lcdf', + 'pareto_type_2_lpdf', 'pareto_type_2_rng', + 'Phi', + 'Phi_approx', 'pi', - 'poisson_ccdf_log', 'poisson_cdf', - 'poisson_cdf_log', - 'poisson_log', - 'poisson_log_log', + 'poisson_lccdf', + 'poisson_lcdf', + 'poisson_log_lpmf', 'poisson_log_rng', + 'poisson_lpmf', 'poisson_rng', 'positive_infinity', 'pow', + 'print', 'prod', 'qr_Q', 'qr_R', @@ -305,11 +319,12 @@ FUNCTIONS = ( 'quad_form_diag', 'quad_form_sym', 'rank', - 'rayleigh_ccdf_log', 'rayleigh_cdf', - 'rayleigh_cdf_log', - 'rayleigh_log', + 'rayleigh_lccdf', + 'rayleigh_lcdf', + 'rayleigh_lpdf', 'rayleigh_rng', + 'reject', 'rep_array', 'rep_matrix', 'rep_row_vector', @@ -320,10 +335,10 @@ FUNCTIONS = ( 'rows', 'rows_dot_product', 'rows_dot_self', - 'scaled_inv_chi_square_ccdf_log', 'scaled_inv_chi_square_cdf', - 'scaled_inv_chi_square_cdf_log', - 'scaled_inv_chi_square_log', + 'scaled_inv_chi_square_lccdf', + 'scaled_inv_chi_square_lcdf', + 'scaled_inv_chi_square_lpdf', 'scaled_inv_chi_square_rng', 'sd', 'segment', @@ -331,10 +346,10 @@ FUNCTIONS = ( 'singular_values', 'sinh', 'size', - 'skew_normal_ccdf_log', 'skew_normal_cdf', - 'skew_normal_cdf_log', - 'skew_normal_log', + 'skew_normal_lccdf', + 'skew_normal_lcdf', + 'skew_normal_lpdf', 'skew_normal_rng', 'softmax', 'sort_asc', @@ -346,10 +361,10 @@ FUNCTIONS = ( 'square', 'squared_distance', 'step', - 'student_t_ccdf_log', 'student_t_cdf', - 'student_t_cdf_log', - 'student_t_log', + 'student_t_lccdf', + 'student_t_lcdf', + 'student_t_lpdf', 'student_t_rng', 'sub_col', 'sub_row', @@ -357,6 +372,7 @@ FUNCTIONS = ( 'tail', 'tan', 'tanh', + 'target', 'tcrossprod', 'tgamma', 'to_array_1d', @@ -369,22 +385,22 @@ FUNCTIONS = ( 'trace_quad_form', 'trigamma', 'trunc', - 'uniform_ccdf_log', 'uniform_cdf', - 'uniform_cdf_log', - 'uniform_log', + 'uniform_lccdf', + 'uniform_lcdf', + 'uniform_lpdf', 'uniform_rng', 'variance', - 'von_mises_log', + 'von_mises_lpdf', 'von_mises_rng', - 'weibull_ccdf_log', 'weibull_cdf', - 'weibull_cdf_log', - 'weibull_log', + 'weibull_lccdf', + 'weibull_lcdf', + 'weibull_lpdf', 'weibull_rng', - 'wiener_log', - 'wishart_log', - 'wishart_rng' + 'wiener_lpdf', + 'wishart_lpdf', + 'wishart_rng', ) DISTRIBUTIONS = ( @@ -438,7 +454,7 @@ DISTRIBUTIONS = ( 'von_mises', 'weibull', 'wiener', - 'wishart' + 'wishart', ) RESERVED = ( @@ -469,19 +485,23 @@ RESERVED = ( 'do', 'double', 'dynamic_cast', + 'else', 'enum', 'explicit', 'export', 'extern', 'false', - 'false', 'float', + 'for', 'friend', 'fvar', 'goto', + 'if', + 'in', 'inline', 'int', 'long', + 'lp__', 'mutable', 'namespace', 'new', @@ -498,9 +518,16 @@ RESERVED = ( 'register', 'reinterpret_cast', 'repeat', + 'return', 'short', 'signed', 'sizeof', + 'STAN_MAJOR', + 'STAN_MATH_MAJOR', + 'STAN_MATH_MINOR', + 'STAN_MATH_PATCH', + 'STAN_MINOR', + 'STAN_PATCH', 'static', 'static_assert', 'static_cast', @@ -512,7 +539,6 @@ RESERVED = ( 'thread_local', 'throw', 'true', - 'true', 'try', 'typedef', 'typeid', @@ -526,7 +552,7 @@ RESERVED = ( 'void', 'volatile', 'wchar_t', + 'while', 'xor', - 'xor_eq' + 'xor_eq', ) - diff --git a/pygments/lexers/_stata_builtins.py b/pygments/lexers/_stata_builtins.py index 5f5f72a9..3e5e75b2 100644 --- a/pygments/lexers/_stata_builtins.py +++ b/pygments/lexers/_stata_builtins.py @@ -10,6 +10,10 @@ """ +builtins_special = ( + "if", "in", "using", "replace", "by", "gen", "generate" +) + builtins_base = ( "if", "else", "in", "foreach", "for", "forv", "forva", "forval", "forvalu", "forvalue", "forvalues", "by", "bys", @@ -66,7 +70,7 @@ builtins_base = ( "doedit", "dotplot", "dotplot_7", "dprobit", "drawnorm", "drop", "ds", "ds_util", "dstdize", "duplicates", "durbina", "dwstat", "dydx", "e", "ed", "edi", "edit", "egen", - "eivreg", "emdef", "en", "enc", "enco", "encod", "encode", + "eivreg", "emdef", "end", "en", "enc", "enco", "encod", "encode", "eq", "erase", "ereg", "ereg_lf", "ereg_p", "ereg_sw", "ereghet", "ereghet_glf", "ereghet_glf_sh", "ereghet_gp", "ereghet_ilf", "ereghet_ilf_sh", "ereghet_ip", "eret", @@ -415,5 +419,3 @@ builtins_functions = ( "weekly", "wofd", "word", "wordcount", "year", "yearly", "yh", "ym", "yofd", "yq", "yw" ) - - diff --git a/pygments/lexers/_vbscript_builtins.py b/pygments/lexers/_vbscript_builtins.py new file mode 100644 index 00000000..7d514790 --- /dev/null +++ b/pygments/lexers/_vbscript_builtins.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers._vbscript_builtins + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + These are manually translated lists from + http://www.indusoft.com/pdf/VBScript%20Reference.pdf. + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" +KEYWORDS = [ + 'ByRef', + 'ByVal', + # dim: special rule + 'call', + 'case', + 'class', + # const: special rule + 'do', + 'each', + 'else', + 'elseif', + 'end', + 'erase', + 'execute', + 'function', + 'exit', + 'for', + 'function', + 'GetRef', + 'global', + 'if', + 'let', + 'loop', + 'next', + 'new', + # option: special rule + 'private', + 'public', + 'redim', + 'select', + 'set', + 'sub', + 'then', + 'wend', + 'while', + 'with', +] + +BUILTIN_FUNCTIONS = [ + 'Abs', + 'Array', + 'Asc', + 'Atn', + 'CBool', + 'CByte', + 'CCur', + 'CDate', + 'CDbl', + 'Chr', + 'CInt', + 'CLng', + 'Cos', + 'CreateObject', + 'CSng', + 'CStr', + 'Date', + 'DateAdd', + 'DateDiff', + 'DatePart', + 'DateSerial', + 'DateValue', + 'Day', + 'Eval', + 'Exp', + 'Filter', + 'Fix', + 'FormatCurrency', + 'FormatDateTime', + 'FormatNumber', + 'FormatPercent', + 'GetObject', + 'GetLocale', + 'Hex', + 'Hour', + 'InStr', + 'inStrRev', + 'Int', + 'IsArray', + 'IsDate', + 'IsEmpty', + 'IsNull', + 'IsNumeric', + 'IsObject', + 'Join', + 'LBound', + 'LCase', + 'Left', + 'Len', + 'LoadPicture', + 'Log', + 'LTrim', + 'Mid', + 'Minute', + 'Month', + 'MonthName', + 'MsgBox', + 'Now', + 'Oct', + 'Randomize', + 'RegExp', + 'Replace', + 'RGB', + 'Right', + 'Rnd', + 'Round', + 'RTrim', + 'ScriptEngine', + 'ScriptEngineBuildVersion', + 'ScriptEngineMajorVersion', + 'ScriptEngineMinorVersion', + 'Second', + 'SetLocale', + 'Sgn', + 'Space', + 'Split', + 'Sqr', + 'StrComp', + 'String', + 'StrReverse', + 'Tan', + 'Time', + 'Timer', + 'TimeSerial', + 'TimeValue', + 'Trim', + 'TypeName', + 'UBound', + 'UCase', + 'VarType', + 'Weekday', + 'WeekdayName', + 'Year', +] + +BUILTIN_VARIABLES = [ + 'Debug', + 'Dictionary', + 'Drive', + 'Drives', + 'Err', + 'File', + 'Files', + 'FileSystemObject', + 'Folder', + 'Folders', + 'Match', + 'Matches', + 'RegExp', + 'Submatches', + 'TextStream', +] + +OPERATORS = [ + '+', + '-', + '*', + '/', + '\\', + '^', + '|', + '<', + '<=', + '>', + '>=', + '=', + '<>', + '&', + '$', +] + +OPERATOR_WORDS = [ + 'mod', + 'and', + 'or', + 'xor', + 'eqv', + 'imp', + 'is', + 'not', +] + +BUILTIN_CONSTANTS = [ + 'False', + 'True', + 'vbAbort', + 'vbAbortRetryIgnore', + 'vbApplicationModal', + 'vbArray', + 'vbBinaryCompare', + 'vbBlack', + 'vbBlue', + 'vbBoole', + 'vbByte', + 'vbCancel', + 'vbCr', + 'vbCritical', + 'vbCrLf', + 'vbCurrency', + 'vbCyan', + 'vbDataObject', + 'vbDate', + 'vbDefaultButton1', + 'vbDefaultButton2', + 'vbDefaultButton3', + 'vbDefaultButton4', + 'vbDouble', + 'vbEmpty', + 'vbError', + 'vbExclamation', + 'vbFalse', + 'vbFirstFullWeek', + 'vbFirstJan1', + 'vbFormFeed', + 'vbFriday', + 'vbGeneralDate', + 'vbGreen', + 'vbIgnore', + 'vbInformation', + 'vbInteger', + 'vbLf', + 'vbLong', + 'vbLongDate', + 'vbLongTime', + 'vbMagenta', + 'vbMonday', + 'vbMsgBoxHelpButton', + 'vbMsgBoxRight', + 'vbMsgBoxRtlReading', + 'vbMsgBoxSetForeground', + 'vbNewLine', + 'vbNo', + 'vbNull', + 'vbNullChar', + 'vbNullString', + 'vbObject', + 'vbObjectError', + 'vbOK', + 'vbOKCancel', + 'vbOKOnly', + 'vbQuestion', + 'vbRed', + 'vbRetry', + 'vbRetryCancel', + 'vbSaturday', + 'vbShortDate', + 'vbShortTime', + 'vbSingle', + 'vbString', + 'vbSunday', + 'vbSystemModal', + 'vbTab', + 'vbTextCompare', + 'vbThursday', + 'vbTrue', + 'vbTuesday', + 'vbUseDefault', + 'vbUseSystem', + 'vbUseSystem', + 'vbVariant', + 'vbVerticalTab', + 'vbWednesday', + 'vbWhite', + 'vbYellow', + 'vbYes', + 'vbYesNo', + 'vbYesNoCancel', +]
\ No newline at end of file diff --git a/pygments/lexers/actionscript.py b/pygments/lexers/actionscript.py index 84607e68..fc3b90cd 100644 --- a/pygments/lexers/actionscript.py +++ b/pygments/lexers/actionscript.py @@ -125,7 +125,7 @@ class ActionScript3Lexer(RegexLexer): 'text/actionscript3'] identifier = r'[$a-zA-Z_]\w*' - typeidentifier = identifier + '(?:\.<\w+>)?' + typeidentifier = identifier + r'(?:\.<\w+>)?' flags = re.DOTALL | re.MULTILINE tokens = { @@ -232,7 +232,7 @@ class MxmlLexer(RegexLexer): (r'/?\s*>', Name.Tag, '#pop'), ], 'attr': [ - ('\s+', Text), + (r'\s+', Text), ('".*?"', String, '#pop'), ("'.*?'", String, '#pop'), (r'[^\s>]+', String, '#pop'), diff --git a/pygments/lexers/ampl.py b/pygments/lexers/ampl.py index d439cb19..638d025d 100644 --- a/pygments/lexers/ampl.py +++ b/pygments/lexers/ampl.py @@ -3,7 +3,7 @@ pygments.lexers.ampl ~~~~~~~~~~~~~~~~~~~~ - Lexers for the ampl language. <http://ampl.com/> + Lexers for the AMPL language. :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. @@ -18,7 +18,7 @@ __all__ = ['AmplLexer'] class AmplLexer(RegexLexer): """ - For AMPL source code. + For `AMPL <http://ampl.com/>`_ source code. .. versionadded:: 2.2 """ diff --git a/pygments/lexers/apl.py b/pygments/lexers/apl.py index b3414cc0..4bb88ae3 100644 --- a/pygments/lexers/apl.py +++ b/pygments/lexers/apl.py @@ -71,14 +71,14 @@ class APLLexer(RegexLexer): # # Numbers # ======= - (u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)' - u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?', + (u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)' + u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?', Number), # # Operators # ========== - (u'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type - (u'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]', + (u'[\\.\\\\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type + (u'[+\\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]', Operator), # # Constant diff --git a/pygments/lexers/asm.py b/pygments/lexers/asm.py index 5b8cab80..7100868c 100644 --- a/pygments/lexers/asm.py +++ b/pygments/lexers/asm.py @@ -20,7 +20,7 @@ from pygments.token import Text, Name, Number, String, Comment, Punctuation, \ __all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer', 'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'NasmLexer', - 'NasmObjdumpLexer', 'TasmLexer', 'Ca65Lexer'] + 'NasmObjdumpLexer', 'TasmLexer', 'Ca65Lexer', 'Dasm16Lexer'] class GasLexer(RegexLexer): @@ -35,7 +35,7 @@ class GasLexer(RegexLexer): #: optional Comment or Whitespace string = r'"(\\"|[^"])*"' char = r'[\w$.@-]' - identifier = r'(?:[a-zA-Z$_]' + char + '*|\.' + char + '+)' + identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)' number = r'(?:0[xX][a-zA-Z0-9]+|\d+)' tokens = { @@ -53,6 +53,7 @@ class GasLexer(RegexLexer): ('@' + identifier, Name.Attribute), (number, Number.Integer), (r'[\r\n]+', Text, '#pop'), + (r'[;#].*?\n', Comment, '#pop'), include('punctuation'), include('whitespace') @@ -76,6 +77,7 @@ class GasLexer(RegexLexer): ('$'+number, Number.Integer), (r"$'(.|\\')'", String.Char), (r'[\r\n]+', Text, '#pop'), + (r'[;#].*?\n', Comment, '#pop'), include('punctuation'), include('whitespace') @@ -256,7 +258,7 @@ class HsailLexer(RegexLexer): (r'0[xX][a-fA-F0-9]+', Number.Hex), (ieeefloat, Number.Float), (float, Number.Float), - ('\d+', Number.Integer), + (r'\d+', Number.Integer), (r'[=<>{}\[\]()*.,:;!]|x\b', Punctuation) ], @@ -350,7 +352,7 @@ class LlvmLexer(RegexLexer): include('whitespace'), # Before keywords, because keywords are valid label names :(... - (identifier + '\s*:', Name.Label), + (identifier + r'\s*:', Name.Label), include('keyword'), @@ -375,54 +377,63 @@ class LlvmLexer(RegexLexer): 'keyword': [ # Regular keywords (words(( - 'begin', 'end', 'true', 'false', 'declare', 'define', 'global', - 'constant', 'private', 'linker_private', 'internal', - 'available_externally', 'linkonce', 'linkonce_odr', 'weak', - 'weak_odr', 'appending', 'dllimport', 'dllexport', 'common', - 'default', 'hidden', 'protected', 'extern_weak', 'external', - 'thread_local', 'zeroinitializer', 'undef', 'null', 'to', 'tail', - 'target', 'triple', 'datalayout', 'volatile', 'nuw', 'nsw', 'nnan', - 'ninf', 'nsz', 'arcp', 'fast', 'exact', 'inbounds', 'align', - 'addrspace', 'section', 'alias', 'module', 'asm', 'sideeffect', - 'gc', 'dbg', 'linker_private_weak', 'attributes', 'blockaddress', - 'initialexec', 'localdynamic', 'localexec', 'prefix', 'unnamed_addr', - 'ccc', 'fastcc', 'coldcc', 'x86_stdcallcc', 'x86_fastcallcc', - 'arm_apcscc', 'arm_aapcscc', 'arm_aapcs_vfpcc', 'ptx_device', - 'ptx_kernel', 'intel_ocl_bicc', 'msp430_intrcc', 'spir_func', - 'spir_kernel', 'x86_64_sysvcc', 'x86_64_win64cc', 'x86_thiscallcc', - 'cc', 'c', 'signext', 'zeroext', 'inreg', 'sret', 'nounwind', - 'noreturn', 'noalias', 'nocapture', 'byval', 'nest', 'readnone', - 'readonly', 'inlinehint', 'noinline', 'alwaysinline', 'optsize', 'ssp', - 'sspreq', 'noredzone', 'noimplicitfloat', 'naked', 'builtin', 'cold', - 'nobuiltin', 'noduplicate', 'nonlazybind', 'optnone', 'returns_twice', - 'sanitize_address', 'sanitize_memory', 'sanitize_thread', 'sspstrong', - 'uwtable', 'returned', 'type', 'opaque', 'eq', 'ne', 'slt', 'sgt', - 'sle', 'sge', 'ult', 'ugt', 'ule', 'uge', 'oeq', 'one', 'olt', 'ogt', - 'ole', 'oge', 'ord', 'uno', 'ueq', 'une', 'x', 'acq_rel', 'acquire', - 'alignstack', 'atomic', 'catch', 'cleanup', 'filter', 'inteldialect', - 'max', 'min', 'monotonic', 'nand', 'personality', 'release', 'seq_cst', - 'singlethread', 'umax', 'umin', 'unordered', 'xchg', 'add', 'fadd', - 'sub', 'fsub', 'mul', 'fmul', 'udiv', 'sdiv', 'fdiv', 'urem', 'srem', - 'frem', 'shl', 'lshr', 'ashr', 'and', 'or', 'xor', 'icmp', 'fcmp', - 'phi', 'call', 'trunc', 'zext', 'sext', 'fptrunc', 'fpext', 'uitofp', - 'sitofp', 'fptoui', 'fptosi', 'inttoptr', 'ptrtoint', 'bitcast', - 'addrspacecast', 'select', 'va_arg', 'ret', 'br', 'switch', 'invoke', - 'unwind', 'unreachable', 'indirectbr', 'landingpad', 'resume', - 'malloc', 'alloca', 'free', 'load', 'store', 'getelementptr', - 'extractelement', 'insertelement', 'shufflevector', 'getresult', - 'extractvalue', 'insertvalue', 'atomicrmw', 'cmpxchg', 'fence', - 'allocsize', 'amdgpu_cs', 'amdgpu_gs', 'amdgpu_kernel', 'amdgpu_ps', - 'amdgpu_vs', 'any', 'anyregcc', 'argmemonly', 'avr_intrcc', - 'avr_signalcc', 'caller', 'catchpad', 'catchret', 'catchswitch', - 'cleanuppad', 'cleanupret', 'comdat', 'convergent', 'cxx_fast_tlscc', - 'deplibs', 'dereferenceable', 'dereferenceable_or_null', 'distinct', - 'exactmatch', 'externally_initialized', 'from', 'ghccc', 'hhvm_ccc', - 'hhvmcc', 'ifunc', 'inaccessiblemem_or_argmemonly', 'inaccessiblememonly', - 'inalloca', 'jumptable', 'largest', 'local_unnamed_addr', 'minsize', - 'musttail', 'noduplicates', 'none', 'nonnull', 'norecurse', 'notail', - 'preserve_allcc', 'preserve_mostcc', 'prologue', 'safestack', 'samesize', - 'source_filename', 'swiftcc', 'swifterror', 'swiftself', 'webkit_jscc', - 'within', 'writeonly', 'x86_intrcc', 'x86_vectorcallcc'), + 'acq_rel', 'acquire', 'add', 'addrspace', 'addrspacecast', 'afn', 'alias', + 'aliasee', 'align', 'alignLog2', 'alignstack', 'alloca', 'allocsize', 'allOnes', + 'alwaysinline', 'amdgpu_cs', 'amdgpu_es', 'amdgpu_gs', 'amdgpu_hs', + 'amdgpu_kernel', 'amdgpu_ls', 'amdgpu_ps', 'amdgpu_vs', 'and', 'any', + 'anyregcc', 'appending', 'arcp', 'argmemonly', 'args', 'arm_aapcs_vfpcc', + 'arm_aapcscc', 'arm_apcscc', 'ashr', 'asm', 'atomic', 'atomicrmw', 'attributes', + 'available_externally', 'avr_intrcc', 'avr_signalcc', 'bit', 'bitcast', + 'bitMask', 'blockaddress', 'br', 'branchFunnel', 'builtin', 'byArg', 'byte', + 'byteArray', 'byval', 'c', 'call', 'callee', 'caller', 'calls', 'catch', + 'catchpad', 'catchret', 'catchswitch', 'cc', 'ccc', 'cleanup', 'cleanuppad', + 'cleanupret', 'cmpxchg', 'cold', 'coldcc', 'comdat', 'common', 'constant', + 'contract', 'convergent', 'critical', 'cxx_fast_tlscc', 'datalayout', 'declare', + 'default', 'define', 'deplibs', 'dereferenceable', 'dereferenceable_or_null', + 'distinct', 'dllexport', 'dllimport', 'double', 'dso_local', 'dso_preemptable', + 'dsoLocal', 'eq', 'exact', 'exactmatch', 'extern_weak', 'external', + 'externally_initialized', 'extractelement', 'extractvalue', 'fadd', 'false', + 'fast', 'fastcc', 'fcmp', 'fdiv', 'fence', 'filter', 'flags', 'float', 'fmul', + 'fp128', 'fpext', 'fptosi', 'fptoui', 'fptrunc', 'frem', 'from', 'fsub', + 'funcFlags', 'function', 'gc', 'getelementptr', 'ghccc', 'global', 'guid', 'gv', + 'half', 'hash', 'hhvm_ccc', 'hhvmcc', 'hidden', 'hot', 'hotness', 'icmp', + 'ifunc', 'inaccessiblemem_or_argmemonly', 'inaccessiblememonly', 'inalloca', + 'inbounds', 'indir', 'indirectbr', 'info', 'initialexec', 'inline', + 'inlineBits', 'inlinehint', 'inrange', 'inreg', 'insertelement', 'insertvalue', + 'insts', 'intel_ocl_bicc', 'inteldialect', 'internal', 'inttoptr', 'invoke', + 'jumptable', 'kind', 'label', 'landingpad', 'largest', 'linkage', 'linkonce', + 'linkonce_odr', 'live', 'load', 'local_unnamed_addr', 'localdynamic', + 'localexec', 'lshr', 'max', 'metadata', 'min', 'minsize', 'module', 'monotonic', + 'msp430_intrcc', 'mul', 'musttail', 'naked', 'name', 'nand', 'ne', 'nest', + 'ninf', 'nnan', 'noalias', 'nobuiltin', 'nocapture', 'nocf_check', + 'noduplicate', 'noduplicates', 'noimplicitfloat', 'noinline', 'none', + 'nonlazybind', 'nonnull', 'norecurse', 'noRecurse', 'noredzone', 'noreturn', + 'notail', 'notEligibleToImport', 'nounwind', 'nsw', 'nsz', 'null', 'nuw', 'oeq', + 'offset', 'oge', 'ogt', 'ole', 'olt', 'one', 'opaque', 'optforfuzzing', + 'optnone', 'optsize', 'or', 'ord', 'path', 'personality', 'phi', 'ppc_fp128', + 'prefix', 'preserve_allcc', 'preserve_mostcc', 'private', 'prologue', + 'protected', 'ptrtoint', 'ptx_device', 'ptx_kernel', 'readnone', 'readNone', + 'readonly', 'readOnly', 'reassoc', 'refs', 'relbf', 'release', 'resByArg', + 'resume', 'ret', 'returnDoesNotAlias', 'returned', 'returns_twice', 'safestack', + 'samesize', 'sanitize_address', 'sanitize_hwaddress', 'sanitize_memory', + 'sanitize_thread', 'sdiv', 'section', 'select', 'seq_cst', 'sext', 'sge', 'sgt', + 'shadowcallstack', 'shl', 'shufflevector', 'sideeffect', 'signext', 'single', + 'singleImpl', 'singleImplName', 'sitofp', 'sizeM1', 'sizeM1BitWidth', 'sle', + 'slt', 'source_filename', 'speculatable', 'spir_func', 'spir_kernel', 'srem', + 'sret', 'ssp', 'sspreq', 'sspstrong', 'store', 'strictfp', 'sub', 'summaries', + 'summary', 'swiftcc', 'swifterror', 'swiftself', 'switch', 'syncscope', 'tail', + 'target', 'thread_local', 'to', 'token', 'triple', 'true', 'trunc', 'type', + 'typeCheckedLoadConstVCalls', 'typeCheckedLoadVCalls', 'typeid', 'typeIdInfo', + 'typeTestAssumeConstVCalls', 'typeTestAssumeVCalls', 'typeTestRes', 'typeTests', + 'udiv', 'ueq', 'uge', 'ugt', 'uitofp', 'ule', 'ult', 'umax', 'umin', 'undef', + 'une', 'uniformRetVal', 'uniqueRetVal', 'unknown', 'unnamed_addr', 'uno', + 'unordered', 'unreachable', 'unsat', 'unwind', 'urem', 'uselistorder', + 'uselistorder_bb', 'uwtable', 'va_arg', 'variable', 'vFuncId', + 'virtualConstProp', 'void', 'volatile', 'weak', 'weak_odr', 'webkit_jscc', + 'win64cc', 'within', 'wpdRes', 'wpdResolutions', 'writeonly', 'x', + 'x86_64_sysvcc', 'x86_fastcallcc', 'x86_fp80', 'x86_intrcc', 'x86_mmx', + 'x86_regcallcc', 'x86_stdcallcc', 'x86_thiscallcc', 'x86_vectorcallcc', 'xchg', + 'xor', 'zeroext', 'zeroinitializer', 'zext'), suffix=r'\b'), Keyword), # Types @@ -639,3 +650,109 @@ class Ca65Lexer(RegexLexer): # comments in GAS start with "#" if re.match(r'^\s*;', text, re.MULTILINE): return 0.9 + + +class Dasm16Lexer(RegexLexer): + """ + Simple lexer for DCPU-16 Assembly + + Check http://0x10c.com/doc/dcpu-16.txt + + .. versionadded:: 2.4 + """ + name = 'DASM16' + aliases = ['dasm16'] + filenames = ['*.dasm16', '*.dasm'] + mimetypes = ['text/x-dasm16'] + + INSTRUCTIONS = [ + 'SET', + 'ADD', 'SUB', + 'MUL', 'MLI', + 'DIV', 'DVI', + 'MOD', 'MDI', + 'AND', 'BOR', 'XOR', + 'SHR', 'ASR', 'SHL', + 'IFB', 'IFC', 'IFE', 'IFN', 'IFG', 'IFA', 'IFL', 'IFU', + 'ADX', 'SBX', + 'STI', 'STD', + 'JSR', + 'INT', 'IAG', 'IAS', 'RFI', 'IAQ', 'HWN', 'HWQ', 'HWI', + ] + + REGISTERS = [ + 'A', 'B', 'C', + 'X', 'Y', 'Z', + 'I', 'J', + 'SP', 'PC', 'EX', + 'POP', 'PEEK', 'PUSH' + ] + + # Regexes yo + char = r'[a-zA-Z$._0-9@]' + identifier = r'(?:[a-zA-Z$_]' + char + '*|\.' + char + '+)' + number = r'[+-]?(?:0[xX][a-zA-Z0-9]+|\d+)' + binary_number = r'0b[01_]+' + instruction = r'(?i)(' + '|'.join(INSTRUCTIONS) + ')' + single_char = r"'\\?" + char + "'" + string = r'"(\\"|[^"])*"' + + def guess_identifier(lexer, match): + ident = match.group(0) + klass = Name.Variable if ident.upper() in lexer.REGISTERS else Name.Label + yield match.start(), klass, ident + + tokens = { + 'root': [ + include('whitespace'), + (':' + identifier, Name.Label), + (identifier + ':', Name.Label), + (instruction, Name.Function, 'instruction-args'), + (r'\.' + identifier, Name.Function, 'data-args'), + (r'[\r\n]+', Text) + ], + + 'numeric' : [ + (binary_number, Number.Integer), + (number, Number.Integer), + (single_char, String), + ], + + 'arg' : [ + (identifier, guess_identifier), + include('numeric') + ], + + 'deref' : [ + (r'\+', Punctuation), + (r'\]', Punctuation, '#pop'), + include('arg'), + include('whitespace') + ], + + 'instruction-line' : [ + (r'[\r\n]+', Text, '#pop'), + (r';.*?$', Comment, '#pop'), + include('whitespace') + ], + + 'instruction-args': [ + (r',', Punctuation), + (r'\[', Punctuation, 'deref'), + include('arg'), + include('instruction-line') + ], + + 'data-args' : [ + (r',', Punctuation), + include('numeric'), + (string, String), + include('instruction-line') + ], + + 'whitespace': [ + (r'\n', Text), + (r'\s+', Text), + (r';.*?\n', Comment) + ], + } diff --git a/pygments/lexers/basic.py b/pygments/lexers/basic.py index e6545ee6..1aa3274c 100644 --- a/pygments/lexers/basic.py +++ b/pygments/lexers/basic.py @@ -12,11 +12,14 @@ import re from pygments.lexer import RegexLexer, bygroups, default, words, include -from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation +from pygments.token import Comment, Error, Keyword, Name, Number, \ + Punctuation, Operator, String, Text, Whitespace +from pygments.lexers import _vbscript_builtins + __all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer', - 'QBasicLexer'] + 'QBasicLexer', 'VBScriptLexer', 'BBCBasicLexer'] + class BlitzMaxLexer(RegexLexer): @@ -498,3 +501,159 @@ class QBasicLexer(RegexLexer): def analyse_text(text): if '$DYNAMIC' in text or '$STATIC' in text: return 0.9 + + +class VBScriptLexer(RegexLexer): + """ + VBScript is scripting language that is modeled on Visual Basic. + + .. versionadded:: 2.4 + """ + name = 'VBScript' + aliases = [] + filenames = ['*.vbs', '*.VBS'] + flags = re.IGNORECASE + + tokens = { + 'root': [ + (r"'[^\n]*", Comment.Single), + (r'\s+', Whitespace), + ('"', String.Double, 'string'), + ('&h[0-9a-f]+', Number.Hex), + # Float variant 1, for example: 1., 1.e2, 1.2e3 + (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float), + (r'\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Float variant 2, for example: .1, .1e2 + (r'[0-9]+e[+-]?[0-9]+', Number.Float), # Float variant 3, for example: 123e45 + (r'\d+', Number.Integer), + ('#.+#', String), # date or time value + (r'(dim)(\s+)([a-z_][a-z0-9_]*)', + bygroups(Keyword.Declaration, Whitespace, Name.Variable), 'dim_more'), + (r'(function|sub)(\s+)([a-z_][a-z0-9_]*)', + bygroups(Keyword.Declaration, Whitespace, Name.Function)), + (r'(class)(\s+)([a-z_][a-z0-9_]*)', bygroups(Keyword.Declaration, Whitespace, Name.Class)), + (r'(const)(\s+)([a-z_][a-z0-9_]*)', bygroups(Keyword.Declaration, Whitespace, Name.Constant)), + (r'(end)(\s+)(class|function|if|property|sub|with)', bygroups(Keyword, Whitespace, Keyword)), + (r'(on)(\s+)(error)(\s+)(goto)(\s+)(0)', + bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Number.Integer)), + (r'(on)(\s+)(error)(\s+)(resume)(\s+)(next)', + bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Keyword)), + (r'(option)(\s+)(explicit)', bygroups(Keyword, Whitespace, Keyword)), + (r'(property)(\s+)(get|let|set)(\s+)([a-z_][a-z0-9_]*)', + bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration, Whitespace, Name.Property)), + (r'rem\s.*[^\n]*', Comment.Single), + (words(_vbscript_builtins.KEYWORDS, suffix=r'\b'), Keyword), + (words(_vbscript_builtins.OPERATORS), Operator), + (words(_vbscript_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word), + (words(_vbscript_builtins.BUILTIN_CONSTANTS, suffix=r'\b'), Name.Constant), + (words(_vbscript_builtins.BUILTIN_FUNCTIONS, suffix=r'\b'), Name.Builtin), + (words(_vbscript_builtins.BUILTIN_VARIABLES, suffix=r'\b'), Name.Builtin), + (r'[a-z_][a-z0-9_]*', Name), + (r'\b_\n', Operator), + (words(r'(),.:'), Punctuation), + (r'.+(\n)?', Error) + ], + 'dim_more': [ + (r'(\s*)(,)(\s*)([a-z_][a-z0-9]*)', bygroups(Whitespace, Punctuation, Whitespace, Name.Variable)), + default('#pop'), + ], + 'string': [ + (r'[^"\n]+', String.Double), + (r'\"\"', String.Double), + (r'"', String.Double, '#pop'), + (r'\n', Error, '#pop'), # Unterminated string + ], + } + + +class BBCBasicLexer(RegexLexer): + """ + BBC Basic was supplied on the BBC Micro, and later Acorn RISC OS. + It is also used by BBC Basic For Windows. + + .. versionadded:: 2.4 + """ + base_keywords = ['OTHERWISE', 'AND', 'DIV', 'EOR', 'MOD', 'OR', 'ERROR', + 'LINE', 'OFF', 'STEP', 'SPC', 'TAB', 'ELSE', 'THEN', + 'OPENIN', 'PTR', 'PAGE', 'TIME', 'LOMEM', 'HIMEM', 'ABS', + 'ACS', 'ADVAL', 'ASC', 'ASN', 'ATN', 'BGET', 'COS', 'COUNT', + 'DEG', 'ERL', 'ERR', 'EVAL', 'EXP', 'EXT', 'FALSE', 'FN', + 'GET', 'INKEY', 'INSTR', 'INT', 'LEN', 'LN', 'LOG', 'NOT', + 'OPENUP', 'OPENOUT', 'PI', 'POINT', 'POS', 'RAD', 'RND', + 'SGN', 'SIN', 'SQR', 'TAN', 'TO', 'TRUE', 'USR', 'VAL', + 'VPOS', 'CHR$', 'GET$', 'INKEY$', 'LEFT$', 'MID$', + 'RIGHT$', 'STR$', 'STRING$', 'EOF', 'PTR', 'PAGE', 'TIME', + 'LOMEM', 'HIMEM', 'SOUND', 'BPUT', 'CALL', 'CHAIN', 'CLEAR', + 'CLOSE', 'CLG', 'CLS', 'DATA', 'DEF', 'DIM', 'DRAW', 'END', + 'ENDPROC', 'ENVELOPE', 'FOR', 'GOSUB', 'GOTO', 'GCOL', 'IF', + 'INPUT', 'LET', 'LOCAL', 'MODE', 'MOVE', 'NEXT', 'ON', + 'VDU', 'PLOT', 'PRINT', 'PROC', 'READ', 'REM', 'REPEAT', + 'REPORT', 'RESTORE', 'RETURN', 'RUN', 'STOP', 'COLOUR', + 'TRACE', 'UNTIL', 'WIDTH', 'OSCLI'] + + basic5_keywords = ['WHEN', 'OF', 'ENDCASE', 'ENDIF', 'ENDWHILE', 'CASE', + 'CIRCLE', 'FILL', 'ORIGIN', 'POINT', 'RECTANGLE', 'SWAP', + 'WHILE', 'WAIT', 'MOUSE', 'QUIT', 'SYS', 'INSTALL', + 'LIBRARY', 'TINT', 'ELLIPSE', 'BEATS', 'TEMPO', 'VOICES', + 'VOICE', 'STEREO', 'OVERLAY', 'APPEND', 'AUTO', 'CRUNCH', + 'DELETE', 'EDIT', 'HELP', 'LIST', 'LOAD', 'LVAR', 'NEW', + 'OLD', 'RENUMBER', 'SAVE', 'TEXTLOAD', 'TEXTSAVE', + 'TWIN', 'TWINO', 'INSTALL', 'SUM', 'BEAT'] + + + name = 'BBC Basic' + aliases = ['bbcbasic'] + filenames = ['*.bbc'] + + tokens = { + 'root': [ + (r"[0-9]+", Name.Label), + (r"(\*)([^\n]*)", + bygroups(Keyword.Pseudo, Comment.Special)), + (r"", Whitespace, 'code'), + ], + + 'code': [ + (r"(REM)([^\n]*)", + bygroups(Keyword.Declaration, Comment.Single)), + (r'\n', Whitespace, 'root'), + (r'\s+', Whitespace), + (r':', Comment.Preproc), + + # Some special cases to make functions come out nicer + (r'(DEF)(\s*)(FN|PROC)([A-Za-z_@][\w@]*)', + bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration, Name.Function)), + (r'(FN|PROC)([A-Za-z_@][\w@]*)', + bygroups(Keyword, Name.Function)), + + (r'(GOTO|GOSUB|THEN|RESTORE)(\s*)(\d+)', + bygroups(Keyword, Whitespace, Name.Label)), + + (r'(TRUE|FALSE)', Keyword.Constant), + (r'(PAGE|LOMEM|HIMEM|TIME|WIDTH|ERL|ERR|REPORT\$|POS|VPOS|VOICES)', Keyword.Pseudo), + + (words(base_keywords), Keyword), + (words(basic5_keywords), Keyword), + + ('"', String.Double, 'string'), + + ('%[01]{1,32}', Number.Bin), + ('&[0-9a-f]{1,8}', Number.Hex), + + (r'[+-]?[0-9]+\.[0-9]*(E[+-]?[0-9]+)?', Number.Float), + (r'[+-]?\.[0-9]+(E[+-]?[0-9]+)?', Number.Float), + (r'[+-]?[0-9]+E[+-]?[0-9]+', Number.Float), + (r'[+-]?\d+', Number.Integer), + + (r'([A-Za-z_@][\w@]*[%$]?)', Name.Variable), + (r'([+\-]=|[$!|?+\-*/%^=><();]|>=|<=|<>|<<|>>|>>>|,)', Operator), + ], + 'string': [ + (r'[^"\n]+', String.Double), + (r'"', String.Double, '#pop'), + (r'\n', Error, 'root'), # Unterminated string + ], + } + + def analyse_text(text): + if text.startswith('10REM >') or text.startswith('REM >'): + return 0.9 diff --git a/pygments/lexers/bibtex.py b/pygments/lexers/bibtex.py index a6159f81..7244ef2f 100644 --- a/pygments/lexers/bibtex.py +++ b/pygments/lexers/bibtex.py @@ -101,12 +101,12 @@ class BibTeXLexer(ExtendedRegexLexer): 'quoted-string': [ (r'\{', String, 'braced-string'), ('"', String, '#pop'), - ('[^\{\"]+', String), + (r'[^\{\"]+', String), ], 'braced-string': [ (r'\{', String, '#push'), (r'\}', String, '#pop'), - ('[^\{\}]+', String), + (r'[^\{\}]+', String), ], 'whitespace': [ (r'\s+', Text), @@ -154,7 +154,7 @@ class BSTLexer(RegexLexer): default('#pop'), ], 'whitespace': [ - ('\s+', Text), + (r'\s+', Text), ('%.*?$', Comment.SingleLine), ], } diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py index 691f5ab4..38f425db 100644 --- a/pygments/lexers/c_cpp.py +++ b/pygments/lexers/c_cpp.py @@ -36,7 +36,7 @@ class CFamilyLexer(RegexLexer): tokens = { 'whitespace': [ # preprocessor directives: without whitespace - ('^#if\s+0', Comment.Preproc, 'if0'), + (r'^#if\s+0', Comment.Preproc, 'if0'), ('^#', Comment.Preproc, 'macro'), # or with whitespace ('^(' + _ws1 + r')(#if\s+0)', @@ -84,7 +84,7 @@ class CFamilyLexer(RegexLexer): prefix=r'__', suffix=r'\b'), Keyword.Reserved), (r'(true|false|NULL)\b', Name.Builtin), (r'([a-zA-Z_]\w*)(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)), - ('[a-zA-Z_]\w*', Name), + (r'[a-zA-Z_]\w*', Name), ], 'root': [ include('whitespace'), @@ -190,9 +190,9 @@ class CLexer(CFamilyLexer): priority = 0.1 def analyse_text(text): - if re.search('^\s*#include [<"]', text, re.MULTILINE): + if re.search(r'^\s*#include [<"]', text, re.MULTILINE): return 0.1 - if re.search('^\s*#ifn?def ', text, re.MULTILINE): + if re.search(r'^\s*#ifn?def ', text, re.MULTILINE): return 0.1 diff --git a/pygments/lexers/c_like.py b/pygments/lexers/c_like.py index 38827219..58372b81 100644 --- a/pygments/lexers/c_like.py +++ b/pygments/lexers/c_like.py @@ -20,7 +20,7 @@ from pygments.lexers.c_cpp import CLexer, CppLexer from pygments.lexers import _mql_builtins __all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer', - 'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer'] + 'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer', 'CharmciLexer'] class PikeLexer(CppLexer): @@ -245,7 +245,7 @@ class ValaLexer(RegexLexer): 'ulong', 'unichar', 'ushort'), suffix=r'\b'), Keyword.Type), (r'(true|false|null)\b', Name.Builtin), - ('[a-zA-Z_]\w*', Name), + (r'[a-zA-Z_]\w*', Name), ], 'root': [ include('whitespace'), @@ -344,7 +344,7 @@ class SwigLexer(CppLexer): # SWIG directives (r'(%[a-z_][a-z0-9_]*)', Name.Function), # Special variables - ('\$\**\&?\w+', Name), + (r'\$\**\&?\w+', Name), # Stringification / additional preprocessor directives (r'##*[a-zA-Z_]\w*', Comment.Preproc), inherit, @@ -539,3 +539,23 @@ class ArduinoLexer(CppLexer): yield index, Name.Function, value else: yield index, token, value + +class CharmciLexer(CppLexer): + """ + For `Charm++ <https://charm.cs.illinois.edu>`_ interface files (.ci). + """ + + name = 'Charmci' + aliases = ['charmci'] + filenames = ['*.ci'] + + mimetypes = [] + + tokens = { + 'statements': [ + (r'(module)(\s+)', bygroups(Keyword, Text), 'classname'), + (words(('mainmodule','mainchare','chare','array','group','nodegroup','message','conditional')), Keyword), + (words(('entry','aggregate','threaded','sync','exclusive','nokeep','notrace','immediate','expedited','inline','local','python','accel','readwrite','writeonly','accelblock','memcritical','packed','varsize','initproc','initnode','initcall','stacksize','createhere','createhome','reductiontarget','iget','nocopy','mutable','migratable','readonly')), Keyword), + inherit, + ], + } diff --git a/pygments/lexers/capnproto.py b/pygments/lexers/capnproto.py index 203523a1..2615dcaf 100644 --- a/pygments/lexers/capnproto.py +++ b/pygments/lexers/capnproto.py @@ -44,34 +44,34 @@ class CapnProtoLexer(RegexLexer): ], 'type': [ (r'[^][=;,(){}$]+', Name.Class), - (r'[[(]', Name.Class, 'parentype'), + (r'[\[(]', Name.Class, 'parentype'), default('#pop'), ], 'parentype': [ (r'[^][;()]+', Name.Class), - (r'[[(]', Name.Class, '#push'), + (r'[\[(]', Name.Class, '#push'), (r'[])]', Name.Class, '#pop'), default('#pop'), ], 'expression': [ (r'[^][;,(){}$]+', Literal), - (r'[[(]', Literal, 'parenexp'), + (r'[\[(]', Literal, 'parenexp'), default('#pop'), ], 'parenexp': [ (r'[^][;()]+', Literal), - (r'[[(]', Literal, '#push'), + (r'[\[(]', Literal, '#push'), (r'[])]', Literal, '#pop'), default('#pop'), ], 'annotation': [ (r'[^][;,(){}=:]+', Name.Attribute), - (r'[[(]', Name.Attribute, 'annexp'), + (r'[\[(]', Name.Attribute, 'annexp'), default('#pop'), ], 'annexp': [ (r'[^][;()]+', Name.Attribute), - (r'[[(]', Name.Attribute, '#push'), + (r'[\[(]', Name.Attribute, '#push'), (r'[])]', Name.Attribute, '#pop'), default('#pop'), ], diff --git a/pygments/lexers/chapel.py b/pygments/lexers/chapel.py index 55bf0e1e..16ce720b 100644 --- a/pygments/lexers/chapel.py +++ b/pygments/lexers/chapel.py @@ -42,16 +42,26 @@ class ChapelLexer(RegexLexer): (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b', Keyword.Type), (words(( - 'align', 'as', 'atomic', 'begin', 'break', 'by', 'cobegin', - 'coforall', 'continue', 'delete', 'dmapped', 'do', 'domain', - 'else', 'enum', 'except', 'export', 'extern', 'for', 'forall', - 'if', 'index', 'inline', 'iter', 'label', 'lambda', 'let', - 'local', 'new', 'noinit', 'on', 'only', 'otherwise', 'pragma', - 'private', 'public', 'reduce', 'require', 'return', 'scan', - 'select', 'serial', 'single', 'sparse', 'subdomain', 'sync', - 'then', 'use', 'when', 'where', 'while', 'with', 'yield', + 'align', 'as', 'atomic', + 'begin', 'borrowed', 'break', 'by', + 'catch', 'cobegin', 'coforall', 'continue', + 'delete', 'dmapped', 'do', 'domain', + 'else', 'enum', 'except', 'export', 'extern', + 'for', 'forall', + 'if', 'index', 'inline', + 'label', 'lambda', 'let', 'local', + 'new', 'noinit', + 'on', 'only', 'otherwise', 'override', 'owned', + 'pragma', 'private', 'prototype', 'public', + 'reduce', 'require', 'return', + 'scan', 'select', 'serial', 'shared', 'single', 'sparse', 'subdomain', 'sync', + 'then', 'throw', 'throws', 'try', + 'unmanaged', 'use', + 'when', 'where', 'while', 'with', + 'yield', 'zip'), suffix=r'\b'), Keyword), + (r'(iter)((?:\s)+)', bygroups(Keyword, Text), 'procname'), (r'(proc)((?:\s)+)', bygroups(Keyword, Text), 'procname'), (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text), 'classname'), @@ -96,7 +106,7 @@ class ChapelLexer(RegexLexer): (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'), ], 'procname': [ - (r'([a-zA-Z_][\w$]+|\~[a-zA-Z_][\w$]+|[+*/!~%<>=&^|\-]{1,2})', + (r'([a-zA-Z_][.\w$]*|\~[a-zA-Z_][.\w$]*|[+*/!~%<>=&^|\-]{1,2})', Name.Function, '#pop'), ], } diff --git a/pygments/lexers/clean.py b/pygments/lexers/clean.py index ba2569f6..dc973bea 100644 --- a/pygments/lexers/clean.py +++ b/pygments/lexers/clean.py @@ -5,14 +5,13 @@ Lexer for the Clean language. - :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from pygments.lexer import ExtendedRegexLexer, LexerContext, \ - bygroups, words, include, default -from pygments.token import Comment, Keyword, Literal, Name, Number, Operator, \ - Punctuation, String, Text, Whitespace +from pygments.lexer import ExtendedRegexLexer, words, include, bygroups +from pygments.token import Comment, Error, Keyword, Literal, Name, Number, \ + Operator, Punctuation, String, Whitespace __all__ = ['CleanLexer'] @@ -28,261 +27,152 @@ class CleanLexer(ExtendedRegexLexer): aliases = ['clean'] filenames = ['*.icl', '*.dcl'] - def get_tokens_unprocessed(self, text=None, context=None): - ctx = LexerContext(text, 0) - ctx.indent = 0 - return ExtendedRegexLexer.get_tokens_unprocessed(self, text, context=ctx) + keywords = ( + 'case', 'ccall', 'class', 'code', 'code inline', 'derive', 'export', + 'foreign', 'generic', 'if', 'in', 'infix', 'infixl', 'infixr', + 'instance', 'let', 'of', 'otherwise', 'special', 'stdcall', 'where', + 'with') - def check_class_not_import(lexer, match, ctx): - if match.group(0) == 'import': - yield match.start(), Keyword.Namespace, match.group(0) - ctx.stack = ctx.stack[:-1] + ['fromimportfunc'] - else: - yield match.start(), Name.Class, match.group(0) - ctx.pos = match.end() + modulewords = ('implementation', 'definition', 'system') - def check_instance_class(lexer, match, ctx): - if match.group(0) == 'instance' or match.group(0) == 'class': - yield match.start(), Keyword, match.group(0) - else: - yield match.start(), Name.Function, match.group(0) - ctx.stack = ctx.stack + ['fromimportfunctype'] - ctx.pos = match.end() - - @staticmethod - def indent_len(text): - # Tabs are four spaces: - # https://svn.cs.ru.nl/repos/clean-platform/trunk/doc/STANDARDS.txt - text = text.replace('\n', '') - return len(text.replace('\t', ' ')), len(text) - - def store_indent(lexer, match, ctx): - ctx.indent, _ = CleanLexer.indent_len(match.group(0)) - ctx.pos = match.end() - yield match.start(), Text, match.group(0) - - def check_indent1(lexer, match, ctx): - indent, reallen = CleanLexer.indent_len(match.group(0)) - if indent > ctx.indent: - yield match.start(), Whitespace, match.group(0) - ctx.pos = match.start() + reallen + 1 - else: - ctx.indent = 0 - ctx.pos = match.start() - ctx.stack = ctx.stack[:-1] - yield match.start(), Whitespace, match.group(0)[1:] - - def check_indent2(lexer, match, ctx): - indent, reallen = CleanLexer.indent_len(match.group(0)) - if indent > ctx.indent: - yield match.start(), Whitespace, match.group(0) - ctx.pos = match.start() + reallen + 1 - else: - ctx.indent = 0 - ctx.pos = match.start() - ctx.stack = ctx.stack[:-2] - - def check_indent3(lexer, match, ctx): - indent, reallen = CleanLexer.indent_len(match.group(0)) - if indent > ctx.indent: - yield match.start(), Whitespace, match.group(0) - ctx.pos = match.start() + reallen + 1 - else: - ctx.indent = 0 - ctx.pos = match.start() - ctx.stack = ctx.stack[:-3] - yield match.start(), Whitespace, match.group(0)[1:] - if match.group(0) == '\n\n': - ctx.pos = ctx.pos + 1 - - def skip(lexer, match, ctx): - ctx.stack = ctx.stack[:-1] - ctx.pos = match.end() - yield match.start(), Comment, match.group(0) - - keywords = ('class', 'instance', 'where', 'with', 'let', 'let!', - 'in', 'case', 'of', 'infix', 'infixr', 'infixl', 'generic', - 'derive', 'otherwise', 'code', 'inline') + lowerId = r'[a-z`][\w\d`]*' + upperId = r'[A-Z`][\w\d`]*' + funnyId = r'[~@#\$%\^?!+\-*<>\\/|&=:]+' + scoreUpperId = r'_' + upperId + scoreLowerId = r'_' + lowerId + moduleId = r'[a-zA-Z_][a-zA-Z0-9_.`]+' + classId = '|'.join([lowerId, upperId, funnyId]) tokens = { - 'common': [ - (r';', Punctuation, '#pop'), - (r'//', Comment, 'singlecomment'), - ], 'root': [ - # Comments + include('comments'), + include('keywords'), + include('module'), + include('import'), + include('whitespace'), + include('literals'), + include('operators'), + include('delimiters'), + include('names'), + ], + 'whitespace': [ + (r'\s+', Whitespace), + ], + 'comments': [ (r'//.*\n', Comment.Single), - (r'(?s)/\*\*.*?\*/', Comment.Special), - (r'(?s)/\*.*?\*/', Comment.Multi), - - # Modules, imports, etc. - (r'\b((?:implementation|definition|system)\s+)?(module)(\s+)([\w`.]+)', - bygroups(Keyword.Namespace, Keyword.Namespace, Text, Name.Class)), - (r'(?<=\n)import(?=\s)', Keyword.Namespace, 'import'), - (r'(?<=\n)from(?=\s)', Keyword.Namespace, 'fromimport'), - - # Keywords - # We cannot use (?s)^|(?<=\s) as prefix, so need to repeat this - (words(keywords, prefix=r'(?<=\s)', suffix=r'(?=\s)'), Keyword), - (words(keywords, prefix=r'^', suffix=r'(?=\s)'), Keyword), - - # Function definitions - (r'(?=\{\|)', Whitespace, 'genericfunction'), - (r'(?<=\n)([ \t]*)([\w`$()=\-<>~*\^|+&%]+)((?:\s+\w)*)(\s*)(::)', - bygroups(store_indent, Name.Function, Keyword.Type, Whitespace, - Punctuation), - 'functiondefargs'), - - # Type definitions - (r'(?<=\n)([ \t]*)(::)', bygroups(store_indent, Punctuation), 'typedef'), - (r'^([ \t]*)(::)', bygroups(store_indent, Punctuation), 'typedef'), - - # Literals - (r'\'\\?.(?<!\\)\'', String.Char), - (r'\'\\\d+\'', String.Char), - (r'\'\\\\\'', String.Char), # (special case for '\\') - (r'[+\-~]?\s*\d+\.\d+(E[+\-~]?\d+)?\b', Number.Float), - (r'[+\-~]?\s*0[0-7]\b', Number.Oct), - (r'[+\-~]?\s*0x[0-9a-fA-F]\b', Number.Hex), - (r'[+\-~]?\s*\d+\b', Number.Integer), - (r'"', String.Double, 'doubleqstring'), - (words(('True', 'False'), prefix=r'(?<=\s)', suffix=r'(?=\s)'), - Literal), - - # Qualified names - (r'(\')([\w.]+)(\'\.)', - bygroups(Punctuation, Name.Namespace, Punctuation)), - - # Everything else is some name - (r'([\w`$%/?@]+\.?)*[\w`$%/?@]+', Name), - - # Punctuation - (r'[{}()\[\],:;.#]', Punctuation), - (r'[+\-=!<>|&~*\^/]', Operator), - (r'\\\\', Operator), - - # Lambda expressions - (r'\\.*?(->|\.|=)', Name.Function), - - # Whitespace - (r'\s', Whitespace), - - include('common'), + (r'/\*', Comment.Multi, 'comments.in'), + (r'/\*\*', Comment.Special, 'comments.in'), ], - 'fromimport': [ - include('common'), - (r'([\w`.]+)', check_class_not_import), - (r'\n', Whitespace, '#pop'), - (r'\s', Whitespace), + 'comments.in': [ + (r'\*\/', Comment.Multi, '#pop'), + (r'/\*', Comment.Multi, '#push'), + (r'[^*/]+', Comment.Multi), + (r'\*(?!/)', Comment.Multi), + (r'/', Comment.Multi), ], - 'fromimportfunc': [ - include('common'), - (r'(::)(\s+)([^,\s]+)', bygroups(Punctuation, Text, Keyword.Type)), - (r'([\w`$()=\-<>~*\^|+&%/]+)', check_instance_class), - (r',', Punctuation), - (r'\n', Whitespace, '#pop'), - (r'\s', Whitespace), + 'keywords': [ + (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword), ], - 'fromimportfunctype': [ - include('common'), - (r'[{(\[]', Punctuation, 'combtype'), - (r',', Punctuation, '#pop'), - (r'[:;.#]', Punctuation), - (r'\n', Whitespace, '#pop:2'), - (r'[^\S\n]+', Whitespace), - (r'\S+', Keyword.Type), + 'module': [ + (words(modulewords, prefix=r'\b', suffix=r'\b'), Keyword.Namespace), + (r'\bmodule\b', Keyword.Namespace, 'module.name'), ], - 'combtype': [ - include('common'), - (r'[})\]]', Punctuation, '#pop'), - (r'[{(\[]', Punctuation, '#pop'), - (r'[,:;.#]', Punctuation), - (r'\s+', Whitespace), - (r'\S+', Keyword.Type), + 'module.name': [ + include('whitespace'), + (moduleId, Name.Class, '#pop'), ], 'import': [ - include('common'), - (words(('from', 'import', 'as', 'qualified'), - prefix='(?<=\s)', suffix='(?=\s)'), Keyword.Namespace), - (r'[\w`.]+', Name.Class), - (r'\n', Whitespace, '#pop'), - (r',', Punctuation), - (r'[^\S\n]+', Whitespace), - ], - 'singlecomment': [ - (r'(.)(?=\n)', skip), - (r'.+(?!\n)', Comment), - ], - 'doubleqstring': [ - (r'[^\\"]+', String.Double), - (r'"', String.Double, '#pop'), - (r'\\.', String.Double), - ], - 'typedef': [ - include('common'), - (r'[\w`]+', Keyword.Type), - (r'[:=|(),\[\]{}!*]', Punctuation), - (r'->', Punctuation), - (r'\n(?=[^\s|])', Whitespace, '#pop'), - (r'\s', Whitespace), - (r'.', Keyword.Type), + (r'\b(import)\b(\s*)', bygroups(Keyword, Whitespace), 'import.module'), + (r'\b(from)\b(\s*)\b(' + moduleId + r')\b(\s*)\b(import)\b', + bygroups(Keyword, Whitespace, Name.Class, Whitespace, Keyword), + 'import.what'), + ], + 'import.module': [ + (r'\b(qualified)\b(\s*)', bygroups(Keyword, Whitespace)), + (r'(\s*)\b(as)\b', bygroups(Whitespace, Keyword), ('#pop', 'import.module.as')), + (moduleId, Name.Class), + (r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace)), + (r'\s*', Whitespace, '#pop'), + ], + 'import.module.as': [ + include('whitespace'), + (lowerId, Name.Class, '#pop'), + (upperId, Name.Class, '#pop'), + ], + 'import.what': [ + (r'\b(class)\b(\s+)(' + classId + r')', + bygroups(Keyword, Whitespace, Name.Class), 'import.what.class'), + (r'\b(instance)(\s+)(' + classId + r')(\s+)', + bygroups(Keyword, Whitespace, Name.Class, Whitespace), 'import.what.instance'), + (r'(::)(\s*)\b(' + upperId + r')\b', + bygroups(Punctuation, Whitespace, Name.Class), 'import.what.type'), + (r'\b(generic)\b(\s+)\b(' + lowerId + '|' + upperId + r')\b', + bygroups(Keyword, Whitespace, Name)), + include('names'), + (r'(,)(\s+)', bygroups(Punctuation, Whitespace)), + (r'$', Whitespace, '#pop'), + include('whitespace'), + ], + 'import.what.class': [ + (r',', Punctuation, '#pop'), + (r'\(', Punctuation, 'import.what.class.members'), + (r'$', Whitespace, '#pop:2'), + include('whitespace'), ], - 'genericfunction': [ - include('common'), - (r'\{\|', Punctuation), - (r'\|\}', Punctuation, '#pop'), + 'import.what.class.members': [ (r',', Punctuation), - (r'->', Punctuation), - (r'(\s+of\s+)(\{)', bygroups(Keyword, Punctuation), 'genericftypes'), - (r'\s', Whitespace), - (r'[\w`\[\]{}!]+', Keyword.Type), - (r'[*()]', Punctuation), + (r'\.\.', Punctuation), + (r'\)', Punctuation, '#pop'), + include('names'), + ], + 'import.what.instance': [ + (r'[,)]', Punctuation, '#pop'), + (r'\(', Punctuation, 'import.what.instance'), + (r'$', Whitespace, '#pop:2'), + include('whitespace'), + include('names'), + ], + 'import.what.type': [ + (r',', Punctuation, '#pop'), + (r'[({]', Punctuation, 'import.what.type.consesandfields'), + (r'$', Whitespace, '#pop:2'), + include('whitespace'), ], - 'genericftypes': [ - include('common'), - (r'[\w`]+', Keyword.Type), + 'import.what.type.consesandfields': [ (r',', Punctuation), - (r'\s', Whitespace), - (r'\}', Punctuation, '#pop'), - ], - 'functiondefargs': [ - include('common'), - (r'\n(\s*)', check_indent1), - (r'[!{}()\[\],:;.#]', Punctuation), - (r'->', Punctuation, 'functiondefres'), - (r'^(?=\S)', Whitespace, '#pop'), - (r'\S', Keyword.Type), - (r'\s', Whitespace), - ], - 'functiondefres': [ - include('common'), - (r'\n(\s*)', check_indent2), - (r'^(?=\S)', Whitespace, '#pop:2'), - (r'[!{}()\[\],:;.#]', Punctuation), - (r'\|', Punctuation, 'functiondefclasses'), - (r'\S', Keyword.Type), - (r'\s', Whitespace), - ], - 'functiondefclasses': [ - include('common'), - (r'\n(\s*)', check_indent3), - (r'^(?=\S)', Whitespace, '#pop:3'), - (r'[,&]', Punctuation), - (r'\[', Punctuation, 'functiondefuniquneq'), - (r'[\w`$()=\-<>~*\^|+&%/{}\[\]@]', Name.Function, 'functionname'), - (r'\s+', Whitespace), - ], - 'functiondefuniquneq': [ - include('common'), - (r'[a-z]+', Keyword.Type), - (r'\s+', Whitespace), - (r'<=|,', Punctuation), - (r'\]', Punctuation, '#pop') - ], - 'functionname': [ - include('common'), - (r'[\w`$()=\-<>~*\^|+&%/]+', Name.Function), - (r'(?=\{\|)', Punctuation, 'genericfunction'), - default('#pop'), + (r'\.\.', Punctuation), + (r'[)}]', Punctuation, '#pop'), + include('names'), + ], + 'literals': [ + (r'\'([^\'\\]|\\(x[\da-fA-F]+|\d+|.))\'', Literal.Char), + (r'[+~-]?0[0-7]+\b', Number.Oct), + (r'[+~-]?\d+\.\d+(E[+-]?\d+)?', Number.Float), + (r'[+~-]?\d+\b', Number.Integer), + (r'[+~-]?0x[\da-fA-F]+\b', Number.Hex), + (r'True|False', Literal), + (r'"', String.Double, 'literals.stringd'), + ], + 'literals.stringd': [ + (r'[^\\"\n]+', String.Double), + (r'"', String.Double, '#pop'), + (r'\\.', String.Double), + (r'[$\n]', Error, '#pop'), + ], + 'operators': [ + (r'[-~@#\$%\^?!+*<>\\/|&=:\.]+', Operator), + (r'\b_+\b', Operator), + ], + 'delimiters': [ + (r'[,;(){}\[\]]', Punctuation), + (r'(\')([\w`.]+)(\')', + bygroups(Punctuation, Name.Class, Punctuation)), + ], + 'names': [ + (lowerId, Name), + (scoreLowerId, Name), + (funnyId, Name.Function), + (upperId, Name.Class), + (scoreUpperId, Name.Class), ] } diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py index c39b1a52..6f2c7c76 100644 --- a/pygments/lexers/configs.py +++ b/pygments/lexers/configs.py @@ -15,12 +15,13 @@ from pygments.lexer import RegexLexer, default, words, bygroups, include, using from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Whitespace, Literal from pygments.lexers.shell import BashLexer +from pygments.lexers.data import JsonLexer __all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer', 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer', 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer', 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer', - 'PkgConfigLexer', 'PacmanConfLexer'] + 'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer'] class IniLexer(RegexLexer): @@ -539,20 +540,25 @@ class DockerLexer(RegexLexer): filenames = ['Dockerfile', '*.docker'] mimetypes = ['text/x-dockerfile-config'] - _keywords = (r'(?:FROM|MAINTAINER|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|' - r'VOLUME|WORKDIR)') - + _keywords = (r'(?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)') + _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)') + _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex flags = re.IGNORECASE | re.MULTILINE tokens = { 'root': [ - (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,), - bygroups(Name.Keyword, Whitespace, Keyword)), - (r'^(%s)\b(.*)' % (_keywords,), bygroups(Keyword, String)), (r'#.*', Comment), - (r'RUN', Keyword), # Rest of line falls through + (r'(ONBUILD)(%s)' % (_lb,), bygroups(Keyword, using(BashLexer))), + (r'(HEALTHCHECK)((%s--\w+=\w+%s)*)' % (_lb, _lb), + bygroups(Keyword, using(BashLexer))), + (r'(VOLUME|ENTRYPOINT|CMD|SHELL)(%s)(\[.*?\])' % (_lb,), + bygroups(Keyword, using(BashLexer), using(JsonLexer))), + (r'(LABEL|ENV|ARG)((%s\w+=\w+%s)*)' % (_lb, _lb), + bygroups(Keyword, using(BashLexer))), + (r'(%s|VOLUME)\b(.*)' % (_keywords), bygroups(Keyword, String)), + (r'(%s)' % (_bash_keywords,), Keyword), (r'(.*\\\n)*.+', using(BashLexer)), - ], + ] } @@ -568,6 +574,8 @@ class TerraformLexer(RegexLexer): filenames = ['*.tf'] mimetypes = ['application/x-tf', 'application/x-terraform'] + embedded_keywords = ('ingress', 'egress', 'listener', 'default', 'connection', 'alias', 'tags', 'lifecycle', 'timeouts') + tokens = { 'root': [ include('string'), @@ -584,9 +592,8 @@ class TerraformLexer(RegexLexer): (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)), (words(('variable', 'resource', 'provider', 'provisioner', 'module'), prefix=r'\b', suffix=r'\b'), Keyword.Reserved, 'function'), - (words(('ingress', 'egress', 'listener', 'default', 'connection', 'alias'), - prefix=r'\b', suffix=r'\b'), Keyword.Declaration), - ('\$\{', String.Interpol, 'var_builtin'), + (words(embedded_keywords, prefix=r'\b', suffix=r'\b'), Keyword.Declaration), + (r'\$\{', String.Interpol, 'var_builtin'), ], 'function': [ (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)), @@ -831,3 +838,97 @@ class PacmanConfLexer(RegexLexer): (r'.', Text), ], } + + +class AugeasLexer(RegexLexer): + """ + Lexer for `Augeas <http://augeas.net>`_. + + .. versionadded:: 2.4 + """ + name = 'Augeas' + aliases = ['augeas'] + filenames = ['*.aug'] + + tokens = { + 'root': [ + (r'(module)(\s*)([^\s=]+)', bygroups(Keyword.Namespace, Text, Name.Namespace)), + (r'(let)(\s*)([^\s=]+)', bygroups(Keyword.Declaration, Text, Name.Variable)), + (r'(del|store|value|counter|seq|key|label|autoload|incl|excl|transform|test|get|put)(\s+)', bygroups(Name.Builtin, Text)), + (r'(\()([^:]+)(\:)(unit|string|regexp|lens|tree|filter)(\))', bygroups(Punctuation, Name.Variable, Punctuation, Keyword.Type, Punctuation)), + (r'\(\*', Comment.Multiline, 'comment'), + (r'[*+\-.;=?|]', Operator), + (r'[()\[\]{}]', Operator), + (r'"', String.Double, 'string'), + (r'\/', String.Regex, 'regex'), + (r'([A-Z]\w*)(\.)(\w+)', bygroups(Name.Namespace, Punctuation, Name.Variable)), + (r'.', Name.Variable), + (r'\s', Text), + ], + 'string': [ + (r'\\.', String.Escape), + (r'[^"]', String.Double), + (r'"', String.Double, '#pop'), + ], + 'regex': [ + (r'\\.', String.Escape), + (r'[^/]', String.Regex), + (r'\/', String.Regex, '#pop'), + ], + 'comment': [ + (r'[^*)]', Comment.Multiline), + (r'\(\*', Comment.Multiline, '#push'), + (r'\*\)', Comment.Multiline, '#pop'), + (r'[)*]', Comment.Multiline) + ], + } + + +class TOMLLexer(RegexLexer): + """ + Lexer for `TOML <https://github.com/toml-lang/toml>`_, a simple language + for config files. + + .. versionadded:: 2.4 + """ + + name = 'TOML' + aliases = ['toml'] + filenames = ['*.toml'] + + tokens = { + 'root': [ + + # Basics, comments, strings + (r'\s+', Text), + (r'#.*?$', Comment.Single), + # Basic string + (r'"(\\\\|\\"|[^"])*"', String), + # Literal string + (r'\'\'\'(.*)\'\'\'', String), + (r'\'[^\']*\'', String), + (r'(true|false)$', Keyword.Constant), + (r'[a-zA-Z_][\w\-]*', Name), + + (r'\[.*?\]$', Keyword), + # Datetime + # TODO this needs to be expanded, as TOML is rather flexible: + # https://github.com/toml-lang/toml#offset-date-time + (r'\d{4}-\d{2}-\d{2}(?:T| )\d{2}:\d{2}:\d{2}(?:Z|[-+]\d{2}:\d{2})', Number.Integer), + + # Numbers + (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float), + (r'\d+[eE][+-]?[0-9]+j?', Number.Float), + # Handle +-inf, +-infinity, +-nan + (r'[+-]?(?:(inf(?:inity)?)|nan)', Number.Float), + (r'[+-]?\d+', Number.Integer), + + # Punctuation + (r'[]{}:(),;[]', Punctuation), + (r'\.', Punctuation), + + # Operators + (r'=', Operator) + + ] + } diff --git a/pygments/lexers/csound.py b/pygments/lexers/csound.py index 858aa348..161e1576 100644 --- a/pygments/lexers/csound.py +++ b/pygments/lexers/csound.py @@ -3,7 +3,7 @@ pygments.lexers.csound ~~~~~~~~~~~~~~~~~~~~~~ - Lexers for CSound languages. + Lexers for Csound languages. :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. @@ -12,9 +12,9 @@ import re from pygments.lexer import RegexLexer, bygroups, default, include, using, words -from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \ - String, Text -from pygments.lexers._csound_builtins import OPCODES +from pygments.token import Comment, Error, Keyword, Name, Number, Operator, Punctuation, \ + String, Text, Whitespace +from pygments.lexers._csound_builtins import OPCODES, DEPRECATED_OPCODES from pygments.lexers.html import HtmlLexer from pygments.lexers.python import PythonLexer from pygments.lexers.scripting import LuaLexer @@ -25,74 +25,104 @@ newline = (r'((?:(?:;|//).*)*)(\n)', bygroups(Comment.Single, Text)) class CsoundLexer(RegexLexer): - # Subclasses must define a 'single-line string' state. tokens = { 'whitespace': [ (r'[ \t]+', Text), - (r'\\\n', Text), - (r'/[*](.|\n)*?[*]/', Comment.Multiline) + (r'/[*](?:.|\n)*?[*]/', Comment.Multiline), + (r'(?:;|//).*$', Comment.Single), + (r'(\\)(\n)', bygroups(Whitespace, Text)) ], - 'macro call': [ - (r'(\$\w+\.?)(\()', bygroups(Comment.Preproc, Punctuation), - 'function macro call'), - (r'\$\w+(\.|\b)', Comment.Preproc) - ], - 'function macro call': [ - (r"((?:\\['\)]|[^'\)])+)(')", bygroups(Comment.Preproc, Punctuation)), - (r"([^'\)]+)(\))", bygroups(Comment.Preproc, Punctuation), '#pop') + 'preprocessor directives': [ + (r'#(?:e(?:nd(?:if)?|lse)\b|##)|@@?[ \t]*\d+', Comment.Preproc), + (r'#include', Comment.Preproc, 'include directive'), + (r'#[ \t]*define', Comment.Preproc, 'define directive'), + (r'#(?:ifn?def|undef)\b', Comment.Preproc, 'macro directive') ], - 'whitespace or macro call': [ + 'include directive': [ include('whitespace'), - include('macro call') + (r'([^ \t]).*?\1', String, '#pop') ], - 'preprocessor directives': [ - (r'#(e(nd(if)?|lse)|ifn?def|undef)\b|##', Comment.Preproc), - (r'#include\b', Comment.Preproc, 'include'), - (r'#[ \t]*define\b', Comment.Preproc, 'macro name'), - (r'@+[ \t]*\d*', Comment.Preproc) + 'define directive': [ + (r'\n', Text), + include('whitespace'), + (r'([A-Z_a-z]\w*)(\()', bygroups(Comment.Preproc, Punctuation), + ('#pop', 'macro parameter name list')), + (r'[A-Z_a-z]\w*', Comment.Preproc, ('#pop', 'before macro body')) ], - - 'include': [ + 'macro parameter name list': [ include('whitespace'), - (r'"', String, 'single-line string') + (r'[A-Z_a-z]\w*', Comment.Preproc), + (r"['#]", Punctuation), + (r'\)', Punctuation, ('#pop', 'before macro body')) ], - - 'macro name': [ + 'before macro body': [ + (r'\n', Text), include('whitespace'), - (r'(\w+)(\()', bygroups(Comment.Preproc, Text), - 'function macro argument list'), - (r'\w+', Comment.Preproc, 'object macro definition after name') + (r'#', Punctuation, ('#pop', 'macro body')) + ], + 'macro body': [ + (r'(?:\\(?!#)|[^#\\]|\n)+', Comment.Preproc), + (r'\\#', Comment.Preproc), + (r'(?<!\\)#', Punctuation, '#pop') ], - 'object macro definition after name': [ + + 'macro directive': [ include('whitespace'), - (r'#', Punctuation, 'object macro replacement text') + (r'[A-Z_a-z]\w*', Comment.Preproc, '#pop') + ], + + 'macro uses': [ + (r'(\$[A-Z_a-z]\w*\.?)(\()', bygroups(Comment.Preproc, Punctuation), + 'macro parameter value list'), + (r'\$[A-Z_a-z]\w*(?:\.|\b)', Comment.Preproc) ], - 'object macro replacement text': [ - (r'(\\#|[^#])+', Comment.Preproc), - (r'#', Punctuation, '#pop:3') + 'macro parameter value list': [ + (r'(?:[^\'#"{()]|\{(?!\{))+', Comment.Preproc), + (r"['#]", Punctuation), + (r'"', String, 'macro parameter value quoted string'), + (r'\{\{', String, 'macro parameter value braced string'), + (r'\(', Comment.Preproc, 'macro parameter value parenthetical'), + (r'\)', Punctuation, '#pop') ], - 'function macro argument list': [ - (r"(\w+)(['#])", bygroups(Comment.Preproc, Punctuation)), - (r'(\w+)(\))', bygroups(Comment.Preproc, Punctuation), - 'function macro definition after name') + 'macro parameter value quoted string': [ + (r"\\[#'()]", Comment.Preproc), + (r"[#'()]", Error), + include('quoted string') ], - 'function macro definition after name': [ - (r'[ \t]+', Text), - (r'#', Punctuation, 'function macro replacement text') + 'macro parameter value braced string': [ + (r"\\[#'()]", Comment.Preproc), + (r"[#'()]", Error), + include('braced string') + ], + 'macro parameter value parenthetical': [ + (r'(?:[^\\()]|\\\))+', Comment.Preproc), + (r'\(', Comment.Preproc, '#push'), + (r'\)', Comment.Preproc, '#pop') ], - 'function macro replacement text': [ - (r'(\\#|[^#])+', Comment.Preproc), - (r'#', Punctuation, '#pop:4') + + 'whitespace and macro uses': [ + include('whitespace'), + include('macro uses') + ], + + 'numbers': [ + (r'\d+[Ee][+-]?\d+|(\d+\.\d*|\d*\.\d+)([Ee][+-]?\d+)?', Number.Float), + (r'(0[Xx])([0-9A-Fa-f]+)', bygroups(Keyword.Type, Number.Hex)), + (r'\d+', Number.Integer) + ], + + 'braced string': [ + # Do nothing. This must be defined in subclasses. ] } class CsoundScoreLexer(CsoundLexer): """ - For `Csound <http://csound.github.io>`_ scores. + For `Csound <https://csound.github.io>`_ scores. .. versionadded:: 2.1 """ @@ -102,47 +132,77 @@ class CsoundScoreLexer(CsoundLexer): filenames = ['*.sco'] tokens = { - 'partial statement': [ + 'root': [ + (r'\n', Text), + include('whitespace and macro uses'), include('preprocessor directives'), - (r'\d+e[+-]?\d+|(\d+\.\d*|\d*\.\d+)(e[+-]?\d+)?', Number.Float), - (r'0[xX][a-fA-F0-9]+', Number.Hex), - (r'\d+', Number.Integer), - (r'"', String, 'single-line string'), - (r'[+\-*/%^!=<>|&#~.]', Operator), - (r'[]()[]', Punctuation), - (r'\w+', Comment.Preproc) - ], - 'statement': [ - include('whitespace or macro call'), - newline + ('#pop',), - include('partial statement') + (r'[abCdefiqstvxy]', Keyword), + # There is also a w statement that is generated internally and should not be + # used; see https://github.com/csound/csound/issues/750. + + (r'z', Keyword.Constant), + # z is a constant equal to 800,000,000,000. 800 billion seconds is about + # 25,367.8 years. See also + # https://csound.github.io/docs/manual/ScoreTop.html and + # https://github.com/csound/csound/search?q=stof+path%3AEngine+filename%3Asread.c. + + (r'([nNpP][pP])(\d+)', bygroups(Keyword, Number.Integer)), + + (r'[mn]', Keyword, 'mark statement'), + + include('numbers'), + (r'[!+\-*/^%&|<>#~.]', Operator), + (r'[()\[\]]', Punctuation), + (r'"', String, 'quoted string'), + (r'\{', Comment.Preproc, 'loop after left brace'), ], - 'root': [ - newline, - include('whitespace or macro call'), - (r'[{}]', Punctuation, 'statement'), - (r'[abefimq-tv-z]|[nN][pP]?', Keyword, 'statement') + 'mark statement': [ + include('whitespace and macro uses'), + (r'[A-Z_a-z]\w*', Name.Label), + (r'\n', Text, '#pop') ], - 'single-line string': [ + 'quoted string': [ (r'"', String, '#pop'), - (r'[^\\"]+', String) + (r'[^"$]+', String), + include('macro uses'), + (r'[$]', String) + ], + + 'loop after left brace': [ + include('whitespace and macro uses'), + (r'\d+', Number.Integer, ('#pop', 'loop after repeat count')), + ], + 'loop after repeat count': [ + include('whitespace and macro uses'), + (r'[A-Z_a-z]\w*', Comment.Preproc, ('#pop', 'loop')) + ], + 'loop': [ + (r'\}', Comment.Preproc, '#pop'), + include('root') + ], + + # Braced strings are not allowed in Csound scores, but this is needed + # because the superclass includes it. + 'braced string': [ + (r'\}\}', String, '#pop'), + (r'[^}]|\}(?!\})', String) ] } class CsoundOrchestraLexer(CsoundLexer): """ - For `Csound <http://csound.github.io>`_ orchestras. + For `Csound <https://csound.github.io>`_ orchestras. .. versionadded:: 2.1 """ name = 'Csound Orchestra' aliases = ['csound', 'csound-orc'] - filenames = ['*.orc'] + filenames = ['*.orc', '*.udo'] user_defined_opcodes = set() @@ -152,159 +212,189 @@ class CsoundOrchestraLexer(CsoundLexer): yield match.start(), Name.Function, opcode def name_callback(lexer, match): - name = match.group(0) - if re.match('p\d+$', name) or name in OPCODES: + name = match.group(1) + if name in OPCODES or name in DEPRECATED_OPCODES: yield match.start(), Name.Builtin, name + if match.group(2): + yield match.start(2), Punctuation, match.group(2) + yield match.start(3), Keyword.Type, match.group(3) elif name in lexer.user_defined_opcodes: yield match.start(), Name.Function, name else: - nameMatch = re.search(r'^(g?[aikSw])(\w+)', name) + nameMatch = re.search(r'^(g?[afikSw])(\w+)', name) if nameMatch: yield nameMatch.start(1), Keyword.Type, nameMatch.group(1) yield nameMatch.start(2), Name, nameMatch.group(2) else: yield match.start(), Name, name + # If there's a trailing :V, for example, we want to keep this around + # and emit it as well, otherwise this lexer will not pass round-trip + # testing + if match.group(2): + yield match.start(2), Punctuation, match.group(2) + yield match.start(3), Name, match.group(3) + tokens = { - 'label': [ - (r'\b(\w+)(:)', bygroups(Name.Label, Punctuation)) - ], + 'root': [ + (r'\n', Text), - 'partial expression': [ + (r'^([ \t]*)(\w+)(:)(?:[ \t]+|$)', bygroups(Text, Name.Label, Punctuation)), + + include('whitespace and macro uses'), include('preprocessor directives'), - (r'\b(0dbfs|k(r|smps)|nchnls(_i)?|sr)\b', Name.Variable.Global), - (r'\d+e[+-]?\d+|(\d+\.\d*|\d*\.\d+)(e[+-]?\d+)?', Number.Float), - (r'0[xX][a-fA-F0-9]+', Number.Hex), - (r'\d+', Number.Integer), - (r'"', String, 'single-line string'), - (r'\{\{', String, 'multi-line string'), - (r'[+\-*/%^!=&|<>#~¬]', Operator), - (r'[](),?:[]', Punctuation), + + (r'\binstr\b', Keyword.Declaration, 'instrument numbers and identifiers'), + (r'\bopcode\b', Keyword.Declaration, 'after opcode keyword'), + (r'\b(?:end(?:in|op))\b', Keyword.Declaration), + + include('partial statements') + ], + + 'partial statements': [ + (r'\b(?:0dbfs|A4|k(?:r|smps)|nchnls(?:_i)?|sr)\b', Name.Variable.Global), + + include('numbers'), + + (r'\+=|-=|\*=|/=|<<|>>|<=|>=|==|!=|&&|\|\||[~¬]|[=!+\-*/^%&|<>#?:]', Operator), + (r'[(),\[\]]', Punctuation), + + (r'"', String, 'quoted string'), + (r'\{\{', String, 'braced string'), + (words(( - # Keywords 'do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen', 'kthen', 'od', 'then', 'until', 'while', - # Opcodes that act as control structures - 'return', 'timout' ), prefix=r'\b', suffix=r'\b'), Keyword), - (words(('goto', 'igoto', 'kgoto', 'rigoto', 'tigoto'), - prefix=r'\b', suffix=r'\b'), Keyword, 'goto label'), - (words(('cggoto', 'cigoto', 'cingoto', 'ckgoto', 'cngoto'), - prefix=r'\b', suffix=r'\b'), Keyword, - ('goto label', 'goto expression')), - (words(('loop_ge', 'loop_gt', 'loop_le', 'loop_lt'), - prefix=r'\b', suffix=r'\b'), Keyword, - ('goto label', 'goto expression', 'goto expression', 'goto expression')), - (r'\bscoreline(_i)?\b', Name.Builtin, 'scoreline opcode'), - (r'\bpyl?run[it]?\b', Name.Builtin, 'python opcode'), - (r'\blua_(exec|opdef)\b', Name.Builtin, 'lua opcode'), - (r'\b[a-zA-Z_]\w*\b', name_callback) - ], - - 'expression': [ - include('whitespace or macro call'), - newline + ('#pop',), - include('partial expression') - ], + (words(('return', 'rireturn'), prefix=r'\b', suffix=r'\b'), Keyword.Pseudo), - 'root': [ - newline, - include('whitespace or macro call'), - (r'\binstr\b', Keyword, ('instrument block', 'instrument name list')), - (r'\bopcode\b', Keyword, ('opcode block', 'opcode parameter list', - 'opcode types', 'opcode types', 'opcode name')), - include('label'), - default('expression') - ], - - 'instrument name list': [ - include('whitespace or macro call'), - (r'\d+|\+?[a-zA-Z_]\w*', Name.Function), - (r',', Punctuation), - newline + ('#pop',) + (r'\b[ik]?goto\b', Keyword, 'goto label'), + (r'\b(r(?:einit|igoto)|tigoto)(\(|\b)', bygroups(Keyword.Pseudo, Punctuation), + 'goto label'), + (r'\b(c(?:g|in?|k|nk?)goto)(\(|\b)', bygroups(Keyword.Pseudo, Punctuation), + ('goto label', 'goto argument')), + (r'\b(timout)(\(|\b)', bygroups(Keyword.Pseudo, Punctuation), + ('goto label', 'goto argument', 'goto argument')), + (r'\b(loop_[gl][et])(\(|\b)', bygroups(Keyword.Pseudo, Punctuation), + ('goto label', 'goto argument', 'goto argument', 'goto argument')), + + (r'\bprintk?s\b', Name.Builtin, 'prints opcode'), + (r'\b(?:readscore|scoreline(?:_i)?)\b', Name.Builtin, 'Csound score opcode'), + (r'\bpyl?run[it]?\b', Name.Builtin, 'Python opcode'), + (r'\blua_(?:exec|opdef)\b', Name.Builtin, 'Lua opcode'), + (r'\bp\d+\b', Name.Variable.Instance), + (r'\b([A-Z_a-z]\w*)(?:(:)([A-Za-z]))?\b', name_callback) ], - 'instrument block': [ - newline, - include('whitespace or macro call'), - (r'\bendin\b', Keyword, '#pop'), - include('label'), - default('expression') + + 'instrument numbers and identifiers': [ + include('whitespace and macro uses'), + (r'\d+|[A-Z_a-z]\w*', Name.Function), + (r'[+,]', Punctuation), + (r'\n', Text, '#pop') ], - 'opcode name': [ - include('whitespace or macro call'), - (r'[a-zA-Z_]\w*', opcode_name_callback, '#pop') + 'after opcode keyword': [ + include('whitespace and macro uses'), + (r'[A-Z_a-z]\w*', opcode_name_callback, ('#pop', 'opcode type signatures')), + (r'\n', Text, '#pop') ], - 'opcode types': [ - include('whitespace or macro call'), - (r'0|[]afijkKoOpPStV[]+', Keyword.Type, '#pop'), - (r',', Punctuation) + 'opcode type signatures': [ + include('whitespace and macro uses'), + + # https://github.com/csound/csound/search?q=XIDENT+path%3AEngine+filename%3Acsound_orc.lex + (r'0|[afijkKoOpPStV\[\]]+', Keyword.Type), + + (r',', Punctuation), + (r'\n', Text, '#pop') ], - 'opcode parameter list': [ - include('whitespace or macro call'), - newline + ('#pop',) + + 'quoted string': [ + (r'"', String, '#pop'), + (r'[^\\"$%)]+', String), + include('macro uses'), + include('escape sequences'), + include('format specifiers'), + (r'[\\$%)]', String) ], - 'opcode block': [ - newline, - include('whitespace or macro call'), - (r'\bendop\b', Keyword, '#pop'), - include('label'), - default('expression') + 'braced string': [ + (r'\}\}', String, '#pop'), + (r'(?:[^\\%)}]|\}(?!\}))+', String), + include('escape sequences'), + include('format specifiers'), + (r'[\\%)]', String) + ], + 'escape sequences': [ + # https://github.com/csound/csound/search?q=unquote_string+path%3AEngine+filename%3Acsound_orc_compile.c + (r'\\(?:[\\abnrt"]|[0-7]{1,3})', String.Escape) + ], + # Format specifiers are highlighted in all strings, even though only + # fprintks https://csound.github.io/docs/manual/fprintks.html + # fprints https://csound.github.io/docs/manual/fprints.html + # printf/printf_i https://csound.github.io/docs/manual/printf.html + # printks https://csound.github.io/docs/manual/printks.html + # prints https://csound.github.io/docs/manual/prints.html + # sprintf https://csound.github.io/docs/manual/sprintf.html + # sprintfk https://csound.github.io/docs/manual/sprintfk.html + # work with strings that contain format specifiers. In addition, these + # opcodes’ handling of format specifiers is inconsistent: + # - fprintks, fprints, printks, and prints do accept %a and %A + # specifiers, but can’t accept %s specifiers. + # - printf, printf_i, sprintf, and sprintfk don’t accept %a and %A + # specifiers, but can accept %s specifiers. + # See https://github.com/csound/csound/issues/747 for more information. + 'format specifiers': [ + (r'%[#0\- +]*\d*(?:\.\d+)?[diuoxXfFeEgGaAcs]', String.Interpol), + (r'%%', String.Escape) + ], + + 'goto argument': [ + include('whitespace and macro uses'), + (r',', Punctuation, '#pop'), + include('partial statements') ], - 'goto label': [ - include('whitespace or macro call'), + include('whitespace and macro uses'), (r'\w+', Name.Label, '#pop'), default('#pop') ], - 'goto expression': [ - include('whitespace or macro call'), - (r',', Punctuation, '#pop'), - include('partial expression') - ], - 'single-line string': [ - include('macro call'), - (r'"', String, '#pop'), - # From https://github.com/csound/csound/blob/develop/Opcodes/fout.c#L1405 - (r'%\d*(\.\d+)?[cdhilouxX]', String.Interpol), - (r'%[!%nNrRtT]|[~^]|\\([\\aAbBnNrRtT"]|[0-7]{1,3})', String.Escape), - (r'[^\\"~$%\^\n]+', String), - (r'[\\"~$%\^\n]', String) + 'prints opcode': [ + include('whitespace and macro uses'), + (r'"', String, 'prints quoted string'), + default('#pop') ], - 'multi-line string': [ - (r'\}\}', String, '#pop'), - (r'[^}]+|\}(?!\})', String) + 'prints quoted string': [ + (r'\\\\[aAbBnNrRtT]', String.Escape), + (r'%[!nNrRtT]|[~^]{1,2}', String.Escape), + include('quoted string') ], - 'scoreline opcode': [ - include('whitespace or macro call'), - (r'\{\{', String, 'scoreline'), - default('#pop') + 'Csound score opcode': [ + include('whitespace and macro uses'), + (r'\{\{', String, 'Csound score'), + (r'\n', Text, '#pop') ], - 'scoreline': [ + 'Csound score': [ (r'\}\}', String, '#pop'), (r'([^}]+)|\}(?!\})', using(CsoundScoreLexer)) ], - 'python opcode': [ - include('whitespace or macro call'), - (r'\{\{', String, 'python'), - default('#pop') + 'Python opcode': [ + include('whitespace and macro uses'), + (r'\{\{', String, 'Python'), + (r'\n', Text, '#pop') ], - 'python': [ + 'Python': [ (r'\}\}', String, '#pop'), (r'([^}]+)|\}(?!\})', using(PythonLexer)) ], - 'lua opcode': [ - include('whitespace or macro call'), - (r'"', String, 'single-line string'), - (r'\{\{', String, 'lua'), - (r',', Punctuation), - default('#pop') + 'Lua opcode': [ + include('whitespace and macro uses'), + (r'\{\{', String, 'Lua'), + (r'\n', Text, '#pop') ], - 'lua': [ + 'Lua': [ (r'\}\}', String, '#pop'), (r'([^}]+)|\}(?!\})', using(LuaLexer)) ] @@ -313,7 +403,7 @@ class CsoundOrchestraLexer(CsoundLexer): class CsoundDocumentLexer(RegexLexer): """ - For `Csound <http://csound.github.io>`_ documents. + For `Csound <https://csound.github.io>`_ documents. .. versionadded:: 2.1 """ @@ -331,15 +421,18 @@ class CsoundDocumentLexer(RegexLexer): # be XML files. tokens = { 'root': [ - newline, (r'/[*](.|\n)*?[*]/', Comment.Multiline), - (r'[^<&;/]+', Text), + (r'(?:;|//).*$', Comment.Single), + (r'[^/;<]+|/(?!/)', Text), + (r'<\s*CsInstruments', Name.Tag, ('orchestra', 'tag')), (r'<\s*CsScore', Name.Tag, ('score', 'tag')), - (r'<\s*[hH][tT][mM][lL]', Name.Tag, ('HTML', 'tag')), + (r'<\s*[Hh][Tt][Mm][Ll]', Name.Tag, ('HTML', 'tag')), + (r'<\s*[\w:.-]+', Name.Tag, 'tag'), (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag) ], + 'orchestra': [ (r'<\s*/\s*CsInstruments\s*>', Name.Tag, '#pop'), (r'(.|\n)+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer)) @@ -349,9 +442,10 @@ class CsoundDocumentLexer(RegexLexer): (r'(.|\n)+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer)) ], 'HTML': [ - (r'<\s*/\s*[hH][tT][mM][lL]\s*>', Name.Tag, '#pop'), - (r'(.|\n)+?(?=<\s*/\s*[hH][tT][mM][lL]\s*>)', using(HtmlLexer)) + (r'<\s*/\s*[Hh][Tt][Mm][Ll]\s*>', Name.Tag, '#pop'), + (r'(.|\n)+?(?=<\s*/\s*[Hh][Tt][Mm][Ll]\s*>)', using(HtmlLexer)) ], + 'tag': [ (r'\s+', Text), (r'[\w.:-]+\s*=', Name.Attribute, 'attr'), diff --git a/pygments/lexers/css.py b/pygments/lexers/css.py index 29d83707..ce97730e 100644 --- a/pygments/lexers/css.py +++ b/pygments/lexers/css.py @@ -125,7 +125,7 @@ _css_properties = ( 'wrap-flow', 'wrap-inside', 'wrap-through', 'writing-mode', 'z-index', ) -# List of keyword values obtained from: +# List of keyword values obtained from: # http://cssvalues.com/ _keyword_values = ( 'absolute', 'alias', 'all', 'all-petite-caps', 'all-scroll', @@ -263,7 +263,7 @@ _time_units = ( 's', 'ms', ) _all_units = _angle_units + _frequency_units + _length_units + \ - _resolution_units + _time_units + _resolution_units + _time_units class CssLexer(RegexLexer): @@ -322,16 +322,18 @@ class CssLexer(RegexLexer): include('urls'), (r'('+r'|'.join(_functional_notation_keyword_values)+r')(\()', bygroups(Name.Builtin, Punctuation), 'function-start'), - (r'([a-zA-Z_][\w-]+)(\()', bygroups(Name.Function, Punctuation), 'function-start'), + (r'([a-zA-Z_][\w-]+)(\()', + bygroups(Name.Function, Punctuation), 'function-start'), (words(_keyword_values, suffix=r'\b'), Keyword.Constant), (words(_other_keyword_values, suffix=r'\b'), Keyword.Constant), (words(_color_keywords, suffix=r'\b'), Keyword.Constant), - (words(_css_properties, suffix=r'\b'), Keyword), # for transition-property etc. + # for transition-property etc. + (words(_css_properties, suffix=r'\b'), Keyword), (r'\!important', Comment.Preproc), (r'/\*(?:.|\n)*?\*/', Comment), include('numeric-values'), - + (r'[~^*!%&<>|+=@:./?-]+', Operator), (r'[\[\](),]+', Punctuation), (r'"(\\\\|\\"|[^"])*"', String.Double), @@ -351,7 +353,8 @@ class CssLexer(RegexLexer): # function-start may be entered recursively (r'(' + r'|'.join(_functional_notation_keyword_values) + r')(\()', bygroups(Name.Builtin, Punctuation), 'function-start'), - (r'([a-zA-Z_][\w-]+)(\()', bygroups(Name.Function, Punctuation), 'function-start'), + (r'([a-zA-Z_][\w-]+)(\()', + bygroups(Name.Function, Punctuation), 'function-start'), (r'/\*(?:.|\n)*?\*/', Comment), include('numeric-values'), @@ -373,8 +376,8 @@ class CssLexer(RegexLexer): 'numeric-values': [ (r'\#[a-zA-Z0-9]{1,6}', Number.Hex), (r'[+\-]?[0-9]*[.][0-9]+', Number.Float, 'numeric-end'), - (r'[+\-]?[0-9]+', Number.Integer, 'numeric-end'), - ], + (r'[+\-]?[0-9]+', Number.Integer, 'numeric-end'), + ], 'numeric-end': [ (words(_all_units, suffix=r'\b'), Keyword.Type), (r'%', Keyword.Type), @@ -466,9 +469,9 @@ common_sass_tokens = { ], 'string-single': [ - (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double), + (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Single), (r'#\{', String.Interpol, 'interpolation'), - (r"'", String.Double, '#pop'), + (r"'", String.Single, '#pop'), ], 'string-url': [ diff --git a/pygments/lexers/data.py b/pygments/lexers/data.py index 296366c2..7593b487 100644 --- a/pygments/lexers/data.py +++ b/pygments/lexers/data.py @@ -205,7 +205,7 @@ class YamlLexer(ExtendedRegexLexer): bygroups(Text, Number), 'ignored-line'), ], - # the %YAG directive + # the %TAG directive 'tag-directive': [ # a tag handle and the corresponding prefix (r'([ ]+)(!|![\w-]*!)' @@ -218,7 +218,7 @@ class YamlLexer(ExtendedRegexLexer): 'indentation': [ # trailing whitespaces are ignored (r'[ ]*$', something(Text), '#pop:2'), - # whitespaces preceeding block collection indicators + # whitespaces preceding block collection indicators (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)), # block collection indicators (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)), @@ -232,6 +232,9 @@ class YamlLexer(ExtendedRegexLexer): (r'[ ]*(?=#|$)', something(Text), '#pop'), # whitespaces separating tokens (r'[ ]+', Text), + # key with colon + (r'''([^,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''', + bygroups(Name.Tag, set_indent(Punctuation, implicit=True))), # tags, anchors and aliases, include('descriptors'), # block collections and scalars @@ -250,7 +253,7 @@ class YamlLexer(ExtendedRegexLexer): (r'!<[\w#;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type), # a tag in the form '!', '!suffix' or '!handle!suffix' (r'!(?:[\w-]+!)?' - r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]+', Keyword.Type), + r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]*', Keyword.Type), # an anchor (r'&[\w-]+', Name.Label), # an alias @@ -308,6 +311,9 @@ class YamlLexer(ExtendedRegexLexer): # a flow mapping indicated by '{' and '}' 'flow-mapping': [ + # key with colon + (r'''([^,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''', + bygroups(Name.Tag, Punctuation)), # include flow collection rules include('flow-collection'), # the closing indicator diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py index 4e2bc8ab..27ae77c5 100644 --- a/pygments/lexers/dotnet.py +++ b/pygments/lexers/dotnet.py @@ -58,7 +58,7 @@ class CSharpLexer(RegexLexer): # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf levels = { - 'none': '@?[_a-zA-Z]\w*', + 'none': r'@?[_a-zA-Z]\w*', 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' + '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'), @@ -171,7 +171,7 @@ class NemerleLexer(RegexLexer): # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf levels = { - 'none': '@?[_a-zA-Z]\w*', + 'none': r'@?[_a-zA-Z]\w*', 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' + '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'), @@ -352,13 +352,13 @@ class BooLexer(RegexLexer): ('[*/]', Comment.Multiline) ], 'funcname': [ - ('[a-zA-Z_]\w*', Name.Function, '#pop') + (r'[a-zA-Z_]\w*', Name.Function, '#pop') ], 'classname': [ - ('[a-zA-Z_]\w*', Name.Class, '#pop') + (r'[a-zA-Z_]\w*', Name.Class, '#pop') ], 'namespace': [ - ('[a-zA-Z_][\w.]*', Name.Namespace, '#pop') + (r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop') ] } @@ -413,7 +413,7 @@ class VbNetLexer(RegexLexer): 'Static', 'Step', 'Stop', 'SyncLock', 'Then', 'Throw', 'To', 'True', 'Try', 'TryCast', 'Wend', 'Using', 'When', 'While', 'Widening', 'With', 'WithEvents', 'WriteOnly'), - prefix='(?<!\.)', suffix=r'\b'), Keyword), + prefix=r'(?<!\.)', suffix=r'\b'), Keyword), (r'(?<!\.)End\b', Keyword, 'end'), (r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'), (r'(?<!\.)(Function|Sub|Property)(\s+)', @@ -574,10 +574,10 @@ class FSharpLexer(RegexLexer): 'virtual', 'volatile', ] keyopts = [ - '!=', '#', '&&', '&', '\(', '\)', '\*', '\+', ',', '-\.', - '->', '-', '\.\.', '\.', '::', ':=', ':>', ':', ';;', ';', '<-', - '<\]', '<', '>\]', '>', '\?\?', '\?', '\[<', '\[\|', '\[', '\]', - '_', '`', '\{', '\|\]', '\|', '\}', '~', '<@@', '<@', '=', '@>', '@@>', + '!=', '#', '&&', '&', r'\(', r'\)', r'\*', r'\+', ',', r'-\.', + '->', '-', r'\.\.', r'\.', '::', ':=', ':>', ':', ';;', ';', '<-', + r'<\]', '<', r'>\]', '>', r'\?\?', r'\?', r'\[<', r'\[\|', r'\[', r'\]', + '_', '`', r'\{', r'\|\]', r'\|', r'\}', '~', '<@@', '<@', '=', '@>', '@@>', ] operators = r'[!$%&*+\./:<=>?@^|~-]' diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index a1426bd6..4451b480 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -66,7 +66,7 @@ class ProtoBufLexer(RegexLexer): (r'[+-=]', Operator), (r'([a-zA-Z_][\w.]*)([ \t]*)(=)', bygroups(Name.Attribute, Text, Operator)), - ('[a-zA-Z_][\w.]*', Name), + (r'[a-zA-Z_][\w.]*', Name), ], 'package': [ (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'), @@ -300,7 +300,7 @@ class PuppetLexer(RegexLexer): ], 'names': [ - ('[a-zA-Z_]\w*', Name.Attribute), + (r'[a-zA-Z_]\w*', Name.Attribute), (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation, String, Punctuation)), (r'\$\S+', Name.Variable), @@ -688,7 +688,7 @@ class CrmshLexer(RegexLexer): (r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action, bygroups(Name, Punctuation, Operator.Word)), # punctuation - (r'(\\(?=\n)|[[\](){}/:@])', Punctuation), + (r'(\\(?=\n)|[\[\](){}/:@])', Punctuation), (r'\s+|\n', Whitespace), ], } diff --git a/pygments/lexers/dylan.py b/pygments/lexers/dylan.py index f61bb60d..30318f38 100644 --- a/pygments/lexers/dylan.py +++ b/pygments/lexers/dylan.py @@ -179,10 +179,10 @@ class DylanLexer(RegexLexer): (valid_name + ':', Keyword), # class names - (r'<' + valid_name + '>', Name.Class), + ('<' + valid_name + '>', Name.Class), # define variable forms. - (r'\*' + valid_name + '\*', Name.Variable.Global), + (r'\*' + valid_name + r'\*', Name.Variable.Global), # define constant forms. (r'\$' + valid_name, Name.Constant), @@ -260,7 +260,7 @@ class DylanConsoleLexer(Lexer): mimetypes = ['text/x-dylan-console'] _line_re = re.compile('.*?\n') - _prompt_re = re.compile('\?| ') + _prompt_re = re.compile(r'\?| ') def get_tokens_unprocessed(self, text): dylexer = DylanLexer(**self.options) diff --git a/pygments/lexers/elm.py b/pygments/lexers/elm.py index 0fa36367..22a10bd9 100644 --- a/pygments/lexers/elm.py +++ b/pygments/lexers/elm.py @@ -27,7 +27,7 @@ class ElmLexer(RegexLexer): filenames = ['*.elm'] mimetypes = ['text/x-elm'] - validName = r'[a-z_][a-zA-Z_\']*' + validName = r'[a-z_][a-zA-Z0-9_\']*' specialName = r'^main ' diff --git a/pygments/lexers/erlang.py b/pygments/lexers/erlang.py index 9e7f85c1..0d0d0798 100644 --- a/pygments/lexers/erlang.py +++ b/pygments/lexers/erlang.py @@ -344,7 +344,7 @@ class ElixirLexer(RegexLexer): op1_re = "|".join(re.escape(s) for s in OPERATORS1) ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re) punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION) - alnum = '\w' + alnum = r'\w' name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum} complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re) @@ -495,7 +495,7 @@ class ElixirConsoleLexer(Lexer): aliases = ['iex'] mimetypes = ['text/x-elixir-shellsession'] - _prompt_re = re.compile('(iex|\.{3})(\(\d+\))?> ') + _prompt_re = re.compile(r'(iex|\.{3})(\(\d+\))?> ') def get_tokens_unprocessed(self, text): exlexer = ElixirLexer(**self.options) diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py index 793c28be..26222c9f 100644 --- a/pygments/lexers/esoteric.py +++ b/pygments/lexers/esoteric.py @@ -245,7 +245,7 @@ class AheuiLexer(RegexLexer): Aheui_ is esoteric language based on Korean alphabets. - .. _Aheui:: http://aheui.github.io/ + .. _Aheui: http://aheui.github.io/ """ diff --git a/pygments/lexers/floscript.py b/pygments/lexers/floscript.py new file mode 100644 index 00000000..4f200809 --- /dev/null +++ b/pygments/lexers/floscript.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.floscript + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for FloScript + + :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \ + default, words, combined, do_insertions +from pygments.util import get_bool_opt, shebang_matches +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Generic, Other, Error +from pygments import unistring as uni + +__all__ = ['FloScriptLexer',] + +class FloScriptLexer(RegexLexer): + """ + For `FloScript <https://github.com/ioflo/ioflo>`_ configuration language source code. + + .. versionadded:: 2.4 + """ + + name = 'FloScript' + aliases = ['floscript', 'flo'] + filenames = ['*.flo'] + + def innerstring_rules(ttype): + return [ + # the old style '%s' % (...) string formatting + (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?' + '[hlL]?[E-GXc-giorsux%]', String.Interpol), + # backslashes, quotes and formatting signs must be parsed one at a time + (r'[^\\\'"%\n]+', ttype), + (r'[\'"\\]', ttype), + # unhandled string formatting sign + (r'%', ttype), + # newlines are an error (use "nl" state) + ] + + + tokens = { + 'root': [ + (r'\n', Text), + (r'[^\S\n]+', Text), + + (r'[]{}:(),;[]', Punctuation), + (r'\\\n', Text), + (r'\\', Text), + (r'(to|by|with|from|per|for|cum|qua|via|as|at|in|of|on|re|is|if|be|into|and|not)\b', Operator.Word), + (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator), + (r'(load|init|server|logger|log|loggee|first|over|under|next|done|timeout|repeat|native|benter|enter|recur|exit|precur|renter|rexit|print|put|inc|copy|set|aux|rear|raze|go|let|do|bid|ready|start|stop|run|abort|use|flo|give|take)\b', Name.Builtin), + (r'(frame|framer|house)\b', Keyword), + ('"', String, 'string'), + + include('name'), + include('numbers'), + (r'#.+$', Comment.Singleline), + ], + 'string': [ + ('[^"]+', String), + ('"', String, '#pop'), + ], + 'numbers': [ + (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float), + (r'\d+[eE][+-]?[0-9]+j?', Number.Float), + (r'0[0-7]+j?', Number.Oct), + (r'0[bB][01]+', Number.Bin), + (r'0[xX][a-fA-F0-9]+', Number.Hex), + (r'\d+L', Number.Integer.Long), + (r'\d+j?', Number.Integer) + ], + + 'name': [ + (r'@[\w.]+', Name.Decorator), + (r'[a-zA-Z_]\w*', Name), + ], + + + + } diff --git a/pygments/lexers/forth.py b/pygments/lexers/forth.py index a51f1b57..7fecdd52 100644 --- a/pygments/lexers/forth.py +++ b/pygments/lexers/forth.py @@ -3,6 +3,8 @@ pygments.lexers.forth ~~~~~~~~~~~~~~~~~~~~~ + Lexer for the Forth language. + :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/pygments/lexers/fortran.py b/pygments/lexers/fortran.py index 1a611c9d..5165bac0 100644 --- a/pygments/lexers/fortran.py +++ b/pygments/lexers/fortran.py @@ -158,6 +158,7 @@ class FortranLexer(RegexLexer): (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer), (r'[+-]?\d*\.\d+([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float), (r'[+-]?\d+\.\d*([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float), + (r'[+-]?\d+(\.\d*)?[ed][-+]?\d+(_[a-z]\w+)?', Number.Float), ], } diff --git a/pygments/lexers/grammar_notation.py b/pygments/lexers/grammar_notation.py index 076249d3..bc715ffa 100644 --- a/pygments/lexers/grammar_notation.py +++ b/pygments/lexers/grammar_notation.py @@ -158,7 +158,7 @@ class JsgfLexer(RegexLexer): (r'//.*', Comment.Single), ], 'non-comments': [ - ('\A#JSGF[^;]*', Comment.Preproc), + (r'\A#JSGF[^;]*', Comment.Preproc), (r'\s+', Text), (r';', Punctuation), (r'[=|()\[\]*+]', Operator), diff --git a/pygments/lexers/graph.py b/pygments/lexers/graph.py index 1a338246..6e836bdd 100644 --- a/pygments/lexers/graph.py +++ b/pygments/lexers/graph.py @@ -22,9 +22,9 @@ __all__ = ['CypherLexer'] class CypherLexer(RegexLexer): """ For `Cypher Query Language - <http://docs.neo4j.org/chunked/milestone/cypher-query-lang.html>`_ + <https://neo4j.com/docs/developer-manual/3.3/cypher/>`_ - For the Cypher version in Neo4J 2.0 + For the Cypher version in Neo4j 3.3 .. versionadded:: 2.0 """ @@ -49,14 +49,19 @@ class CypherLexer(RegexLexer): ], 'keywords': [ (r'(create|order|match|limit|set|skip|start|return|with|where|' - r'delete|foreach|not|by)\b', Keyword), + r'delete|foreach|not|by|true|false)\b', Keyword), ], 'clauses': [ - # TODO: many missing ones, see http://docs.neo4j.org/refcard/2.0/ - (r'(all|any|as|asc|create|create\s+unique|delete|' - r'desc|distinct|foreach|in|is\s+null|limit|match|none|' - r'order\s+by|return|set|skip|single|start|union|where|with)\b', - Keyword), + # based on https://neo4j.com/docs/cypher-refcard/3.3/ + (r'(all|any|as|asc|ascending|assert|call|case|create|' + r'create\s+index|create\s+unique|delete|desc|descending|' + r'distinct|drop\s+constraint\s+on|drop\s+index\s+on|end|' + r'ends\s+with|fieldterminator|foreach|in|is\s+node\s+key|' + r'is\s+null|is\s+unique|limit|load\s+csv\s+from|match|merge|none|' + r'not|null|on\s+match|on\s+create|optional\s+match|order\s+by|' + r'remove|return|set|skip|single|start|starts\s+with|then|union|' + r'union\s+all|unwind|using\s+periodic\s+commit|yield|where|when|' + r'with)\b', Keyword), ], 'relations': [ (r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)), diff --git a/pygments/lexers/graphics.py b/pygments/lexers/graphics.py index c8af9f99..30ab2cbc 100644 --- a/pygments/lexers/graphics.py +++ b/pygments/lexers/graphics.py @@ -15,7 +15,7 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, \ Number, Punctuation, String __all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer', - 'PovrayLexer'] + 'PovrayLexer', 'HLSLShaderLexer'] class GLShaderLexer(RegexLexer): @@ -46,28 +46,102 @@ class GLShaderLexer(RegexLexer): (r'0[0-7]*', Number.Oct), (r'[1-9][0-9]*', Number.Integer), (words(( - 'attribute', 'const', 'uniform', 'varying', 'centroid', 'break', - 'continue', 'do', 'for', 'while', 'if', 'else', 'in', 'out', - 'inout', 'float', 'int', 'void', 'bool', 'true', 'false', - 'invariant', 'discard', 'return', 'mat2', 'mat3' 'mat4', - 'mat2x2', 'mat3x2', 'mat4x2', 'mat2x3', 'mat3x3', 'mat4x3', - 'mat2x4', 'mat3x4', 'mat4x4', 'vec2', 'vec3', 'vec4', - 'ivec2', 'ivec3', 'ivec4', 'bvec2', 'bvec3', 'bvec4', - 'sampler1D', 'sampler2D', 'sampler3D' 'samplerCube', - 'sampler1DShadow', 'sampler2DShadow', 'struct'), + # Storage qualifiers + 'attribute', 'const', 'uniform', 'varying', + 'buffer', 'shared', 'in', 'out', + # Layout qualifiers + 'layout', + # Interpolation qualifiers + 'flat', 'smooth', 'noperspective', + # Auxiliary qualifiers + 'centroid', 'sample', 'patch', + # Parameter qualifiers. Some double as Storage qualifiers + 'inout', + # Precision qualifiers + 'lowp', 'mediump', 'highp', 'precision', + # Invariance qualifiers + 'invariant', + # Precise qualifiers + 'precise', + # Memory qualifiers + 'coherent', 'volatile', 'restrict', 'readonly', 'writeonly', + # Statements + 'break', 'continue', 'do', 'for', 'while', 'switch', + 'case', 'default', 'if', 'else', 'subroutine', + 'discard', 'return', 'struct'), prefix=r'\b', suffix=r'\b'), Keyword), (words(( - 'asm', 'class', 'union', 'enum', 'typedef', 'template', 'this', - 'packed', 'goto', 'switch', 'default', 'inline', 'noinline', - 'volatile', 'public', 'static', 'extern', 'external', 'interface', - 'long', 'short', 'double', 'half', 'fixed', 'unsigned', 'lowp', - 'mediump', 'highp', 'precision', 'input', 'output', - 'hvec2', 'hvec3', 'hvec4', 'dvec2', 'dvec3', 'dvec4', - 'fvec2', 'fvec3', 'fvec4', 'sampler2DRect', 'sampler3DRect', - 'sampler2DRectShadow', 'sizeof', 'cast', 'namespace', 'using'), + # Boolean values + 'true', 'false'), prefix=r'\b', suffix=r'\b'), - Keyword), # future use + Keyword.Constant), + (words(( + # Miscellaneous types + 'void', 'atomic_uint', + # Floating-point scalars and vectors + 'float', 'vec2', 'vec3', 'vec4', + 'double', 'dvec2', 'dvec3', 'dvec4', + # Integer scalars and vectors + 'int', 'ivec2', 'ivec3', 'ivec4', + 'uint', 'uvec2', 'uvec3', 'uvec4', + # Boolean scalars and vectors + 'bool', 'bvec2', 'bvec3', 'bvec4', + # Matrices + 'mat2', 'mat3', 'mat4', 'dmat2', 'dmat3', 'dmat4', + 'mat2x2', 'mat2x3', 'mat2x4', 'dmat2x2', 'dmat2x3', 'dmat2x4', + 'mat3x2', 'mat3x3', 'mat3x4', 'dmat3x2', 'dmat3x3', + 'dmat3x4', 'mat4x2', 'mat4x3', 'mat4x4', 'dmat4x2', 'dmat4x3', 'dmat4x4', + # Floating-point samplers + 'sampler1D', 'sampler2D', 'sampler3D', 'samplerCube', + 'sampler1DArray', 'sampler2DArray', 'samplerCubeArray', + 'sampler2DRect', 'samplerBuffer', + 'sampler2DMS', 'sampler2DMSArray', + # Shadow samplers + 'sampler1DShadow', 'sampler2DShadow', 'samplerCubeShadow', + 'sampler1DArrayShadow', 'sampler2DArrayShadow', + 'samplerCubeArrayShadow', 'sampler2DRectShadow', + # Signed integer samplers + 'isampler1D', 'isampler2D', 'isampler3D', 'isamplerCube', + 'isampler1DArray', 'isampler2DArray', 'isamplerCubeArray', + 'isampler2DRect', 'isamplerBuffer', + 'isampler2DMS', 'isampler2DMSArray', + # Unsigned integer samplers + 'usampler1D', 'usampler2D', 'usampler3D', 'usamplerCube', + 'usampler1DArray', 'usampler2DArray', 'usamplerCubeArray', + 'usampler2DRect', 'usamplerBuffer', + 'usampler2DMS', 'usampler2DMSArray', + # Floating-point image types + 'image1D', 'image2D', 'image3D', 'imageCube', + 'image1DArray', 'image2DArray', 'imageCubeArray', + 'image2DRect', 'imageBuffer', + 'image2DMS', 'image2DMSArray', + # Signed integer image types + 'iimage1D', 'iimage2D', 'iimage3D', 'iimageCube', + 'iimage1DArray', 'iimage2DArray', 'iimageCubeArray', + 'iimage2DRect', 'iimageBuffer', + 'iimage2DMS', 'iimage2DMSArray', + # Unsigned integer image types + 'uimage1D', 'uimage2D', 'uimage3D', 'uimageCube', + 'uimage1DArray', 'uimage2DArray', 'uimageCubeArray', + 'uimage2DRect', 'uimageBuffer', + 'uimage2DMS', 'uimage2DMSArray'), + prefix=r'\b', suffix=r'\b'), + Keyword.Type), + (words(( + # Reserved for future use. + 'common', 'partition', 'active', 'asm', 'class', + 'union', 'enum', 'typedef', 'template', 'this', + 'resource', 'goto', 'inline', 'noinline', 'public', + 'static', 'extern', 'external', 'interface', 'long', + 'short', 'half', 'fixed', 'unsigned', 'superp', 'input', + 'output', 'hvec2', 'hvec3', 'hvec4', 'fvec2', 'fvec3', + 'fvec4', 'sampler3DRect', 'filter', 'sizeof', 'cast', + 'namespace', 'using'), + prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + # All names beginning with "gl_" are reserved. + (r'gl_\w*', Name.Builtin), (r'[a-zA-Z_]\w*', Name), (r'\.', Punctuation), (r'\s+', Text), @@ -75,6 +149,160 @@ class GLShaderLexer(RegexLexer): } +class HLSLShaderLexer(RegexLexer): + """ + HLSL (Microsoft Direct3D Shader) lexer. + + .. versionadded:: 2.3 + """ + name = 'HLSL' + aliases = ['hlsl'] + filenames = ['*.hlsl', '*.hlsli'] + mimetypes = ['text/x-hlsl'] + + tokens = { + 'root': [ + (r'^#.*', Comment.Preproc), + (r'//.*', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?', + Operator), + (r'[?:]', Operator), # quick hack for ternary + (r'\bdefined\b', Operator), + (r'[;{}(),.\[\]]', Punctuation), + # FIXME when e is present, no decimal point needed + (r'[+-]?\d*\.\d+([eE][-+]?\d+)?f?', Number.Float), + (r'[+-]?\d+\.\d*([eE][-+]?\d+)?f?', Number.Float), + (r'0[xX][0-9a-fA-F]*', Number.Hex), + (r'0[0-7]*', Number.Oct), + (r'[1-9][0-9]*', Number.Integer), + (r'"', String, 'string'), + (words(( + 'asm','asm_fragment','break','case','cbuffer','centroid','class', + 'column_major','compile','compile_fragment','const','continue', + 'default','discard','do','else','export','extern','for','fxgroup', + 'globallycoherent','groupshared','if','in','inline','inout', + 'interface','line','lineadj','linear','namespace','nointerpolation', + 'noperspective','NULL','out','packoffset','pass','pixelfragment', + 'point','precise','return','register','row_major','sample', + 'sampler','shared','stateblock','stateblock_state','static', + 'struct','switch','tbuffer','technique','technique10', + 'technique11','texture','typedef','triangle','triangleadj', + 'uniform','vertexfragment','volatile','while'), + prefix=r'\b', suffix=r'\b'), + Keyword), + (words(('true','false'), prefix=r'\b', suffix=r'\b'), + Keyword.Constant), + (words(( + 'auto','catch','char','const_cast','delete','dynamic_cast','enum', + 'explicit','friend','goto','long','mutable','new','operator', + 'private','protected','public','reinterpret_cast','short','signed', + 'sizeof','static_cast','template','this','throw','try','typename', + 'union','unsigned','using','virtual'), + prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + (words(( + 'dword','matrix','snorm','string','unorm','unsigned','void','vector', + 'BlendState','Buffer','ByteAddressBuffer','ComputeShader', + 'DepthStencilState','DepthStencilView','DomainShader', + 'GeometryShader','HullShader','InputPatch','LineStream', + 'OutputPatch','PixelShader','PointStream','RasterizerState', + 'RenderTargetView','RasterizerOrderedBuffer', + 'RasterizerOrderedByteAddressBuffer', + 'RasterizerOrderedStructuredBuffer','RasterizerOrderedTexture1D', + 'RasterizerOrderedTexture1DArray','RasterizerOrderedTexture2D', + 'RasterizerOrderedTexture2DArray','RasterizerOrderedTexture3D', + 'RWBuffer','RWByteAddressBuffer','RWStructuredBuffer', + 'RWTexture1D','RWTexture1DArray','RWTexture2D','RWTexture2DArray', + 'RWTexture3D','SamplerState','SamplerComparisonState', + 'StructuredBuffer','Texture1D','Texture1DArray','Texture2D', + 'Texture2DArray','Texture2DMS','Texture2DMSArray','Texture3D', + 'TextureCube','TextureCubeArray','TriangleStream','VertexShader'), + prefix=r'\b', suffix=r'\b'), + Keyword.Type), + (words(( + 'bool','double','float','int','half','min16float','min10float', + 'min16int','min12int','min16uint','uint'), + prefix=r'\b', suffix=r'([1-4](x[1-4])?)?\b'), + Keyword.Type), # vector and matrix types + (words(( + 'abort','abs','acos','all','AllMemoryBarrier', + 'AllMemoryBarrierWithGroupSync','any','AppendStructuredBuffer', + 'asdouble','asfloat','asin','asint','asuint','asuint','atan', + 'atan2','ceil','CheckAccessFullyMapped','clamp','clip', + 'CompileShader','ConsumeStructuredBuffer','cos','cosh','countbits', + 'cross','D3DCOLORtoUBYTE4','ddx','ddx_coarse','ddx_fine','ddy', + 'ddy_coarse','ddy_fine','degrees','determinant', + 'DeviceMemoryBarrier','DeviceMemoryBarrierWithGroupSync','distance', + 'dot','dst','errorf','EvaluateAttributeAtCentroid', + 'EvaluateAttributeAtSample','EvaluateAttributeSnapped','exp', + 'exp2','f16tof32','f32tof16','faceforward','firstbithigh', + 'firstbitlow','floor','fma','fmod','frac','frexp','fwidth', + 'GetRenderTargetSampleCount','GetRenderTargetSamplePosition', + 'GlobalOrderedCountIncrement','GroupMemoryBarrier', + 'GroupMemoryBarrierWithGroupSync','InterlockedAdd','InterlockedAnd', + 'InterlockedCompareExchange','InterlockedCompareStore', + 'InterlockedExchange','InterlockedMax','InterlockedMin', + 'InterlockedOr','InterlockedXor','isfinite','isinf','isnan', + 'ldexp','length','lerp','lit','log','log10','log2','mad','max', + 'min','modf','msad4','mul','noise','normalize','pow','printf', + 'Process2DQuadTessFactorsAvg','Process2DQuadTessFactorsMax', + 'Process2DQuadTessFactorsMin','ProcessIsolineTessFactors', + 'ProcessQuadTessFactorsAvg','ProcessQuadTessFactorsMax', + 'ProcessQuadTessFactorsMin','ProcessTriTessFactorsAvg', + 'ProcessTriTessFactorsMax','ProcessTriTessFactorsMin', + 'QuadReadLaneAt','QuadSwapX','QuadSwapY','radians','rcp', + 'reflect','refract','reversebits','round','rsqrt','saturate', + 'sign','sin','sincos','sinh','smoothstep','sqrt','step','tan', + 'tanh','tex1D','tex1D','tex1Dbias','tex1Dgrad','tex1Dlod', + 'tex1Dproj','tex2D','tex2D','tex2Dbias','tex2Dgrad','tex2Dlod', + 'tex2Dproj','tex3D','tex3D','tex3Dbias','tex3Dgrad','tex3Dlod', + 'tex3Dproj','texCUBE','texCUBE','texCUBEbias','texCUBEgrad', + 'texCUBElod','texCUBEproj','transpose','trunc','WaveAllBitAnd', + 'WaveAllMax','WaveAllMin','WaveAllBitOr','WaveAllBitXor', + 'WaveAllEqual','WaveAllProduct','WaveAllSum','WaveAllTrue', + 'WaveAnyTrue','WaveBallot','WaveGetLaneCount','WaveGetLaneIndex', + 'WaveGetOrderedIndex','WaveIsHelperLane','WaveOnce', + 'WavePrefixProduct','WavePrefixSum','WaveReadFirstLane', + 'WaveReadLaneAt'), + prefix=r'\b', suffix=r'\b'), + Name.Builtin), # built-in functions + (words(( + 'SV_ClipDistance','SV_ClipDistance0','SV_ClipDistance1', + 'SV_Culldistance','SV_CullDistance0','SV_CullDistance1', + 'SV_Coverage','SV_Depth','SV_DepthGreaterEqual', + 'SV_DepthLessEqual','SV_DispatchThreadID','SV_DomainLocation', + 'SV_GroupID','SV_GroupIndex','SV_GroupThreadID','SV_GSInstanceID', + 'SV_InnerCoverage','SV_InsideTessFactor','SV_InstanceID', + 'SV_IsFrontFace','SV_OutputControlPointID','SV_Position', + 'SV_PrimitiveID','SV_RenderTargetArrayIndex','SV_SampleIndex', + 'SV_StencilRef','SV_TessFactor','SV_VertexID', + 'SV_ViewportArrayIndex'), + prefix=r'\b', suffix=r'\b'), + Name.Decorator), # system-value semantics + (r'\bSV_Target[0-7]?\b', Name.Decorator), + (words(( + 'allow_uav_condition','branch','call','domain','earlydepthstencil', + 'fastopt','flatten','forcecase','instance','loop','maxtessfactor', + 'numthreads','outputcontrolpoints','outputtopology','partitioning', + 'patchconstantfunc','unroll'), + prefix=r'\b', suffix=r'\b'), + Name.Decorator), # attributes + (r'[a-zA-Z_]\w*', Name), + (r'\\$', Comment.Preproc), # backslash at end of line -- usually macro continuation + (r'\s+', Text), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|' + r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + ], + } + + class PostScriptLexer(RegexLexer): """ Lexer for PostScript files. @@ -233,8 +461,8 @@ class AsymptoteLexer(RegexLexer): r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|' r'picture|position|real|revolution|slice|splitface|ticksgridT|' r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type), - ('[a-zA-Z_]\w*:(?!:)', Name.Label), - ('[a-zA-Z_]\w*', Name), + (r'[a-zA-Z_]\w*:(?!:)', Name.Label), + (r'[a-zA-Z_]\w*', Name), ], 'root': [ include('whitespace'), @@ -334,9 +562,9 @@ class GnuplotLexer(RegexLexer): (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump', 'she$ll', 'test$'), Keyword, 'noargs'), - ('([a-zA-Z_]\w*)(\s*)(=)', + (r'([a-zA-Z_]\w*)(\s*)(=)', bygroups(Name.Variable, Text, Operator), 'genericargs'), - ('([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)', + (r'([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)', bygroups(Name.Function, Text, Operator), 'genericargs'), (r'@[a-zA-Z_]\w*', Name.Constant), # macros (r';', Keyword), @@ -382,7 +610,7 @@ class GnuplotLexer(RegexLexer): (r'(\d+\.\d*|\.\d+)', Number.Float), (r'-?\d+', Number.Integer), ('[,.~!%^&*+=|?:<>/-]', Operator), - ('[{}()\[\]]', Punctuation), + (r'[{}()\[\]]', Punctuation), (r'(eq|ne)\b', Operator.Word), (r'([a-zA-Z_]\w*)(\s*)(\()', bygroups(Name.Function, Text, Punctuation)), diff --git a/pygments/lexers/haskell.py b/pygments/lexers/haskell.py index 1a2f2217..b3884f5c 100644 --- a/pygments/lexers/haskell.py +++ b/pygments/lexers/haskell.py @@ -12,12 +12,12 @@ import re from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \ - default, include + default, include, inherit from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Generic from pygments import unistring as uni -__all__ = ['HaskellLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexer', +__all__ = ['HaskellLexer', 'HspecLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexer', 'LiterateHaskellLexer', 'LiterateIdrisLexer', 'LiterateAgdaLexer', 'LiterateCryptolLexer', 'KokaLexer'] @@ -72,11 +72,14 @@ class HaskellLexer(RegexLexer): (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators # Numbers - (r'\d+[eE][+-]?\d+', Number.Float), - (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float), - (r'0[oO][0-7]+', Number.Oct), - (r'0[xX][\da-fA-F]+', Number.Hex), - (r'\d+', Number.Integer), + (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*', Number.Float), + (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*\.[\da-fA-F](_*[\da-fA-F])*(_*[pP][+-]?\d(_*\d)*)?', Number.Float), + (r'\d(_*\d)*_*[eE][+-]?\d(_*\d)*', Number.Float), + (r'\d(_*\d)*\.\d(_*\d)*(_*[eE][+-]?\d(_*\d)*)?', Number.Float), + (r'0[bB]_*[01](_*[01])*', Number.Bin), + (r'0[oO]_*[0-7](_*[0-7])*', Number.Oct), + (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*', Number.Hex), + (r'\d(_*\d)*', Number.Integer), # Character/String Literals (r"'", String.Char, 'character'), (r'"', String, 'string'), @@ -154,6 +157,28 @@ class HaskellLexer(RegexLexer): } +class HspecLexer(HaskellLexer): + """ + A Haskell lexer with support for Hspec constructs. + + .. versionadded:: 2.4.0 + """ + + name = 'Hspec' + aliases = ['hspec'] + filenames = [] + mimetypes = [] + + tokens = { + 'root': [ + (r'(it\s*)("[^"]*")', bygroups(Text, String.Doc)), + (r'(describe\s*)("[^"]*")', bygroups(Text, String.Doc)), + (r'(context\s*)("[^"]*")', bygroups(Text, String.Doc)), + inherit, + ], + } + + class IdrisLexer(RegexLexer): """ A lexer for the dependently typed programming language Idris. @@ -677,10 +702,10 @@ class KokaLexer(RegexLexer): symbols = r'[$%&*+@!/\\^~=.:\-?|<>]+' # symbol boundary: an operator keyword should not be followed by any of these - sboundary = '(?!'+symbols+')' + sboundary = '(?!' + symbols + ')' # name boundary: a keyword should not be followed by any of these - boundary = '(?![\w/])' + boundary = r'(?![\w/])' # koka token abstractions tokenType = Name.Attribute diff --git a/pygments/lexers/haxe.py b/pygments/lexers/haxe.py index 6f5c3599..364ad344 100644 --- a/pygments/lexers/haxe.py +++ b/pygments/lexers/haxe.py @@ -43,7 +43,7 @@ class HaxeLexer(ExtendedRegexLexer): typeid = r'_*[A-Z]\w*' # combined ident and dollar and idtype - ident = r'(?:_*[a-z]\w*|_+[0-9]\w*|' + typeid + '|_+|\$\w+)' + ident = r'(?:_*[a-z]\w*|_+[0-9]\w*|' + typeid + r'|_+|\$\w+)' binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|' r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|' @@ -182,7 +182,7 @@ class HaxeLexer(ExtendedRegexLexer): (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float), (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float), (r'[0-9]+\.[0-9]+', Number.Float), - (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float), + (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float), # Int (r'0x[0-9a-fA-F]+', Number.Hex), @@ -219,7 +219,7 @@ class HaxeLexer(ExtendedRegexLexer): (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')), (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')), (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')), - (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'preproc-expr-chain')), + (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, ('#pop', 'preproc-expr-chain')), # Int (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'preproc-expr-chain')), @@ -456,7 +456,7 @@ class HaxeLexer(ExtendedRegexLexer): (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')), (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')), (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')), - (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'expr-chain')), + (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, ('#pop', 'expr-chain')), # Int (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')), @@ -711,7 +711,7 @@ class HaxeLexer(ExtendedRegexLexer): (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, '#pop'), (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, '#pop'), (r'[0-9]+\.[0-9]+', Number.Float, '#pop'), - (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, '#pop'), + (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, '#pop'), # Int (r'0x[0-9a-fA-F]+', Number.Hex, '#pop'), diff --git a/pygments/lexers/html.py b/pygments/lexers/html.py index 73f020fa..091379ce 100644 --- a/pygments/lexers/html.py +++ b/pygments/lexers/html.py @@ -220,7 +220,7 @@ class XmlLexer(RegexLexer): (r'/?\s*>', Name.Tag, '#pop'), ], 'attr': [ - ('\s+', Text), + (r'\s+', Text), ('".*?"', String, '#pop'), ("'.*?'", String, '#pop'), (r'[^\s>]+', String, '#pop'), @@ -313,7 +313,7 @@ class HamlLexer(ExtendedRegexLexer): include('css'), (r'%[\w:-]+', Name.Tag, 'tag'), (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'), - (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)', + (r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)', bygroups(Comment, Comment.Special, Comment), '#pop'), (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'), @@ -330,8 +330,8 @@ class HamlLexer(ExtendedRegexLexer): 'tag': [ include('css'), - (r'\{(,\n|' + _dot + ')*?\}', using(RubyLexer)), - (r'\[' + _dot + '*?\]', using(RubyLexer)), + (r'\{(,\n|' + _dot + r')*?\}', using(RubyLexer)), + (r'\[' + _dot + r'*?\]', using(RubyLexer)), (r'\(', Text, 'html-attributes'), (r'/[ \t]*\n', Punctuation, '#pop:2'), (r'[<>]{1,2}(?=[ \t=])', Punctuation), @@ -340,7 +340,7 @@ class HamlLexer(ExtendedRegexLexer): 'plain': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text), - (r'(#\{)(' + _dot + '*?)(\})', + (r'(#\{)(' + _dot + r'*?)(\})', bygroups(String.Interpol, using(RubyLexer), String.Interpol)), (r'\n', Text, 'root'), ], @@ -373,7 +373,7 @@ class HamlLexer(ExtendedRegexLexer): 'filter-block': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator), - (r'(#\{)(' + _dot + '*?)(\})', + (r'(#\{)(' + _dot + r'*?)(\})', bygroups(String.Interpol, using(RubyLexer), String.Interpol)), (r'\n', Text, 'root'), ], @@ -422,7 +422,7 @@ class ScamlLexer(ExtendedRegexLexer): include('css'), (r'%[\w:-]+', Name.Tag, 'tag'), (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'), - (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)', + (r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)', bygroups(Comment, Comment.Special, Comment), '#pop'), (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'), @@ -442,8 +442,8 @@ class ScamlLexer(ExtendedRegexLexer): 'tag': [ include('css'), - (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)), - (r'\[' + _dot + '*?\]', using(ScalaLexer)), + (r'\{(,\n|' + _dot + r')*?\}', using(ScalaLexer)), + (r'\[' + _dot + r'*?\]', using(ScalaLexer)), (r'\(', Text, 'html-attributes'), (r'/[ \t]*\n', Punctuation, '#pop:2'), (r'[<>]{1,2}(?=[ \t=])', Punctuation), @@ -452,7 +452,7 @@ class ScamlLexer(ExtendedRegexLexer): 'plain': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text), - (r'(#\{)(' + _dot + '*?)(\})', + (r'(#\{)(' + _dot + r'*?)(\})', bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), (r'\n', Text, 'root'), ], @@ -485,7 +485,7 @@ class ScamlLexer(ExtendedRegexLexer): 'filter-block': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator), - (r'(#\{)(' + _dot + '*?)(\})', + (r'(#\{)(' + _dot + r'*?)(\})', bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), (r'\n', Text, 'root'), ], @@ -530,7 +530,7 @@ class PugLexer(ExtendedRegexLexer): 'content': [ include('css'), (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'), - (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)', + (r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)', bygroups(Comment, Comment.Special, Comment), '#pop'), (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'), @@ -551,8 +551,8 @@ class PugLexer(ExtendedRegexLexer): 'tag': [ include('css'), - (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)), - (r'\[' + _dot + '*?\]', using(ScalaLexer)), + (r'\{(,\n|' + _dot + r')*?\}', using(ScalaLexer)), + (r'\[' + _dot + r'*?\]', using(ScalaLexer)), (r'\(', Text, 'html-attributes'), (r'/[ \t]*\n', Punctuation, '#pop:2'), (r'[<>]{1,2}(?=[ \t=])', Punctuation), @@ -561,7 +561,7 @@ class PugLexer(ExtendedRegexLexer): 'plain': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text), - (r'(#\{)(' + _dot + '*?)(\})', + (r'(#\{)(' + _dot + r'*?)(\})', bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), (r'\n', Text, 'root'), ], @@ -594,7 +594,7 @@ class PugLexer(ExtendedRegexLexer): 'filter-block': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator), - (r'(#\{)(' + _dot + '*?)(\})', + (r'(#\{)(' + _dot + r'*?)(\})', bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), (r'\n', Text, 'root'), ], diff --git a/pygments/lexers/idl.py b/pygments/lexers/idl.py index 2fc39318..87cafe6a 100644 --- a/pygments/lexers/idl.py +++ b/pygments/lexers/idl.py @@ -53,7 +53,7 @@ class IDLLexer(RegexLexer): 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder', 'bytscl', 'caldat', 'calendar', 'call_external', 'call_function', 'call_method', 'call_procedure', 'canny', - 'catch', 'cd', 'cdf_\w*', 'ceil', 'chebyshev', + 'catch', 'cd', r'cdf_\w*', 'ceil', 'chebyshev', 'check_math', 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen', 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts', @@ -87,7 +87,7 @@ class IDLLexer(RegexLexer): 'dlm_load', 'dlm_register', 'doc_library', 'double', 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec', 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn', - 'eof', 'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx', + 'eof', r'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx', 'erode', 'errorplot', 'errplot', 'estimator_filter', 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint', 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath', @@ -104,11 +104,11 @@ class IDLLexer(RegexLexer): 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv', 'getwindows', 'get_drive_list', 'get_dxf_objects', 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size', - 'greg2jul', 'grib_\w*', 'grid3', 'griddata', + 'greg2jul', r'grib_\w*', 'grid3', 'griddata', 'grid_input', 'grid_tps', 'gs_iter', - 'h5[adfgirst]_\w*', 'h5_browser', 'h5_close', + r'h5[adfgirst]_\w*', 'h5_browser', 'h5_close', 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse', - 'hanning', 'hash', 'hdf_\w*', 'heap_free', + 'hanning', 'hash', r'hdf_\w*', 'heap_free', 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save', 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal', 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int', @@ -156,7 +156,7 @@ class IDLLexer(RegexLexer): 'modifyct', 'moment', 'morph_close', 'morph_distance', 'morph_gradient', 'morph_hitormiss', 'morph_open', 'morph_thin', 'morph_tophat', 'multi', 'm_correlate', - 'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick', + r'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick', 'noise_scatter', 'noise_slur', 'norm', 'n_elements', 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy', 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid', diff --git a/pygments/lexers/igor.py b/pygments/lexers/igor.py index 1a21fe87..e2e2cdfa 100644 --- a/pygments/lexers/igor.py +++ b/pygments/lexers/igor.py @@ -5,7 +5,7 @@ Lexers for Igor Pro. - :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2018 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -49,51 +49,71 @@ class IgorLexer(RegexLexer): ) operations = ( 'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio', 'AddMovieFrame', - 'AdoptFiles', 'APMath', 'Append', 'AppendImage', 'AppendLayoutObject', - 'AppendMatrixContour', 'AppendText', 'AppendToGizmo', 'AppendToGraph', - 'AppendToLayout', 'AppendToTable', 'AppendXYZContour', 'AutoPositionWindow', - 'BackgroundInfo', 'Beep', 'BoundingBall', 'BoxSmooth', 'BrowseURL', 'BuildMenu', - 'Button', 'cd', 'Chart', 'CheckBox', 'CheckDisplayed', 'ChooseColor', 'Close', - 'CloseHelp', 'CloseMovie', 'CloseProc', 'ColorScale', 'ColorTab2Wave', - 'Concatenate', 'ControlBar', 'ControlInfo', 'ControlUpdate', - 'ConvertGlobalStringTextEncoding', 'ConvexHull', 'Convolve', 'CopyFile', - 'CopyFolder', 'CopyScales', 'Correlate', 'CreateAliasShortcut', 'CreateBrowser', - 'Cross', 'CtrlBackground', 'CtrlFIFO', 'CtrlNamedBackground', 'Cursor', - 'CurveFit', 'CustomControl', 'CWT', 'Debugger', 'DebuggerOptions', 'DefaultFont', - 'DefaultGuiControls', 'DefaultGuiFont', 'DefaultTextEncoding', 'DefineGuide', - 'DelayUpdate', 'DeleteAnnotations', 'DeleteFile', 'DeleteFolder', 'DeletePoints', - 'Differentiate', 'dir', 'Display', 'DisplayHelpTopic', 'DisplayProcedure', - 'DoAlert', 'DoIgorMenu', 'DoUpdate', 'DoWindow', 'DoXOPIdle', 'DPSS', - 'DrawAction', 'DrawArc', 'DrawBezier', 'DrawLine', 'DrawOval', 'DrawPICT', - 'DrawPoly', 'DrawRect', 'DrawRRect', 'DrawText', 'DrawUserShape', 'DSPDetrend', - 'DSPPeriodogram', 'Duplicate', 'DuplicateDataFolder', 'DWT', 'EdgeStats', 'Edit', - 'ErrorBars', 'EstimatePeakSizes', 'Execute', 'ExecuteScriptText', + 'AddWavesToBoxPlot', 'AddWavesToViolinPlot', 'AdoptFiles', 'APMath', 'Append', + 'AppendBoxPlot', 'AppendImage', 'AppendLayoutObject', 'AppendMatrixContour', + 'AppendText', 'AppendToGizmo', 'AppendToGraph', 'AppendToLayout', 'AppendToTable', + 'AppendViolinPlot', 'AppendXYZContour', 'AutoPositionWindow', + 'AxonTelegraphFindServers', 'BackgroundInfo', 'Beep', 'BoundingBall', 'BoxSmooth', + 'BrowseURL', 'BuildMenu', 'Button', 'cd', 'Chart', 'CheckBox', 'CheckDisplayed', + 'ChooseColor', 'Close', 'CloseHelp', 'CloseMovie', 'CloseProc', 'ColorScale', + 'ColorTab2Wave', 'Concatenate', 'ControlBar', 'ControlInfo', 'ControlUpdate', + 'ConvertGlobalStringTextEncoding', 'ConvexHull', 'Convolve', 'CopyDimLabels', + 'CopyFile', 'CopyFolder', 'CopyScales', 'Correlate', 'CreateAliasShortcut', + 'CreateBrowser', 'Cross', 'CtrlBackground', 'CtrlFIFO', 'CtrlNamedBackground', + 'Cursor', 'CurveFit', 'CustomControl', 'CWT', 'DAQmx_AI_SetupReader', + 'DAQmx_AO_SetOutputs', 'DAQmx_CTR_CountEdges', 'DAQmx_CTR_OutputPulse', + 'DAQmx_CTR_Period', 'DAQmx_CTR_PulseWidth', 'DAQmx_DIO_Config', + 'DAQmx_DIO_WriteNewData', 'DAQmx_Scan', 'DAQmx_WaveformGen', 'Debugger', + 'DebuggerOptions', 'DefaultFont', 'DefaultGuiControls', 'DefaultGuiFont', + 'DefaultTextEncoding', 'DefineGuide', 'DelayUpdate', 'DeleteAnnotations', + 'DeleteFile', 'DeleteFolder', 'DeletePoints', 'Differentiate', 'dir', 'Display', + 'DisplayHelpTopic', 'DisplayProcedure', 'DoAlert', 'DoIgorMenu', 'DoUpdate', + 'DoWindow', 'DoXOPIdle', 'DPSS', 'DrawAction', 'DrawArc', 'DrawBezier', + 'DrawLine', 'DrawOval', 'DrawPICT', 'DrawPoly', 'DrawRect', 'DrawRRect', + 'DrawText', 'DrawUserShape', 'DSPDetrend', 'DSPPeriodogram', 'Duplicate', + 'DuplicateDataFolder', 'DWT', 'EdgeStats', 'Edit', 'ErrorBars', + 'EstimatePeakSizes', 'Execute', 'ExecuteScriptText', 'ExperimentInfo', 'ExperimentModified', 'ExportGizmo', 'Extract', 'FastGaussTransform', 'FastOp', - 'FBinRead', 'FBinWrite', 'FFT', 'FIFOStatus', 'FIFO2Wave', 'FilterFIR', + 'FBinRead', 'FBinWrite', 'FFT', 'FGetPos', 'FIFOStatus', 'FIFO2Wave', 'FilterFIR', 'FilterIIR', 'FindAPeak', 'FindContour', 'FindDuplicates', 'FindLevel', 'FindLevels', 'FindPeak', 'FindPointsInPoly', 'FindRoots', 'FindSequence', - 'FindValue', 'FPClustering', 'fprintf', 'FReadLine', 'FSetPos', 'FStatus', - 'FTPCreateDirectory', 'FTPDelete', 'FTPDownload', 'FTPUpload', 'FuncFit', - 'FuncFitMD', 'GBLoadWave', 'GetAxis', 'GetCamera', 'GetFileFolderInfo', + 'FindValue', 'FMaxFlat', 'FPClustering', 'fprintf', 'FReadLine', 'FSetPos', + 'FStatus', 'FTPCreateDirectory', 'FTPDelete', 'FTPDownload', 'FTPUpload', + 'FuncFit', 'FuncFitMD', 'GBLoadWave', 'GetAxis', 'GetCamera', 'GetFileFolderInfo', 'GetGizmo', 'GetLastUserMenuInfo', 'GetMarquee', 'GetMouse', 'GetSelection', - 'GetWindow', 'GPIBReadBinaryWave2', 'GPIBReadBinary2', 'GPIBReadWave2', - 'GPIBRead2', 'GPIBWriteBinaryWave2', 'GPIBWriteBinary2', 'GPIBWriteWave2', - 'GPIBWrite2', 'GPIB2', 'GraphNormal', 'GraphWaveDraw', 'GraphWaveEdit', 'Grep', - 'GroupBox', 'Hanning', 'HDF5CloseFile', 'HDF5CloseGroup', 'HDF5ConvertColors', - 'HDF5CreateFile', 'HDF5CreateGroup', 'HDF5CreateLink', 'HDF5Dump', - 'HDF5DumpErrors', 'HDF5DumpState', 'HDF5ListAttributes', 'HDF5ListGroup', - 'HDF5LoadData', 'HDF5LoadGroup', 'HDF5LoadImage', 'HDF5OpenFile', 'HDF5OpenGroup', - 'HDF5SaveData', 'HDF5SaveGroup', 'HDF5SaveImage', 'HDF5TestOperation', - 'HDF5UnlinkObject', 'HideIgorMenus', 'HideInfo', 'HideProcedures', 'HideTools', - 'HilbertTransform', 'Histogram', 'ICA', 'IFFT', 'ImageAnalyzeParticles', - 'ImageBlend', 'ImageBoundaryToMask', 'ImageEdgeDetection', 'ImageFileInfo', - 'ImageFilter', 'ImageFocus', 'ImageFromXYZ', 'ImageGenerateROIMask', 'ImageGLCM', + 'GetWindow', 'GISCreateVectorLayer', 'GISGetRasterInfo', + 'GISGetRegisteredFileInfo', 'GISGetVectorLayerInfo', 'GISLoadRasterData', + 'GISLoadVectorData', 'GISRasterizeVectorData', 'GISRegisterFile', + 'GISTransformCoords', 'GISUnRegisterFile', 'GISWriteFieldData', + 'GISWriteGeometryData', 'GISWriteRaster', 'GPIBReadBinaryWave2', + 'GPIBReadBinary2', 'GPIBReadWave2', 'GPIBRead2', 'GPIBWriteBinaryWave2', + 'GPIBWriteBinary2', 'GPIBWriteWave2', 'GPIBWrite2', 'GPIB2', 'GraphNormal', + 'GraphWaveDraw', 'GraphWaveEdit', 'Grep', 'GroupBox', 'Hanning', 'HDFInfo', + 'HDFReadImage', 'HDFReadSDS', 'HDFReadVset', 'HDF5CloseFile', 'HDF5CloseGroup', + 'HDF5ConvertColors', 'HDF5CreateFile', 'HDF5CreateGroup', 'HDF5CreateLink', + 'HDF5Dump', 'HDF5DumpErrors', 'HDF5DumpState', 'HDF5FlushFile', + 'HDF5ListAttributes', 'HDF5ListGroup', 'HDF5LoadData', 'HDF5LoadGroup', + 'HDF5LoadImage', 'HDF5OpenFile', 'HDF5OpenGroup', 'HDF5SaveData', 'HDF5SaveGroup', + 'HDF5SaveImage', 'HDF5TestOperation', 'HDF5UnlinkObject', 'HideIgorMenus', + 'HideInfo', 'HideProcedures', 'HideTools', 'HilbertTransform', 'Histogram', 'ICA', + 'IFFT', 'ImageAnalyzeParticles', 'ImageBlend', 'ImageBoundaryToMask', + 'ImageComposite', 'ImageEdgeDetection', 'ImageFileInfo', 'ImageFilter', + 'ImageFocus', 'ImageFromXYZ', 'ImageGenerateROIMask', 'ImageGLCM', 'ImageHistModification', 'ImageHistogram', 'ImageInterpolate', 'ImageLineProfile', 'ImageLoad', 'ImageMorphology', 'ImageRegistration', 'ImageRemoveBackground', 'ImageRestore', 'ImageRotate', 'ImageSave', 'ImageSeedFill', 'ImageSkeleton3d', 'ImageSnake', 'ImageStats', 'ImageThreshold', 'ImageTransform', 'ImageUnwrapPhase', 'ImageWindow', 'IndexSort', 'InsertPoints', 'Integrate', 'IntegrateODE', 'Integrate2D', 'Interpolate2', 'Interpolate3D', 'Interp3DPath', + 'ITCCloseAll2', 'ITCCloseDevice2', 'ITCConfigAllChannels2', + 'ITCConfigChannelReset2', 'ITCConfigChannelUpload2', 'ITCConfigChannel2', + 'ITCFIFOAvailableAll2', 'ITCFIFOAvailable2', 'ITCGetAllChannelsConfig2', + 'ITCGetChannelConfig2', 'ITCGetCurrentDevice2', 'ITCGetDeviceInfo2', + 'ITCGetDevices2', 'ITCGetErrorString2', 'ITCGetSerialNumber2', 'ITCGetState2', + 'ITCGetVersions2', 'ITCInitialize2', 'ITCOpenDevice2', 'ITCReadADC2', + 'ITCReadDigital2', 'ITCReadTimer2', 'ITCSelectDevice2', 'ITCSetDAC2', + 'ITCSetGlobals2', 'ITCSetModes2', 'ITCSetState2', 'ITCStartAcq2', 'ITCStopAcq2', + 'ITCUpdateFIFOPositionAll2', 'ITCUpdateFIFOPosition2', 'ITCWriteDigital2', 'JCAMPLoadWave', 'JointHistogram', 'KillBackground', 'KillControl', 'KillDataFolder', 'KillFIFO', 'KillFreeAxis', 'KillPath', 'KillPICTs', 'KillStrings', 'KillVariables', 'KillWaves', 'KillWindow', 'KMeans', 'Label', @@ -104,39 +124,48 @@ class IgorLexer(RegexLexer): 'MatrixFilter', 'MatrixGaussJ', 'MatrixGLM', 'MatrixInverse', 'MatrixLinearSolve', 'MatrixLinearSolveTD', 'MatrixLLS', 'MatrixLUBkSub', 'MatrixLUD', 'MatrixLUDTD', 'MatrixMultiply', 'MatrixOP', 'MatrixSchur', 'MatrixSolve', 'MatrixSVBkSub', - 'MatrixSVD', 'MatrixTranspose', 'MeasureStyledText', 'MLLoadWave', 'Modify', - 'ModifyBrowser', 'ModifyCamera', 'ModifyContour', 'ModifyControl', - 'ModifyControlList', 'ModifyFreeAxis', 'ModifyGizmo', 'ModifyGraph', - 'ModifyImage', 'ModifyLayout', 'ModifyPanel', 'ModifyTable', 'ModifyWaterfall', - 'MoveDataFolder', 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', - 'MoveVariable', 'MoveWave', 'MoveWindow', 'MultiTaperPSD', - 'MultiThreadingControl', 'NeuralNetworkRun', 'NeuralNetworkTrain', 'NewCamera', - 'NewDataFolder', 'NewFIFO', 'NewFIFOChan', 'NewFreeAxis', 'NewGizmo', 'NewImage', - 'NewLayout', 'NewMovie', 'NewNotebook', 'NewPanel', 'NewPath', 'NewWaterfall', - 'NI4882', 'Note', 'Notebook', 'NotebookAction', 'Open', 'OpenHelp', - 'OpenNotebook', 'Optimize', 'ParseOperationTemplate', 'PathInfo', 'PauseForUser', - 'PauseUpdate', 'PCA', 'PlayMovie', 'PlayMovieAction', 'PlaySound', - 'PopupContextualMenu', 'PopupMenu', 'Preferences', 'PrimeFactors', 'Print', - 'printf', 'PrintGraphs', 'PrintLayout', 'PrintNotebook', 'PrintSettings', - 'PrintTable', 'Project', 'PulseStats', 'PutScrapText', 'pwd', 'Quit', - 'RatioFromNumber', 'Redimension', 'Remove', 'RemoveContour', 'RemoveFromGizmo', - 'RemoveFromGraph', 'RemoveFromLayout', 'RemoveFromTable', 'RemoveImage', - 'RemoveLayoutObjects', 'RemovePath', 'Rename', 'RenameDataFolder', 'RenamePath', - 'RenamePICT', 'RenameWindow', 'ReorderImages', 'ReorderTraces', 'ReplaceText', - 'ReplaceWave', 'Resample', 'ResumeUpdate', 'Reverse', 'Rotate', 'Save', - 'SaveData', 'SaveExperiment', 'SaveGraphCopy', 'SaveNotebook', + 'MatrixSVD', 'MatrixTranspose', 'MCC_FindServers', 'MeasureStyledText', + 'MFR_CheckForNewBricklets', + 'MFR_CloseResultFile', 'MFR_CreateOverviewTable', 'MFR_GetBrickletCount', + 'MFR_GetBrickletData', 'MFR_GetBrickletDeployData', 'MFR_GetBrickletMetaData', + 'MFR_GetBrickletRawData', 'MFR_GetReportTemplate', 'MFR_GetResultFileMetaData', + 'MFR_GetResultFileName', 'MFR_GetVernissageVersion', 'MFR_GetVersion', + 'MFR_GetXOPErrorMessage', 'MFR_OpenResultFile', + 'MLLoadWave', 'Modify', 'ModifyBoxPlot', 'ModifyBrowser', 'ModifyCamera', + 'ModifyContour', 'ModifyControl', 'ModifyControlList', 'ModifyFreeAxis', + 'ModifyGizmo', 'ModifyGraph', 'ModifyImage', 'ModifyLayout', 'ModifyPanel', + 'ModifyTable', 'ModifyViolinPlot', 'ModifyWaterfall', 'MoveDataFolder', + 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable', + 'MoveWave', 'MoveWindow', 'MultiTaperPSD', 'MultiThreadingControl', + 'NC_CloseFile', 'NC_DumpErrors', 'NC_Inquire', 'NC_ListAttributes', + 'NC_ListObjects', 'NC_LoadData', 'NC_OpenFile', 'NeuralNetworkRun', + 'NeuralNetworkTrain', 'NewCamera', 'NewDataFolder', 'NewFIFO', 'NewFIFOChan', + 'NewFreeAxis', 'NewGizmo', 'NewImage', 'NewLayout', 'NewMovie', 'NewNotebook', + 'NewPanel', 'NewPath', 'NewWaterfall', 'NILoadWave', 'NI4882', 'Note', 'Notebook', + 'NotebookAction', 'Open', 'OpenHelp', 'OpenNotebook', 'Optimize', + 'ParseOperationTemplate', 'PathInfo', 'PauseForUser', 'PauseUpdate', 'PCA', + 'PlayMovie', 'PlayMovieAction', 'PlaySound', 'PopupContextualMenu', 'PopupMenu', + 'Preferences', 'PrimeFactors', 'Print', 'printf', 'PrintGraphs', 'PrintLayout', + 'PrintNotebook', 'PrintSettings', 'PrintTable', 'Project', 'PulseStats', + 'PutScrapText', 'pwd', 'Quit', 'RatioFromNumber', 'Redimension', 'Remez', + 'Remove', 'RemoveContour', 'RemoveFromGizmo', 'RemoveFromGraph', + 'RemoveFromLayout', 'RemoveFromTable', 'RemoveImage', 'RemoveLayoutObjects', + 'RemovePath', 'Rename', 'RenameDataFolder', 'RenamePath', 'RenamePICT', + 'RenameWindow', 'ReorderImages', 'ReorderTraces', 'ReplaceText', 'ReplaceWave', + 'Resample', 'ResumeUpdate', 'Reverse', 'Rotate', 'Save', 'SaveData', + 'SaveExperiment', 'SaveGizmoCopy', 'SaveGraphCopy', 'SaveNotebook', 'SavePackagePreferences', 'SavePICT', 'SaveTableCopy', 'SetActiveSubwindow', 'SetAxis', 'SetBackground', 'SetDashPattern', 'SetDataFolder', 'SetDimLabel', - 'SetDrawEnv', 'SetDrawLayer', 'SetFileFolderInfo', 'SetFormula', 'SetIgorHook', - 'SetIgorMenuMode', 'SetIgorOption', 'SetMarquee', 'SetProcessSleep', - 'SetRandomSeed', 'SetScale', 'SetVariable', 'SetWaveLock', 'SetWaveTextEncoding', - 'SetWindow', 'ShowIgorMenus', 'ShowInfo', 'ShowTools', 'Silent', 'Sleep', - 'Slider', 'Smooth', 'SmoothCustom', 'Sort', 'SortColumns', 'SoundInRecord', - 'SoundInSet', 'SoundInStartChart', 'SoundInStatus', 'SoundInStopChart', - 'SoundLoadWave', 'SoundSaveWave', 'SphericalInterpolate', 'SphericalTriangulate', - 'SplitString', 'SplitWave', 'sprintf', 'sscanf', 'Stack', 'StackWindows', - 'StatsAngularDistanceTest', 'StatsANOVA1Test', 'StatsANOVA2NRTest', - 'StatsANOVA2RMTest', 'StatsANOVA2Test', 'StatsChiTest', + 'SetDrawEnv', 'SetDrawLayer', 'SetFileFolderInfo', 'SetFormula', 'SetIdlePeriod', + 'SetIgorHook', 'SetIgorMenuMode', 'SetIgorOption', 'SetMarquee', + 'SetProcessSleep', 'SetRandomSeed', 'SetScale', 'SetVariable', 'SetWaveLock', + 'SetWaveTextEncoding', 'SetWindow', 'ShowIgorMenus', 'ShowInfo', 'ShowTools', + 'Silent', 'Sleep', 'Slider', 'Smooth', 'SmoothCustom', 'Sort', 'SortColumns', + 'SoundInRecord', 'SoundInSet', 'SoundInStartChart', 'SoundInStatus', + 'SoundInStopChart', 'SoundLoadWave', 'SoundSaveWave', 'SphericalInterpolate', + 'SphericalTriangulate', 'SplitString', 'SplitWave', 'sprintf', 'SQLHighLevelOp', + 'sscanf', 'Stack', 'StackWindows', 'StatsAngularDistanceTest', 'StatsANOVA1Test', + 'StatsANOVA2NRTest', 'StatsANOVA2RMTest', 'StatsANOVA2Test', 'StatsChiTest', 'StatsCircularCorrelationTest', 'StatsCircularMeans', 'StatsCircularMoments', 'StatsCircularTwoSampleTest', 'StatsCochranTest', 'StatsContingencyTable', 'StatsDIPTest', 'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest', @@ -148,23 +177,30 @@ class IgorLexer(RegexLexer): 'StatsSignTest', 'StatsSRTest', 'StatsTTest', 'StatsTukeyTest', 'StatsVariancesTest', 'StatsWatsonUSquaredTest', 'StatsWatsonWilliamsTest', 'StatsWheelerWatsonTest', 'StatsWilcoxonRankTest', 'StatsWRCorrelationTest', - 'String', 'StructGet', 'StructPut', 'SumDimension', 'SumSeries', 'TabControl', - 'Tag', 'TextBox', 'ThreadGroupPutDF', 'ThreadStart', 'Tile', 'TileWindows', + 'STFT', 'String', 'StructFill', 'StructGet', 'StructPut', 'SumDimension', + 'SumSeries', 'TabControl', 'Tag', 'TDMLoadData', 'TDMSaveData', 'TextBox', + 'ThreadGroupPutDF', 'ThreadStart', 'TickWavesFromAxis', 'Tile', 'TileWindows', 'TitleBox', 'ToCommandLine', 'ToolsGrid', 'Triangulate3d', 'Unwrap', 'URLRequest', 'ValDisplay', 'Variable', 'VDTClosePort2', 'VDTGetPortList2', 'VDTGetStatus2', 'VDTOpenPort2', 'VDTOperationsPort2', 'VDTReadBinaryWave2', 'VDTReadBinary2', 'VDTReadHexWave2', 'VDTReadHex2', 'VDTReadWave2', 'VDTRead2', 'VDTTerminalPort2', 'VDTWriteBinaryWave2', 'VDTWriteBinary2', 'VDTWriteHexWave2', 'VDTWriteHex2', - 'VDTWriteWave2', 'VDTWrite2', 'VDT2', 'WaveMeanStdv', 'WaveStats', + 'VDTWriteWave2', 'VDTWrite2', 'VDT2', 'VISAControl', 'VISARead', 'VISAReadBinary', + 'VISAReadBinaryWave', 'VISAReadWave', 'VISAWrite', 'VISAWriteBinary', + 'VISAWriteBinaryWave', 'VISAWriteWave', 'WaveMeanStdv', 'WaveStats', 'WaveTransform', 'wfprintf', 'WignerTransform', 'WindowFunction', 'XLLoadWave' ) functions = ( 'abs', 'acos', 'acosh', 'AddListItem', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD', 'alog', 'AnnotationInfo', 'AnnotationList', 'area', 'areaXY', 'asin', 'asinh', - 'atan', 'atanh', 'atan2', 'AxisInfo', 'AxisList', 'AxisValFromPixel', 'Besseli', - 'Besselj', 'Besselk', 'Bessely', 'beta', 'betai', 'BinarySearch', - 'BinarySearchInterp', 'binomial', 'binomialln', 'binomialNoise', 'cabs', - 'CaptureHistory', 'CaptureHistoryStart', 'ceil', 'cequal', 'char2num', + 'atan', 'atanh', 'atan2', 'AxisInfo', 'AxisList', 'AxisValFromPixel', + 'AxonTelegraphAGetDataNum', 'AxonTelegraphAGetDataString', + 'AxonTelegraphAGetDataStruct', 'AxonTelegraphGetDataNum', + 'AxonTelegraphGetDataString', 'AxonTelegraphGetDataStruct', + 'AxonTelegraphGetTimeoutMs', 'AxonTelegraphSetTimeoutMs', 'Base64Decode', + 'Base64Encode', 'Besseli', 'Besselj', 'Besselk', 'Bessely', 'beta', 'betai', + 'BinarySearch', 'BinarySearchInterp', 'binomial', 'binomialln', 'binomialNoise', + 'cabs', 'CaptureHistory', 'CaptureHistoryStart', 'ceil', 'cequal', 'char2num', 'chebyshev', 'chebyshevU', 'CheckName', 'ChildWindowList', 'CleanupName', 'cmplx', 'cmpstr', 'conj', 'ContourInfo', 'ContourNameList', 'ContourNameToWaveRef', 'ContourZ', 'ControlNameList', 'ConvertTextEncoding', 'cos', 'cosh', @@ -172,37 +208,70 @@ class IgorLexer(RegexLexer): 'CreationDate', 'csc', 'csch', 'CsrInfo', 'CsrWave', 'CsrWaveRef', 'CsrXWave', 'CsrXWaveRef', 'CTabList', 'DataFolderDir', 'DataFolderExists', 'DataFolderRefsEqual', 'DataFolderRefStatus', 'date', 'datetime', 'DateToJulian', - 'date2secs', 'Dawson', 'DDERequestString', 'defined', 'deltax', 'digamma', - 'dilogarithm', 'DimDelta', 'DimOffset', 'DimSize', 'ei', 'enoise', 'equalWaves', - 'erf', 'erfc', 'erfcw', 'exists', 'exp', 'ExpConvExp', 'ExpConvExpFit', - 'ExpConvExpFitBL', 'ExpConvExpFit1Shape', 'ExpConvExpFit1ShapeBL', 'ExpGauss', - 'ExpGaussFit', 'ExpGaussFitBL', 'ExpGaussFit1Shape', 'ExpGaussFit1ShapeBL', - 'expInt', 'expIntegralE1', 'expNoise', 'factorial', 'fakedata', 'faverage', - 'faverageXY', 'FetchURL', 'FindDimLabel', 'FindListItem', 'floor', 'FontList', - 'FontSizeHeight', 'FontSizeStringWidth', 'FresnelCos', 'FresnelSin', + 'date2secs', 'Dawson', 'defined', 'deltax', 'digamma', 'dilogarithm', 'DimDelta', + 'DimOffset', 'DimSize', 'ei', 'enoise', 'equalWaves', 'erf', 'erfc', 'erfcw', + 'exists', 'exp', 'expInt', 'expIntegralE1', 'expNoise', 'factorial', 'Faddeeva', + 'fakedata', 'faverage', 'faverageXY', 'fDAQmx_AI_GetReader', + 'fDAQmx_AO_UpdateOutputs', 'fDAQmx_ConnectTerminals', 'fDAQmx_CTR_Finished', + 'fDAQmx_CTR_IsFinished', 'fDAQmx_CTR_IsPulseFinished', 'fDAQmx_CTR_ReadCounter', + 'fDAQmx_CTR_ReadWithOptions', 'fDAQmx_CTR_SetPulseFrequency', 'fDAQmx_CTR_Start', + 'fDAQmx_DeviceNames', 'fDAQmx_DIO_Finished', 'fDAQmx_DIO_PortWidth', + 'fDAQmx_DIO_Read', 'fDAQmx_DIO_Write', 'fDAQmx_DisconnectTerminals', + 'fDAQmx_ErrorString', 'fDAQmx_ExternalCalDate', 'fDAQmx_NumAnalogInputs', + 'fDAQmx_NumAnalogOutputs', 'fDAQmx_NumCounters', 'fDAQmx_NumDIOPorts', + 'fDAQmx_ReadChan', 'fDAQmx_ReadNamedChan', 'fDAQmx_ResetDevice', + 'fDAQmx_ScanGetAvailable', 'fDAQmx_ScanGetNextIndex', 'fDAQmx_ScanStart', + 'fDAQmx_ScanStop', 'fDAQmx_ScanWait', 'fDAQmx_ScanWaitWithTimeout', + 'fDAQmx_SelfCalDate', 'fDAQmx_SelfCalibration', 'fDAQmx_WaveformStart', + 'fDAQmx_WaveformStop', 'fDAQmx_WF_IsFinished', 'fDAQmx_WF_WaitUntilFinished', + 'fDAQmx_WriteChan', 'FetchURL', 'FindDimLabel', 'FindListItem', 'floor', + 'FontList', 'FontSizeHeight', 'FontSizeStringWidth', 'FresnelCos', 'FresnelSin', 'FuncRefInfo', 'FunctionInfo', 'FunctionList', 'FunctionPath', 'gamma', 'gammaEuler', 'gammaInc', 'gammaNoise', 'gammln', 'gammp', 'gammq', 'Gauss', - 'GaussFit', 'GaussFitBL', 'GaussFit1Width', 'GaussFit1WidthBL', 'Gauss1D', - 'Gauss2D', 'gcd', 'GetBrowserLine', 'GetBrowserSelection', 'GetDataFolder', - 'GetDataFolderDFR', 'GetDefaultFont', 'GetDefaultFontSize', 'GetDefaultFontStyle', - 'GetDimLabel', 'GetEnvironmentVariable', 'GetErrMessage', 'GetFormula', - 'GetIndependentModuleName', 'GetIndexedObjName', 'GetIndexedObjNameDFR', - 'GetKeyState', 'GetRTErrMessage', 'GetRTError', 'GetRTLocation', 'GetRTLocInfo', - 'GetRTStackInfo', 'GetScrapText', 'GetUserData', 'GetWavesDataFolder', - 'GetWavesDataFolderDFR', 'GizmoInfo', 'GizmoScale', 'gnoise', 'GrepList', - 'GrepString', 'GuideInfo', 'GuideNameList', 'Hash', 'hcsr', 'HDF5AttributeInfo', + 'Gauss1D', 'Gauss2D', 'gcd', 'GetBrowserLine', 'GetBrowserSelection', + 'GetDataFolder', 'GetDataFolderDFR', 'GetDefaultFont', 'GetDefaultFontSize', + 'GetDefaultFontStyle', 'GetDimLabel', 'GetEnvironmentVariable', 'GetErrMessage', + 'GetFormula', 'GetIndependentModuleName', 'GetIndexedObjName', + 'GetIndexedObjNameDFR', 'GetKeyState', 'GetRTErrMessage', 'GetRTError', + 'GetRTLocation', 'GetRTLocInfo', 'GetRTStackInfo', 'GetScrapText', 'GetUserData', + 'GetWavesDataFolder', 'GetWavesDataFolderDFR', 'GISGetAllFileFormats', + 'GISSRefsAreEqual', 'GizmoInfo', 'GizmoScale', 'gnoise', 'GrepList', 'GrepString', + 'GuideInfo', 'GuideNameList', 'Hash', 'hcsr', 'HDF5AttributeInfo', 'HDF5DatasetInfo', 'HDF5LibraryInfo', 'HDF5TypeInfo', 'hermite', 'hermiteGauss', 'HyperGNoise', 'HyperGPFQ', 'HyperG0F1', 'HyperG1F1', 'HyperG2F1', 'IgorInfo', 'IgorVersion', 'imag', 'ImageInfo', 'ImageNameList', 'ImageNameToWaveRef', - 'IndependentModuleList', 'IndexedDir', 'IndexedFile', 'Inf', 'Integrate1D', - 'interp', 'Interp2D', 'Interp3D', 'inverseERF', 'inverseERFC', 'ItemsInList', - 'JacobiCn', 'JacobiSn', 'JulianToDate', 'Laguerre', 'LaguerreA', 'LaguerreGauss', - 'LambertW', 'LayoutInfo', 'leftx', 'LegendreA', 'limit', 'ListMatch', - 'ListToTextWave', 'ListToWaveRefWave', 'ln', 'log', 'logNormalNoise', - 'LorentzianFit', 'LorentzianFitBL', 'LorentzianFit1Width', - 'LorentzianFit1WidthBL', 'lorentzianNoise', 'LowerStr', 'MacroList', 'magsqr', - 'MandelbrotPoint', 'MarcumQ', 'MatrixCondition', 'MatrixDet', 'MatrixDot', - 'MatrixRank', 'MatrixTrace', 'max', 'mean', 'median', 'min', 'mod', 'ModDate', + 'IndependentModuleList', 'IndexedDir', 'IndexedFile', 'IndexToScale', 'Inf', + 'Integrate1D', 'interp', 'Interp2D', 'Interp3D', 'inverseERF', 'inverseERFC', + 'ItemsInList', 'JacobiCn', 'JacobiSn', 'JulianToDate', 'Laguerre', 'LaguerreA', + 'LaguerreGauss', 'LambertW', 'LayoutInfo', 'leftx', 'LegendreA', 'limit', + 'ListMatch', 'ListToTextWave', 'ListToWaveRefWave', 'ln', 'log', 'logNormalNoise', + 'lorentzianNoise', 'LowerStr', 'MacroList', 'magsqr', 'MandelbrotPoint', + 'MarcumQ', 'MatrixCondition', 'MatrixDet', 'MatrixDot', 'MatrixRank', + 'MatrixTrace', 'max', 'MCC_AutoBridgeBal', 'MCC_AutoFastComp', + 'MCC_AutoPipetteOffset', 'MCC_AutoSlowComp', 'MCC_AutoWholeCellComp', + 'MCC_GetBridgeBalEnable', 'MCC_GetBridgeBalResist', 'MCC_GetFastCompCap', + 'MCC_GetFastCompTau', 'MCC_GetHolding', 'MCC_GetHoldingEnable', 'MCC_GetMode', + 'MCC_GetNeutralizationCap', 'MCC_GetNeutralizationEnable', + 'MCC_GetOscKillerEnable', 'MCC_GetPipetteOffset', 'MCC_GetPrimarySignalGain', + 'MCC_GetPrimarySignalHPF', 'MCC_GetPrimarySignalLPF', 'MCC_GetRsCompBandwidth', + 'MCC_GetRsCompCorrection', 'MCC_GetRsCompEnable', 'MCC_GetRsCompPrediction', + 'MCC_GetSecondarySignalGain', 'MCC_GetSecondarySignalLPF', 'MCC_GetSlowCompCap', + 'MCC_GetSlowCompTau', 'MCC_GetSlowCompTauX20Enable', + 'MCC_GetSlowCurrentInjEnable', 'MCC_GetSlowCurrentInjLevel', + 'MCC_GetSlowCurrentInjSetlTime', 'MCC_GetWholeCellCompCap', + 'MCC_GetWholeCellCompEnable', 'MCC_GetWholeCellCompResist', + 'MCC_SelectMultiClamp700B', 'MCC_SetBridgeBalEnable', 'MCC_SetBridgeBalResist', + 'MCC_SetFastCompCap', 'MCC_SetFastCompTau', 'MCC_SetHolding', + 'MCC_SetHoldingEnable', 'MCC_SetMode', 'MCC_SetNeutralizationCap', + 'MCC_SetNeutralizationEnable', 'MCC_SetOscKillerEnable', 'MCC_SetPipetteOffset', + 'MCC_SetPrimarySignalGain', 'MCC_SetPrimarySignalHPF', 'MCC_SetPrimarySignalLPF', + 'MCC_SetRsCompBandwidth', 'MCC_SetRsCompCorrection', 'MCC_SetRsCompEnable', + 'MCC_SetRsCompPrediction', 'MCC_SetSecondarySignalGain', + 'MCC_SetSecondarySignalLPF', 'MCC_SetSlowCompCap', 'MCC_SetSlowCompTau', + 'MCC_SetSlowCompTauX20Enable', 'MCC_SetSlowCurrentInjEnable', + 'MCC_SetSlowCurrentInjLevel', 'MCC_SetSlowCurrentInjSetlTime', 'MCC_SetTimeoutMs', + 'MCC_SetWholeCellCompCap', 'MCC_SetWholeCellCompEnable', + 'MCC_SetWholeCellCompResist', 'mean', 'median', 'min', 'mod', 'ModDate', 'MPFXEMGPeak', 'MPFXExpConvExpPeak', 'MPFXGaussPeak', 'MPFXLorenzianPeak', 'MPFXVoigtPeak', 'NameOfWave', 'NaN', 'NewFreeDataFolder', 'NewFreeWave', 'norm', 'NormalizeUnicode', 'note', 'NumberByKey', 'numpnts', 'numtype', @@ -217,9 +286,30 @@ class IgorLexer(RegexLexer): 'SelectNumber', 'SelectString', 'SetEnvironmentVariable', 'sign', 'sin', 'sinc', 'sinh', 'sinIntegral', 'SortList', 'SpecialCharacterInfo', 'SpecialCharacterList', 'SpecialDirPath', 'SphericalBessJ', 'SphericalBessJD', 'SphericalBessY', - 'SphericalBessYD', 'SphericalHarmonics', 'sqrt', 'StartMSTimer', 'StatsBetaCDF', - 'StatsBetaPDF', 'StatsBinomialCDF', 'StatsBinomialPDF', 'StatsCauchyCDF', - 'StatsCauchyPDF', 'StatsChiCDF', 'StatsChiPDF', 'StatsCMSSDCDF', + 'SphericalBessYD', 'SphericalHarmonics', 'SQLAllocHandle', 'SQLAllocStmt', + 'SQLBinaryWavesToTextWave', 'SQLBindCol', 'SQLBindParameter', 'SQLBrowseConnect', + 'SQLBulkOperations', 'SQLCancel', 'SQLCloseCursor', 'SQLColAttributeNum', + 'SQLColAttributeStr', 'SQLColumnPrivileges', 'SQLColumns', 'SQLConnect', + 'SQLDataSources', 'SQLDescribeCol', 'SQLDescribeParam', 'SQLDisconnect', + 'SQLDriverConnect', 'SQLDrivers', 'SQLEndTran', 'SQLError', 'SQLExecDirect', + 'SQLExecute', 'SQLFetch', 'SQLFetchScroll', 'SQLForeignKeys', 'SQLFreeConnect', + 'SQLFreeEnv', 'SQLFreeHandle', 'SQLFreeStmt', 'SQLGetConnectAttrNum', + 'SQLGetConnectAttrStr', 'SQLGetCursorName', 'SQLGetDataNum', 'SQLGetDataStr', + 'SQLGetDescFieldNum', 'SQLGetDescFieldStr', 'SQLGetDescRec', 'SQLGetDiagFieldNum', + 'SQLGetDiagFieldStr', 'SQLGetDiagRec', 'SQLGetEnvAttrNum', 'SQLGetEnvAttrStr', + 'SQLGetFunctions', 'SQLGetInfoNum', 'SQLGetInfoStr', 'SQLGetStmtAttrNum', + 'SQLGetStmtAttrStr', 'SQLGetTypeInfo', 'SQLMoreResults', 'SQLNativeSql', + 'SQLNumParams', 'SQLNumResultCols', 'SQLNumResultRowsIfKnown', + 'SQLNumRowsFetched', 'SQLParamData', 'SQLPrepare', 'SQLPrimaryKeys', + 'SQLProcedureColumns', 'SQLProcedures', 'SQLPutData', 'SQLReinitialize', + 'SQLRowCount', 'SQLSetConnectAttrNum', 'SQLSetConnectAttrStr', 'SQLSetCursorName', + 'SQLSetDescFieldNum', 'SQLSetDescFieldStr', 'SQLSetDescRec', 'SQLSetEnvAttrNum', + 'SQLSetEnvAttrStr', 'SQLSetPos', 'SQLSetStmtAttrNum', 'SQLSetStmtAttrStr', + 'SQLSpecialColumns', 'SQLStatistics', 'SQLTablePrivileges', 'SQLTables', + 'SQLTextWaveToBinaryWaves', 'SQLTextWaveTo2DBinaryWave', 'SQLUpdateBoundValues', + 'SQLXOPCheckState', 'SQL2DBinaryWaveToTextWave', 'sqrt', 'StartMSTimer', + 'StatsBetaCDF', 'StatsBetaPDF', 'StatsBinomialCDF', 'StatsBinomialPDF', + 'StatsCauchyCDF', 'StatsCauchyPDF', 'StatsChiCDF', 'StatsChiPDF', 'StatsCMSSDCDF', 'StatsCorrelation', 'StatsDExpCDF', 'StatsDExpPDF', 'StatsErlangCDF', 'StatsErlangPDF', 'StatsErrorPDF', 'StatsEValueCDF', 'StatsEValuePDF', 'StatsExpCDF', 'StatsExpPDF', 'StatsFCDF', 'StatsFPDF', 'StatsFriedmanCDF', @@ -250,19 +340,66 @@ class IgorLexer(RegexLexer): 'StopMSTimer', 'StringByKey', 'stringCRC', 'StringFromList', 'StringList', 'stringmatch', 'strlen', 'strsearch', 'StrVarOrDefault', 'str2num', 'StudentA', 'StudentT', 'sum', 'SVAR_Exists', 'TableInfo', 'TagVal', 'TagWaveRef', 'tan', - 'tanh', 'TextEncodingCode', 'TextEncodingName', 'TextFile', 'ThreadGroupCreate', + 'tango_close_device', 'tango_command_inout', 'tango_compute_image_proj', + 'tango_get_dev_attr_list', 'tango_get_dev_black_box', 'tango_get_dev_cmd_list', + 'tango_get_dev_status', 'tango_get_dev_timeout', 'tango_get_error_stack', + 'tango_open_device', 'tango_ping_device', 'tango_read_attribute', + 'tango_read_attributes', 'tango_reload_dev_interface', + 'tango_resume_attr_monitor', 'tango_set_attr_monitor_period', + 'tango_set_dev_timeout', 'tango_start_attr_monitor', 'tango_stop_attr_monitor', + 'tango_suspend_attr_monitor', 'tango_write_attribute', 'tango_write_attributes', + 'tanh', 'TDMAddChannel', 'TDMAddGroup', 'TDMAppendDataValues', + 'TDMAppendDataValuesTime', 'TDMChannelPropertyExists', 'TDMCloseChannel', + 'TDMCloseFile', 'TDMCloseGroup', 'TDMCreateChannelProperty', 'TDMCreateFile', + 'TDMCreateFileProperty', 'TDMCreateGroupProperty', 'TDMFilePropertyExists', + 'TDMGetChannelPropertyNames', 'TDMGetChannelPropertyNum', + 'TDMGetChannelPropertyStr', 'TDMGetChannelPropertyTime', + 'TDMGetChannelPropertyType', 'TDMGetChannels', 'TDMGetChannelStringPropertyLen', + 'TDMGetDataType', 'TDMGetDataValues', 'TDMGetDataValuesTime', + 'TDMGetFilePropertyNames', 'TDMGetFilePropertyNum', 'TDMGetFilePropertyStr', + 'TDMGetFilePropertyTime', 'TDMGetFilePropertyType', 'TDMGetFileStringPropertyLen', + 'TDMGetGroupPropertyNames', 'TDMGetGroupPropertyNum', 'TDMGetGroupPropertyStr', + 'TDMGetGroupPropertyTime', 'TDMGetGroupPropertyType', 'TDMGetGroups', + 'TDMGetGroupStringPropertyLen', 'TDMGetLibraryErrorDescription', + 'TDMGetNumChannelProperties', 'TDMGetNumChannels', 'TDMGetNumDataValues', + 'TDMGetNumFileProperties', 'TDMGetNumGroupProperties', 'TDMGetNumGroups', + 'TDMGroupPropertyExists', 'TDMOpenFile', 'TDMOpenFileEx', 'TDMRemoveChannel', + 'TDMRemoveGroup', 'TDMReplaceDataValues', 'TDMReplaceDataValuesTime', + 'TDMSaveFile', 'TDMSetChannelPropertyNum', 'TDMSetChannelPropertyStr', + 'TDMSetChannelPropertyTime', 'TDMSetDataValues', 'TDMSetDataValuesTime', + 'TDMSetFilePropertyNum', 'TDMSetFilePropertyStr', 'TDMSetFilePropertyTime', + 'TDMSetGroupPropertyNum', 'TDMSetGroupPropertyStr', 'TDMSetGroupPropertyTime', + 'TextEncodingCode', 'TextEncodingName', 'TextFile', 'ThreadGroupCreate', 'ThreadGroupGetDF', 'ThreadGroupGetDFR', 'ThreadGroupRelease', 'ThreadGroupWait', 'ThreadProcessorCount', 'ThreadReturnValue', 'ticks', 'time', 'TraceFromPixel', - 'TraceInfo', 'TraceNameList', 'TraceNameToWaveRef', 'trunc', 'UniqueName', - 'UnPadString', 'UnsetEnvironmentVariable', 'UpperStr', 'URLDecode', 'URLEncode', - 'VariableList', 'Variance', 'vcsr', 'Voigt', 'VoigtFit', 'VoigtFitBL', - 'VoigtFit1Shape', 'VoigtFit1ShapeBL', 'VoigtFit1Shape1Width', - 'VoigtFit1Shape1WidthBL', 'VoigtFunc', 'WaveCRC', 'WaveDims', 'WaveExists', - 'WaveInfo', 'WaveList', 'WaveMax', 'WaveMin', 'WaveName', 'WaveRefIndexed', + 'TraceInfo', 'TraceNameList', 'TraceNameToWaveRef', 'TrimString', 'trunc', + 'UniqueName', 'UnPadString', 'UnsetEnvironmentVariable', 'UpperStr', 'URLDecode', + 'URLEncode', 'VariableList', 'Variance', 'vcsr', 'viAssertIntrSignal', + 'viAssertTrigger', 'viAssertUtilSignal', 'viClear', 'viClose', 'viDisableEvent', + 'viDiscardEvents', 'viEnableEvent', 'viFindNext', 'viFindRsrc', 'viGetAttribute', + 'viGetAttributeString', 'viGpibCommand', 'viGpibControlATN', 'viGpibControlREN', + 'viGpibPassControl', 'viGpibSendIFC', 'viIn8', 'viIn16', 'viIn32', 'viLock', + 'viMapAddress', 'viMapTrigger', 'viMemAlloc', 'viMemFree', 'viMoveIn8', + 'viMoveIn16', 'viMoveIn32', 'viMoveOut8', 'viMoveOut16', 'viMoveOut32', 'viOpen', + 'viOpenDefaultRM', 'viOut8', 'viOut16', 'viOut32', 'viPeek8', 'viPeek16', + 'viPeek32', 'viPoke8', 'viPoke16', 'viPoke32', 'viRead', 'viReadSTB', + 'viSetAttribute', 'viSetAttributeString', 'viStatusDesc', 'viTerminate', + 'viUnlock', 'viUnmapAddress', 'viUnmapTrigger', 'viUsbControlIn', + 'viUsbControlOut', 'viVxiCommandQuery', 'viWaitOnEvent', 'viWrite', 'VoigtFunc', + 'VoigtPeak', 'WaveCRC', 'WaveDims', 'WaveExists', 'WaveHash', 'WaveInfo', + 'WaveList', 'WaveMax', 'WaveMin', 'WaveName', 'WaveRefIndexed', 'WaveRefIndexedDFR', 'WaveRefsEqual', 'WaveRefWaveToList', 'WaveTextEncoding', 'WaveType', 'WaveUnits', 'WhichListItem', 'WinList', 'WinName', 'WinRecreation', - 'WinType', 'WMFindWholeWord', 'WNoise', 'xcsr', 'XWaveName', 'XWaveRefFromTrace', - 'x2pnt', 'zcsr', 'ZernikeR', 'zeta' + 'WinType', 'wnoise', 'xcsr', 'XWaveName', 'XWaveRefFromTrace', 'x2pnt', 'zcsr', + 'ZernikeR', 'zeromq_client_connect', 'zeromq_client_connect', + 'zeromq_client_recv', 'zeromq_client_recv', 'zeromq_client_send', + 'zeromq_client_send', 'zeromq_handler_start', 'zeromq_handler_start', + 'zeromq_handler_stop', 'zeromq_handler_stop', 'zeromq_server_bind', + 'zeromq_server_bind', 'zeromq_server_recv', 'zeromq_server_recv', + 'zeromq_server_send', 'zeromq_server_send', 'zeromq_set', 'zeromq_set', + 'zeromq_stop', 'zeromq_stop', 'zeromq_test_callfunction', + 'zeromq_test_callfunction', 'zeromq_test_serializeWave', + 'zeromq_test_serializeWave', 'zeta' ) tokens = { diff --git a/pygments/lexers/inferno.py b/pygments/lexers/inferno.py index 5fc5a0ba..0d68856d 100644 --- a/pygments/lexers/inferno.py +++ b/pygments/lexers/inferno.py @@ -64,7 +64,7 @@ class LimboLexer(RegexLexer): (r'(byte|int|big|real|string|array|chan|list|adt' r'|fn|ref|of|module|self|type)\b', Keyword.Type), (r'(con|iota|nil)\b', Keyword.Constant), - ('[a-zA-Z_]\w*', Name), + (r'[a-zA-Z_]\w*', Name), ], 'statement' : [ include('whitespace'), diff --git a/pygments/lexers/int_fiction.py b/pygments/lexers/int_fiction.py index f280a56d..57ace259 100644 --- a/pygments/lexers/int_fiction.py +++ b/pygments/lexers/int_fiction.py @@ -911,7 +911,7 @@ class Tads3Lexer(RegexLexer): 'block?/root': [ (r'\{', Punctuation, ('#pop', 'block')), include('whitespace'), - (r'(?=[[\'"<(:])', Text, # It might be a VerbRule macro. + (r'(?=[\[\'"<(:])', Text, # It might be a VerbRule macro. ('#pop', 'object-body/no-braces', 'grammar', 'grammar-rules')), # It might be a macro like DefineAction. default(('#pop', 'object-body/no-braces')) diff --git a/pygments/lexers/iolang.py b/pygments/lexers/iolang.py index bbc17faf..26f44e27 100644 --- a/pygments/lexers/iolang.py +++ b/pygments/lexers/iolang.py @@ -49,7 +49,7 @@ class IoLexer(RegexLexer): # names (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b', Name.Builtin), - ('[a-zA-Z_]\w*', Name), + (r'[a-zA-Z_]\w*', Name), # numbers (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), (r'\d+', Number.Integer) diff --git a/pygments/lexers/j.py b/pygments/lexers/j.py index 434964fe..46037820 100644 --- a/pygments/lexers/j.py +++ b/pygments/lexers/j.py @@ -52,13 +52,13 @@ class JLexer(RegexLexer): Name.Function, 'explicitDefinition'), # Flow Control - (words(('for_', 'goto_', 'label_'), suffix=validName+'\.'), Name.Label), + (words(('for_', 'goto_', 'label_'), suffix=validName+r'\.'), Name.Label), (words(( 'assert', 'break', 'case', 'catch', 'catchd', 'catcht', 'continue', 'do', 'else', 'elseif', 'end', 'fcase', 'for', 'if', 'return', 'select', 'throw', 'try', 'while', 'whilst', - ), suffix='\.'), Name.Label), + ), suffix=r'\.'), Name.Label), # Variable Names (validName, Name.Variable), diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index 862535c9..0507375f 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -453,6 +453,10 @@ class TypeScriptLexer(RegexLexer): flags = re.DOTALL | re.MULTILINE + # Higher priority than the TypoScriptLexer, as TypeScript is far more + # common these days + priority = 0.5 + tokens = { 'commentsandwhitespace': [ (r'\s+', Text), @@ -534,12 +538,6 @@ class TypeScriptLexer(RegexLexer): ], } - def analyse_text(text): - if re.search('^(import.+(from\s+)?["\']|' - '(export\s*)?(interface|class|function)\s+)', - text, re.MULTILINE): - return 1.0 - class LassoLexer(RegexLexer): """ @@ -1015,7 +1013,7 @@ class ObjectiveJLexer(RegexLexer): } def analyse_text(text): - if re.search('^\s*@import\s+[<"]', text, re.MULTILINE): + if re.search(r'^\s*@import\s+[<"]', text, re.MULTILINE): # special directive found in most Objective-J files return True return False @@ -1500,8 +1498,10 @@ class JuttleLexer(RegexLexer): (r'^(?=\s|/)', Text, 'slashstartsregex'), include('commentsandwhitespace'), (r':\d{2}:\d{2}:\d{2}(\.\d*)?:', String.Moment), - (r':(now|beginning|end|forever|yesterday|today|tomorrow|(\d+(\.\d*)?|\.\d+)(ms|[smhdwMy])?):', String.Moment), - (r':\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(\.\d*)?)?(Z|[+-]\d{2}:\d{2}|[+-]\d{4})?:', String.Moment), + (r':(now|beginning|end|forever|yesterday|today|tomorrow|' + r'(\d+(\.\d*)?|\.\d+)(ms|[smhdwMy])?):', String.Moment), + (r':\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(\.\d*)?)?' + r'(Z|[+-]\d{2}:\d{2}|[+-]\d{4})?:', String.Moment), (r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?' r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?)' r'|[ ]+(ago|from[ ]+now))*:', String.Moment), diff --git a/pygments/lexers/julia.py b/pygments/lexers/julia.py index 67453aba..84ae1ae7 100644 --- a/pygments/lexers/julia.py +++ b/pygments/lexers/julia.py @@ -48,6 +48,7 @@ class JuliaLexer(RegexLexer): # keywords (r'in\b', Keyword.Pseudo), + (r'isa\b', Keyword.Pseudo), (r'(true|false)\b', Keyword.Constant), (r'(local|global|const)\b', Keyword.Declaration), (words([ @@ -55,7 +56,8 @@ class JuliaLexer(RegexLexer): 'baremodule', 'begin', 'bitstype', 'break', 'catch', 'ccall', 'continue', 'do', 'else', 'elseif', 'end', 'export', 'finally', 'for', 'if', 'import', 'importall', 'let', 'macro', 'module', - 'quote', 'return', 'try', 'using', 'while'], + 'mutable', 'primitive', 'quote', 'return', 'struct', 'try', + 'using', 'while'], suffix=r'\b'), Keyword), # NOTE @@ -146,7 +148,7 @@ class JuliaLexer(RegexLexer): (words([ # prec-assignment u'=', u':=', u'+=', u'-=', u'*=', u'/=', u'//=', u'.//=', u'.*=', u'./=', - u'\=', u'.\=', u'^=', u'.^=', u'÷=', u'.÷=', u'%=', u'.%=', u'|=', u'&=', + u'\\=', u'.\\=', u'^=', u'.^=', u'÷=', u'.÷=', u'%=', u'.%=', u'|=', u'&=', u'$=', u'=>', u'<<=', u'>>=', u'>>>=', u'~', u'.+=', u'.-=', # prec-conditional u'?', @@ -181,7 +183,7 @@ class JuliaLexer(RegexLexer): # prec-dot u'.', # unary op - u'+', u'-', u'!', u'~', u'√', u'∛', u'∜' + u'+', u'-', u'!', u'√', u'∛', u'∜' ]), Operator), # chars diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py index f4392839..8de6e9f2 100644 --- a/pygments/lexers/jvm.py +++ b/pygments/lexers/jvm.py @@ -21,7 +21,7 @@ from pygments import unistring as uni __all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer', 'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer', 'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer', - 'PigLexer', 'GoloLexer', 'JasminLexer'] + 'PigLexer', 'GoloLexer', 'JasminLexer', 'SarlLexer'] class JavaLexer(RegexLexer): @@ -257,7 +257,7 @@ class ScalaLexer(RegexLexer): u'\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]') idrest = u'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op) - letter_letter_digit = u'%s(?:%s|\d)*' % (letter, letter) + letter_letter_digit = u'%s(?:%s|\\d)*' % (letter, letter) tokens = { 'root': [ @@ -689,7 +689,7 @@ class IokeLexer(RegexLexer): # functions (u'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|' u'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)' - u'(?![\w!:?])', Name.Function), + u'(?![\\w!:?])', Name.Function), # Numbers (r'-?0[xX][0-9a-fA-F]+', Number.Hex), @@ -801,7 +801,7 @@ class ClojureLexer(RegexLexer): # TODO / should divide keywords/symbols into namespace/rest # but that's hard, so just pretend / is part of the name - valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+' + valid_name = r'(?!#)[\w!$%*+<=>?/.#|-]+' tokens = { 'root': [ @@ -1006,7 +1006,7 @@ class KotlinLexer(RegexLexer): .. versionadded:: 1.5 """ - + name = 'Kotlin' aliases = ['kotlin'] filenames = ['*.kt'] @@ -1017,15 +1017,22 @@ class KotlinLexer(RegexLexer): kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' + '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*') - kt_id = '(' + kt_name + '|`' + kt_name + '`)' + + kt_space_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' + + '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf', + 'Mn', 'Mc', 'Zs') + ',-]*') + + kt_id = '(' + kt_name + '|`' + kt_space_name + '`)' tokens = { 'root': [ (r'^\s*\[.*?\]', Name.Attribute), (r'[^\S\n]+', Text), + (r'\s+', Text), (r'\\\n', Text), # line continuation (r'//.*?\n', Comment.Single), (r'/[*].*?[*]/', Comment.Multiline), + (r'""".*?"""', String), (r'\n', Text), (r'::|!!|\?[:.]', Operator), (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation), @@ -1035,11 +1042,14 @@ class KotlinLexer(RegexLexer): (r"'\\.'|'[^\\]'", String.Char), (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|" r"0[xX][0-9a-fA-F]+[Ll]?", Number), - (r'(class)(\s+)(object)', bygroups(Keyword, Text, Keyword)), + (r'(object)(\s+)(:)(\s+)', bygroups(Keyword, Text, Punctuation, Text), 'class'), + (r'(companion)(\s+)(object)', bygroups(Keyword, Text, Keyword)), (r'(class|interface|object)(\s+)', bygroups(Keyword, Text), 'class'), (r'(package|import)(\s+)', bygroups(Keyword, Text), 'package'), + (r'(val|var)(\s+)([(])', bygroups(Keyword, Text, Punctuation), 'property_dec'), (r'(val|var)(\s+)', bygroups(Keyword, Text), 'property'), (r'(fun)(\s+)', bygroups(Keyword, Text), 'function'), + (r'(inline fun)(\s+)', bygroups(Keyword, Text), 'function'), (r'(abstract|annotation|as|break|by|catch|class|companion|const|' r'constructor|continue|crossinline|data|do|dynamic|else|enum|' r'external|false|final|finally|for|fun|get|if|import|in|infix|' @@ -1058,9 +1068,26 @@ class KotlinLexer(RegexLexer): 'property': [ (kt_id, Name.Property, '#pop') ], + 'property_dec': [ + (r'(,)(\s*)', bygroups(Punctuation, Text)), + (r'(:)(\s*)', bygroups(Punctuation, Text)), + (r'<', Punctuation, 'generic'), + (r'([)])', Punctuation, '#pop'), + (kt_id, Name.Property) + ], 'function': [ + (r'<', Punctuation, 'generic'), + (r''+kt_id+'([.])'+kt_id, bygroups(Name.Class, Punctuation, Name.Function), '#pop'), (kt_id, Name.Function, '#pop') ], + 'generic': [ + (r'(>)(\s*)', bygroups(Punctuation, Text), '#pop'), + (r':',Punctuation), + (r'(reified|out|in)\b', Keyword), + (r',',Text), + (r'\s+',Text), + (kt_id,Name) + ] } @@ -1258,7 +1285,7 @@ class GoloLexer(RegexLexer): (r'-?\d[\d_]*L', Number.Integer.Long), (r'-?\d[\d_]*', Number.Integer), - ('`?[a-zA-Z_][\w$]*', Name), + (r'`?[a-zA-Z_][\w$]*', Name), (r'@[a-zA-Z_][\w$.]*', Name.Decorator), (r'"""', String, combined('stringescape', 'triplestring')), @@ -1571,3 +1598,57 @@ class JasminLexer(RegexLexer): re.MULTILINE): score += 0.6 return score + + +class SarlLexer(RegexLexer): + """ + For `SARL <http://www.sarl.io>`_ source code. + + .. versionadded:: 2.4 + """ + + name = 'SARL' + aliases = ['sarl'] + filenames = ['*.sarl'] + mimetypes = ['text/x-sarl'] + + flags = re.MULTILINE | re.DOTALL + + tokens = { + 'root': [ + # method names + (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments + r'([a-zA-Z_$][\w$]*)' # method name + r'(\s*)(\()', # signature start + bygroups(using(this), Name.Function, Text, Operator)), + (r'[^\S\n]+', Text), + (r'//.*?\n', Comment.Single), + (r'/\*.*?\*/', Comment.Multiline), + (r'@[a-zA-Z_][\w.]*', Name.Decorator), + (r'(as|break|case|catch|default|do|else|extends|extension|finally|fires|for|if|implements|instanceof|new|on|requires|return|super|switch|throw|throws|try|typeof|uses|while|with)\b', + Keyword), + (r'(abstract|def|dispatch|final|native|override|private|protected|public|static|strictfp|synchronized|transient|val|var|volatile)\b', Keyword.Declaration), + (r'(boolean|byte|char|double|float|int|long|short|void)\b', + Keyword.Type), + (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)), + (r'(false|it|null|occurrence|this|true|void)\b', Keyword.Constant), + (r'(agent|annotation|artifact|behavior|capacity|class|enum|event|interface|skill|space)(\s+)', bygroups(Keyword.Declaration, Text), + 'class'), + (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), + (r'"(\\\\|\\"|[^"])*"', String), + (r"'(\\\\|\\'|[^'])*'", String), + (r'[a-zA-Z_]\w*:', Name.Label), + (r'[a-zA-Z_$]\w*', Name), + (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator), + (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'[0-9]+L?', Number.Integer), + (r'\n', Text) + ], + 'class': [ + (r'[a-zA-Z_]\w*', Name.Class, '#pop') + ], + 'import': [ + (r'[\w.]+\*?', Name.Namespace, '#pop') + ], + } diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py index e258c347..3bfc83a6 100644 --- a/pygments/lexers/lisp.py +++ b/pygments/lexers/lisp.py @@ -19,7 +19,7 @@ from pygments.lexers.python import PythonLexer __all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer', 'NewLispLexer', 'EmacsLispLexer', 'ShenLexer', 'CPSALexer', - 'XtlangLexer'] + 'XtlangLexer', 'FennelLexer'] class SchemeLexer(RegexLexer): @@ -139,7 +139,7 @@ class SchemeLexer(RegexLexer): (r"(?<=#\()" + valid_name, Name.Variable), # highlight the builtins - ("(?<=\()(%s)" % '|'.join(re.escape(entry) + ' ' for entry in builtins), + (r"(?<=\()(%s)" % '|'.join(re.escape(entry) + ' ' for entry in builtins), Name.Builtin), # the remaining functions @@ -321,7 +321,7 @@ class CommonLispLexer(RegexLexer): (r'#\d+#', Operator), # read-time comment - (r'#+nil' + terminated + '\s*\(', Comment.Preproc, 'commented-form'), + (r'#+nil' + terminated + r'\s*\(', Comment.Preproc, 'commented-form'), # read-time conditional (r'#[+-]', Operator), @@ -333,7 +333,7 @@ class CommonLispLexer(RegexLexer): (r'(t|nil)' + terminated, Name.Constant), # functions and variables - (r'\*' + symbol + '\*', Name.Variable.Global), + (r'\*' + symbol + r'\*', Name.Variable.Global), (symbol, Name.Variable), # parentheses @@ -382,7 +382,7 @@ class HyLexer(RegexLexer): # valid names for identifiers # well, names can only not consist fully of numbers # but this should be good enough for now - valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+' + valid_name = r'(?!#)[\w!$%*+<=>?/.#-:]+' def _multi_escape(entries): return words(entries, suffix=' ') @@ -1249,7 +1249,7 @@ class RacketLexer(RegexLexer): _opening_parenthesis = r'[([{]' _closing_parenthesis = r'[)\]}]' _delimiters = r'()[\]{}",\'`;\s' - _symbol = r'(?u)(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters + _symbol = r'(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters _exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?' _exponent = r'(?:[defls][-+]?\d+)' _inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)' @@ -1301,16 +1301,16 @@ class RacketLexer(RegexLexer): (_inexact_simple, _delimiters), Number.Float, '#pop'), # #b - (r'(?i)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'), + (r'(?iu)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'), # #o - (r'(?i)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'), + (r'(?iu)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'), # #x - (r'(?i)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'), + (r'(?iu)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'), # #i is always inexact, i.e. float - (r'(?i)(#d)?#i%s' % _symbol, Number.Float, '#pop'), + (r'(?iu)(#d)?#i%s' % _symbol, Number.Float, '#pop'), # Strings and characters (r'#?"', String.Double, ('#pop', 'string')), @@ -1323,7 +1323,7 @@ class RacketLexer(RegexLexer): (r'#(true|false|[tTfF])', Name.Constant, '#pop'), # Keyword argument names (e.g. #:keyword) - (r'#:%s' % _symbol, Keyword.Declaration, '#pop'), + (r'(?u)#:%s' % _symbol, Keyword.Declaration, '#pop'), # Reader extensions (r'(#lang |#!)(\S+)', @@ -1400,7 +1400,7 @@ class RacketLexer(RegexLexer): class NewLispLexer(RegexLexer): """ - For `newLISP. <www.newlisp.org>`_ source code (version 10.3.0). + For `newLISP. <http://www.newlisp.org/>`_ source code (version 10.3.0). .. versionadded:: 1.5 """ @@ -2154,7 +2154,7 @@ class EmacsLispLexer(RegexLexer): (r'(t|nil)' + terminated, Name.Constant), # functions and variables - (r'\*' + symbol + '\*', Name.Variable.Global), + (r'\*' + symbol + r'\*', Name.Variable.Global), (symbol, Name.Variable), # parentheses @@ -2327,13 +2327,13 @@ class ShenLexer(RegexLexer): token = Name.Function if token == Literal else token yield index, token, value - raise StopIteration + return def _process_signature(self, tokens): for index, token, value in tokens: if token == Literal and value == '}': yield index, Punctuation, value - raise StopIteration + return elif token in (Literal, Name.Function): token = Name.Variable if value.istitle() else Keyword.Type yield index, token, value @@ -2619,3 +2619,75 @@ class XtlangLexer(RegexLexer): include('scheme') ], } + + +class FennelLexer(RegexLexer): + """A lexer for the `Fennel programming language <https://fennel-lang.org>`_. + + Fennel compiles to Lua, so all the Lua builtins are recognized as well + as the special forms that are particular to the Fennel compiler. + + .. versionadded:: 2.3 + """ + name = 'Fennel' + aliases = ['fennel', 'fnl'] + filenames = ['*.fnl'] + + # these two lists are taken from fennel-mode.el: + # https://gitlab.com/technomancy/fennel-mode + # this list is current as of Fennel version 0.1.0. + special_forms = ( + u'require-macros', u'eval-compiler', + u'do', u'values', u'if', u'when', u'each', u'for', u'fn', u'lambda', + u'λ', u'set', u'global', u'var', u'local', u'let', u'tset', u'doto', + u'set-forcibly!', u'defn', u'partial', u'while', u'or', u'and', u'true', + u'false', u'nil', u'.', u'+', u'..', u'^', u'-', u'*', u'%', u'/', u'>', + u'<', u'>=', u'<=', u'=', u'~=', u'#', u'...', u':', u'->', u'->>', + ) + + # Might be nicer to use the list from _lua_builtins.py but it's unclear how? + builtins = ( + u'_G', u'_VERSION', u'arg', u'assert', u'bit32', u'collectgarbage', + u'coroutine', u'debug', u'dofile', u'error', u'getfenv', + u'getmetatable', u'io', u'ipairs', u'load', u'loadfile', u'loadstring', + u'math', u'next', u'os', u'package', u'pairs', u'pcall', u'print', + u'rawequal', u'rawget', u'rawlen', u'rawset', u'require', u'select', + u'setfenv', u'setmetatable', u'string', u'table', u'tonumber', + u'tostring', u'type', u'unpack', u'xpcall' + ) + + # based on the scheme definition, but disallowing leading digits and commas + valid_name = r'[a-zA-Z_!$%&*+/:<=>?@^~|-][\w!$%&*+/:<=>?@^~|\.-]*' + + tokens = { + 'root': [ + # the only comment form is a semicolon; goes to the end of the line + (r';.*$', Comment.Single), + + (r'[,\s]+', Text), + (r'-?\d+\.\d+', Number.Float), + (r'-?\d+', Number.Integer), + + (r'"(\\\\|\\"|[^"])*"', String), + (r"'(\\\\|\\'|[^'])*'", String), + + # these are technically strings, but it's worth visually + # distinguishing them because their intent is different + # from regular strings. + (r':' + valid_name, String.Symbol), + + # special forms are keywords + (words(special_forms, suffix=' '), Keyword), + # lua standard library are builtins + (words(builtins, suffix=' '), Name.Builtin), + # special-case the vararg symbol + (r'\.\.\.', Name.Variable), + # regular identifiers + (valid_name, Name.Variable), + + # all your normal paired delimiters for your programming enjoyment + (r'(\(|\))', Punctuation), + (r'(\[|\])', Punctuation), + (r'(\{|\})', Punctuation), + ] + } diff --git a/pygments/lexers/markup.py b/pygments/lexers/markup.py index 92dc9e7a..6eb55fc4 100644 --- a/pygments/lexers/markup.py +++ b/pygments/lexers/markup.py @@ -536,10 +536,9 @@ class MarkdownLexer(RegexLexer): # no lexer for this language. handle it like it was a code block if lexer is None: yield match.start(4), String, code - return - - for item in do_insertions([], lexer.get_tokens_unprocessed(code)): - yield item + else: + for item in do_insertions([], lexer.get_tokens_unprocessed(code)): + yield item yield match.start(5), String , match.group(5) @@ -583,6 +582,11 @@ class MarkdownLexer(RegexLexer): (r'[@#][\w/:]+', Name.Entity), # (image?) links eg:  (r'(!?\[)([^]]+)(\])(\()([^)]+)(\))', bygroups(Text, Name.Tag, Text, Text, Name.Attribute, Text)), + # reference-style links, e.g.: + # [an example][id] + # [id]: http://example.com/ + (r'(\[)([^]]+)(\])(\[)([^]]*)(\])', bygroups(Text, Name.Tag, Text, Text, Name.Label, Text)), + (r'^(\s*\[)([^]]*)(\]:\s*)(.+)', bygroups(Text, Name.Label, Text, Name.Attribute)), # general text, must come last! (r'[^\\\s]+', Text), diff --git a/pygments/lexers/matlab.py b/pygments/lexers/matlab.py index 56a0f6d6..1c77b60c 100644 --- a/pygments/lexers/matlab.py +++ b/pygments/lexers/matlab.py @@ -134,9 +134,9 @@ class MatlabLexer(RegexLexer): } def analyse_text(text): - if re.match('^\s*%', text, re.M): # comment + if re.match(r'^\s*%', text, re.M): # comment return 0.2 - elif re.match('^!\w+', text, re.M): # system cmd + elif re.match(r'^!\w+', text, re.M): # system cmd return 0.2 diff --git a/pygments/lexers/ml.py b/pygments/lexers/ml.py index f80d5bfa..0bff9816 100644 --- a/pygments/lexers/ml.py +++ b/pygments/lexers/ml.py @@ -43,7 +43,7 @@ class SMLLexer(RegexLexer): symbolicid_reserved = set(( # Core - ':', '\|', '=', '=>', '->', '#', + ':', r'\|', '=', '=>', '->', '#', # Modules ':>', )) diff --git a/pygments/lexers/modeling.py b/pygments/lexers/modeling.py index b354f1cf..481cce38 100644 --- a/pygments/lexers/modeling.py +++ b/pygments/lexers/modeling.py @@ -13,7 +13,7 @@ import re from pygments.lexer import RegexLexer, include, bygroups, using, default from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation + Number, Punctuation, Whitespace from pygments.lexers.html import HtmlLexer from pygments.lexers import _stan_builtins @@ -284,8 +284,8 @@ class StanLexer(RegexLexer): """Pygments Lexer for Stan models. The Stan modeling language is specified in the *Stan Modeling Language - User's Guide and Reference Manual, v2.8.0*, - `pdf <https://github.com/stan-dev/stan/releases/download/v2.8.8/stan-reference-2.8.0.pdf>`__. + User's Guide and Reference Manual, v2.17.0*, + `pdf <https://github.com/stan-dev/stan/releases/download/v2.17.0/stan-reference-2.17.0.pdf>`__. .. versionadded:: 1.6 """ @@ -316,19 +316,24 @@ class StanLexer(RegexLexer): 'parameters', r'transformed\s+parameters', 'model', r'generated\s+quantities')), bygroups(Keyword.Namespace, Text, Punctuation)), + # target keyword + (r'target\s*\+=', Keyword), # Reserved Words (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword), # Truncation (r'T(?=\s*\[)', Keyword), # Data types (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type), + # < should be punctuation, but elsewhere I can't tell if it is in + # a range constraint + (r'(<)(\s*)(upper|lower)(\s*)(=)', bygroups(Operator, Whitespace, Keyword, Whitespace, Punctuation)), + (r'(,)(\s*)(upper)(\s*)(=)', bygroups(Punctuation, Whitespace, Keyword, Whitespace, Punctuation)), # Punctuation - (r"[;:,\[\]()]", Punctuation), + (r"[;,\[\]()]", Punctuation), # Builtin - (r'(%s)(?=\s*\()' - % r'|'.join(_stan_builtins.FUNCTIONS - + _stan_builtins.DISTRIBUTIONS), - Name.Builtin), + (r'(%s)(?=\s*\()' % '|'.join(_stan_builtins.FUNCTIONS), Name.Builtin), + (r'(~)(\s*)(%s)(?=\s*\()' % '|'.join(_stan_builtins.DISTRIBUTIONS), + bygroups(Operator, Whitespace, Name.Builtin)), # Special names ending in __, like lp__ (r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo), (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved), @@ -337,17 +342,18 @@ class StanLexer(RegexLexer): # Regular variable names (r'[A-Za-z]\w*\b', Name), # Real Literals - (r'-?[0-9]+(\.[0-9]+)?[eE]-?[0-9]+', Number.Float), - (r'-?[0-9]*\.[0-9]*', Number.Float), + (r'[0-9]+(\.[0-9]*)?([eE][+-]?[0-9]+)?', Number.Float), + (r'\.[0-9]+([eE][+-]?[0-9]+)?', Number.Float), # Integer Literals - (r'-?[0-9]+', Number.Integer), + (r'[0-9]+', Number.Integer), # Assignment operators - # SLexer makes these tokens Operators. - (r'<-|~', Operator), + (r'<-|(?:\+|-|\.?/|\.?\*|=)?=|~', Operator), # Infix, prefix and postfix operators (and = ) - (r"\+|-|\.?\*|\.?/|\\|'|\^|==?|!=?|<=?|>=?|\|\||&&", Operator), + (r"\+|-|\.?\*|\.?/|\\|'|\^|!=?|<=?|>=?|\|\||&&|%|\?|:", Operator), # Block delimiters (r'[{}]', Punctuation), + # Distribution | + (r'\|', Punctuation) ] } diff --git a/pygments/lexers/objective.py b/pygments/lexers/objective.py index 7807255e..179928e9 100644 --- a/pygments/lexers/objective.py +++ b/pygments/lexers/objective.py @@ -87,26 +87,26 @@ def objective(baselexer): ], 'oc_classname': [ # interface definition that inherits - ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)', + (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)', bygroups(Name.Class, Text, Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')), - ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?', + (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?', bygroups(Name.Class, Text, Name.Class), '#pop'), # interface definition for a category - ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)', + (r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)', bygroups(Name.Class, Text, Name.Label, Text, Punctuation), ('#pop', 'oc_ivars')), - ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))', + (r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))', bygroups(Name.Class, Text, Name.Label), '#pop'), # simple interface / implementation - ('([a-zA-Z$_][\w$]*)(\s*)(\{)', + (r'([a-zA-Z$_][\w$]*)(\s*)(\{)', bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')), - ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop') + (r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop') ], 'oc_forward_classname': [ - ('([a-zA-Z$_][\w$]*)(\s*,\s*)', + (r'([a-zA-Z$_][\w$]*)(\s*,\s*)', bygroups(Name.Class, Text), 'oc_forward_classname'), - ('([a-zA-Z$_][\w$]*)(\s*;?)', + (r'([a-zA-Z$_][\w$]*)(\s*;?)', bygroups(Name.Class, Text), '#pop') ], 'oc_ivars': [ @@ -244,17 +244,17 @@ class LogosLexer(ObjectiveCppLexer): inherit, ], 'logos_init_directive': [ - ('\s+', Text), + (r'\s+', Text), (',', Punctuation, ('logos_init_directive', '#pop')), - ('([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)', + (r'([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)', bygroups(Name.Class, Text, Punctuation, Text, Text)), - ('([a-zA-Z$_][\w$]*)', Name.Class), - ('\)', Punctuation, '#pop'), + (r'([a-zA-Z$_][\w$]*)', Name.Class), + (r'\)', Punctuation, '#pop'), ], 'logos_classname': [ - ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?', + (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?', bygroups(Name.Class, Text, Name.Class), '#pop'), - ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop') + (r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop') ], 'root': [ (r'(%subclass)(\s+)', bygroups(Keyword, Text), diff --git a/pygments/lexers/parsers.py b/pygments/lexers/parsers.py index 1f3c9b4d..43eb6c1f 100644 --- a/pygments/lexers/parsers.py +++ b/pygments/lexers/parsers.py @@ -364,13 +364,13 @@ class AntlrLexer(RegexLexer): # tokensSpec (r'tokens\b', Keyword, 'tokens'), # attrScope - (r'(scope)(\s*)(' + _id + ')(\s*)(\{)', + (r'(scope)(\s*)(' + _id + r')(\s*)(\{)', bygroups(Keyword, Whitespace, Name.Variable, Whitespace, Punctuation), 'action'), # exception (r'(catch|finally)\b', Keyword, 'exception'), # action - (r'(@' + _id + ')(\s*)(::)?(\s*)(' + _id + ')(\s*)(\{)', + (r'(@' + _id + r')(\s*)(::)?(\s*)(' + _id + r')(\s*)(\{)', bygroups(Name.Label, Whitespace, Punctuation, Whitespace, Name.Label, Whitespace, Punctuation), 'action'), # rule @@ -405,10 +405,10 @@ class AntlrLexer(RegexLexer): # L173 ANTLRv3.g from ANTLR book (r'(scope)(\s+)(\{)', bygroups(Keyword, Whitespace, Punctuation), 'action'), - (r'(scope)(\s+)(' + _id + ')(\s*)(;)', + (r'(scope)(\s+)(' + _id + r')(\s*)(;)', bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)), # ruleAction - (r'(@' + _id + ')(\s*)(\{)', + (r'(@' + _id + r')(\s*)(\{)', bygroups(Name.Label, Whitespace, Punctuation), 'action'), # finished prelims, go to rule alts! (r':', Punctuation, '#pop') @@ -442,7 +442,7 @@ class AntlrLexer(RegexLexer): include('comments'), (r'\{', Punctuation), (r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL - + ')?(\s*)(;)', + + r')?(\s*)(;)', bygroups(Name.Label, Whitespace, Punctuation, Whitespace, String, Whitespace, Punctuation)), (r'\}', Punctuation, '#pop'), @@ -452,7 +452,7 @@ class AntlrLexer(RegexLexer): include('comments'), (r'\{', Punctuation), (r'(' + _id + r')(\s*)(=)(\s*)(' + - '|'.join((_id, _STRING_LITERAL, _INT, '\*')) + ')(\s*)(;)', + '|'.join((_id, _STRING_LITERAL, _INT, r'\*')) + r')(\s*)(;)', bygroups(Name.Variable, Whitespace, Punctuation, Whitespace, Text, Whitespace, Punctuation)), (r'\}', Punctuation, '#pop'), diff --git a/pygments/lexers/pascal.py b/pygments/lexers/pascal.py index 9aa1ac8f..467a0b2c 100644 --- a/pygments/lexers/pascal.py +++ b/pygments/lexers/pascal.py @@ -593,8 +593,8 @@ class AdaLexer(RegexLexer): ], 'end': [ ('(if|case|record|loop|select)', Keyword.Reserved), - ('"[^"]+"|[\w.]+', Name.Function), - ('\s+', Text), + (r'"[^"]+"|[\w.]+', Name.Function), + (r'\s+', Text), (';', Punctuation, '#pop'), ], 'type_def': [ @@ -628,11 +628,11 @@ class AdaLexer(RegexLexer): ], 'package': [ ('body', Keyword.Declaration), - ('is\s+new|renames', Keyword.Reserved), + (r'is\s+new|renames', Keyword.Reserved), ('is', Keyword.Reserved, '#pop'), (';', Punctuation, '#pop'), - ('\(', Punctuation, 'package_instantiation'), - ('([\w.]+)', Name.Class), + (r'\(', Punctuation, 'package_instantiation'), + (r'([\w.]+)', Name.Class), include('root'), ], 'package_instantiation': [ diff --git a/pygments/lexers/pawn.py b/pygments/lexers/pawn.py index f462a883..0ef28175 100644 --- a/pygments/lexers/pawn.py +++ b/pygments/lexers/pawn.py @@ -36,7 +36,7 @@ class SourcePawnLexer(RegexLexer): tokens = { 'root': [ # preprocessor directives: without whitespace - ('^#if\s+0', Comment.Preproc, 'if0'), + (r'^#if\s+0', Comment.Preproc, 'if0'), ('^#', Comment.Preproc, 'macro'), # or with whitespace ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'), @@ -62,7 +62,7 @@ class SourcePawnLexer(RegexLexer): r'public|return|sizeof|static|decl|struct|switch)\b', Keyword), (r'(bool|Float)\b', Keyword.Type), (r'(true|false)\b', Keyword.Constant), - ('[a-zA-Z_]\w*', Name), + (r'[a-zA-Z_]\w*', Name), ], 'string': [ (r'"', String, '#pop'), @@ -148,7 +148,7 @@ class PawnLexer(RegexLexer): tokens = { 'root': [ # preprocessor directives: without whitespace - ('^#if\s+0', Comment.Preproc, 'if0'), + (r'^#if\s+0', Comment.Preproc, 'if0'), ('^#', Comment.Preproc, 'macro'), # or with whitespace ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'), @@ -174,7 +174,7 @@ class PawnLexer(RegexLexer): r'public|return|sizeof|tagof|state|goto)\b', Keyword), (r'(bool|Float)\b', Keyword.Type), (r'(true|false)\b', Keyword.Constant), - ('[a-zA-Z_]\w*', Name), + (r'[a-zA-Z_]\w*', Name), ], 'string': [ (r'"', String, '#pop'), diff --git a/pygments/lexers/perl.py b/pygments/lexers/perl.py index db5a9361..27e3cc79 100644 --- a/pygments/lexers/perl.py +++ b/pygments/lexers/perl.py @@ -208,7 +208,7 @@ class PerlLexer(RegexLexer): def analyse_text(text): if shebang_matches(text, r'perl'): return True - if re.search('(?:my|our)\s+[$@%(]', text): + if re.search(r'(?:my|our)\s+[$@%(]', text): return 0.9 @@ -226,7 +226,7 @@ class Perl6Lexer(ExtendedRegexLexer): mimetypes = ['text/x-perl6', 'application/x-perl6'] flags = re.MULTILINE | re.DOTALL | re.UNICODE - PERL6_IDENTIFIER_RANGE = "['\w:-]" + PERL6_IDENTIFIER_RANGE = r"['\w:-]" PERL6_KEYWORDS = ( 'BEGIN', 'CATCH', 'CHECK', 'CONTROL', 'END', 'ENTER', 'FIRST', 'INIT', @@ -495,7 +495,7 @@ class Perl6Lexer(ExtendedRegexLexer): (r'^=.*?\n\s*?\n', Comment.Multiline), (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)', bygroups(Keyword, Name), 'token-sym-brackets'), - (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + ')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?', + (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + r')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?', bygroups(Keyword, Name), 'pre-token'), # deal with a special case in the Perl 6 grammar (role q { ... }) (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)), @@ -591,21 +591,21 @@ class Perl6Lexer(ExtendedRegexLexer): rating = False # check for my/our/has declarations - if re.search("(?:my|our|has)\s+(?:" + Perl6Lexer.PERL6_IDENTIFIER_RANGE + - "+\s+)?[$@%&(]", text): + if re.search(r"(?:my|our|has)\s+(?:" + Perl6Lexer.PERL6_IDENTIFIER_RANGE + + r"+\s+)?[$@%&(]", text): rating = 0.8 saw_perl_decl = True for line in lines: line = re.sub('#.*', '', line) - if re.match('^\s*$', line): + if re.match(r'^\s*$', line): continue # match v6; use v6; use v6.0; use v6.0.0; - if re.match('^\s*(?:use\s+)?v6(?:\.\d(?:\.\d)?)?;', line): + if re.match(r'^\s*(?:use\s+)?v6(?:\.\d(?:\.\d)?)?;', line): return True # match class, module, role, enum, grammar declarations - class_decl = re.match('^\s*(?:(?P<scope>my|our)\s+)?(?:module|class|role|enum|grammar)', line) + class_decl = re.match(r'^\s*(?:(?P<scope>my|our)\s+)?(?:module|class|role|enum|grammar)', line) if class_decl: if saw_perl_decl or class_decl.group('scope') is not None: return True diff --git a/pygments/lexers/php.py b/pygments/lexers/php.py index f618b5fd..440d9d81 100644 --- a/pygments/lexers/php.py +++ b/pygments/lexers/php.py @@ -15,7 +15,8 @@ from pygments.lexer import RegexLexer, include, bygroups, default, using, \ this, words from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Other -from pygments.util import get_bool_opt, get_list_opt, iteritems +from pygments.util import get_bool_opt, get_list_opt, iteritems, \ + shebang_matches __all__ = ['ZephirLexer', 'PhpLexer'] @@ -173,7 +174,7 @@ class PhpLexer(RegexLexer): r'finally)\b', Keyword), (r'(true|false|null)\b', Keyword.Constant), include('magicconstants'), - (r'\$\{\$+' + _ident_inner + '\}', Name.Variable), + (r'\$\{\$+' + _ident_inner + r'\}', Name.Variable), (r'\$+' + _ident_inner, Name.Variable), (_ident_inner, Name.Other), (r'(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?', Number.Float), @@ -214,7 +215,7 @@ class PhpLexer(RegexLexer): (r'"', String.Double, '#pop'), (r'[^{$"\\]+', String.Double), (r'\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape), - (r'\$' + _ident_inner + '(\[\S+?\]|->' + _ident_inner + ')?', + (r'\$' + _ident_inner + r'(\[\S+?\]|->' + _ident_inner + ')?', String.Interpol), (r'(\{\$\{)(.*?)(\}\})', bygroups(String.Interpol, using(this, _startinline=True), @@ -261,6 +262,8 @@ class PhpLexer(RegexLexer): yield index, token, value def analyse_text(text): + if shebang_matches(text, r'php'): + return True rv = 0.0 if re.search(r'<\?(?!xml)', text): rv += 0.3 diff --git a/pygments/lexers/pony.py b/pygments/lexers/pony.py new file mode 100644 index 00000000..13239047 --- /dev/null +++ b/pygments/lexers/pony.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.pony + ~~~~~~~~~~~~~~~~~~~~ + + Lexers for Pony and related languages. + + :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, bygroups, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['PonyLexer'] + + +class PonyLexer(RegexLexer): + """ + For Pony source code. + + .. versionadded:: 2.4 + """ + + name = 'Pony' + aliases = ['pony'] + filenames = ['*.pony'] + + _caps = r'(iso|trn|ref|val|box|tag)' + + tokens = { + 'root': [ + (r'\n', Text), + (r'[^\S\n]+', Text), + (r'//.*\n', Comment.Single), + (r'/\*', Comment.Multiline, 'nested_comment'), + (r'"""(?:.|\n)*?"""', String.Doc), + (r'"', String, 'string'), + (r'\'.*\'', String.Char), + (r'=>|[]{}:().~;,|&!^?[]', Punctuation), + (words(( + 'addressof', 'and', 'as', 'consume', 'digestof', 'is', 'isnt', + 'not', 'or'), + suffix=r'\b'), + Operator.Word), + (r'!=|==|<<|>>|[-+/*%=<>]', Operator), + (words(( + 'box', 'break', 'compile_error', 'compile_intrinsic', + 'continue', 'do', 'else', 'elseif', 'embed', 'end', 'error', + 'for', 'if', 'ifdef', 'in', 'iso', 'lambda', 'let', 'match', + 'object', 'recover', 'ref', 'repeat', 'return', 'tag', 'then', + 'this', 'trn', 'try', 'until', 'use', 'var', 'val', 'where', + 'while', 'with', '#any', '#read', '#send', '#share'), + suffix=r'\b'), + Keyword), + (r'(actor|class|struct|primitive|interface|trait|type)((?:\s)+)', + bygroups(Keyword, Text), 'typename'), + (r'(new|fun|be)((?:\s)+)', bygroups(Keyword, Text), 'methodname'), + (words(( + 'I8', 'U8', 'I16', 'U16', 'I32', 'U32', 'I64', 'U64', 'I128', + 'U128', 'ILong', 'ULong', 'ISize', 'USize', 'F32', 'F64', + 'Bool', 'Pointer', 'None', 'Any', 'Array', 'String', + 'Iterator'), + suffix=r'\b'), + Name.Builtin.Type), + (r'_?[A-Z]\w*', Name.Type), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float), + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'\d+', Number.Integer), + (r'(true|false)\b', Name.Builtin), + (r'_\d*', Name), + (r'_?[a-z][\w\'_]*', Name) + ], + 'typename': [ + (_caps + r'?((?:\s)*)(_?[A-Z]\w*)', + bygroups(Keyword, Text, Name.Class), '#pop') + ], + 'methodname': [ + (_caps + r'?((?:\s)*)(_?[a-z]\w*)', + bygroups(Keyword, Text, Name.Function), '#pop') + ], + 'nested_comment': [ + (r'[^*/]+', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline) + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\"', String), + (r'[^\\"]+', String) + ] + } diff --git a/pygments/lexers/prolog.py b/pygments/lexers/prolog.py index 90f9529c..58e762b0 100644 --- a/pygments/lexers/prolog.py +++ b/pygments/lexers/prolog.py @@ -57,15 +57,15 @@ class PrologLexer(RegexLexer): (r'_', Keyword), # The don't-care variable (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)), (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' - u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' + u'[\\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' u'(\\s*)(:-|-->)', bygroups(Name.Function, Text, Operator)), # function defn (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' - u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' + u'[\\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' u'(\\s*)(\\()', bygroups(Name.Function, Text, Punctuation)), (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' - u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*', + u'[\\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*', String.Atom), # atom, characters # This one includes ! (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+', @@ -300,7 +300,7 @@ class LogtalkLexer(RegexLexer): return 1.0 elif ':- category(' in text: return 1.0 - elif re.search('^:-\s[a-z]', text, re.M): + elif re.search(r'^:-\s[a-z]', text, re.M): return 0.9 else: return 0.0 diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py index 390eafe8..c87282ca 100644 --- a/pygments/lexers/python.py +++ b/pygments/lexers/python.py @@ -124,10 +124,10 @@ class PythonLexer(RegexLexer): 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError', 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError', - 'MemoryError', 'NameError', 'NotImplemented', 'NotImplementedError', + 'MemoryError', 'ModuleNotFoundError', 'NameError', 'NotImplemented', 'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning', 'PendingDeprecationWarning', - 'ReferenceError', 'RuntimeError', 'RuntimeWarning', 'StandardError', - 'StopIteration', 'SyntaxError', 'SyntaxWarning', 'SystemError', + 'RecursionError', 'ReferenceError', 'RuntimeError', 'RuntimeWarning', 'StandardError', + 'StopIteration', 'StopAsyncIteration', 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError', 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError', 'UnicodeWarning', 'UserWarning', @@ -180,15 +180,15 @@ class PythonLexer(RegexLexer): ], 'name': [ (r'@[\w.]+', Name.Decorator), - ('[a-zA-Z_]\w*', Name), + (r'[a-zA-Z_]\w*', Name), ], 'funcname': [ include('magicfuncs'), - ('[a-zA-Z_]\w*', Name.Function, '#pop'), + (r'[a-zA-Z_]\w*', Name.Function, '#pop'), default('#pop'), ], 'classname': [ - ('[a-zA-Z_]\w*', Name.Class, '#pop') + (r'[a-zA-Z_]\w*', Name.Class, '#pop') ], 'import': [ (r'(?:[ \t]|\\\n)+', Text), @@ -262,13 +262,13 @@ class Python3Lexer(RegexLexer): return [ # the old style '%s' % (...) string formatting (still valid in Py3) (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?' - '[hlL]?[E-GXc-giorsux%]', String.Interpol), + '[hlL]?[E-GXc-giorsaux%]', String.Interpol), # the new style '{}'.format(...) string formatting (r'\{' - '((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name - '(\![sra])?' # conversion - '(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?' - '\}', String.Interpol), + r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name + r'(\![sra])?' # conversion + r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?' + r'\}', String.Interpol), # backslashes, quotes and formatting signs must be parsed one at a time (r'[^\\\'"%{\n]+', ttype), @@ -361,12 +361,12 @@ class Python3Lexer(RegexLexer): Name.Variable.Magic), ] tokens['numbers'] = [ - (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), - (r'\d+[eE][+-]?[0-9]+j?', Number.Float), - (r'0[oO][0-7]+', Number.Oct), - (r'0[bB][01]+', Number.Bin), - (r'0[xX][a-fA-F0-9]+', Number.Hex), - (r'\d+', Number.Integer) + (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)([eE][+-]?\d(?:_?\d)*)?', Number.Float), + (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float), + (r'0[oO](?:_?[0-7])+', Number.Oct), + (r'0[bB](?:_?[01])+', Number.Bin), + (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex), + (r'\d(?:_?\d)*', Number.Integer) ] tokens['backtick'] = [] tokens['name'] = [ @@ -396,6 +396,7 @@ class Python3Lexer(RegexLexer): tokens['strings-single'] = innerstring_rules(String.Single) tokens['strings-double'] = innerstring_rules(String.Double) + def analyse_text(text): return shebang_matches(text, r'pythonw?3(\.\d)?') @@ -671,10 +672,10 @@ class CythonLexer(RegexLexer): ], 'name': [ (r'@\w+', Name.Decorator), - ('[a-zA-Z_]\w*', Name), + (r'[a-zA-Z_]\w*', Name), ], 'funcname': [ - ('[a-zA-Z_]\w*', Name.Function, '#pop') + (r'[a-zA-Z_]\w*', Name.Function, '#pop') ], 'cdef': [ (r'(public|readonly|extern|api|inline)\b', Keyword.Reserved), @@ -691,7 +692,7 @@ class CythonLexer(RegexLexer): (r'.', Text), ], 'classname': [ - ('[a-zA-Z_]\w*', Name.Class, '#pop') + (r'[a-zA-Z_]\w*', Name.Class, '#pop') ], 'import': [ (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)), diff --git a/pygments/lexers/qvt.py b/pygments/lexers/qvt.py index f496d600..9b2559b1 100644 --- a/pygments/lexers/qvt.py +++ b/pygments/lexers/qvt.py @@ -18,7 +18,7 @@ __all__ = ['QVToLexer'] class QVToLexer(RegexLexer): - """ + u""" For the `QVT Operational Mapping language <http://www.omg.org/spec/QVT/1.1/>`_. Reference for implementing this: «Meta Object Facility (MOF) 2.0 @@ -126,7 +126,7 @@ class QVToLexer(RegexLexer): (r'[^\\\'"\n]+', String), # quotes, percents and backslashes must be parsed one at a time (r'[\'"\\]', String), - ], + ], 'stringescape': [ (r'\\([\\btnfr"\']|u[0-3][0-7]{2}|u[0-7]{1,2})', String.Escape) ], @@ -134,15 +134,15 @@ class QVToLexer(RegexLexer): (r'"', String, '#pop'), (r'\\\\|\\"', String.Escape), include('strings') - ], + ], 'sqs': [ # single-quoted string (r"'", String, '#pop'), (r"\\\\|\\'", String.Escape), include('strings') - ], + ], 'name': [ - ('[a-zA-Z_]\w*', Name), - ], + (r'[a-zA-Z_]\w*', Name), + ], # numbers: excerpt taken from the python lexer 'numbers': [ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), diff --git a/pygments/lexers/r.py b/pygments/lexers/r.py index dce61969..66d6402c 100644 --- a/pygments/lexers/r.py +++ b/pygments/lexers/r.py @@ -11,7 +11,7 @@ import re -from pygments.lexer import Lexer, RegexLexer, include, words, do_insertions +from pygments.lexer import Lexer, RegexLexer, include, do_insertions, bygroups from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Generic @@ -80,286 +80,25 @@ class SLexer(RegexLexer): mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile'] - builtins_base = ( - 'Arg', 'Conj', 'Cstack_info', 'Encoding', 'FALSE', - 'Filter', 'Find', 'I', 'ISOdate', 'ISOdatetime', 'Im', 'Inf', - 'La.svd', 'Map', 'Math.Date', 'Math.POSIXt', 'Math.data.frame', - 'Math.difftime', 'Math.factor', 'Mod', 'NA_character_', - 'NA_complex_', 'NA_real_', 'NCOL', 'NROW', 'NULLNA_integer_', 'NaN', - 'Negate', 'NextMethod', 'Ops.Date', 'Ops.POSIXt', 'Ops.data.frame', - 'Ops.difftime', 'Ops.factor', 'Ops.numeric_version', 'Ops.ordered', - 'Position', 'R.Version', 'R.home', 'R.version', 'R.version.string', - 'RNGkind', 'RNGversion', 'R_system_version', 'Re', 'Recall', - 'Reduce', 'Summary.Date', 'Summary.POSIXct', 'Summary.POSIXlt', - 'Summary.data.frame', 'Summary.difftime', 'Summary.factor', - 'Summary.numeric_version', 'Summary.ordered', 'Sys.Date', - 'Sys.chmod', 'Sys.getenv', 'Sys.getlocale', 'Sys.getpid', - 'Sys.glob', 'Sys.info', 'Sys.localeconv', 'Sys.readlink', - 'Sys.setFileTime', 'Sys.setenv', 'Sys.setlocale', 'Sys.sleep', - 'Sys.time', 'Sys.timezone', 'Sys.umask', 'Sys.unsetenv', - 'Sys.which', 'TRUE', 'UseMethod', 'Vectorize', 'abbreviate', 'abs', - 'acos', 'acosh', 'addNA', 'addTaskCallback', 'agrep', 'alist', - 'all', 'all.equal', 'all.equal.POSIXct', 'all.equal.character', - 'all.equal.default', 'all.equal.factor', 'all.equal.formula', - 'all.equal.language', 'all.equal.list', 'all.equal.numeric', - 'all.equal.raw', 'all.names', 'all.vars', 'any', 'anyDuplicated', - 'anyDuplicated.array', 'anyDuplicated.data.frame', - 'anyDuplicated.default', 'anyDuplicated.matrix', 'aperm', - 'aperm.default', 'aperm.table', 'append', 'apply', 'args', - 'arrayInd', 'as.Date', 'as.Date.POSIXct', 'as.Date.POSIXlt', - 'as.Date.character', 'as.Date.date', 'as.Date.dates', - 'as.Date.default', 'as.Date.factor', 'as.Date.numeric', - 'as.POSIXct', 'as.POSIXct.Date', 'as.POSIXct.POSIXlt', - 'as.POSIXct.date', 'as.POSIXct.dates', 'as.POSIXct.default', - 'as.POSIXct.numeric', 'as.POSIXlt', 'as.POSIXlt.Date', - 'as.POSIXlt.POSIXct', 'as.POSIXlt.character', 'as.POSIXlt.date', - 'as.POSIXlt.dates', 'as.POSIXlt.default', 'as.POSIXlt.factor', - 'as.POSIXlt.numeric', 'as.array', 'as.array.default', 'as.call', - 'as.character', 'as.character.Date', 'as.character.POSIXt', - 'as.character.condition', 'as.character.default', - 'as.character.error', 'as.character.factor', 'as.character.hexmode', - 'as.character.numeric_version', 'as.character.octmode', - 'as.character.srcref', 'as.complex', 'as.data.frame', - 'as.data.frame.AsIs', 'as.data.frame.Date', 'as.data.frame.POSIXct', - 'as.data.frame.POSIXlt', 'as.data.frame.array', - 'as.data.frame.character', 'as.data.frame.complex', - 'as.data.frame.data.frame', 'as.data.frame.default', - 'as.data.frame.difftime', 'as.data.frame.factor', - 'as.data.frame.integer', 'as.data.frame.list', - 'as.data.frame.logical', 'as.data.frame.matrix', - 'as.data.frame.model.matrix', 'as.data.frame.numeric', - 'as.data.frame.numeric_version', 'as.data.frame.ordered', - 'as.data.frame.raw', 'as.data.frame.table', 'as.data.frame.ts', - 'as.data.frame.vector', 'as.difftime', 'as.double', - 'as.double.POSIXlt', 'as.double.difftime', 'as.environment', - 'as.expression', 'as.expression.default', 'as.factor', - 'as.function', 'as.function.default', 'as.hexmode', 'as.integer', - 'as.list', 'as.list.Date', 'as.list.POSIXct', 'as.list.data.frame', - 'as.list.default', 'as.list.environment', 'as.list.factor', - 'as.list.function', 'as.list.numeric_version', 'as.logical', - 'as.logical.factor', 'as.matrix', 'as.matrix.POSIXlt', - 'as.matrix.data.frame', 'as.matrix.default', 'as.matrix.noquote', - 'as.name', 'as.null', 'as.null.default', 'as.numeric', - 'as.numeric_version', 'as.octmode', 'as.ordered', - 'as.package_version', 'as.pairlist', 'as.qr', 'as.raw', 'as.single', - 'as.single.default', 'as.symbol', 'as.table', 'as.table.default', - 'as.vector', 'as.vector.factor', 'asNamespace', 'asS3', 'asS4', - 'asin', 'asinh', 'assign', 'atan', 'atan2', 'atanh', - 'attachNamespace', 'attr', 'attr.all.equal', 'attributes', - 'autoload', 'autoloader', 'backsolve', 'baseenv', 'basename', - 'besselI', 'besselJ', 'besselK', 'besselY', 'beta', - 'bindingIsActive', 'bindingIsLocked', 'bindtextdomain', 'bitwAnd', - 'bitwNot', 'bitwOr', 'bitwShiftL', 'bitwShiftR', 'bitwXor', 'body', - 'bquote', 'browser', 'browserCondition', 'browserSetDebug', - 'browserText', 'builtins', 'by', 'by.data.frame', 'by.default', - 'bzfile', 'c.Date', 'c.POSIXct', 'c.POSIXlt', 'c.noquote', - 'c.numeric_version', 'call', 'callCC', 'capabilities', 'casefold', - 'cat', 'category', 'cbind', 'cbind.data.frame', 'ceiling', - 'char.expand', 'charToRaw', 'charmatch', 'chartr', 'check_tzones', - 'chol', 'chol.default', 'chol2inv', 'choose', 'class', - 'clearPushBack', 'close', 'close.connection', 'close.srcfile', - 'close.srcfilealias', 'closeAllConnections', 'col', 'colMeans', - 'colSums', 'colnames', 'commandArgs', 'comment', 'computeRestarts', - 'conditionCall', 'conditionCall.condition', 'conditionMessage', - 'conditionMessage.condition', 'conflicts', 'contributors', 'cos', - 'cosh', 'crossprod', 'cummax', 'cummin', 'cumprod', 'cumsum', 'cut', - 'cut.Date', 'cut.POSIXt', 'cut.default', 'dQuote', 'data.class', - 'data.matrix', 'date', 'debug', 'debugonce', - 'default.stringsAsFactors', 'delayedAssign', 'deparse', 'det', - 'determinant', 'determinant.matrix', 'dget', 'diag', 'diff', - 'diff.Date', 'diff.POSIXt', 'diff.default', 'difftime', 'digamma', - 'dim', 'dim.data.frame', 'dimnames', 'dimnames.data.frame', 'dir', - 'dir.create', 'dirname', 'do.call', 'dput', 'drop', 'droplevels', - 'droplevels.data.frame', 'droplevels.factor', 'dump', 'duplicated', - 'duplicated.POSIXlt', 'duplicated.array', 'duplicated.data.frame', - 'duplicated.default', 'duplicated.matrix', - 'duplicated.numeric_version', 'dyn.load', 'dyn.unload', 'eapply', - 'eigen', 'else', 'emptyenv', 'enc2native', 'enc2utf8', - 'encodeString', 'enquote', 'env.profile', 'environment', - 'environmentIsLocked', 'environmentName', 'eval', 'eval.parent', - 'evalq', 'exists', 'exp', 'expand.grid', 'expm1', 'expression', - 'factor', 'factorial', 'fifo', 'file', 'file.access', 'file.append', - 'file.choose', 'file.copy', 'file.create', 'file.exists', - 'file.info', 'file.link', 'file.path', 'file.remove', 'file.rename', - 'file.show', 'file.symlink', 'find.package', 'findInterval', - 'findPackageEnv', 'findRestart', 'floor', 'flush', - 'flush.connection', 'force', 'formals', 'format', - 'format.AsIs', 'format.Date', 'format.POSIXct', 'format.POSIXlt', - 'format.data.frame', 'format.default', 'format.difftime', - 'format.factor', 'format.hexmode', 'format.info', - 'format.libraryIQR', 'format.numeric_version', 'format.octmode', - 'format.packageInfo', 'format.pval', 'format.summaryDefault', - 'formatC', 'formatDL', 'forwardsolve', 'gamma', 'gc', 'gc.time', - 'gcinfo', 'gctorture', 'gctorture2', 'get', 'getAllConnections', - 'getCallingDLL', 'getCallingDLLe', 'getConnection', - 'getDLLRegisteredRoutines', 'getDLLRegisteredRoutines.DLLInfo', - 'getDLLRegisteredRoutines.character', 'getElement', - 'getExportedValue', 'getHook', 'getLoadedDLLs', 'getNamespace', - 'getNamespaceExports', 'getNamespaceImports', 'getNamespaceInfo', - 'getNamespaceName', 'getNamespaceUsers', 'getNamespaceVersion', - 'getNativeSymbolInfo', 'getOption', 'getRversion', 'getSrcLines', - 'getTaskCallbackNames', 'geterrmessage', 'gettext', 'gettextf', - 'getwd', 'gl', 'globalenv', 'gregexpr', 'grep', 'grepRaw', 'grepl', - 'gsub', 'gzcon', 'gzfile', 'head', 'iconv', 'iconvlist', - 'icuSetCollate', 'identical', 'identity', 'ifelse', 'importIntoEnv', - 'in', 'inherits', 'intToBits', 'intToUtf8', 'interaction', 'interactive', - 'intersect', 'inverse.rle', 'invisible', 'invokeRestart', - 'invokeRestartInteractively', 'is.R', 'is.array', 'is.atomic', - 'is.call', 'is.character', 'is.complex', 'is.data.frame', - 'is.double', 'is.element', 'is.environment', 'is.expression', - 'is.factor', 'is.finite', 'is.function', 'is.infinite', - 'is.integer', 'is.language', 'is.list', 'is.loaded', 'is.logical', - 'is.matrix', 'is.na', 'is.na.POSIXlt', 'is.na.data.frame', - 'is.na.numeric_version', 'is.name', 'is.nan', 'is.null', - 'is.numeric', 'is.numeric.Date', 'is.numeric.POSIXt', - 'is.numeric.difftime', 'is.numeric_version', 'is.object', - 'is.ordered', 'is.package_version', 'is.pairlist', 'is.primitive', - 'is.qr', 'is.raw', 'is.recursive', 'is.single', 'is.symbol', - 'is.table', 'is.unsorted', 'is.vector', 'isBaseNamespace', - 'isIncomplete', 'isNamespace', 'isOpen', 'isRestart', 'isS4', - 'isSeekable', 'isSymmetric', 'isSymmetric.matrix', 'isTRUE', - 'isatty', 'isdebugged', 'jitter', 'julian', 'julian.Date', - 'julian.POSIXt', 'kappa', 'kappa.default', 'kappa.lm', 'kappa.qr', - 'kronecker', 'l10n_info', 'labels', 'labels.default', 'lapply', - 'lazyLoad', 'lazyLoadDBexec', 'lazyLoadDBfetch', 'lbeta', 'lchoose', - 'length', 'length.POSIXlt', 'letters', 'levels', 'levels.default', - 'lfactorial', 'lgamma', 'library.dynam', 'library.dynam.unload', - 'licence', 'license', 'list.dirs', 'list.files', 'list2env', 'load', - 'loadNamespace', 'loadedNamespaces', 'loadingNamespaceInfo', - 'local', 'lockBinding', 'lockEnvironment', 'log', 'log10', 'log1p', - 'log2', 'logb', 'lower.tri', 'ls', 'make.names', 'make.unique', - 'makeActiveBinding', 'mapply', 'margin.table', 'mat.or.vec', - 'match', 'match.arg', 'match.call', 'match.fun', 'max', 'max.col', - 'mean', 'mean.Date', 'mean.POSIXct', 'mean.POSIXlt', 'mean.default', - 'mean.difftime', 'mem.limits', 'memCompress', 'memDecompress', - 'memory.profile', 'merge', 'merge.data.frame', 'merge.default', - 'message', 'mget', 'min', 'missing', 'mode', 'month.abb', - 'month.name', 'months', 'months.Date', 'months.POSIXt', - 'months.abb', 'months.nameletters', 'names', 'names.POSIXlt', - 'namespaceExport', 'namespaceImport', 'namespaceImportClasses', - 'namespaceImportFrom', 'namespaceImportMethods', 'nargs', 'nchar', - 'ncol', 'new.env', 'ngettext', 'nlevels', 'noquote', 'norm', - 'normalizePath', 'nrow', 'numeric_version', 'nzchar', 'objects', - 'oldClass', 'on.exit', 'open', 'open.connection', 'open.srcfile', - 'open.srcfilealias', 'open.srcfilecopy', 'options', 'order', - 'ordered', 'outer', 'packBits', 'packageEvent', - 'packageHasNamespace', 'packageStartupMessage', 'package_version', - 'pairlist', 'parent.env', 'parent.frame', 'parse', - 'parseNamespaceFile', 'paste', 'paste0', 'path.expand', - 'path.package', 'pipe', 'pmatch', 'pmax', 'pmax.int', 'pmin', - 'pmin.int', 'polyroot', 'pos.to.env', 'pretty', 'pretty.default', - 'prettyNum', 'print', 'print.AsIs', 'print.DLLInfo', - 'print.DLLInfoList', 'print.DLLRegisteredRoutines', 'print.Date', - 'print.NativeRoutineList', 'print.POSIXct', 'print.POSIXlt', - 'print.by', 'print.condition', 'print.connection', - 'print.data.frame', 'print.default', 'print.difftime', - 'print.factor', 'print.function', 'print.hexmode', - 'print.libraryIQR', 'print.listof', 'print.noquote', - 'print.numeric_version', 'print.octmode', 'print.packageInfo', - 'print.proc_time', 'print.restart', 'print.rle', - 'print.simple.list', 'print.srcfile', 'print.srcref', - 'print.summary.table', 'print.summaryDefault', 'print.table', - 'print.warnings', 'prmatrix', 'proc.time', 'prod', 'prop.table', - 'provideDimnames', 'psigamma', 'pushBack', 'pushBackLength', 'q', - 'qr', 'qr.Q', 'qr.R', 'qr.X', 'qr.coef', 'qr.default', 'qr.fitted', - 'qr.qty', 'qr.qy', 'qr.resid', 'qr.solve', 'quarters', - 'quarters.Date', 'quarters.POSIXt', 'quit', 'quote', 'range', - 'range.default', 'rank', 'rapply', 'raw', 'rawConnection', - 'rawConnectionValue', 'rawShift', 'rawToBits', 'rawToChar', 'rbind', - 'rbind.data.frame', 'rcond', 'read.dcf', 'readBin', 'readChar', - 'readLines', 'readRDS', 'readRenviron', 'readline', 'reg.finalizer', - 'regexec', 'regexpr', 'registerS3method', 'registerS3methods', - 'regmatches', 'remove', 'removeTaskCallback', 'rep', 'rep.Date', - 'rep.POSIXct', 'rep.POSIXlt', 'rep.factor', 'rep.int', - 'rep.numeric_version', 'rep_len', 'replace', 'replicate', - 'requireNamespace', 'restartDescription', 'restartFormals', - 'retracemem', 'rev', 'rev.default', 'rle', 'rm', 'round', - 'round.Date', 'round.POSIXt', 'row', 'row.names', - 'row.names.data.frame', 'row.names.default', 'rowMeans', 'rowSums', - 'rownames', 'rowsum', 'rowsum.data.frame', 'rowsum.default', - 'sQuote', 'sample', 'sample.int', 'sapply', 'save', 'save.image', - 'saveRDS', 'scale', 'scale.default', 'scan', 'search', - 'searchpaths', 'seek', 'seek.connection', 'seq', 'seq.Date', - 'seq.POSIXt', 'seq.default', 'seq.int', 'seq_along', 'seq_len', - 'sequence', 'serialize', 'set.seed', 'setHook', 'setNamespaceInfo', - 'setSessionTimeLimit', 'setTimeLimit', 'setdiff', 'setequal', - 'setwd', 'shQuote', 'showConnections', 'sign', 'signalCondition', - 'signif', 'simpleCondition', 'simpleError', 'simpleMessage', - 'simpleWarning', 'simplify2array', 'sin', 'single', - 'sinh', 'sink', 'sink.number', 'slice.index', 'socketConnection', - 'socketSelect', 'solve', 'solve.default', 'solve.qr', 'sort', - 'sort.POSIXlt', 'sort.default', 'sort.int', 'sort.list', 'split', - 'split.Date', 'split.POSIXct', 'split.data.frame', 'split.default', - 'sprintf', 'sqrt', 'srcfile', 'srcfilealias', 'srcfilecopy', - 'srcref', 'standardGeneric', 'stderr', 'stdin', 'stdout', 'stop', - 'stopifnot', 'storage.mode', 'strftime', 'strptime', 'strsplit', - 'strtoi', 'strtrim', 'structure', 'strwrap', 'sub', 'subset', - 'subset.data.frame', 'subset.default', 'subset.matrix', - 'substitute', 'substr', 'substring', 'sum', 'summary', - 'summary.Date', 'summary.POSIXct', 'summary.POSIXlt', - 'summary.connection', 'summary.data.frame', 'summary.default', - 'summary.factor', 'summary.matrix', 'summary.proc_time', - 'summary.srcfile', 'summary.srcref', 'summary.table', - 'suppressMessages', 'suppressPackageStartupMessages', - 'suppressWarnings', 'svd', 'sweep', 'sys.call', 'sys.calls', - 'sys.frame', 'sys.frames', 'sys.function', 'sys.load.image', - 'sys.nframe', 'sys.on.exit', 'sys.parent', 'sys.parents', - 'sys.save.image', 'sys.source', 'sys.status', 'system', - 'system.file', 'system.time', 'system2', 't', 't.data.frame', - 't.default', 'table', 'tabulate', 'tail', 'tan', 'tanh', 'tapply', - 'taskCallbackManager', 'tcrossprod', 'tempdir', 'tempfile', - 'testPlatformEquivalence', 'textConnection', 'textConnectionValue', - 'toString', 'toString.default', 'tolower', 'topenv', 'toupper', - 'trace', 'traceback', 'tracemem', 'tracingState', 'transform', - 'transform.data.frame', 'transform.default', 'trigamma', 'trunc', - 'trunc.Date', 'trunc.POSIXt', 'truncate', 'truncate.connection', - 'try', 'tryCatch', 'typeof', 'unclass', 'undebug', 'union', - 'unique', 'unique.POSIXlt', 'unique.array', 'unique.data.frame', - 'unique.default', 'unique.matrix', 'unique.numeric_version', - 'units', 'units.difftime', 'unix.time', 'unlink', 'unlist', - 'unloadNamespace', 'unlockBinding', 'unname', 'unserialize', - 'unsplit', 'untrace', 'untracemem', 'unz', 'upper.tri', 'url', - 'utf8ToInt', 'vapply', 'version', 'warning', 'warnings', 'weekdays', - 'weekdays.Date', 'weekdays.POSIXt', 'which', 'which.max', - 'which.min', 'with', 'with.default', 'withCallingHandlers', - 'withRestarts', 'withVisible', 'within', 'within.data.frame', - 'within.list', 'write', 'write.dcf', 'writeBin', 'writeChar', - 'writeLines', 'xor', 'xor.hexmode', 'xor.octmode', - 'xpdrows.data.frame', 'xtfrm', 'xtfrm.AsIs', 'xtfrm.Date', - 'xtfrm.POSIXct', 'xtfrm.POSIXlt', 'xtfrm.Surv', 'xtfrm.default', - 'xtfrm.difftime', 'xtfrm.factor', 'xtfrm.numeric_version', 'xzfile', - 'zapsmall' - ) - + valid_name = r'(?:`[^`\\]*(?:\\.[^`\\]*)*`)|(?:(?:[a-zA-z]|[_.][^0-9])[\w_.]*)' tokens = { 'comments': [ (r'#.*$', Comment.Single), ], 'valid_name': [ - (r'[a-zA-Z][\w.]*', Text), - # can begin with ., but not if that is followed by a digit - (r'\.[a-zA-Z_][\w.]*', Text), + (valid_name, Name), ], 'punctuation': [ (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation), ], 'keywords': [ - (words(builtins_base, suffix=r'(?![\w. =])'), - Keyword.Pseudo), (r'(if|else|for|while|repeat|in|next|break|return|switch|function)' r'(?![\w.])', Keyword.Reserved), - (r'(array|category|character|complex|double|function|integer|list|' - r'logical|matrix|numeric|vector|data.frame|c)' - r'(?![\w.])', - Keyword.Type), - (r'(library|require|attach|detach|source)' - r'(?![\w.])', - Keyword.Namespace) ], 'operators': [ (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator), - (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator) + (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator), ], 'builtin_symbols': [ (r'(NULL|NA(_(integer|real|complex|character)_)?|' @@ -379,17 +118,18 @@ class SLexer(RegexLexer): include('comments'), # whitespaces (r'\s+', Text), - (r'`.*?`', String.Backtick), (r'\'', String, 'string_squote'), (r'\"', String, 'string_dquote'), include('builtin_symbols'), + include('valid_name'), include('numbers'), include('keywords'), include('punctuation'), include('operators'), - include('valid_name'), ], 'root': [ + # calls: + (r'(%s)\s*(?=\()' % valid_name, Name.Function), include('statements'), # blocks: (r'\{|\}', Punctuation), @@ -421,7 +161,7 @@ class RdLexer(RegexLexer): This is a very minimal implementation, highlighting little more than the macros. A description of Rd syntax is found in `Writing R Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_ - and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_. + and `Parsing Rd files <http://developer.r-project.org/parseRd.pdf>`_. .. versionadded:: 1.6 """ diff --git a/pygments/lexers/rdf.py b/pygments/lexers/rdf.py index d0f8778a..27bbe154 100644 --- a/pygments/lexers/rdf.py +++ b/pygments/lexers/rdf.py @@ -97,7 +97,7 @@ class SparqlLexer(RegexLexer): 'root': [ (r'\s+', Text), # keywords :: - (r'((?i)select|construct|describe|ask|where|filter|group\s+by|minus|' + (r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|' r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|' r'offset|bindings|load|clear|drop|create|add|move|copy|' r'insert\s+data|delete\s+data|delete\s+where|delete|insert|' @@ -111,10 +111,10 @@ class SparqlLexer(RegexLexer): # # variables :: ('[?$]' + VARNAME, Name.Variable), # prefixed names :: - (r'(' + PN_PREFIX + ')?(\:)(' + PN_LOCAL + ')?', + (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?', bygroups(Name.Namespace, Punctuation, Name.Tag)), # function names :: - (r'((?i)str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|' + (r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|' r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|' r'contains|strstarts|strends|strbefore|strafter|year|month|day|' r'hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|' @@ -125,7 +125,7 @@ class SparqlLexer(RegexLexer): # boolean literals :: (r'(true|false)', Keyword.Constant), # double literals :: - (r'[+\-]?(\d+\.\d*' + EXPONENT + '|\.?\d+' + EXPONENT + ')', Number.Float), + (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float), # decimal literals :: (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float), # integer literals :: diff --git a/pygments/lexers/rebol.py b/pygments/lexers/rebol.py index f3d00200..e58e01fa 100644 --- a/pygments/lexers/rebol.py +++ b/pygments/lexers/rebol.py @@ -102,12 +102,12 @@ class RebolLexer(RegexLexer): yield match.start(), Generic.Heading, word elif re.match("to-.*", word): yield match.start(), Keyword, word - elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$', + elif re.match(r'(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$', word): yield match.start(), Operator, word - elif re.match(".*\?$", word): + elif re.match(r".*\?$", word): yield match.start(), Keyword, word - elif re.match(".*\!$", word): + elif re.match(r".*\!$", word): yield match.start(), Keyword.Type, word elif re.match("'.*", word): yield match.start(), Name.Variable.Instance, word # lit-word @@ -239,7 +239,7 @@ class RebolLexer(RegexLexer): if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE): # The code starts with REBOL header return 1.0 - elif re.search(r'\s*REBOL\s*[', text, re.IGNORECASE): + elif re.search(r'\s*REBOL\s*\[', text, re.IGNORECASE): # The code contains REBOL header but also some text before it return 0.5 @@ -297,10 +297,10 @@ class RedLexer(RegexLexer): yield match.start(), Keyword.Namespace, word elif re.match("to-.*", word): yield match.start(), Keyword, word - elif re.match('(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|' - '<<<|>>>|<<|>>|<|>%)$', word): + elif re.match(r'(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|' + r'<<<|>>>|<<|>>|<|>%)$', word): yield match.start(), Operator, word - elif re.match(".*\!$", word): + elif re.match(r".*\!$", word): yield match.start(), Keyword.Type, word elif re.match("'.*", word): yield match.start(), Name.Variable.Instance, word # lit-word diff --git a/pygments/lexers/robotframework.py b/pygments/lexers/robotframework.py index e868127b..5bacffa3 100644 --- a/pygments/lexers/robotframework.py +++ b/pygments/lexers/robotframework.py @@ -161,7 +161,7 @@ class RowTokenizer(object): class RowSplitter(object): _space_splitter = re.compile('( {2,})') - _pipe_splitter = re.compile('((?:^| +)\|(?: +|$))') + _pipe_splitter = re.compile(r'((?:^| +)\|(?: +|$))') def split(self, row): splitter = (row.startswith('| ') and self._split_from_pipes diff --git a/pygments/lexers/ruby.py b/pygments/lexers/ruby.py index fe750f1a..ce2fc7a7 100644 --- a/pygments/lexers/ruby.py +++ b/pygments/lexers/ruby.py @@ -403,8 +403,8 @@ class RubyConsoleLexer(Lexer): aliases = ['rbcon', 'irb'] mimetypes = ['text/x-ruby-shellsession'] - _prompt_re = re.compile('irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] ' - '|>> |\?> ') + _prompt_re = re.compile(r'irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] ' + r'|>> |\?> ') def get_tokens_unprocessed(self, text): rblexer = RubyLexer(**self.options) @@ -498,11 +498,11 @@ class FancyLexer(RegexLexer): (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function), # operators, must be below functions (r'[-+*/~,<>=&!?%^\[\].$]+', Operator), - ('[A-Z]\w*', Name.Constant), - ('@[a-zA-Z_]\w*', Name.Variable.Instance), - ('@@[a-zA-Z_]\w*', Name.Variable.Class), + (r'[A-Z]\w*', Name.Constant), + (r'@[a-zA-Z_]\w*', Name.Variable.Instance), + (r'@@[a-zA-Z_]\w*', Name.Variable.Class), ('@@?', Operator), - ('[a-zA-Z_]\w*', Name), + (r'[a-zA-Z_]\w*', Name), # numbers - / checks are necessary to avoid mismarking regexes, # see comment in RubyLexer (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?', diff --git a/pygments/lexers/rust.py b/pygments/lexers/rust.py index 6914f54d..10097fba 100644 --- a/pygments/lexers/rust.py +++ b/pygments/lexers/rust.py @@ -24,7 +24,7 @@ class RustLexer(RegexLexer): """ name = 'Rust' filenames = ['*.rs', '*.rs.in'] - aliases = ['rust'] + aliases = ['rust', 'rs'] mimetypes = ['text/rust'] keyword_types = ( diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py index b3af606e..28c37db8 100644 --- a/pygments/lexers/scripting.py +++ b/pygments/lexers/scripting.py @@ -104,7 +104,7 @@ class LuaLexer(RegexLexer): (r'%s(?=%s*[.:])' % (_name, _s), Name.Class), (_name, Name.Function, '#pop'), # inline function - ('\(', Punctuation, '#pop'), + (r'\(', Punctuation, '#pop'), ], 'goto': [ @@ -696,8 +696,8 @@ class AppleScriptLexer(RegexLexer): (r'[-+]?\d+', Number.Integer), ], 'comment': [ - ('\(\*', Comment.Multiline, '#push'), - ('\*\)', Comment.Multiline, '#pop'), + (r'\(\*', Comment.Multiline, '#push'), + (r'\*\)', Comment.Multiline, '#pop'), ('[^*(]+', Comment.Multiline), ('[*(]', Comment.Multiline), ], diff --git a/pygments/lexers/sgf.py b/pygments/lexers/sgf.py new file mode 100644 index 00000000..aa934b49 --- /dev/null +++ b/pygments/lexers/sgf.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.sgf + ~~~~~~~~~~~~~~~~~~~~ + + Lexer for Smart Game Format (sgf) file format. + + The format is used to store game records of board games for two players + (mainly Go game). + For more information about the definition of the format, see: + https://www.red-bean.com/sgf/ + + :copyright: Copyright 2006-2018 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. + + .. versionadded:: 2.4 +""" + +from pygments.lexer import RegexLexer, bygroups +from pygments.token import * + +__all__ = ["SmartGameFormatLexer"] + + +class SmartGameFormatLexer(RegexLexer): + name = 'SmartGameFormat' + aliases = ['sgf'] + filenames = ['*.sgf'] + + tokens = { + 'root': [ + (r'[\s():;]', Punctuation), + # tokens: + (r'(A[BW]|AE|AN|AP|AR|AS|[BW]L|BM|[BW]R|[BW]S|[BW]T|CA|CH|CP|CR|DD|DM|DO|DT|EL|EV|EX|FF|FG|G[BW]|GC|GM|GN|HA|HO|ID|IP|IT|IY|KM|KO|L|LB|LN|LT|M|MA|MN|N|OB|OM|ON|OP|OT|OV|P[BW]|PC|PL|PM|RE|RG|RO|RU|SO|SC|SE|SI|SL|SO|SQ|ST|SU|SZ|T[BW]|TC|TE|TM|TR|UC|US|V|VW|[BW]|C)', + Name.Builtin), + # number: + (r'(\[)([0-9.]+)(\])', + bygroups(Punctuation, Literal.Number, Punctuation)), + # date: + (r'(\[)([0-9]{4}-[0-9]{2}-[0-9]{2})(\])', + bygroups(Punctuation, Literal.Date, Punctuation)), + # point: + (r'(\[)([a-z]{2})(\])', + bygroups(Punctuation, String, Punctuation)), + # double points: + (r'(\[)([a-z]{2})(:)([a-z]{2})(\])', + bygroups(Punctuation, String, Punctuation, String, Punctuation)), + + (r'(\[)([\w\s#()+,\-.:?]+)(\])', + bygroups(Punctuation, String, Punctuation)), + (r'(\[)(\s.*)(\])', + bygroups(Punctuation, Text, Punctuation)), + ], + } diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py index ceb6f14d..31bc7e94 100644 --- a/pygments/lexers/shell.py +++ b/pygments/lexers/shell.py @@ -19,7 +19,7 @@ from pygments.util import shebang_matches __all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer', - 'MSDOSSessionLexer', 'PowerShellLexer', + 'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer', 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer'] line_re = re.compile('.*?\n') @@ -38,7 +38,7 @@ class BashLexer(RegexLexer): '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'] - mimetypes = ['application/x-sh', 'application/x-shellscript'] + mimetypes = ['application/x-sh', 'application/x-shellscript', 'text/x-shellscript'] tokens = { 'root': [ @@ -76,7 +76,7 @@ class BashLexer(RegexLexer): (r'&&|\|\|', Operator), ], 'data': [ - (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double), + (r'(?s)\$?"(\\.|[^"\\$])*"', String.Double), (r'"', String.Double, 'string'), (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single), (r"(?s)'.*?'", String.Single), @@ -126,6 +126,28 @@ class BashLexer(RegexLexer): return 0.2 +class SlurmBashLexer(BashLexer): + """ + Lexer for (ba|k|z|)sh Slurm scripts. + + .. versionadded:: 2.4 + """ + + name = 'Slurm' + aliases = ['slurm', 'sbatch'] + filenames = ['*.sl'] + mimetypes = [] + EXTRA_KEYWORDS = {'srun'} + + def get_tokens_unprocessed(self, text): + for index, token, value in BashLexer.get_tokens_unprocessed(self, text): + if token is Text and value in self.EXTRA_KEYWORDS: + yield index, Name.Builtin, value + elif token is Comment.Single and 'SBATCH' in value: + yield index, Keyword.Pseudo, value + else: + yield index, token, value + class ShellSessionBaseLexer(Lexer): """ Base lexer for simplistic shell sessions. @@ -638,13 +660,29 @@ class PowerShellLexer(RegexLexer): 'wildcard').split() verbs = ( - 'write where wait use update unregister undo trace test tee take ' - 'suspend stop start split sort skip show set send select scroll resume ' - 'restore restart resolve resize reset rename remove register receive ' - 'read push pop ping out new move measure limit join invoke import ' - 'group get format foreach export expand exit enter enable disconnect ' - 'disable debug cxnew copy convertto convertfrom convert connect ' - 'complete compare clear checkpoint aggregate add').split() + 'write where watch wait use update unregister unpublish unprotect ' + 'unlock uninstall undo unblock trace test tee take sync switch ' + 'suspend submit stop step start split sort skip show set send select ' + 'search scroll save revoke resume restore restart resolve resize ' + 'reset request repair rename remove register redo receive read push ' + 'publish protect pop ping out optimize open new move mount merge ' + 'measure lock limit join invoke install initialize import hide group ' + 'grant get format foreach find export expand exit enter enable edit ' + 'dismount disconnect disable deny debug cxnew copy convertto ' + 'convertfrom convert connect confirm compress complete compare close ' + 'clear checkpoint block backup assert approve aggregate add').split() + + aliases_ = ( + 'ac asnp cat cd cfs chdir clc clear clhy cli clp cls clv cnsn ' + 'compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo epal ' + 'epcsv epsn erase etsn exsn fc fhx fl foreach ft fw gal gbp gc gci gcm ' + 'gcs gdr ghy gi gjb gl gm gmo gp gps gpv group gsn gsnp gsv gu gv gwmi ' + 'h history icm iex ihy ii ipal ipcsv ipmo ipsn irm ise iwmi iwr kill lp ' + 'ls man md measure mi mount move mp mv nal ndr ni nmo npssc nsn nv ogv ' + 'oh popd ps pushd pwd r rbp rcjb rcsn rd rdr ren ri rjb rm rmdir rmo ' + 'rni rnp rp rsn rsnp rujb rv rvpa rwmi sajb sal saps sasv sbp sc select ' + 'set shcm si sl sleep sls sort sp spjb spps spsv start sujb sv swmi tee ' + 'trcm type wget where wjb write').split() commenthelp = ( 'component description example externalhelp forwardhelpcategory ' @@ -672,6 +710,7 @@ class PowerShellLexer(RegexLexer): (r'(%s)\b' % '|'.join(keywords), Keyword), (r'-(%s)\b' % '|'.join(operators), Operator), (r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin), + (r'(%s)\s' % '|'.join(aliases_), Name.Builtin), (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s (r'-[a-z_]\w*', Name), (r'\w+', Name), diff --git a/pygments/lexers/slash.py b/pygments/lexers/slash.py new file mode 100644 index 00000000..bd73d463 --- /dev/null +++ b/pygments/lexers/slash.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.slash + ~~~~~~~~~~~~~~~~~~~~~ + + Lexer for the `Slash <https://github.com/arturadib/Slash-A>`_ programming + language. + + :copyright: Copyright 2012 by GitHub, Inc + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import ExtendedRegexLexer, bygroups, DelegatingLexer +from pygments.token import Name, Number, String, Comment, Punctuation, \ + Other, Keyword, Operator, Whitespace + +__all__ = ['SlashLexer'] + + +class SlashLanguageLexer(ExtendedRegexLexer): + _nkw = r'(?=[^a-zA-Z_0-9])' + + def move_state(new_state): + return ("#pop", new_state) + + def right_angle_bracket(lexer, match, ctx): + if len(ctx.stack) > 1 and ctx.stack[-2] == "string": + ctx.stack.pop() + yield match.start(), String.Interpol, "}" + ctx.pos = match.end() + pass + + tokens = { + "root": [ + (r"<%=", Comment.Preproc, move_state("slash")), + (r"<%!!", Comment.Preproc, move_state("slash")), + (r"<%#.*?%>", Comment.Multiline), + (r"<%", Comment.Preproc, move_state("slash")), + (r".|\n", Other), + ], + "string": [ + (r"\\", String.Escape, move_state("string_e")), + (r"\"", String, move_state("slash")), + (r"#\{", String.Interpol, "slash"), + (r'.|\n', String), + ], + "string_e": [ + (r'n', String.Escape, move_state("string")), + (r't', String.Escape, move_state("string")), + (r'r', String.Escape, move_state("string")), + (r'e', String.Escape, move_state("string")), + (r'x[a-fA-F0-9]{2}', String.Escape, move_state("string")), + (r'.', String.Escape, move_state("string")), + ], + "regexp": [ + (r'}[a-z]*', String.Regex, move_state("slash")), + (r'\\(.|\n)', String.Regex), + (r'{', String.Regex, "regexp_r"), + (r'.|\n', String.Regex), + ], + "regexp_r": [ + (r'}[a-z]*', String.Regex, "#pop"), + (r'\\(.|\n)', String.Regex), + (r'{', String.Regex, "regexp_r"), + ], + "slash": [ + (r"%>", Comment.Preproc, move_state("root")), + (r"\"", String, move_state("string")), + (r"'[a-zA-Z0-9_]+", String), + (r'%r{', String.Regex, move_state("regexp")), + (r'/\*.*?\*/', Comment.Multiline), + (r"(#|//).*?\n", Comment.Single), + (r'-?[0-9]+e[+-]?[0-9]+', Number.Float), + (r'-?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), + (r'-?[0-9]+', Number.Integer), + (r'nil'+_nkw, Name.Builtin), + (r'true'+_nkw, Name.Builtin), + (r'false'+_nkw, Name.Builtin), + (r'self'+_nkw, Name.Builtin), + (r'(class)(\s+)([A-Z][a-zA-Z0-9_\']*)', + bygroups(Keyword, Whitespace, Name.Class)), + (r'class'+_nkw, Keyword), + (r'extends'+_nkw, Keyword), + (r'(def)(\s+)(self)(\s*)(\.)(\s*)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)', + bygroups(Keyword, Whitespace, Name.Builtin, Whitespace, Punctuation, Whitespace, Name.Function)), + (r'(def)(\s+)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)', + bygroups(Keyword, Whitespace, Name.Function)), + (r'def'+_nkw, Keyword), + (r'if'+_nkw, Keyword), + (r'elsif'+_nkw, Keyword), + (r'else'+_nkw, Keyword), + (r'unless'+_nkw, Keyword), + (r'for'+_nkw, Keyword), + (r'in'+_nkw, Keyword), + (r'while'+_nkw, Keyword), + (r'until'+_nkw, Keyword), + (r'and'+_nkw, Keyword), + (r'or'+_nkw, Keyword), + (r'not'+_nkw, Keyword), + (r'lambda'+_nkw, Keyword), + (r'try'+_nkw, Keyword), + (r'catch'+_nkw, Keyword), + (r'return'+_nkw, Keyword), + (r'next'+_nkw, Keyword), + (r'last'+_nkw, Keyword), + (r'throw'+_nkw, Keyword), + (r'use'+_nkw, Keyword), + (r'switch'+_nkw, Keyword), + (r'\\', Keyword), + (r'λ', Keyword), + (r'__FILE__'+_nkw, Name.Builtin.Pseudo), + (r'__LINE__'+_nkw, Name.Builtin.Pseudo), + (r'[A-Z][a-zA-Z0-9_\']*'+_nkw, Name.Constant), + (r'[a-z_][a-zA-Z0-9_\']*'+_nkw, Name), + (r'@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Instance), + (r'@@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Class), + (r'\(', Punctuation), + (r'\)', Punctuation), + (r'\[', Punctuation), + (r'\]', Punctuation), + (r'\{', Punctuation), + (r'\}', right_angle_bracket), + (r';', Punctuation), + (r',', Punctuation), + (r'<<=', Operator), + (r'>>=', Operator), + (r'<<', Operator), + (r'>>', Operator), + (r'==', Operator), + (r'!=', Operator), + (r'=>', Operator), + (r'=', Operator), + (r'<=>', Operator), + (r'<=', Operator), + (r'>=', Operator), + (r'<', Operator), + (r'>', Operator), + (r'\+\+', Operator), + (r'\+=', Operator), + (r'-=', Operator), + (r'\*\*=', Operator), + (r'\*=', Operator), + (r'\*\*', Operator), + (r'\*', Operator), + (r'/=', Operator), + (r'\+', Operator), + (r'-', Operator), + (r'/', Operator), + (r'%=', Operator), + (r'%', Operator), + (r'^=', Operator), + (r'&&=', Operator), + (r'&=', Operator), + (r'&&', Operator), + (r'&', Operator), + (r'\|\|=', Operator), + (r'\|=', Operator), + (r'\|\|', Operator), + (r'\|', Operator), + (r'!', Operator), + (r'\.\.\.', Operator), + (r'\.\.', Operator), + (r'\.', Operator), + (r'::', Operator), + (r':', Operator), + (r'(\s|\n)+', Whitespace), + (r'[a-z_][a-zA-Z0-9_\']*', Name.Variable), + ], + } + + +class SlashLexer(DelegatingLexer): + """ + Lexer for the Slash programming language. + + .. versionadded:: 2.4 + """ + + name = 'Slash' + aliases = ['slash'] + filenames = ['*.sl'] + + def __init__(self, **options): + from pygments.lexers.web import HtmlLexer + super(SlashLexer, self).__init__(HtmlLexer, SlashLanguageLexer, **options) diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py index 7507c0fc..8884db22 100644 --- a/pygments/lexers/sql.py +++ b/pygments/lexers/sql.py @@ -59,7 +59,14 @@ line_re = re.compile('.*?\n') language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE) -do_re = re.compile(r'\bDO\b', re.IGNORECASE) +do_re = re.compile(r'\bDO\b', re.IGNORECASE) + +# Regular expressions for analyse_text() +name_between_bracket_re = re.compile(r'\[[a-zA-Z_]\w*\]') +name_between_backtick_re = re.compile(r'`[a-zA-Z_]\w*`') +tsql_go_re = re.compile(r'\bgo\b', re.IGNORECASE) +tsql_declare_re = re.compile(r'\bdeclare\s+@', re.IGNORECASE) +tsql_variable_re = re.compile(r'@[a-zA-Z_]\w*\b') def language_callback(lexer, match): @@ -82,7 +89,7 @@ def language_callback(lexer, match): lexer.text[max(0, match.start()-25):match.start()])) if m: l = lexer._get_lexer('plpgsql') - + # 1 = $, 2 = delimiter, 3 = $ yield (match.start(1), String, match.group(1)) yield (match.start(2), String.Delimiter, match.group(2)) @@ -155,7 +162,7 @@ class PostgresLexer(PostgresBase, RegexLexer): (r'\s+', Text), (r'--.*\n?', Comment.Single), (r'/\*', Comment.Multiline, 'multiline-comments'), - (r'(' + '|'.join(s.replace(" ", "\s+") + (r'(' + '|'.join(s.replace(" ", r"\s+") for s in DATATYPES + PSEUDO_TYPES) + r')\b', Name.Builtin), (words(KEYWORDS, suffix=r'\b'), Keyword), @@ -308,14 +315,7 @@ class PostgresConsoleLexer(Lexer): # and continue until the end of command is detected curcode = '' insertions = [] - while 1: - try: - line = next(lines) - except StopIteration: - # allow the emission of partially collected items - # the repl loop will be broken below - break - + for line in lines: # Identify a shell prompt in case of psql commandline example if line.startswith('$') and not curcode: lexer = get_lexer_by_name('console', **self.options) @@ -346,8 +346,7 @@ class PostgresConsoleLexer(Lexer): # Emit the output lines out_token = Generic.Output - while 1: - line = next(lines) + for line in lines: mprompt = re_prompt.match(line) if mprompt is not None: # push the line back to have it processed by the prompt @@ -363,6 +362,8 @@ class PostgresConsoleLexer(Lexer): yield (mmsg.start(2), out_token, mmsg.group(2)) else: yield (0, out_token, line) + else: + return class SqlLexer(RegexLexer): @@ -480,6 +481,9 @@ class SqlLexer(RegexLexer): ] } + def analyse_text(text): + return 0.01 + class TransactSqlLexer(RegexLexer): """ @@ -499,7 +503,7 @@ class TransactSqlLexer(RegexLexer): tokens = { 'root': [ (r'\s+', Whitespace), - (r'--(?m).*?$\n?', Comment.Single), + (r'(?m)--.*?$\n?', Comment.Single), (r'/\*', Comment.Multiline, 'multiline-comments'), (words(_tsql_builtins.OPERATORS), Operator), (words(_tsql_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word), @@ -536,6 +540,33 @@ class TransactSqlLexer(RegexLexer): ] } + def analyse_text(text): + rating = 0 + if tsql_declare_re.search(text): + # Found T-SQL variable declaration. + rating = 1.0 + else: + name_between_backtick_count = len( + name_between_backtick_re.findall((text))) + name_between_bracket_count = len( + name_between_bracket_re.findall(text)) + # We need to check if there are any names using + # backticks or brackets, as otherwise both are 0 + # and 0 >= 2 * 0, so we would always assume it's true + dialect_name_count = name_between_backtick_count + name_between_bracket_count + if dialect_name_count >= 1 and name_between_bracket_count >= 2 * name_between_backtick_count: + # Found at least twice as many [name] as `name`. + rating += 0.5 + elif name_between_bracket_count > name_between_backtick_count: + rating += 0.2 + elif name_between_bracket_count > 0: + rating += 0.1 + if tsql_variable_re.search(text) is not None: + rating += 0.1 + if tsql_go_re.search(text) is not None: + rating += 0.1 + return rating + class MySqlLexer(RegexLexer): """ @@ -609,6 +640,23 @@ class MySqlLexer(RegexLexer): ] } + def analyse_text(text): + rating = 0 + name_between_backtick_count = len( + name_between_backtick_re.findall((text))) + name_between_bracket_count = len( + name_between_bracket_re.findall(text)) + # Same logic as above in the TSQL analysis + dialect_name_count = name_between_backtick_count + name_between_bracket_count + if dialect_name_count >= 1 and name_between_backtick_count >= 2 * name_between_bracket_count: + # Found at least twice as many `name` as [name]. + rating += 0.5 + elif name_between_backtick_count > name_between_bracket_count: + rating += 0.2 + elif name_between_backtick_count > 0: + rating += 0.1 + return rating + class SqliteConsoleLexer(Lexer): """ diff --git a/pygments/lexers/stata.py b/pygments/lexers/stata.py index a015a23e..9566d12a 100644 --- a/pygments/lexers/stata.py +++ b/pygments/lexers/stata.py @@ -9,6 +9,7 @@ :license: BSD, see LICENSE for details. """ +import re from pygments.lexer import RegexLexer, include, words from pygments.token import Comment, Keyword, Name, Number, \ String, Text, Operator @@ -33,56 +34,118 @@ class StataLexer(RegexLexer): aliases = ['stata', 'do'] filenames = ['*.do', '*.ado'] mimetypes = ['text/x-stata', 'text/stata', 'application/x-stata'] + flags = re.MULTILINE | re.DOTALL tokens = { 'root': [ include('comments'), - include('vars-strings'), + include('strings'), + include('macros'), include('numbers'), include('keywords'), + include('operators'), + include('format'), (r'.', Text), ], - # Global and local macros; regular and special strings - 'vars-strings': [ - (r'\$[\w{]', Name.Variable.Global, 'var_validglobal'), - (r'`\w{0,31}\'', Name.Variable), - (r'"', String, 'string_dquote'), - (r'`"', String, 'string_mquote'), - ], - # For either string type, highlight macros as macros - 'string_dquote': [ - (r'"', String, '#pop'), - (r'\\\\|\\"|\\\n', String.Escape), - (r'\$', Name.Variable.Global, 'var_validglobal'), - (r'`', Name.Variable, 'var_validlocal'), - (r'[^$`"\\]+', String), - (r'[$"\\]', String), - ], - 'string_mquote': [ + # Comments are a complicated beast in Stata because they can be + # nested and there are a few corner cases with that. See: + # - github.com/kylebarron/language-stata/issues/90 + # - statalist.org/forums/forum/general-stata-discussion/general/1448244 + 'comments': [ + (r'(^//|(?<=\s)//)(?!/)', Comment.Single, 'comments-double-slash'), + (r'^\s*\*', Comment.Single, 'comments-star'), + (r'/\*', Comment.Multiline, 'comments-block'), + (r'(^///|(?<=\s)///)', Comment.Special, 'comments-triple-slash') + ], + 'comments-block': [ + (r'/\*', Comment.Multiline, '#push'), + # this ends and restarts a comment block. but need to catch this so + # that it doesn\'t start _another_ level of comment blocks + (r'\*/\*', Comment.Multiline), + (r'(\*/\s+\*(?!/)[^\n]*)|(\*/)', Comment.Multiline, '#pop'), + # Match anything else as a character inside the comment + (r'.', Comment.Multiline), + ], + 'comments-star': [ + (r'///.*?\n', Comment.Single, + ('#pop', 'comments-triple-slash')), + (r'(^//|(?<=\s)//)(?!/)', Comment.Single, + ('#pop', 'comments-double-slash')), + (r'/\*', Comment.Multiline, 'comments-block'), + (r'.(?=\n)', Comment.Single, '#pop'), + (r'.', Comment.Single), + ], + 'comments-triple-slash': [ + (r'\n', Comment.Special, '#pop'), + # A // breaks out of a comment for the rest of the line + (r'//.*?(?=\n)', Comment.Single, '#pop'), + (r'.', Comment.Special), + ], + 'comments-double-slash': [ + (r'\n', Text, '#pop'), + (r'.', Comment.Single), + ], + # `"compound string"' and regular "string"; note the former are + # nested. + 'strings': [ + (r'`"', String, 'string-compound'), + (r'(?<!`)"', String, 'string-regular'), + ], + 'string-compound': [ + (r'`"', String, '#push'), (r'"\'', String, '#pop'), - (r'\\\\|\\"|\\\n', String.Escape), - (r'\$', Name.Variable.Global, 'var_validglobal'), - (r'`', Name.Variable, 'var_validlocal'), - (r'[^$`"\\]+', String), - (r'[$"\\]', String), - ], - 'var_validglobal': [ - (r'\{\w{0,32}\}', Name.Variable.Global, '#pop'), - (r'\w{1,32}', Name.Variable.Global, '#pop'), + (r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape), + include('macros'), + (r'.', String) ], - 'var_validlocal': [ - (r'\w{0,31}\'', Name.Variable, '#pop'), + 'string-regular': [ + (r'(")(?!\')|(?=\n)', String, '#pop'), + (r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape), + include('macros'), + (r'.', String) ], - # * only OK at line start, // OK anywhere - 'comments': [ - (r'^\s*\*.*$', Comment), - (r'//.*', Comment.Single), - (r'/\*.*?\*/', Comment.Multiline), - (r'/[*](.|\n)*?[*]/', Comment.Multiline), + # A local is usually + # `\w{0,31}' + # `:extended macro' + # `=expression' + # `[rsen](results)' + # `(++--)scalar(++--)' + # + # However, there are all sorts of weird rules wrt edge + # cases. Instead of writing 27 exceptions, anything inside + # `' is a local. + # + # A global is more restricted, so we do follow rules. Note only + # locals explicitly enclosed ${} can be nested. + 'macros': [ + (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested'), + (r'\$', Name.Variable.Global, 'macro-global-name'), + (r'`', Name.Variable, 'macro-local'), + ], + 'macro-local': [ + (r'`', Name.Variable, '#push'), + (r"'", Name.Variable, '#pop'), + (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested'), + (r'\$', Name.Variable.Global, 'macro-global-name'), + (r'.', Name.Variable), # fallback + ], + 'macro-global-nested': [ + (r'\$(\{|(?=[\$`]))', Name.Variable.Global, '#push'), + (r'\}', Name.Variable.Global, '#pop'), + (r'\$', Name.Variable.Global, 'macro-global-name'), + (r'`', Name.Variable, 'macro-local'), + (r'\w', Name.Variable.Global), # fallback + (r'(?!\w)', Name.Variable.Global, '#pop'), + ], + 'macro-global-name': [ + (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'), + (r'\$', Name.Variable.Global, 'macro-global-name', '#pop'), + (r'`', Name.Variable, 'macro-local', '#pop'), + (r'\w{1,32}', Name.Variable.Global, '#pop'), ], # Built in functions and statements 'keywords': [ - (words(builtins_functions, prefix = r'\b', suffix = r'\('), + (words(builtins_functions, prefix = r'\b', suffix = r'(?=\()'), Name.Function), (words(builtins_base, prefix = r'(^\s*|\s)', suffix = r'\b'), Keyword), @@ -100,9 +163,9 @@ class StataLexer(RegexLexer): ], # Stata formats 'format': [ - (r'%-?\d{1,2}(\.\d{1,2})?[gfe]c?', Name.Variable), - (r'%(21x|16H|16L|8H|8L)', Name.Variable), - (r'%-?(tc|tC|td|tw|tm|tq|th|ty|tg).{0,32}', Name.Variable), - (r'%[-~]?\d{1,4}s', Name.Variable), + (r'%-?\d{1,2}(\.\d{1,2})?[gfe]c?', Name.Other), + (r'%(21x|16H|16L|8H|8L)', Name.Other), + (r'%-?(tc|tC|td|tw|tm|tq|th|ty|tg)\S{0,32}', Name.Other), + (r'%[-~]?\d{1,4}s', Name.Other), ] } diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py index 83c57db8..8000deba 100644 --- a/pygments/lexers/templates.py +++ b/pygments/lexers/templates.py @@ -187,13 +187,13 @@ class SmartyLexer(RegexLexer): def analyse_text(text): rv = 0.0 - if re.search('\{if\s+.*?\}.*?\{/if\}', text): + if re.search(r'\{if\s+.*?\}.*?\{/if\}', text): rv += 0.15 - if re.search('\{include\s+file=.*?\}', text): + if re.search(r'\{include\s+file=.*?\}', text): rv += 0.15 - if re.search('\{foreach\s+.*?\}.*?\{/foreach\}', text): + if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text): rv += 0.15 - if re.search('\{\$.*?\}', text): + if re.search(r'\{\$.*?\}', text): rv += 0.01 return rv @@ -375,7 +375,7 @@ class DjangoLexer(RegexLexer): (r'\.\w+', Name.Variable), (r':?"(\\\\|\\"|[^"])*"', String.Double), (r":?'(\\\\|\\'|[^'])*'", String.Single), - (r'([{}()\[\]+\-*/,:~]|[><=]=?)', Operator), + (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator), (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" r"0[xX][0-9a-fA-F]+[Ll]?", Number), ], @@ -421,18 +421,18 @@ class MyghtyLexer(RegexLexer): tokens = { 'root': [ (r'\s+', Text), - (r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)', + (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)', bygroups(Name.Tag, Text, Name.Function, Name.Tag, using(this), Name.Tag)), - (r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)', + (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)', bygroups(Name.Tag, Name.Function, Name.Tag, using(PythonLexer), Name.Tag)), (r'(<&[^|])(.*?)(,.*?)?(&>)', bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)), - (r'(<&\|)(.*?)(,.*?)?(&>)(?s)', + (r'(?s)(<&\|)(.*?)(,.*?)?(&>)', bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)), (r'</&>', Name.Tag), - (r'(<%!?)(.*?)(%>)(?s)', + (r'(?s)(<%!?)(.*?)(%>)', bygroups(Name.Tag, using(PythonLexer), Name.Tag)), (r'(?<=^)#[^\n]*(\n|\Z)', Comment), (r'(?<=^)(%)([^\n]*)(\n|\Z)', @@ -538,20 +538,20 @@ class MasonLexer(RegexLexer): tokens = { 'root': [ (r'\s+', Text), - (r'(<%doc>)(.*?)(</%doc>)(?s)', + (r'(?s)(<%doc>)(.*?)(</%doc>)', bygroups(Name.Tag, Comment.Multiline, Name.Tag)), - (r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)', + (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)', bygroups(Name.Tag, Text, Name.Function, Name.Tag, using(this), Name.Tag)), - (r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)', + (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)', bygroups(Name.Tag, Name.Function, Name.Tag, using(PerlLexer), Name.Tag)), - (r'(<&[^|])(.*?)(,.*?)?(&>)(?s)', + (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)', bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)), - (r'(<&\|)(.*?)(,.*?)?(&>)(?s)', + (r'(?s)(<&\|)(.*?)(,.*?)?(&>)', bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)), (r'</&>', Name.Tag), - (r'(<%!?)(.*?)(%>)(?s)', + (r'(?s)(<%!?)(.*?)(%>)', bygroups(Name.Tag, using(PerlLexer), Name.Tag)), (r'(?<=^)#[^\n]*(\n|\Z)', Comment), (r'(?<=^)(%)([^\n]*)(\n|\Z)', @@ -607,7 +607,7 @@ class MakoLexer(RegexLexer): (r'(</%)([\w.:]+)(>)', bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)), (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'), - (r'(<%(?:!?))(.*?)(%>)(?s)', + (r'(?s)(<%(?:!?))(.*?)(%>)', bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), (r'(\$\{)(.*?)(\})', bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), @@ -759,7 +759,7 @@ class CheetahLexer(RegexLexer): # TODO support other Python syntax like $foo['bar'] (r'(\$)([a-zA-Z_][\w.]*\w)', bygroups(Comment.Preproc, using(CheetahPythonLexer))), - (r'(\$\{!?)(.*?)(\})(?s)', + (r'(?s)(\$\{!?)(.*?)(\})', bygroups(Comment.Preproc, using(CheetahPythonLexer), Comment.Preproc)), (r'''(?sx) @@ -942,9 +942,9 @@ class HtmlGenshiLexer(DelegatingLexer): def analyse_text(text): rv = 0.0 - if re.search('\$\{.*?\}', text) is not None: + if re.search(r'\$\{.*?\}', text) is not None: rv += 0.2 - if re.search('py:(.*?)=["\']', text) is not None: + if re.search(r'py:(.*?)=["\']', text) is not None: rv += 0.2 return rv + HtmlLexer.analyse_text(text) - 0.01 @@ -967,9 +967,9 @@ class GenshiLexer(DelegatingLexer): def analyse_text(text): rv = 0.0 - if re.search('\$\{.*?\}', text) is not None: + if re.search(r'\$\{.*?\}', text) is not None: rv += 0.2 - if re.search('py:(.*?)=["\']', text) is not None: + if re.search(r'py:(.*?)=["\']', text) is not None: rv += 0.2 return rv + XmlLexer.analyse_text(text) - 0.01 @@ -1627,7 +1627,7 @@ class SspLexer(DelegatingLexer): def analyse_text(text): rv = 0.0 - if re.search('val \w+\s*:', text): + if re.search(r'val \w+\s*:', text): rv += 0.6 if looks_like_xml(text): rv += 0.2 @@ -1955,7 +1955,7 @@ class LiquidLexer(RegexLexer): 'output': [ include('whitespace'), - ('\}\}', Punctuation, '#pop'), # end of output + (r'\}\}', Punctuation, '#pop'), # end of output (r'\|', Punctuation, 'filters') ], diff --git a/pygments/lexers/testing.py b/pygments/lexers/testing.py index 1e0795b1..86e60f25 100644 --- a/pygments/lexers/testing.py +++ b/pygments/lexers/testing.py @@ -29,7 +29,7 @@ class GherkinLexer(RegexLexer): feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$' feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$' examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$' - step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )' + step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\\* )' tokens = { 'comments': [ diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py index 9b3b5fea..bb1dccf2 100644 --- a/pygments/lexers/text.py +++ b/pygments/lexers/text.py @@ -18,6 +18,7 @@ from pygments.lexers.markup import BBCodeLexer, MoinWikiLexer, RstLexer, \ from pygments.lexers.installers import DebianControlLexer, SourcesListLexer from pygments.lexers.make import MakefileLexer, BaseMakefileLexer, CMakeLexer from pygments.lexers.haxe import HxmlLexer +from pygments.lexers.sgf import SmartGameFormatLexer from pygments.lexers.diff import DiffLexer, DarcsPatchLexer from pygments.lexers.data import YamlLexer from pygments.lexers.textfmts import IrcLogsLexer, GettextLexer, HttpLexer diff --git a/pygments/lexers/textfmts.py b/pygments/lexers/textfmts.py index bb8124ef..b70c2ad6 100644 --- a/pygments/lexers/textfmts.py +++ b/pygments/lexers/textfmts.py @@ -266,7 +266,7 @@ class TodotxtLexer(RegexLexer): # 5. Leading project (project_regex, Project, 'incomplete'), # 6. Non-whitespace catch-all - ('\S+', IncompleteTaskText, 'incomplete'), + (r'\S+', IncompleteTaskText, 'incomplete'), ], # Parse a complete task @@ -277,9 +277,9 @@ class TodotxtLexer(RegexLexer): (context_regex, Context), (project_regex, Project), # Tokenize non-whitespace text - ('\S+', CompleteTaskText), + (r'\S+', CompleteTaskText), # Tokenize whitespace not containing a newline - ('\s+', CompleteTaskText), + (r'\s+', CompleteTaskText), ], # Parse an incomplete task @@ -290,8 +290,8 @@ class TodotxtLexer(RegexLexer): (context_regex, Context), (project_regex, Project), # Tokenize non-whitespace text - ('\S+', IncompleteTaskText), + (r'\S+', IncompleteTaskText), # Tokenize whitespace not containing a newline - ('\s+', IncompleteTaskText), + (r'\s+', IncompleteTaskText), ], } diff --git a/pygments/lexers/typoscript.py b/pygments/lexers/typoscript.py index e358af07..f75a6f02 100644 --- a/pygments/lexers/typoscript.py +++ b/pygments/lexers/typoscript.py @@ -108,13 +108,13 @@ class TypoScriptLexer(RegexLexer): name = 'TypoScript' aliases = ['typoscript'] - filenames = ['*.ts', '*.txt'] + filenames = ['*.typoscript'] mimetypes = ['text/x-typoscript'] flags = re.DOTALL | re.MULTILINE # Slightly higher than TypeScript (which is 0). - priority = 0.1 + priority = 0.0 tokens = { 'root': [ @@ -132,7 +132,7 @@ class TypoScriptLexer(RegexLexer): ], 'keywords': [ # Conditions - (r'(\[)(?i)(browser|compatVersion|dayofmonth|dayofweek|dayofyear|' + (r'(?i)(\[)(browser|compatVersion|dayofmonth|dayofweek|dayofyear|' r'device|ELSE|END|GLOBAL|globalString|globalVar|hostname|hour|IP|' r'language|loginUser|loginuser|minute|month|page|PIDinRootline|' r'PIDupinRootline|system|treeLevel|useragent|userFunc|usergroup|' @@ -172,7 +172,7 @@ class TypoScriptLexer(RegexLexer): 'html': [ (r'<\S[^\n>]*>', using(TypoScriptHtmlDataLexer)), (r'&[^;\n]*;', String), - (r'(_CSS_DEFAULT_STYLE)(\s*)(\()(?s)(.*(?=\n\)))', + (r'(?s)(_CSS_DEFAULT_STYLE)(\s*)(\()(.*(?=\n\)))', bygroups(Name.Class, Text, String.Symbol, using(TypoScriptCssDataLexer))), ], 'literal': [ @@ -220,7 +220,3 @@ class TypoScriptLexer(RegexLexer): (r'[\w"\-!/&;]+', Text), ], } - - def analyse_text(text): - if '<INCLUDE_TYPOSCRIPT:' in text: - return 1.0 diff --git a/pygments/lexers/unicon.py b/pygments/lexers/unicon.py new file mode 100644 index 00000000..6301a88b --- /dev/null +++ b/pygments/lexers/unicon.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.icon + ~~~~~~~~~~~~~~~~~~~~ + + Lexers for the Icon and Unicon languages, including ucode VM. + + :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import Lexer, RegexLexer, include, bygroups, words, \ + using, this, default +from pygments.util import get_bool_opt, get_list_opt +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error +from pygments.scanner import Scanner + +__all__ = ['IconLexer', 'UcodeLexer', 'UniconLexer'] + +class UniconLexer(RegexLexer): + """ + For Unicon source code. + + .. versionadded:: 2.4 + """ + + name = 'Unicon' + aliases = ['unicon'] + filenames = ['*.icn'] + mimetypes = ['text/unicon'] + + flags = re.MULTILINE + + tokens = { + 'root': [ + (r'[^\S\n]+', Text), + (r'#.*?\n', Comment.Single), + (r'[^\S\n]+', Text), + (r'class|method|procedure', Keyword.Declaration, 'subprogram'), + (r'(record)(\s+)(\w+)', + bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'), + (r'(#line|\$C|\$Cend|\$define|\$else|\$endif|\$error|\$ifdef|' + r'\$ifndef|\$include|\$line|\$undef)\b', Keyword.PreProc), + (r'(&null|&fail)\b', Keyword.Constant), + (r'&allocated|&ascii|&clock|&collections|&column|&col|&control|' + r'&cset|¤t|&dateline|&date|&digits|&dump|' + r'&errno|&errornumber|&errortext|&errorvalue|&error|&errout|' + r'&eventcode|&eventvalue|&eventsource|&e|' + r'&features|&file|&host|&input|&interval|&lcase|&letters|' + r'&level|&line|&ldrag|&lpress|&lrelease|' + r'&main|&mdrag|&meta|&mpress|&mrelease|&now|&output|' + r'&phi|&pick|&pi|&pos|&progname|' + r'&random|&rdrag|®ions|&resize|&row|&rpress|&rrelease|' + r'&shift|&source|&storage|&subject|' + r'&time|&trace|&ucase|&version|' + r'&window|&x|&y', Keyword.Reserved), + (r'(by|of|not|to)\b', Keyword.Reserved), + (r'(global|local|static|abstract)\b', Keyword.Reserved), + (r'package|link|import', Keyword.Declaration), + (words(( + 'break', 'case', 'create', 'critical', 'default', 'end', 'all', + 'do', 'else', 'every', 'fail', 'if', 'import', 'initial', + 'initially', 'invocable', 'next', + 'repeat', 'return', 'suspend', + 'then', 'thread', 'until', 'while'), prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + (words(( + 'Abort', 'abs', 'acos', 'Active', 'Alert', 'any', 'Any', 'Arb', + 'Arbno', 'args', 'array', 'asin', 'atan', 'atanh', 'Attrib', + 'Bal', 'bal', 'Bg', 'Break', 'Breakx', + 'callout', 'center', 'char', 'chdir', 'chmod', 'chown', 'chroot', + 'classname', 'Clip', 'Clone', 'close', 'cofail', 'collect', + 'Color', 'ColorValue', 'condvar', 'constructor', 'copy', + 'CopyArea', 'cos', 'Couple', 'crypt', 'cset', 'ctime', + 'dbcolumns', 'dbdriver', 'dbkeys', 'dblimits', 'dbproduct', + 'dbtables', 'delay', 'delete', 'detab', 'display', 'DrawArc', + 'DrawCircle', 'DrawCube', 'DrawCurve', 'DrawCylinder', + 'DrawDisk', 'DrawImage', 'DrawLine', 'DrawPoint', 'DrawPolygon', + 'DrawRectangle', 'DrawSegment', 'DrawSphere', 'DrawString', + 'DrawTorus', 'dtor', + 'entab', 'EraseArea', 'errorclear', 'Event', 'eventmask', + 'EvGet', 'EvSend', 'exec', 'exit', 'exp', 'Eye', + 'Fail', 'fcntl', 'fdup', 'Fence', 'fetch', 'Fg', 'fieldnames', + 'filepair', 'FillArc', 'FillCircle', 'FillPolygon', + 'FillRectangle', 'find', 'flock', 'flush', 'Font', 'fork', + 'FreeColor', 'FreeSpace', 'function', + 'get', 'getch', 'getche', 'getegid', 'getenv', 'geteuid', + 'getgid', 'getgr', 'gethost', 'getpgrp', 'getpid', 'getppid', + 'getpw', 'getrusage', 'getserv', 'GetSpace', 'gettimeofday', + 'getuid', 'globalnames', 'GotoRC', 'GotoXY', 'gtime', 'hardlink', + 'iand', 'icom', 'IdentityMatrix', 'image', 'InPort', 'insert', + 'Int86', 'integer', 'ioctl', 'ior', 'ishift', 'istate', 'ixor', + 'kbhit', 'key', 'keyword', 'kill', + 'left', 'Len', 'list', 'load', 'loadfunc', 'localnames', + 'lock', 'log', 'Lower', 'lstat', + 'many', 'map', 'match', 'MatrixMode', 'max', 'member', + 'membernames', 'methodnames', 'methods', 'min', 'mkdir', 'move', + 'MultMatrix', 'mutex', + 'name', 'NewColor', 'Normals', 'NotAny', 'numeric', + 'open', 'opencl', 'oprec', 'ord', 'OutPort', + 'PaletteChars', 'PaletteColor', 'PaletteKey', 'paramnames', + 'parent', 'Pattern', 'Peek', 'Pending', 'pipe', 'Pixel', + 'PlayAudio', 'Poke', 'pop', 'PopMatrix', 'Pos', 'pos', + 'proc', 'pull', 'push', 'PushMatrix', 'PushRotate', 'PushScale', + 'PushTranslate', 'put', + 'QueryPointer', + 'Raise', 'read', 'ReadImage', 'readlink', 'reads', 'ready', + 'real', 'receive', 'Refresh', 'Rem', 'remove', 'rename', + 'repl', 'reverse', 'right', 'rmdir', 'Rotate', 'Rpos', + 'Rtab', 'rtod', 'runerr', + 'save', 'Scale', 'seek', 'select', 'send', 'seq', + 'serial', 'set', 'setenv', 'setgid', 'setgrent', + 'sethostent', 'setpgrp', 'setpwent', 'setservent', + 'setuid', 'signal', 'sin', 'sort', 'sortf', 'Span', + 'spawn', 'sql', 'sqrt', 'stat', 'staticnames', 'stop', + 'StopAudio', 'string', 'structure', 'Succeed', 'Swi', + 'symlink', 'sys_errstr', 'system', 'syswrite', + 'Tab', 'tab', 'table', 'tan', + 'Texcoord', 'Texture', 'TextWidth', 'Translate', + 'trap', 'trim', 'truncate', 'trylock', 'type', + 'umask', 'Uncouple', 'unlock', 'upto', 'utime', + 'variable', 'VAttrib', + 'wait', 'WAttrib', 'WDefault', 'WFlush', 'where', + 'WinAssociate', 'WinButton', 'WinColorDialog', 'WindowContents', + 'WinEditRegion', 'WinFontDialog', 'WinMenuBar', 'WinOpenDialog', + 'WinPlayMedia', 'WinSaveDialog', 'WinScrollBar', 'WinSelectDialog', + 'write', 'WriteImage', 'writes', 'WSection', + 'WSync'), prefix=r'\b', suffix=r'\b'), + Name.Function), + include('numbers'), + (r'<@|<<@|>@|>>@|\.>|\->', Operator), + (r'\*\*|\+\+|\-\-|\.|\=|\~\=|<\=|>\=|\=\=|\~\=\=|<<|<<\=|>>|>>\=', Operator), + (r':\=|:\=:|\->|<\->|\+:\=|\|', Operator), + (r'\=\=\=|\~\=\=\=', Operator), + (r'"(?:[^\\"]|\\.)*"', String), + (r"'(?:[^\\']|\\.)*'", String.Character), + (r'[*<>+=/&!?@~\\-]', Operator), + (r'\^', Operator), + (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))), + (r"([\[\]])", Punctuation), + (r"(<>|=>|[()|:;,.'`]|[{}]|[%]|[&?])", Punctuation), + (r'\n+', Text), + ], + 'numbers': [ + (r'\b([+-]?([2-9]|[12][0-9]|3[0-6])[rR][0-9a-zA-Z]+)\b', Number.Hex), + (r'[+-]?[0-9]*\.([0-9]*)([Ee][+-]?[0-9]*)?', Number.Float), + (r'\b([+-]?[0-9]+[KMGTPkmgtp]?)\b', Number.Integer), + ], + 'subprogram': [ + (r'\(', Punctuation, ('#pop', 'formal_part')), + (r';', Punctuation, '#pop'), + (r'"[^"]+"|\w+', Name.Function), + include('root'), + ], + 'type_def': [ + (r'\(', Punctuation, 'formal_part'), + ], + 'formal_part': [ + (r'\)', Punctuation, '#pop'), + (r'\w+', Name.Variable), + (r',', Punctuation), + (r'(:string|:integer|:real)\b', Keyword.Reserved), + include('root'), + ], + } + +class IconLexer(RegexLexer): + """ + Lexer for Icon + + .. versionadded:: 1.6 + """ + name = 'Icon' + aliases = ['icon'] + filenames = ['*.icon', '*.ICON'] + mimetypes = [] + flags = re.MULTILINE + + tokens = { + 'root': [ + (r'[^\S\n]+', Text), + (r'#.*?\n', Comment.Single), + (r'[^\S\n]+', Text), + (r'class|method|procedure', Keyword.Declaration, 'subprogram'), + (r'(record)(\s+)(\w+)', + bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'), + (r'(#line|\$C|\$Cend|\$define|\$else|\$endif|\$error|\$ifdef|' + r'\$ifndef|\$include|\$line|\$undef)\b', Keyword.PreProc), + (r'(&null|&fail)\b', Keyword.Constant), + (r'&allocated|&ascii|&clock|&collections|&column|&col|&control|' + r'&cset|¤t|&dateline|&date|&digits|&dump|' + r'&errno|&errornumber|&errortext|&errorvalue|&error|&errout|' + r'&eventcode|&eventvalue|&eventsource|&e|' + r'&features|&file|&host|&input|&interval|&lcase|&letters|' + r'&level|&line|&ldrag|&lpress|&lrelease|' + r'&main|&mdrag|&meta|&mpress|&mrelease|&now|&output|' + r'&phi|&pick|&pi|&pos|&progname|' + r'&random|&rdrag|®ions|&resize|&row|&rpress|&rrelease|' + r'&shift|&source|&storage|&subject|' + r'&time|&trace|&ucase|&version|' + r'&window|&x|&y', Keyword.Reserved), + (r'(by|of|not|to)\b', Keyword.Reserved), + (r'(global|local|static)\b', Keyword.Reserved), + (r'link', Keyword.Declaration), + (words(( + 'break', 'case', 'create', 'default', 'end', 'all', + 'do', 'else', 'every', 'fail', 'if', 'initial', + 'invocable', 'next', + 'repeat', 'return', 'suspend', + 'then', 'until', 'while'), prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + (words(( + 'abs', 'acos', 'Active', 'Alert', 'any', + 'args', 'array', 'asin', 'atan', 'atanh', 'Attrib', + 'bal', 'Bg', + 'callout', 'center', 'char', 'chdir', 'chmod', 'chown', 'chroot', + 'Clip', 'Clone', 'close', 'cofail', 'collect', + 'Color', 'ColorValue', 'condvar', 'copy', + 'CopyArea', 'cos', 'Couple', 'crypt', 'cset', 'ctime', + 'delay', 'delete', 'detab', 'display', 'DrawArc', + 'DrawCircle', 'DrawCube', 'DrawCurve', 'DrawCylinder', + 'DrawDisk', 'DrawImage', 'DrawLine', 'DrawPoint', 'DrawPolygon', + 'DrawRectangle', 'DrawSegment', 'DrawSphere', 'DrawString', + 'DrawTorus', 'dtor', + 'entab', 'EraseArea', 'errorclear', 'Event', 'eventmask', + 'EvGet', 'EvSend', 'exec', 'exit', 'exp', 'Eye', + 'fcntl', 'fdup', 'fetch', 'Fg', 'fieldnames', + 'FillArc', 'FillCircle', 'FillPolygon', + 'FillRectangle', 'find', 'flock', 'flush', 'Font', + 'FreeColor', 'FreeSpace', 'function', + 'get', 'getch', 'getche', 'getenv', + 'GetSpace', 'gettimeofday', + 'getuid', 'globalnames', 'GotoRC', 'GotoXY', 'gtime', 'hardlink', + 'iand', 'icom', 'IdentityMatrix', 'image', 'InPort', 'insert', + 'Int86', 'integer', 'ioctl', 'ior', 'ishift', 'istate', 'ixor', + 'kbhit', 'key', 'keyword', 'kill', + 'left', 'Len', 'list', 'load', 'loadfunc', 'localnames', + 'lock', 'log', 'Lower', 'lstat', + 'many', 'map', 'match', 'MatrixMode', 'max', 'member', + 'membernames', 'methodnames', 'methods', 'min', 'mkdir', 'move', + 'MultMatrix', 'mutex', + 'name', 'NewColor', 'Normals', 'numeric', + 'open', 'opencl', 'oprec', 'ord', 'OutPort', + 'PaletteChars', 'PaletteColor', 'PaletteKey', 'paramnames', + 'parent', 'Pattern', 'Peek', 'Pending', 'pipe', 'Pixel', + 'Poke', 'pop', 'PopMatrix', 'Pos', 'pos', + 'proc', 'pull', 'push', 'PushMatrix', 'PushRotate', 'PushScale', + 'PushTranslate', 'put', + 'QueryPointer', + 'Raise', 'read', 'ReadImage', 'readlink', 'reads', 'ready', + 'real', 'receive', 'Refresh', 'Rem', 'remove', 'rename', + 'repl', 'reverse', 'right', 'rmdir', 'Rotate', 'Rpos', + 'rtod', 'runerr', + 'save', 'Scale', 'seek', 'select', 'send', 'seq', + 'serial', 'set', 'setenv', + 'setuid', 'signal', 'sin', 'sort', 'sortf', + 'spawn', 'sql', 'sqrt', 'stat', 'staticnames', 'stop', + 'string', 'structure', 'Swi', + 'symlink', 'sys_errstr', 'system', 'syswrite', + 'tab', 'table', 'tan', + 'Texcoord', 'Texture', 'TextWidth', 'Translate', + 'trap', 'trim', 'truncate', 'trylock', 'type', + 'umask', 'Uncouple', 'unlock', 'upto', 'utime', + 'variable', + 'wait', 'WAttrib', 'WDefault', 'WFlush', 'where', + 'WinAssociate', 'WinButton', 'WinColorDialog', 'WindowContents', + 'WinEditRegion', 'WinFontDialog', 'WinMenuBar', 'WinOpenDialog', + 'WinPlayMedia', 'WinSaveDialog', 'WinScrollBar', 'WinSelectDialog', + 'write', 'WriteImage', 'writes', 'WSection', + 'WSync'), prefix=r'\b', suffix=r'\b'), + Name.Function), + include('numbers'), + (r'\*\*|\+\+|\-\-|\.|\=|\~\=|<\=|>\=|\=\=|\~\=\=|<<|<<\=|>>|>>\=', Operator), + (r':\=|:\=:|<\-|<\->|\+:\=|\||\|\|', Operator), + (r'\=\=\=|\~\=\=\=', Operator), + (r'"(?:[^\\"]|\\.)*"', String), + (r"'(?:[^\\']|\\.)*'", String.Character), + (r'[*<>+=/&!?@~\\-]', Operator), + (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))), + (r"([\[\]])", Punctuation), + (r"(<>|=>|[()|:;,.'`]|[{}]|[%^]|[&?])", Punctuation), + (r'\n+', Text), + ], + 'numbers': [ + (r'\b([+-]?([2-9]|[12][0-9]|3[0-6])[rR][0-9a-zA-Z]+)\b', Number.Hex), + (r'[+-]?[0-9]*\.([0-9]*)([Ee][+-]?[0-9]*)?', Number.Float), + (r'\b([+-]?[0-9]+[KMGTPkmgtp]?)\b', Number.Integer), + ], + 'subprogram': [ + (r'\(', Punctuation, ('#pop', 'formal_part')), + (r';', Punctuation, '#pop'), + (r'"[^"]+"|\w+', Name.Function), + include('root'), + ], + 'type_def': [ + (r'\(', Punctuation, 'formal_part'), + ], + 'formal_part': [ + (r'\)', Punctuation, '#pop'), + (r'\w+', Name.Variable), + (r',', Punctuation), + (r'(:string|:integer|:real)\b', Keyword.Reserved), + include('root'), + ], + } + +class UcodeLexer(RegexLexer): + """ + Lexer for Icon ucode files + + .. versionadded:: 2.4 + """ + name = 'ucode' + aliases = ['ucode'] + filenames = ['*.u', '*.u1', '*.u2'] + mimetypes = [] + flags = re.MULTILINE + + tokens = { + 'root': [ + (r'(#.*\n)', Comment), + (words(( + 'con', 'declend', 'end', + 'global', + 'impl', 'invocable', + 'lab', 'link', 'local', + 'record', + 'uid', 'unions', + 'version'), + prefix=r'\b', suffix=r'\b'), + Name.Function), + (words(( + 'colm', 'filen', 'line', 'synt'), + prefix=r'\b', suffix=r'\b'), + Comment), + (words(( + 'asgn', + 'bang', 'bscan', + 'cat', 'ccase', 'chfail', + 'coact', 'cofail', 'compl', + 'coret', 'create', 'cset', + 'diff', 'div', 'dup', + 'efail', 'einit', 'end', 'eqv', 'eret', + 'error', 'escan', 'esusp', + 'field', + 'goto', + 'init', 'int', 'inter', + 'invoke', + 'keywd', + 'lconcat', 'lexeq', 'lexge', + 'lexgt', 'lexle', 'lexlt', 'lexne', + 'limit', 'llist', 'lsusp', + 'mark', 'mark0', 'minus', 'mod', 'mult', + 'neg', 'neqv', 'nonnull', 'noop', 'null', + 'number', 'numeq', 'numge', 'numgt', + 'numle', 'numlt', 'numne', + 'pfail', 'plus', 'pnull', 'pop', 'power', + 'pret', 'proc', 'psusp', 'push1', 'pushn1', + 'random', 'rasgn', 'rcv', 'rcvbk', 'real', + 'refresh', 'rswap', + 'sdup', 'sect', 'size', 'snd', 'sndbk', + 'str', 'subsc', 'swap', + 'tabmat', 'tally', 'toby', 'trace', + 'unmark', + 'value', 'var'), prefix=r'\b', suffix=r'\b'), + Keyword.Declaration), + (words(( + 'any', + 'case', + 'endcase', 'endevery', 'endif', + 'endifelse', 'endrepeat', 'endsuspend', + 'enduntil', 'endwhile', 'every', + 'if', 'ifelse', + 'repeat', + 'suspend', + 'until', + 'while'), + prefix=r'\b', suffix=r'\b'), + Name.Constant), + (r'\d+(\s*|\.$|$)', Number.Integer), + (r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float), + (r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float), + (r"(<>|=>|[()|:;,.'`]|[{}]|[%^]|[&?])", Punctuation), + (r'\s+\b', Text), + (r'[\w-]+', Text), + ], +} diff --git a/pygments/lexers/varnish.py b/pygments/lexers/varnish.py index 44521422..f3b37d60 100644 --- a/pygments/lexers/varnish.py +++ b/pygments/lexers/varnish.py @@ -36,7 +36,7 @@ class VCLLexer(RegexLexer): # Skip over comments and blank lines # This is accurate enough that returning 0.9 is reasonable. # Almost no VCL files start without some comments. - elif '\nvcl 4\.0;' in text[:1000]: + elif '\nvcl 4.0;' in text[:1000]: return 0.9 tokens = { @@ -120,7 +120,7 @@ class VCLLexer(RegexLexer): r'([a-zA-Z_]\w*)' r'(\s*\(.*\))', bygroups(Name.Function, Punctuation, Name.Function, using(this))), - ('[a-zA-Z_]\w*', Name), + (r'[a-zA-Z_]\w*', Name), ], 'comment': [ (r'[^*/]+', Comment.Multiline), diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py index 712c8246..67aefe23 100644 --- a/pygments/lexers/webmisc.py +++ b/pygments/lexers/webmisc.py @@ -438,7 +438,7 @@ class XQueryLexer(ExtendedRegexLexer): ], 'varname': [ (r'\(:', Comment, 'comment'), - (r'(' + qname + ')(\()?', bygroups(Name, Punctuation), 'operator'), + (r'(' + qname + r')(\()?', bygroups(Name, Punctuation), 'operator'), ], 'singletype': [ include('whitespace'), @@ -643,9 +643,9 @@ class XQueryLexer(ExtendedRegexLexer): bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'), (r'(declare)(\s+)(context)(\s+)(item)', bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'), - (ncname + ':\*', Name, 'operator'), - ('\*:'+ncname, Name.Tag, 'operator'), - ('\*', Name.Tag, 'operator'), + (ncname + r':\*', Name, 'operator'), + (r'\*:'+ncname, Name.Tag, 'operator'), + (r'\*', Name.Tag, 'operator'), (stringdouble, String.Double, 'operator'), (stringsingle, String.Single, 'operator'), @@ -661,7 +661,8 @@ class XQueryLexer(ExtendedRegexLexer): # NAMESPACE KEYWORD (r'(declare)(\s+)(default)(\s+)(element|function)', - bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'namespacekeyword'), + bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), + 'namespacekeyword'), (r'(import)(\s+)(schema|module)', bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'), (r'(declare)(\s+)(copy-namespaces)', @@ -861,7 +862,7 @@ class QmlLexer(RegexLexer): class CirruLexer(RegexLexer): - """ + r""" Syntax rules of Cirru can be found at: http://cirru.org/ diff --git a/pygments/lexers/xorg.py b/pygments/lexers/xorg.py index 89475a80..3bba930f 100644 --- a/pygments/lexers/xorg.py +++ b/pygments/lexers/xorg.py @@ -16,6 +16,7 @@ __all__ = ['XorgLexer'] class XorgLexer(RegexLexer): + """Lexer for xorg.conf file.""" name = 'Xorg' aliases = ['xorg.conf'] filenames = ['xorg.conf'] @@ -26,8 +27,8 @@ class XorgLexer(RegexLexer): (r'\s+', Text), (r'#.*$', Comment), - (r'((|Sub)Section)(\s+)("\w+")', - bygroups(String.Escape, String.Escape, Text, String.Escape)), + (r'((?:Sub)?Section)(\s+)("\w+")', + bygroups(String.Escape, Text, String.Escape)), (r'(End(|Sub)Section)', String.Escape), (r'(\w+)(\s+)([^\n#]+)', |