summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAnteru <bitbucket@ca.sh13.net>2018-11-24 16:41:03 +0000
committerAnteru <bitbucket@ca.sh13.net>2018-11-24 16:41:03 +0000
commitfd0d9128ffcbb362ea08118c1b6fbb74590e8a60 (patch)
treea143d37d3892dca36f046338ed8eea083b957c05
parent0342044ec7dcd26d6ab911c87977aa5055bb3336 (diff)
parentcff140e7dfb1e1fbc076fb172995d899755e54b7 (diff)
downloadpygments-fd0d9128ffcbb362ea08118c1b6fbb74590e8a60.tar.gz
Merged in Edward/pygments-main/Edward/use-print-as-a-function-so-the-example-w-1541317855025 (pull request #782)
Use print as a function so the example works in Python 3.
-rw-r--r--AUTHORS1
-rw-r--r--pygments/lexers/_lua_builtins.py2
-rw-r--r--pygments/lexers/_mapping.py4
-rw-r--r--pygments/lexers/ampl.py4
-rw-r--r--pygments/lexers/configs.py22
-rw-r--r--pygments/lexers/css.py23
-rw-r--r--pygments/lexers/graphics.py156
-rw-r--r--pygments/lexers/jvm.py2
-rw-r--r--pygments/lexers/lisp.py75
-rw-r--r--pygments/lexers/rust.py2
-rw-r--r--pygments/lexers/sql.py14
-rw-r--r--pygments/plugin.py4
-rw-r--r--scripts/release-checklist24
-rw-r--r--tests/examplefiles/docker.docker33
-rw-r--r--tests/examplefiles/example.hlsl168
-rw-r--r--tests/examplefiles/fennelview.fnl156
16 files changed, 649 insertions, 41 deletions
diff --git a/AUTHORS b/AUTHORS
index f9ba2675..f39bbaa1 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -160,6 +160,7 @@ Other contributors, listed alphabetically, are:
* Elias Rabel -- Fortran fixed form lexer
* raichoo -- Idris lexer
* Kashif Rasul -- CUDA lexer
+* Nathan Reed -- HLSL lexer
* Justin Reidy -- MXML lexer
* Norman Richards -- JSON lexer
* Corey Richardson -- Rust lexer updates
diff --git a/pygments/lexers/_lua_builtins.py b/pygments/lexers/_lua_builtins.py
index c60bf5a2..0561725d 100644
--- a/pygments/lexers/_lua_builtins.py
+++ b/pygments/lexers/_lua_builtins.py
@@ -288,7 +288,7 @@ if __name__ == '__main__': # pragma: no cover
print('>> %s' % full_function_name)
m = get_function_module(full_function_name)
modules.setdefault(m, []).append(full_function_name)
- modules = {k: tuple(v) for k, v in modules.iteritems()}
+ modules = dict((k, tuple(v)) for k, v in modules.iteritems())
regenerate(__file__, modules)
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index b48ee1d1..f9513e28 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -153,6 +153,7 @@ LEXERS = {
'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
+ 'FennelLexer': ('pygments.lexers.lisp', 'Fennel', (), ('*.fnl',), ()),
'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
'FlatlineLexer': ('pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)),
'ForthLexer': ('pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)),
@@ -174,6 +175,7 @@ LEXERS = {
'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
+ 'HLSLShaderLexer': ('pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)),
'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
@@ -367,7 +369,7 @@ LEXERS = {
'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()),
'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')),
- 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs', '*.rs.in'), ('text/rust',)),
+ 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust',)),
'SASLexer': ('pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
diff --git a/pygments/lexers/ampl.py b/pygments/lexers/ampl.py
index d439cb19..638d025d 100644
--- a/pygments/lexers/ampl.py
+++ b/pygments/lexers/ampl.py
@@ -3,7 +3,7 @@
pygments.lexers.ampl
~~~~~~~~~~~~~~~~~~~~
- Lexers for the ampl language. <http://ampl.com/>
+ Lexers for the AMPL language.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
@@ -18,7 +18,7 @@ __all__ = ['AmplLexer']
class AmplLexer(RegexLexer):
"""
- For AMPL source code.
+ For `AMPL <http://ampl.com/>`_ source code.
.. versionadded:: 2.2
"""
diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py
index c39b1a52..4af2adb6 100644
--- a/pygments/lexers/configs.py
+++ b/pygments/lexers/configs.py
@@ -15,6 +15,7 @@ from pygments.lexer import RegexLexer, default, words, bygroups, include, using
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace, Literal
from pygments.lexers.shell import BashLexer
+from pygments.lexers.data import JsonLexer
__all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
@@ -539,20 +540,25 @@ class DockerLexer(RegexLexer):
filenames = ['Dockerfile', '*.docker']
mimetypes = ['text/x-dockerfile-config']
- _keywords = (r'(?:FROM|MAINTAINER|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|'
- r'VOLUME|WORKDIR)')
-
+ _keywords = (r'(?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
+ _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)')
+ _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
- (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,),
- bygroups(Name.Keyword, Whitespace, Keyword)),
- (r'^(%s)\b(.*)' % (_keywords,), bygroups(Keyword, String)),
(r'#.*', Comment),
- (r'RUN', Keyword), # Rest of line falls through
+ (r'(ONBUILD)(%s)' % (_lb,), bygroups(Keyword, using(BashLexer))),
+ (r'(HEALTHCHECK)((%s--\w+=\w+%s)*)' % (_lb, _lb),
+ bygroups(Keyword, using(BashLexer))),
+ (r'(VOLUME|ENTRYPOINT|CMD|SHELL)(%s)(\[.*?\])' % (_lb,),
+ bygroups(Keyword, using(BashLexer), using(JsonLexer))),
+ (r'(LABEL|ENV|ARG)((%s\w+=\w+%s)*)' % (_lb, _lb),
+ bygroups(Keyword, using(BashLexer))),
+ (r'(%s|VOLUME)\b(.*)' % (_keywords), bygroups(Keyword, String)),
+ (r'(%s)' % (_bash_keywords,), Keyword),
(r'(.*\\\n)*.+', using(BashLexer)),
- ],
+ ]
}
diff --git a/pygments/lexers/css.py b/pygments/lexers/css.py
index 29d83707..ce97730e 100644
--- a/pygments/lexers/css.py
+++ b/pygments/lexers/css.py
@@ -125,7 +125,7 @@ _css_properties = (
'wrap-flow', 'wrap-inside', 'wrap-through', 'writing-mode', 'z-index',
)
-# List of keyword values obtained from:
+# List of keyword values obtained from:
# http://cssvalues.com/
_keyword_values = (
'absolute', 'alias', 'all', 'all-petite-caps', 'all-scroll',
@@ -263,7 +263,7 @@ _time_units = (
's', 'ms',
)
_all_units = _angle_units + _frequency_units + _length_units + \
- _resolution_units + _time_units
+ _resolution_units + _time_units
class CssLexer(RegexLexer):
@@ -322,16 +322,18 @@ class CssLexer(RegexLexer):
include('urls'),
(r'('+r'|'.join(_functional_notation_keyword_values)+r')(\()',
bygroups(Name.Builtin, Punctuation), 'function-start'),
- (r'([a-zA-Z_][\w-]+)(\()', bygroups(Name.Function, Punctuation), 'function-start'),
+ (r'([a-zA-Z_][\w-]+)(\()',
+ bygroups(Name.Function, Punctuation), 'function-start'),
(words(_keyword_values, suffix=r'\b'), Keyword.Constant),
(words(_other_keyword_values, suffix=r'\b'), Keyword.Constant),
(words(_color_keywords, suffix=r'\b'), Keyword.Constant),
- (words(_css_properties, suffix=r'\b'), Keyword), # for transition-property etc.
+ # for transition-property etc.
+ (words(_css_properties, suffix=r'\b'), Keyword),
(r'\!important', Comment.Preproc),
(r'/\*(?:.|\n)*?\*/', Comment),
include('numeric-values'),
-
+
(r'[~^*!%&<>|+=@:./?-]+', Operator),
(r'[\[\](),]+', Punctuation),
(r'"(\\\\|\\"|[^"])*"', String.Double),
@@ -351,7 +353,8 @@ class CssLexer(RegexLexer):
# function-start may be entered recursively
(r'(' + r'|'.join(_functional_notation_keyword_values) + r')(\()',
bygroups(Name.Builtin, Punctuation), 'function-start'),
- (r'([a-zA-Z_][\w-]+)(\()', bygroups(Name.Function, Punctuation), 'function-start'),
+ (r'([a-zA-Z_][\w-]+)(\()',
+ bygroups(Name.Function, Punctuation), 'function-start'),
(r'/\*(?:.|\n)*?\*/', Comment),
include('numeric-values'),
@@ -373,8 +376,8 @@ class CssLexer(RegexLexer):
'numeric-values': [
(r'\#[a-zA-Z0-9]{1,6}', Number.Hex),
(r'[+\-]?[0-9]*[.][0-9]+', Number.Float, 'numeric-end'),
- (r'[+\-]?[0-9]+', Number.Integer, 'numeric-end'),
- ],
+ (r'[+\-]?[0-9]+', Number.Integer, 'numeric-end'),
+ ],
'numeric-end': [
(words(_all_units, suffix=r'\b'), Keyword.Type),
(r'%', Keyword.Type),
@@ -466,9 +469,9 @@ common_sass_tokens = {
],
'string-single': [
- (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
+ (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Single),
(r'#\{', String.Interpol, 'interpolation'),
- (r"'", String.Double, '#pop'),
+ (r"'", String.Single, '#pop'),
],
'string-url': [
diff --git a/pygments/lexers/graphics.py b/pygments/lexers/graphics.py
index c8af9f99..5c3ed7ee 100644
--- a/pygments/lexers/graphics.py
+++ b/pygments/lexers/graphics.py
@@ -15,7 +15,7 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, \
Number, Punctuation, String
__all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer',
- 'PovrayLexer']
+ 'PovrayLexer', 'HLSLShaderLexer']
class GLShaderLexer(RegexLexer):
@@ -75,6 +75,160 @@ class GLShaderLexer(RegexLexer):
}
+class HLSLShaderLexer(RegexLexer):
+ """
+ HLSL (Microsoft Direct3D Shader) lexer.
+
+ .. versionadded:: 2.2
+ """
+ name = 'HLSL'
+ aliases = ['hlsl']
+ filenames = ['*.hlsl', '*.hlsli']
+ mimetypes = ['text/x-hlsl']
+
+ tokens = {
+ 'root': [
+ (r'^#.*', Comment.Preproc),
+ (r'//.*', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
+ Operator),
+ (r'[?:]', Operator), # quick hack for ternary
+ (r'\bdefined\b', Operator),
+ (r'[;{}(),.\[\]]', Punctuation),
+ # FIXME when e is present, no decimal point needed
+ (r'[+-]?\d*\.\d+([eE][-+]?\d+)?f?', Number.Float),
+ (r'[+-]?\d+\.\d*([eE][-+]?\d+)?f?', Number.Float),
+ (r'0[xX][0-9a-fA-F]*', Number.Hex),
+ (r'0[0-7]*', Number.Oct),
+ (r'[1-9][0-9]*', Number.Integer),
+ (r'"', String, 'string'),
+ (words((
+ 'asm','asm_fragment','break','case','cbuffer','centroid','class',
+ 'column_major','compile','compile_fragment','const','continue',
+ 'default','discard','do','else','export','extern','for','fxgroup',
+ 'globallycoherent','groupshared','if','in','inline','inout',
+ 'interface','line','lineadj','linear','namespace','nointerpolation',
+ 'noperspective','NULL','out','packoffset','pass','pixelfragment',
+ 'point','precise','return','register','row_major','sample',
+ 'sampler','shared','stateblock','stateblock_state','static',
+ 'struct','switch','tbuffer','technique','technique10',
+ 'technique11','texture','typedef','triangle','triangleadj',
+ 'uniform','vertexfragment','volatile','while'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words(('true','false'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Constant),
+ (words((
+ 'auto','catch','char','const_cast','delete','dynamic_cast','enum',
+ 'explicit','friend','goto','long','mutable','new','operator',
+ 'private','protected','public','reinterpret_cast','short','signed',
+ 'sizeof','static_cast','template','this','throw','try','typename',
+ 'union','unsigned','using','virtual'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ (words((
+ 'dword','matrix','snorm','string','unorm','unsigned','void','vector',
+ 'BlendState','Buffer','ByteAddressBuffer','ComputeShader',
+ 'DepthStencilState','DepthStencilView','DomainShader',
+ 'GeometryShader','HullShader','InputPatch','LineStream',
+ 'OutputPatch','PixelShader','PointStream','RasterizerState',
+ 'RenderTargetView','RasterizerOrderedBuffer',
+ 'RasterizerOrderedByteAddressBuffer',
+ 'RasterizerOrderedStructuredBuffer','RasterizerOrderedTexture1D',
+ 'RasterizerOrderedTexture1DArray','RasterizerOrderedTexture2D',
+ 'RasterizerOrderedTexture2DArray','RasterizerOrderedTexture3D',
+ 'RWBuffer','RWByteAddressBuffer','RWStructuredBuffer',
+ 'RWTexture1D','RWTexture1DArray','RWTexture2D','RWTexture2DArray',
+ 'RWTexture3D','SamplerState','SamplerComparisonState',
+ 'StructuredBuffer','Texture1D','Texture1DArray','Texture2D',
+ 'Texture2DArray','Texture2DMS','Texture2DMSArray','Texture3D',
+ 'TextureCube','TextureCubeArray','TriangleStream','VertexShader'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword.Type),
+ (words((
+ 'bool','double','float','int','half','min16float','min10float',
+ 'min16int','min12int','min16uint','uint'),
+ prefix=r'\b', suffix=r'([1-4](x[1-4])?)?\b'),
+ Keyword.Type), # vector and matrix types
+ (words((
+ 'abort','abs','acos','all','AllMemoryBarrier',
+ 'AllMemoryBarrierWithGroupSync','any','AppendStructuredBuffer',
+ 'asdouble','asfloat','asin','asint','asuint','asuint','atan',
+ 'atan2','ceil','CheckAccessFullyMapped','clamp','clip',
+ 'CompileShader','ConsumeStructuredBuffer','cos','cosh','countbits',
+ 'cross','D3DCOLORtoUBYTE4','ddx','ddx_coarse','ddx_fine','ddy',
+ 'ddy_coarse','ddy_fine','degrees','determinant',
+ 'DeviceMemoryBarrier','DeviceMemoryBarrierWithGroupSync','distance',
+ 'dot','dst','errorf','EvaluateAttributeAtCentroid',
+ 'EvaluateAttributeAtSample','EvaluateAttributeSnapped','exp',
+ 'exp2','f16tof32','f32tof16','faceforward','firstbithigh',
+ 'firstbitlow','floor','fma','fmod','frac','frexp','fwidth',
+ 'GetRenderTargetSampleCount','GetRenderTargetSamplePosition',
+ 'GlobalOrderedCountIncrement','GroupMemoryBarrier',
+ 'GroupMemoryBarrierWithGroupSync','InterlockedAdd','InterlockedAnd',
+ 'InterlockedCompareExchange','InterlockedCompareStore',
+ 'InterlockedExchange','InterlockedMax','InterlockedMin',
+ 'InterlockedOr','InterlockedXor','isfinite','isinf','isnan',
+ 'ldexp','length','lerp','lit','log','log10','log2','mad','max',
+ 'min','modf','msad4','mul','noise','normalize','pow','printf',
+ 'Process2DQuadTessFactorsAvg','Process2DQuadTessFactorsMax',
+ 'Process2DQuadTessFactorsMin','ProcessIsolineTessFactors',
+ 'ProcessQuadTessFactorsAvg','ProcessQuadTessFactorsMax',
+ 'ProcessQuadTessFactorsMin','ProcessTriTessFactorsAvg',
+ 'ProcessTriTessFactorsMax','ProcessTriTessFactorsMin',
+ 'QuadReadLaneAt','QuadSwapX','QuadSwapY','radians','rcp',
+ 'reflect','refract','reversebits','round','rsqrt','saturate',
+ 'sign','sin','sincos','sinh','smoothstep','sqrt','step','tan',
+ 'tanh','tex1D','tex1D','tex1Dbias','tex1Dgrad','tex1Dlod',
+ 'tex1Dproj','tex2D','tex2D','tex2Dbias','tex2Dgrad','tex2Dlod',
+ 'tex2Dproj','tex3D','tex3D','tex3Dbias','tex3Dgrad','tex3Dlod',
+ 'tex3Dproj','texCUBE','texCUBE','texCUBEbias','texCUBEgrad',
+ 'texCUBElod','texCUBEproj','transpose','trunc','WaveAllBitAnd',
+ 'WaveAllMax','WaveAllMin','WaveAllBitOr','WaveAllBitXor',
+ 'WaveAllEqual','WaveAllProduct','WaveAllSum','WaveAllTrue',
+ 'WaveAnyTrue','WaveBallot','WaveGetLaneCount','WaveGetLaneIndex',
+ 'WaveGetOrderedIndex','WaveIsHelperLane','WaveOnce',
+ 'WavePrefixProduct','WavePrefixSum','WaveReadFirstLane',
+ 'WaveReadLaneAt'),
+ prefix=r'\b', suffix=r'\b'),
+ Name.Builtin), # built-in functions
+ (words((
+ 'SV_ClipDistance','SV_ClipDistance0','SV_ClipDistance1',
+ 'SV_Culldistance','SV_CullDistance0','SV_CullDistance1',
+ 'SV_Coverage','SV_Depth','SV_DepthGreaterEqual',
+ 'SV_DepthLessEqual','SV_DispatchThreadID','SV_DomainLocation',
+ 'SV_GroupID','SV_GroupIndex','SV_GroupThreadID','SV_GSInstanceID',
+ 'SV_InnerCoverage','SV_InsideTessFactor','SV_InstanceID',
+ 'SV_IsFrontFace','SV_OutputControlPointID','SV_Position',
+ 'SV_PrimitiveID','SV_RenderTargetArrayIndex','SV_SampleIndex',
+ 'SV_StencilRef','SV_TessFactor','SV_VertexID',
+ 'SV_ViewportArrayIndex'),
+ prefix=r'\b', suffix=r'\b'),
+ Name.Decorator), # system-value semantics
+ (r'\bSV_Target[0-7]?\b', Name.Decorator),
+ (words((
+ 'allow_uav_condition','branch','call','domain','earlydepthstencil',
+ 'fastopt','flatten','forcecase','instance','loop','maxtessfactor',
+ 'numthreads','outputcontrolpoints','outputtopology','partitioning',
+ 'patchconstantfunc','unroll'),
+ prefix=r'\b', suffix=r'\b'),
+ Name.Decorator), # attributes
+ (r'[a-zA-Z_]\w*', Name),
+ (r'\\$', Comment.Preproc), # backslash at end of line -- usually macro continuation
+ (r'\s+', Text),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ }
+
+
class PostScriptLexer(RegexLexer):
"""
Lexer for PostScript files.
diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py
index f4392839..ccff41c1 100644
--- a/pygments/lexers/jvm.py
+++ b/pygments/lexers/jvm.py
@@ -801,7 +801,7 @@ class ClojureLexer(RegexLexer):
# TODO / should divide keywords/symbols into namespace/rest
# but that's hard, so just pretend / is part of the name
- valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
+ valid_name = r'(?!#)[\w!$%*+<=>?/.#|-]+'
tokens = {
'root': [
diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py
index e258c347..f1494b13 100644
--- a/pygments/lexers/lisp.py
+++ b/pygments/lexers/lisp.py
@@ -19,7 +19,7 @@ from pygments.lexers.python import PythonLexer
__all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer',
'NewLispLexer', 'EmacsLispLexer', 'ShenLexer', 'CPSALexer',
- 'XtlangLexer']
+ 'XtlangLexer', 'FennelLexer']
class SchemeLexer(RegexLexer):
@@ -2327,13 +2327,13 @@ class ShenLexer(RegexLexer):
token = Name.Function if token == Literal else token
yield index, token, value
- raise StopIteration
+ return
def _process_signature(self, tokens):
for index, token, value in tokens:
if token == Literal and value == '}':
yield index, Punctuation, value
- raise StopIteration
+ return
elif token in (Literal, Name.Function):
token = Name.Variable if value.istitle() else Keyword.Type
yield index, token, value
@@ -2619,3 +2619,72 @@ class XtlangLexer(RegexLexer):
include('scheme')
],
}
+
+class FennelLexer(RegexLexer):
+ """A lexer for the Fennel programming language <https://fennel-lang.org>
+
+ Fennel compiles to Lua, so all the Lua builtins are recognized as well
+ as the special forms that are particular to the Fennel compiler.
+ """
+ name = 'Fennel'
+ aliases = ['fennel', 'fnl']
+ filenames = ['*.fnl']
+
+ # these two lists are taken from fennel-mode.el:
+ # https://gitlab.com/technomancy/fennel-mode
+ # this list is current as of Fennel version 0.1.0.
+ special_forms = (
+ u'require-macros', u'eval-compiler',
+ u'do', u'values', u'if', u'when', u'each', u'for', u'fn', u'lambda',
+ u'λ', u'set', u'global', u'var', u'local', u'let', u'tset', u'doto',
+ u'set-forcibly!', u'defn', u'partial', u'while', u'or', u'and', u'true',
+ u'false', u'nil', u'.', u'+', u'..', u'^', u'-', u'*', u'%', u'/', u'>',
+ u'<', u'>=', u'<=', u'=', u'~=', u'#', u'...', u':', u'->', u'->>',
+ )
+
+ # Might be nicer to use the list from _lua_builtins.py but it's unclear how?
+ builtins = (
+ u'_G', u'_VERSION', u'arg', u'assert', u'bit32', u'collectgarbage',
+ u'coroutine', u'debug', u'dofile', u'error', u'getfenv',
+ u'getmetatable', u'io', u'ipairs', u'load', u'loadfile', u'loadstring',
+ u'math', u'next', u'os', u'package', u'pairs', u'pcall', u'print',
+ u'rawequal', u'rawget', u'rawlen', u'rawset', u'require', u'select',
+ u'setfenv', u'setmetatable', u'string', u'table', u'tonumber',
+ u'tostring', u'type', u'unpack', u'xpcall'
+ )
+
+ # based on the scheme definition, but disallowing leading digits and commas
+ valid_name = r'[a-zA-Z_!$%&*+/:<=>?@^~|-][\w!$%&*+/:<=>?@^~|\.-]*'
+
+ tokens = {
+ 'root': [
+ # the only comment form is a semicolon; goes to the end of the line
+ (r';.*$', Comment.Single),
+
+ (r'[,\s]+', Text),
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'(\\\\|\\'|[^'])*'", String),
+
+ # these are technically strings, but it's worth visually
+ # distinguishing them because their intent is different
+ # from regular strings.
+ (r':' + valid_name, String.Symbol),
+
+ # special forms are keywords
+ (words(special_forms, suffix=' '), Keyword),
+ # lua standard library are builtins
+ (words(builtins, suffix=' '), Name.Builtin),
+ # special-case the vararg symbol
+ (r'\.\.\.', Name.Variable),
+ # regular identifiers
+ (valid_name, Name.Variable),
+
+ # all your normal paired delimiters for your programming enjoyment
+ (r'(\(|\))', Punctuation),
+ (r'(\[|\])', Punctuation),
+ (r'(\{|\})', Punctuation),
+ ]
+ }
diff --git a/pygments/lexers/rust.py b/pygments/lexers/rust.py
index 6914f54d..10097fba 100644
--- a/pygments/lexers/rust.py
+++ b/pygments/lexers/rust.py
@@ -24,7 +24,7 @@ class RustLexer(RegexLexer):
"""
name = 'Rust'
filenames = ['*.rs', '*.rs.in']
- aliases = ['rust']
+ aliases = ['rust', 'rs']
mimetypes = ['text/rust']
keyword_types = (
diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py
index 7507c0fc..3f7dfdb8 100644
--- a/pygments/lexers/sql.py
+++ b/pygments/lexers/sql.py
@@ -308,14 +308,7 @@ class PostgresConsoleLexer(Lexer):
# and continue until the end of command is detected
curcode = ''
insertions = []
- while 1:
- try:
- line = next(lines)
- except StopIteration:
- # allow the emission of partially collected items
- # the repl loop will be broken below
- break
-
+ for line in lines:
# Identify a shell prompt in case of psql commandline example
if line.startswith('$') and not curcode:
lexer = get_lexer_by_name('console', **self.options)
@@ -346,8 +339,7 @@ class PostgresConsoleLexer(Lexer):
# Emit the output lines
out_token = Generic.Output
- while 1:
- line = next(lines)
+ for line in lines:
mprompt = re_prompt.match(line)
if mprompt is not None:
# push the line back to have it processed by the prompt
@@ -363,6 +355,8 @@ class PostgresConsoleLexer(Lexer):
yield (mmsg.start(2), out_token, mmsg.group(2))
else:
yield (0, out_token, line)
+ else:
+ return
class SqlLexer(RegexLexer):
diff --git a/pygments/plugin.py b/pygments/plugin.py
index 7987d646..08d9b5b4 100644
--- a/pygments/plugin.py
+++ b/pygments/plugin.py
@@ -40,14 +40,16 @@ FORMATTER_ENTRY_POINT = 'pygments.formatters'
STYLE_ENTRY_POINT = 'pygments.styles'
FILTER_ENTRY_POINT = 'pygments.filters'
+
def iter_entry_points(group_name):
try:
import pkg_resources
- except ImportError:
+ except (ImportError, IOError):
return []
return pkg_resources.iter_entry_points(group_name)
+
def find_plugin_lexers():
for entrypoint in iter_entry_points(LEXER_ENTRY_POINT):
yield entrypoint.load()
diff --git a/scripts/release-checklist b/scripts/release-checklist
new file mode 100644
index 00000000..f18e6376
--- /dev/null
+++ b/scripts/release-checklist
@@ -0,0 +1,24 @@
+Release checklist
+=================
+
+* Check hg status
+* Make check
+* Make pylint
+* Make test from clean checkout with all supported Python versions
+* Update ez_setup.py
+* Update version info in setup.py/__init__.py
+* Check setup.py metadata: long description, trove classifiers
+* Update release date/code name in CHANGES
+* hg commit
+* make clean
+* For every supported version:
+ pythonX.Y setup.py release bdist_egg sdist upload
+* Check PyPI release page for obvious errors
+* hg tag
+* Make a maintenance branch if applicable
+* Update homepage (release info), regenerate docs (+printable!)
+* Add new version/milestone to tracker categories
+* Write announcement and send to mailing list/python-announce
+* Update version info, add new CHANGES entry for next version
+* hg commit
+* hg push
diff --git a/tests/examplefiles/docker.docker b/tests/examplefiles/docker.docker
index d65385b6..1ae3c3a1 100644
--- a/tests/examplefiles/docker.docker
+++ b/tests/examplefiles/docker.docker
@@ -1,5 +1,34 @@
-maintainer First O'Last
+FROM alpine:3.5
+MAINTAINER First O'Last
+# comment
run echo \
123 $bar
-# comment
+RUN apk --update add rsync dumb-init
+
+# Test env with both syntax
+ENV FOO = "BAR"
+ENV FOO \
+ "BAR"
+
+COPY foo "bar"
+COPY foo \
+ "bar"
+
+HEALTHCHECK \
+ --interval=5m --timeout=3s \
+ CMD curl -f http://localhost/ || exit 1
+
+# ONBUILD keyword, then with linebreak
+ONBUILD ADD . /app/src
+ONBUILD \
+ RUN echo 123 $bar
+
+# Potential JSON array parsing, mixed with linebreaks
+VOLUME \
+ /foo
+VOLUME \
+ ["/bar"]
+VOLUME ["/bar"]
+VOLUME /foo
+CMD ["foo", "bar"]
diff --git a/tests/examplefiles/example.hlsl b/tests/examplefiles/example.hlsl
new file mode 100644
index 00000000..21d0a672
--- /dev/null
+++ b/tests/examplefiles/example.hlsl
@@ -0,0 +1,168 @@
+// A few random snippets of HLSL shader code I gathered...
+
+[numthreads(256, 1, 1)]
+void cs_main(uint3 threadId : SV_DispatchThreadID)
+{
+ // Seed the PRNG using the thread ID
+ rng_state = threadId.x;
+
+ // Generate a few numbers...
+ uint r0 = rand_xorshift();
+ uint r1 = rand_xorshift();
+ // Do some stuff with them...
+
+ // Generate a random float in [0, 1)...
+ float f0 = float(rand_xorshift()) * (1.0 / 4294967296.0);
+
+ // ...etc.
+}
+
+// Constant buffer of parameters
+cbuffer IntegratorParams : register(b0)
+{
+ float2 specPow; // Spec powers in XY directions (equal for isotropic BRDFs)
+ float3 L; // Unit vector toward light
+ int2 cThread; // Total threads launched in XY dimensions
+ int2 xyOutput; // Where in the output buffer to store the result
+}
+
+static const float pi = 3.141592654;
+
+float AshikhminShirleyNDF(float3 H)
+{
+ float normFactor = sqrt((specPow.x + 2.0f) * (specPow.y + 2.0)) * (0.5f / pi);
+ float NdotH = H.z;
+ float2 Hxy = normalize(H.xy);
+ return normFactor * pow(NdotH, dot(specPow, Hxy * Hxy));
+}
+
+float BeckmannNDF(float3 H)
+{
+ float glossFactor = specPow.x * 0.5f + 1.0f; // This is 1/m^2 in the usual Beckmann formula
+ float normFactor = glossFactor * (1.0f / pi);
+ float NdotHSq = H.z * H.z;
+ return normFactor / (NdotHSq * NdotHSq) * exp(glossFactor * (1.0f - 1.0f / NdotHSq));
+}
+
+// Output buffer for compute shader (actually float, but must be declared as uint
+// for atomic operations to work)
+globallycoherent RWTexture2D<uint> o_data : register(u0);
+
+// Sum up the outputs of all threads and store to the output location
+static const uint threadGroupSize2D = 16;
+static const uint threadGroupSize1D = threadGroupSize2D * threadGroupSize2D;
+groupshared float g_partialSums[threadGroupSize1D];
+void SumAcrossThreadsAndStore(float value, uint iThreadInGroup)
+{
+ // First reduce within the threadgroup: partial sums of 2, 4, 8... elements
+ // are calculated by 1/2, 1/4, 1/8... of the threads, always keeping the
+ // active threads at the front of the group to minimize divergence.
+
+ // NOTE: there are faster ways of doing this...but this is simple to code
+ // and good enough.
+
+ g_partialSums[iThreadInGroup] = value;
+ GroupMemoryBarrierWithGroupSync();
+
+ [unroll] for (uint i = threadGroupSize1D / 2; i > 0; i /= 2)
+ {
+ if (iThreadInGroup < i)
+ {
+ g_partialSums[iThreadInGroup] += g_partialSums[iThreadInGroup + i];
+ }
+ GroupMemoryBarrierWithGroupSync();
+ }
+
+ // Then reduce across threadgroups: one thread from each group adds the group
+ // total to the final output location, using a software transactional memory
+ // style since D3D11 doesn't support atomic add on floats.
+ // (Assumes the output value has been cleared to zero beforehand.)
+
+ if (iThreadInGroup == 0)
+ {
+ float threadGroupSum = g_partialSums[0];
+ uint outputValueRead = o_data[xyOutput];
+ while (true)
+ {
+ uint newOutputValue = asuint(asfloat(outputValueRead) + threadGroupSum);
+ uint previousOutputValue;
+ InterlockedCompareExchange(
+ o_data[xyOutput], outputValueRead, newOutputValue, previousOutputValue);
+ if (previousOutputValue == outputValueRead)
+ break;
+ outputValueRead = previousOutputValue;
+ }
+ }
+}
+
+void main(
+ in Vertex i_vtx,
+ out Vertex o_vtx,
+ out float3 o_vecCamera : CAMERA,
+ out float4 o_uvzwShadow : UVZW_SHADOW,
+ out float4 o_posClip : SV_Position)
+{
+ o_vtx = i_vtx;
+ o_vecCamera = g_posCamera - i_vtx.m_pos;
+ o_uvzwShadow = mul(float4(i_vtx.m_pos, 1.0), g_matWorldToUvzwShadow);
+ o_posClip = mul(float4(i_vtx.m_pos, 1.0), g_matWorldToClip);
+}
+
+#pragma pack_matrix(row_major)
+
+struct Vertex
+{
+ float3 m_pos : POSITION;
+ float3 m_normal : NORMAL;
+ float2 m_uv : UV;
+};
+
+cbuffer CBFrame : CB_FRAME // matches struct CBFrame in test.cpp
+{
+ float4x4 g_matWorldToClip;
+ float4x4 g_matWorldToUvzwShadow;
+ float3x3 g_matWorldToUvzShadowNormal;
+ float3 g_posCamera;
+
+ float3 g_vecDirectionalLight;
+ float3 g_rgbDirectionalLight;
+
+ float2 g_dimsShadowMap;
+ float g_normalOffsetShadow;
+ float g_shadowSharpening;
+
+ float g_exposure; // Exposure multiplier
+}
+
+Texture2D<float3> g_texDiffuse : register(t0);
+SamplerState g_ss : register(s0);
+
+void main(
+ in Vertex i_vtx,
+ in float3 i_vecCamera : CAMERA,
+ in float4 i_uvzwShadow : UVZW_SHADOW,
+ out float3 o_rgb : SV_Target)
+{
+ float3 normal = normalize(i_vtx.m_normal);
+
+ // Sample shadow map
+ float shadow = EvaluateShadow(i_uvzwShadow, normal);
+
+ // Evaluate diffuse lighting
+ float3 diffuseColor = g_texDiffuse.Sample(g_ss, i_vtx.m_uv);
+ float3 diffuseLight = g_rgbDirectionalLight * (shadow * saturate(dot(normal, g_vecDirectionalLight)));
+ diffuseLight += SimpleAmbient(normal);
+
+ o_rgb = diffuseColor * diffuseLight;
+}
+
+[domain("quad")]
+void ds(
+ in float edgeFactors[4] : SV_TessFactor,
+ in float insideFactors[2] : SV_InsideTessFactor,
+ in OutputPatch<VData, 4> inp,
+ in float2 uv : SV_DomainLocation,
+ out float4 o_pos : SV_Position)
+{
+ o_pos = lerp(lerp(inp[0].pos, inp[1].pos, uv.x), lerp(inp[2].pos, inp[3].pos, uv.x), uv.y);
+}
diff --git a/tests/examplefiles/fennelview.fnl b/tests/examplefiles/fennelview.fnl
new file mode 100644
index 00000000..fd0fc648
--- /dev/null
+++ b/tests/examplefiles/fennelview.fnl
@@ -0,0 +1,156 @@
+;; A pretty-printer that outputs tables in Fennel syntax.
+;; Loosely based on inspect.lua: http://github.com/kikito/inspect.lua
+
+(local quote (fn [str] (.. '"' (: str :gsub '"' '\\"') '"')))
+
+(local short-control-char-escapes
+ {"\a" "\\a" "\b" "\\b" "\f" "\\f" "\n" "\\n"
+ "\r" "\\r" "\t" "\\t" "\v" "\\v"})
+
+(local long-control-char-esapes
+ (let [long {}]
+ (for [i 0 31]
+ (let [ch (string.char i)]
+ (when (not (. short-control-char-escapes ch))
+ (tset short-control-char-escapes ch (.. "\\" i))
+ (tset long ch (: "\\%03d" :format i)))))
+ long))
+
+(fn escape [str]
+ (let [str (: str :gsub "\\" "\\\\")
+ str (: str :gsub "(%c)%f[0-9]" long-control-char-esapes)]
+ (: str :gsub "%c" short-control-char-escapes)))
+
+(fn sequence-key? [k len]
+ (and (= (type k) "number")
+ (<= 1 k)
+ (<= k len)
+ (= (math.floor k) k)))
+
+(local type-order {:number 1 :boolean 2 :string 3 :table 4
+ :function 5 :userdata 6 :thread 7})
+
+(fn sort-keys [a b]
+ (let [ta (type a) tb (type b)]
+ (if (and (= ta tb) (~= ta "boolean")
+ (or (= ta "string") (= ta "number")))
+ (< a b)
+ (let [dta (. type-order a)
+ dtb (. type-order b)]
+ (if (and dta dtb)
+ (< dta dtb)
+ dta true
+ dtb false
+ :else (< ta tb))))))
+
+(fn get-sequence-length [t]
+ (var len 1)
+ (each [i (ipairs t)] (set len i))
+ len)
+
+(fn get-nonsequential-keys [t]
+ (let [keys {}
+ sequence-length (get-sequence-length t)]
+ (each [k (pairs t)]
+ (when (not (sequence-key? k sequence-length))
+ (table.insert keys k)))
+ (table.sort keys sort-keys)
+ (values keys sequence-length)))
+
+(fn count-table-appearances [t appearances]
+ (if (= (type t) "table")
+ (when (not (. appearances t))
+ (tset appearances t 1)
+ (each [k v (pairs t)]
+ (count-table-appearances k appearances)
+ (count-table-appearances v appearances)))
+ (when (and t (= t t)) ; no nans please
+ (tset appearances t (+ (or (. appearances t) 0) 1))))
+ appearances)
+
+
+
+(var put-value nil) ; mutual recursion going on; defined below
+
+(fn puts [self ...]
+ (each [_ v (ipairs [...])]
+ (table.insert self.buffer v)))
+
+(fn tabify [self] (puts self "\n" (: self.indent :rep self.level)))
+
+(fn already-visited? [self v] (~= (. self.ids v) nil))
+
+(fn get-id [self v]
+ (var id (. self.ids v))
+ (when (not id)
+ (let [tv (type v)]
+ (set id (+ (or (. self.max-ids tv) 0) 1))
+ (tset self.max-ids tv id)
+ (tset self.ids v id)))
+ (tostring id))
+
+(fn put-sequential-table [self t length]
+ (puts self "[")
+ (set self.level (+ self.level 1))
+ (for [i 1 length]
+ (puts self " ")
+ (put-value self (. t i)))
+ (set self.level (- self.level 1))
+ (puts self " ]"))
+
+(fn put-key [self k]
+ (if (and (= (type k) "string")
+ (: k :find "^[-%w?\\^_`!#$%&*+./@~:|<=>]+$"))
+ (puts self ":" k)
+ (put-value self k)))
+
+(fn put-kv-table [self t]
+ (puts self "{")
+ (set self.level (+ self.level 1))
+ (each [k v (pairs t)]
+ (tabify self)
+ (put-key self k)
+ (puts self " ")
+ (put-value self v))
+ (set self.level (- self.level 1))
+ (tabify self)
+ (puts self "}"))
+
+(fn put-table [self t]
+ (if (already-visited? self t)
+ (puts self "#<table " (get-id self t) ">")
+ (>= self.level self.depth)
+ (puts self "{...}")
+ :else
+ (let [(non-seq-keys length) (get-nonsequential-keys t)
+ id (get-id self t)]
+ (if (> (. self.appearances t) 1)
+ (puts self "#<" id ">")
+ (and (= (# non-seq-keys) 0) (= (# t) 0))
+ (puts self "{}")
+ (= (# non-seq-keys) 0)
+ (put-sequential-table self t length)
+ :else
+ (put-kv-table self t)))))
+
+(set put-value (fn [self v]
+ (let [tv (type v)]
+ (if (= tv "string")
+ (puts self (quote (escape v)))
+ (or (= tv "number") (= tv "boolean") (= tv "nil"))
+ (puts self (tostring v))
+ (= tv "table")
+ (put-table self v)
+ :else
+ (puts self "#<" (tostring v) ">")))))
+
+
+
+(fn fennelview [root options]
+ (let [options (or options {})
+ inspector {:appearances (count-table-appearances root {})
+ :depth (or options.depth 128)
+ :level 0 :buffer {} :ids {} :max-ids {}
+ :indent (or options.indent " ")}]
+ (put-value inspector root)
+ (table.concat inspector.buffer)))