summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGeorg Brandl <georg@python.org>2017-01-22 18:50:35 +0000
committerGeorg Brandl <georg@python.org>2017-01-22 18:50:35 +0000
commit041b80fab1922800ae0618fd4d3d8d78bf7063b4 (patch)
tree79540afc389a29e894804229b6ba2dbc78c67760
parent635f722ec3eb791e19f41b3231d74923d73bd729 (diff)
parentdbfa7bbacf9ec1a987f632f01c040285269f690b (diff)
downloadpygments-041b80fab1922800ae0618fd4d3d8d78bf7063b4.tar.gz
Merged in tprynn/pygments-main/command-line-lexer-formatter (pull request #559)
Custom Lexer/Formatter File Loading
-rw-r--r--AUTHORS14
-rw-r--r--CHANGES21
-rw-r--r--LICENSE2
-rw-r--r--Makefile5
-rw-r--r--doc/_themes/pygments14/layout.html2
-rw-r--r--doc/_themes/pygments14/static/pygments14.css_t2
-rw-r--r--doc/docs/lexerdevelopment.rst2
-rw-r--r--doc/docs/lexers.rst2
-rw-r--r--doc/docs/unicode.rst2
-rw-r--r--doc/languages.rst1
-rwxr-xr-xexternal/autopygmentize40
-rw-r--r--external/markdown-processor.py2
-rw-r--r--external/moin-parser.py2
-rw-r--r--external/rst-directive.py2
-rw-r--r--pygments/__init__.py2
-rw-r--r--pygments/cmdline.py4
-rw-r--r--pygments/console.py2
-rw-r--r--pygments/filter.py2
-rw-r--r--pygments/filters/__init__.py2
-rw-r--r--pygments/formatter.py2
-rw-r--r--pygments/formatters/__init__.py2
-rwxr-xr-xpygments/formatters/_mapping.py2
-rw-r--r--pygments/formatters/bbcode.py2
-rw-r--r--pygments/formatters/html.py2
-rw-r--r--pygments/formatters/img.py39
-rw-r--r--pygments/formatters/irc.py2
-rw-r--r--pygments/formatters/latex.py2
-rw-r--r--pygments/formatters/other.py2
-rw-r--r--pygments/formatters/rtf.py2
-rw-r--r--pygments/formatters/svg.py2
-rw-r--r--pygments/formatters/terminal.py2
-rw-r--r--pygments/formatters/terminal256.py2
-rw-r--r--pygments/lexer.py2
-rw-r--r--pygments/lexers/__init__.py8
-rw-r--r--pygments/lexers/_asy_builtins.py2
-rw-r--r--pygments/lexers/_cl_builtins.py2
-rw-r--r--pygments/lexers/_cocoa_builtins.py2
-rw-r--r--pygments/lexers/_csound_builtins.py2
-rw-r--r--pygments/lexers/_lasso_builtins.py9
-rw-r--r--pygments/lexers/_lua_builtins.py94
-rw-r--r--pygments/lexers/_mapping.py33
-rw-r--r--pygments/lexers/_mql_builtins.py2
-rw-r--r--pygments/lexers/_openedge_builtins.py2
-rw-r--r--pygments/lexers/_php_builtins.py2
-rw-r--r--pygments/lexers/_postgres_builtins.py2
-rw-r--r--pygments/lexers/_scilab_builtins.py2
-rw-r--r--pygments/lexers/_sourcemod_builtins.py2
-rw-r--r--pygments/lexers/_stan_builtins.py2
-rw-r--r--pygments/lexers/_stata_builtins.py419
-rw-r--r--pygments/lexers/_tsql_builtins.py1004
-rw-r--r--pygments/lexers/_vim_builtins.py2
-rw-r--r--pygments/lexers/actionscript.py2
-rw-r--r--pygments/lexers/agile.py2
-rw-r--r--pygments/lexers/algebra.py2
-rw-r--r--pygments/lexers/ambient.py2
-rw-r--r--pygments/lexers/ampl.py2
-rw-r--r--pygments/lexers/apl.py2
-rw-r--r--pygments/lexers/archetype.py2
-rw-r--r--pygments/lexers/asm.py110
-rw-r--r--pygments/lexers/automation.py2
-rw-r--r--pygments/lexers/basic.py2
-rw-r--r--pygments/lexers/bibtex.py160
-rw-r--r--pygments/lexers/business.py88
-rw-r--r--pygments/lexers/c_cpp.py19
-rw-r--r--pygments/lexers/c_like.py2
-rw-r--r--pygments/lexers/capnproto.py78
-rw-r--r--pygments/lexers/chapel.py7
-rw-r--r--pygments/lexers/clean.py39
-rw-r--r--pygments/lexers/compiled.py3
-rw-r--r--pygments/lexers/configs.py14
-rw-r--r--pygments/lexers/console.py2
-rw-r--r--pygments/lexers/crystal.py389
-rw-r--r--pygments/lexers/csound.py2
-rw-r--r--pygments/lexers/css.py479
-rw-r--r--pygments/lexers/d.py2
-rw-r--r--pygments/lexers/dalvik.py2
-rw-r--r--pygments/lexers/data.py39
-rw-r--r--pygments/lexers/diff.py2
-rw-r--r--pygments/lexers/dotnet.py2
-rw-r--r--pygments/lexers/dsls.py125
-rw-r--r--pygments/lexers/dylan.py2
-rw-r--r--pygments/lexers/ecl.py2
-rw-r--r--pygments/lexers/eiffel.py2
-rw-r--r--pygments/lexers/elm.py2
-rw-r--r--pygments/lexers/erlang.py2
-rw-r--r--pygments/lexers/esoteric.py100
-rw-r--r--pygments/lexers/ezhil.py21
-rw-r--r--pygments/lexers/factor.py2
-rw-r--r--pygments/lexers/fantom.py2
-rw-r--r--pygments/lexers/felix.py2
-rw-r--r--pygments/lexers/forth.py177
-rw-r--r--pygments/lexers/fortran.py6
-rw-r--r--pygments/lexers/foxpro.py2
-rw-r--r--pygments/lexers/functional.py2
-rw-r--r--pygments/lexers/go.py2
-rw-r--r--pygments/lexers/grammar_notation.py4
-rw-r--r--pygments/lexers/graph.py2
-rw-r--r--pygments/lexers/graphics.py2
-rw-r--r--pygments/lexers/haskell.py7
-rw-r--r--pygments/lexers/haxe.py2
-rw-r--r--pygments/lexers/hdl.py2
-rw-r--r--pygments/lexers/hexdump.py30
-rw-r--r--pygments/lexers/html.py19
-rw-r--r--pygments/lexers/idl.py2
-rw-r--r--pygments/lexers/igor.py2
-rw-r--r--pygments/lexers/inferno.py2
-rw-r--r--pygments/lexers/installers.py2
-rw-r--r--pygments/lexers/int_fiction.py2
-rw-r--r--pygments/lexers/iolang.py2
-rw-r--r--pygments/lexers/j.py6
-rw-r--r--pygments/lexers/javascript.py169
-rw-r--r--pygments/lexers/julia.py301
-rw-r--r--pygments/lexers/jvm.py32
-rw-r--r--pygments/lexers/lisp.py1496
-rw-r--r--pygments/lexers/make.py6
-rw-r--r--pygments/lexers/markup.py97
-rw-r--r--pygments/lexers/math.py2
-rw-r--r--pygments/lexers/matlab.py2
-rw-r--r--pygments/lexers/ml.py2
-rw-r--r--pygments/lexers/modeling.py2
-rw-r--r--pygments/lexers/modula2.py2
-rw-r--r--pygments/lexers/monte.py204
-rw-r--r--pygments/lexers/ncl.py331
-rw-r--r--pygments/lexers/nimrod.py24
-rw-r--r--pygments/lexers/nit.py2
-rw-r--r--pygments/lexers/nix.py2
-rw-r--r--pygments/lexers/oberon.py2
-rw-r--r--pygments/lexers/objective.py27
-rw-r--r--pygments/lexers/ooc.py2
-rw-r--r--pygments/lexers/other.py3
-rw-r--r--pygments/lexers/parasail.py2
-rw-r--r--pygments/lexers/parsers.py2
-rw-r--r--pygments/lexers/pascal.py4
-rw-r--r--pygments/lexers/pawn.py2
-rw-r--r--pygments/lexers/perl.py26
-rw-r--r--pygments/lexers/php.py4
-rw-r--r--pygments/lexers/praat.py15
-rw-r--r--pygments/lexers/prolog.py2
-rw-r--r--pygments/lexers/python.py7
-rw-r--r--pygments/lexers/qvt.py2
-rw-r--r--pygments/lexers/r.py2
-rw-r--r--pygments/lexers/rdf.py2
-rw-r--r--pygments/lexers/rebol.py2
-rw-r--r--pygments/lexers/resource.py5
-rw-r--r--pygments/lexers/rnc.py67
-rw-r--r--pygments/lexers/roboconf.py2
-rw-r--r--pygments/lexers/robotframework.py2
-rw-r--r--pygments/lexers/ruby.py2
-rw-r--r--pygments/lexers/rust.py110
-rw-r--r--pygments/lexers/sas.py228
-rw-r--r--pygments/lexers/scripting.py64
-rw-r--r--pygments/lexers/shell.py48
-rw-r--r--pygments/lexers/smalltalk.py2
-rw-r--r--pygments/lexers/smv.py75
-rw-r--r--pygments/lexers/snobol.py2
-rw-r--r--pygments/lexers/special.py5
-rw-r--r--pygments/lexers/sql.py70
-rw-r--r--pygments/lexers/stata.py106
-rw-r--r--pygments/lexers/supercollider.py4
-rw-r--r--pygments/lexers/tcl.py2
-rw-r--r--pygments/lexers/templates.py117
-rw-r--r--pygments/lexers/testing.py2
-rw-r--r--pygments/lexers/text.py2
-rw-r--r--pygments/lexers/textedit.py2
-rw-r--r--pygments/lexers/textfmts.py2
-rw-r--r--pygments/lexers/theorem.py15
-rw-r--r--pygments/lexers/trafficscript.py2
-rw-r--r--pygments/lexers/typoscript.py12
-rw-r--r--pygments/lexers/urbi.py2
-rw-r--r--pygments/lexers/varnish.py12
-rw-r--r--pygments/lexers/verification.py11
-rw-r--r--pygments/lexers/web.py2
-rw-r--r--pygments/lexers/webmisc.py31
-rw-r--r--pygments/lexers/whiley.py116
-rw-r--r--pygments/lexers/x10.py2
-rw-r--r--pygments/modeline.py11
-rw-r--r--pygments/plugin.py30
-rw-r--r--pygments/regexopt.py4
-rw-r--r--pygments/scanner.py2
-rw-r--r--pygments/sphinxext.py2
-rw-r--r--pygments/style.py2
-rw-r--r--pygments/styles/__init__.py6
-rw-r--r--pygments/styles/abap.py29
-rw-r--r--pygments/styles/algol.py2
-rw-r--r--pygments/styles/algol_nu.py2
-rw-r--r--pygments/styles/arduino.py2
-rw-r--r--pygments/styles/autumn.py2
-rw-r--r--pygments/styles/borland.py2
-rw-r--r--pygments/styles/bw.py2
-rw-r--r--pygments/styles/colorful.py2
-rw-r--r--pygments/styles/default.py2
-rw-r--r--pygments/styles/emacs.py2
-rw-r--r--pygments/styles/friendly.py2
-rw-r--r--pygments/styles/fruity.py2
-rw-r--r--pygments/styles/igor.py2
-rw-r--r--pygments/styles/lovelace.py2
-rw-r--r--pygments/styles/manni.py2
-rw-r--r--pygments/styles/monokai.py2
-rw-r--r--pygments/styles/murphy.py2
-rw-r--r--pygments/styles/native.py2
-rw-r--r--pygments/styles/paraiso_dark.py2
-rw-r--r--pygments/styles/paraiso_light.py2
-rw-r--r--pygments/styles/pastie.py2
-rw-r--r--pygments/styles/perldoc.py2
-rw-r--r--pygments/styles/rainbow_dash.py89
-rw-r--r--pygments/styles/rrt.py2
-rw-r--r--pygments/styles/sas.py44
-rw-r--r--pygments/styles/stata.py40
-rw-r--r--pygments/styles/tango.py2
-rw-r--r--pygments/styles/trac.py2
-rw-r--r--pygments/styles/vim.py2
-rw-r--r--pygments/styles/vs.py2
-rw-r--r--pygments/styles/xcode.py2
-rw-r--r--pygments/token.py2
-rw-r--r--pygments/unistring.py2
-rw-r--r--pygments/util.py9
-rwxr-xr-xscripts/check_sources.py4
-rwxr-xr-xscripts/debug_lexer.py2
-rw-r--r--scripts/get_vimkw.py2
-rwxr-xr-xsetup.py2
-rw-r--r--tests/examplefiles/capdl_example.cdl64
-rw-r--r--tests/examplefiles/demo.frt3
-rw-r--r--tests/examplefiles/demo.hbs22
-rw-r--r--tests/examplefiles/durexmania.aheui4
-rw-r--r--tests/examplefiles/example.bat6
-rw-r--r--tests/examplefiles/example.hs10
-rw-r--r--tests/examplefiles/example.juttle110
-rw-r--r--tests/examplefiles/example.lua24
-rw-r--r--tests/examplefiles/example.md61
-rw-r--r--tests/examplefiles/example.ng211
-rw-r--r--tests/examplefiles/example.praat51
-rw-r--r--tests/examplefiles/example.sbl109
-rw-r--r--tests/examplefiles/example.tasm527
-rw-r--r--tests/examplefiles/example.whiley296
-rw-r--r--tests/examplefiles/example.yaml9
-rw-r--r--tests/examplefiles/fibonacci.tokigun.aheui4
-rw-r--r--tests/examplefiles/guidance.smv1124
-rw-r--r--tests/examplefiles/hello-world.puzzlet.aheui10
-rw-r--r--tests/examplefiles/plain.bst1097
-rw-r--r--tests/examplefiles/rnc_example.rnc33
-rw-r--r--tests/examplefiles/test.bib77
-rw-r--r--tests/examplefiles/test.cr2871
-rw-r--r--tests/examplefiles/test.mt7
-rw-r--r--tests/examplefiles/tsql_example.sql72
-rw-r--r--tests/run.py2
-rw-r--r--tests/string_asserts.py2
-rw-r--r--tests/test_basic_api.py10
-rw-r--r--tests/test_bibtex.py236
-rw-r--r--tests/test_cfm.py2
-rw-r--r--tests/test_clexer.py2
-rw-r--r--tests/test_cmdline.py2
-rw-r--r--tests/test_cpp.py33
-rw-r--r--tests/test_crystal.py308
-rw-r--r--tests/test_data.py100
-rw-r--r--tests/test_examplefiles.py8
-rw-r--r--tests/test_ezhil.py3
-rw-r--r--tests/test_html_formatter.py2
-rw-r--r--tests/test_inherit.py2
-rw-r--r--tests/test_irc_formatter.py2
-rw-r--r--tests/test_java.py2
-rw-r--r--tests/test_javascript.py84
-rw-r--r--tests/test_julia.py58
-rw-r--r--tests/test_latex_formatter.py6
-rw-r--r--tests/test_lexers_other.py2
-rw-r--r--tests/test_modeline.py26
-rw-r--r--tests/test_objectiveclexer.py2
-rw-r--r--tests/test_perllexer.py24
-rw-r--r--tests/test_php.py36
-rw-r--r--tests/test_praat.py130
-rw-r--r--tests/test_properties.py89
-rw-r--r--tests/test_python.py113
-rw-r--r--tests/test_qbasiclexer.py2
-rw-r--r--tests/test_regexlexer.py2
-rw-r--r--tests/test_regexopt.py36
-rw-r--r--tests/test_rtf_formatter.py2
-rw-r--r--tests/test_ruby.py2
-rw-r--r--tests/test_shell.py57
-rw-r--r--tests/test_smarty.py2
-rw-r--r--tests/test_sql.py74
-rw-r--r--tests/test_string_asserts.py2
-rw-r--r--tests/test_terminal_formatter.py2
-rw-r--r--tests/test_textfmts.py2
-rw-r--r--tests/test_token.py2
-rw-r--r--tests/test_unistring.py2
-rw-r--r--tests/test_using_api.py2
-rw-r--r--tests/test_util.py2
-rw-r--r--tests/test_whiley.py30
287 files changed, 14235 insertions, 1875 deletions
diff --git a/AUTHORS b/AUTHORS
index 204b4efe..efbad179 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -7,7 +7,7 @@ Other contributors, listed alphabetically, are:
* Sam Aaron -- Ioke lexer
* Ali Afshar -- image formatter
-* Thomas Aglassinger -- Easytrieve, JCL and Rexx lexers
+* Thomas Aglassinger -- Easytrieve, JCL, Rexx and Transact-SQL lexers
* Muthiah Annamalai -- Ezhil lexer
* Kumar Appaiah -- Debian control lexer
* Andreas Amann -- AppleScript lexer
@@ -36,8 +36,9 @@ Other contributors, listed alphabetically, are:
* Matthias Bussonnier -- ANSI style handling for terminal-256 formatter
* chebee7i -- Python traceback lexer improvements
* Hiram Chirino -- Scaml and Jade lexers
+* Mauricio Caceres -- SAS and Stata lexers.
* Ian Cooper -- VGL lexer
-* David Corbett -- Inform, Jasmin, JSGF, and TADS 3 lexers
+* David Corbett -- Inform, Jasmin, JSGF, Snowball, and TADS 3 lexers
* Leaf Corcoran -- MoonScript lexer
* Christopher Creutzig -- MuPAD lexer
* Daniël W. Crompton -- Pike lexer
@@ -65,6 +66,7 @@ Other contributors, listed alphabetically, are:
* Alex Gilding -- BlitzBasic lexer
* Bertrand Goetzmann -- Groovy lexer
* Krzysiek Goj -- Scala lexer
+* Andrey Golovizin -- BibTeX lexers
* Matt Good -- Genshi, Cheetah lexers
* Michał Górny -- vim modeline support
* Alex Gosse -- TrafficScript lexer
@@ -139,6 +141,7 @@ Other contributors, listed alphabetically, are:
* Mher Movsisyan -- DTD lexer
* Dejan Muhamedagic -- Crmsh lexer
* Ana Nelson -- Ragel, ANTLR, R console lexers
+* Kurt Neufeld -- Markdown lexer
* Nam T. Nguyen -- Monokai style
* Jesper Noehr -- HTML formatter "anchorlinenos"
* Mike Nolta -- Julia lexer
@@ -152,6 +155,7 @@ Other contributors, listed alphabetically, are:
* Dominik Picheta -- Nimrod lexer
* Andrew Pinkham -- RTF Formatter Refactoring
* Clément Prévost -- UrbiScript lexer
+* Oleh Prypin -- Crystal lexer (based on Ruby lexer)
* Elias Rabel -- Fortran fixed form lexer
* raichoo -- Idris lexer
* Kashif Rasul -- CUDA lexer
@@ -169,15 +173,18 @@ Other contributors, listed alphabetically, are:
* Matteo Sasso -- Common Lisp lexer
* Joe Schafer -- Ada lexer
* Ken Schutte -- Matlab lexers
+* René Schwaiger -- Rainbow Dash style
+* Sebastian Schweizer -- Whiley lexer
* Tassilo Schweyer -- Io, MOOCode lexers
* Ted Shaw -- AutoIt lexer
* Joerg Sieker -- ABAP lexer
* Robert Simmons -- Standard ML lexer
* Kirill Simonov -- YAML lexer
+* Corbin Simpson -- Monte lexer
* Alexander Smishlajev -- Visual FoxPro lexer
* Steve Spigarelli -- XQuery lexer
* Jerome St-Louis -- eC lexer
-* Camil Staps -- Clean lexer
+* Camil Staps -- Clean and NuSMV lexers
* James Strachan -- Kotlin lexer
* Tom Stuart -- Treetop lexer
* Colin Sullivan -- SuperCollider lexer
@@ -187,6 +194,7 @@ Other contributors, listed alphabetically, are:
* Jeremy Thurgood -- Erlang, Squid config lexers
* Brian Tiffin -- OpenCOBOL lexer
* Bob Tolbert -- Hy lexer
+* Matthias Trute -- Forth lexer
* Erick Tryzelaar -- Felix lexer
* Alexander Udalov -- Kotlin lexer improvements
* Thomas Van Doren -- Chapel lexer
diff --git a/CHANGES b/CHANGES
index 8646b157..1e90c270 100644
--- a/CHANGES
+++ b/CHANGES
@@ -23,6 +23,11 @@ Version 2.2
* JSGF (PR#546)
* NCAR command language (PR#536)
* Extempore (PR#530)
+ * Cap'n Proto (PR#595)
+ * Whiley (PR#573)
+ * Monte (PR#592)
+ * Crystal (PR#576)
+ * Snowball (PR#589)
- Added `lexers.find_lexer_class_by_name()`. (#1203)
@@ -38,12 +43,26 @@ Version 2.2
- Styles can now define colors with ANSI colors for use in the 256-color
terminal formatter. (PR#531)
+- Improved the CSS lexer. (#1083, #1130)
+
+- Added "Rainbow Dash" style. (PR#623)
+
+- Delay loading `pkg_resources`, which takes a long while to import. (PR#690)
+
+
+Version 2.1.3
+-------------
+(released Mar 2, 2016)
+
+- Fixed regression in Bash lexer (PR#563)
+
Version 2.1.2
-------------
-(in development)
+(released Feb 29, 2016)
- Fixed Python 3 regression in image formatter (#1215)
+- Fixed regression in Bash lexer (PR#562)
Version 2.1.1
diff --git a/LICENSE b/LICENSE
index 10b8e916..21815527 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,4 +1,4 @@
-Copyright (c) 2006-2015 by the respective authors (see AUTHORS file).
+Copyright (c) 2006-2017 by the respective authors (see AUTHORS file).
All rights reserved.
Redistribution and use in source and binary forms, with or without
diff --git a/Makefile b/Makefile
index efae8577..82c4a124 100644
--- a/Makefile
+++ b/Makefile
@@ -4,7 +4,7 @@
#
# Combines scripts for common tasks.
#
-# :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+# :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
# :license: BSD, see LICENSE for details.
#
@@ -55,6 +55,9 @@ test:
test-coverage:
@$(PYTHON) tests/run.py -d --with-coverage --cover-package=pygments --cover-erase $(TEST)
+test-examplefiles:
+ nosetests tests/test_examplefiles.py
+
tox-test:
@tox -- $(TEST)
diff --git a/doc/_themes/pygments14/layout.html b/doc/_themes/pygments14/layout.html
index 2cc03e03..e8860827 100644
--- a/doc/_themes/pygments14/layout.html
+++ b/doc/_themes/pygments14/layout.html
@@ -82,7 +82,7 @@
{% block footer %}
<div class="footer" role="contentinfo">
- &copy; Copyright 2006-2015, Georg Brandl and Pygments contributors.
+ &copy; Copyright 2006-2017, Georg Brandl and Pygments contributors.
Created using <a href="http://sphinx-doc.org/">Sphinx</a> {{
sphinx_version }}. <br/>
Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
diff --git a/doc/_themes/pygments14/static/pygments14.css_t b/doc/_themes/pygments14/static/pygments14.css_t
index 5c37aaf9..7f09f623 100644
--- a/doc/_themes/pygments14/static/pygments14.css_t
+++ b/doc/_themes/pygments14/static/pygments14.css_t
@@ -4,7 +4,7 @@
*
* Sphinx stylesheet -- pygments14 theme. Heavily copied from sphinx13.
*
- * :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ * :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
* :license: BSD, see LICENSE for details.
*
*/
diff --git a/doc/docs/lexerdevelopment.rst b/doc/docs/lexerdevelopment.rst
index c55c98a9..63bd01a3 100644
--- a/doc/docs/lexerdevelopment.rst
+++ b/doc/docs/lexerdevelopment.rst
@@ -400,7 +400,7 @@ There are a few more things you can do with states:
tokens = {...}
def get_tokens_unprocessed(self, text, stack=('root', 'otherstate')):
- for item in RegexLexer.get_tokens_unprocessed(text, stack):
+ for item in RegexLexer.get_tokens_unprocessed(self, text, stack):
yield item
Some lexers like the `PhpLexer` use this to make the leading ``<?php``
diff --git a/doc/docs/lexers.rst b/doc/docs/lexers.rst
index 9262efb0..ef40f140 100644
--- a/doc/docs/lexers.rst
+++ b/doc/docs/lexers.rst
@@ -31,7 +31,7 @@ Currently, **all lexers** support these options:
If this option is set to ``"guess"``, a simple UTF-8 vs. Latin-1
detection is used, if it is set to ``"chardet"``, the
- `chardet library <http://chardet.feedparser.org/>`_ is used to
+ `chardet library <https://chardet.github.io/>`_ is used to
guess the encoding of the input.
.. versionadded:: 0.6
diff --git a/doc/docs/unicode.rst b/doc/docs/unicode.rst
index 17853a36..dca91116 100644
--- a/doc/docs/unicode.rst
+++ b/doc/docs/unicode.rst
@@ -55,4 +55,4 @@ encoding is handled differently, see :doc:`the command line docs <cmdline>`.
options dict with lexers and formatters, and still have different input and
output encodings.
-.. _chardet: http://chardet.feedparser.org/
+.. _chardet: https://chardet.github.io/
diff --git a/doc/languages.rst b/doc/languages.rst
index ffe1bdb6..7fa8eb2f 100644
--- a/doc/languages.rst
+++ b/doc/languages.rst
@@ -26,6 +26,7 @@ Programming languages
* Common Lisp
* Coq
* Cryptol (incl. Literate Cryptol)
+* `Crystal <http://crystal-lang.org>`_
* `Cython <http://cython.org>`_
* `D <http://dlang.org>`_
* Dart
diff --git a/external/autopygmentize b/external/autopygmentize
index d2f969a1..d2d05970 100755
--- a/external/autopygmentize
+++ b/external/autopygmentize
@@ -1,6 +1,6 @@
#!/bin/bash
# Best effort auto-pygmentization with transparent decompression
-# by Reuben Thomas 2008-2015
+# by Reuben Thomas 2008-2016
# This program is in the public domain.
# Strategy: first see if pygmentize can find a lexer; if not, ask file; if that finds nothing, fail
@@ -25,6 +25,7 @@ if [[ "$lexer" == text ]]; then
text/x-awk) lexer=awk;;
text/x-c) lexer=c;;
text/x-c++) lexer=cpp;;
+ text/x-crystal) lexer=crystal;;
text/x-diff) lexer=diff;;
text/x-fortran) lexer=fortran;;
text/x-gawk) lexer=gawk;;
@@ -65,19 +66,36 @@ if [[ "$lexer" == text ]]; then
esac
fi
+# Find a preprocessor for compressed files
+concat=cat
+case $(file $file_common_opts --mime-type "$file") in
+ application/x-gzip) concat=zcat;;
+ application/x-bzip2) concat=bzcat;;
+ application/x-xz) concat=xzcat;;
+esac
+
+# Find a suitable lexer, preceded by a hex dump for binary files
+prereader=""
encoding=$(file --mime-encoding --uncompress $file_common_opts "$file")
-if [[ $encoding == "us-asciibinarybinary" ]]; then
- encoding="us-ascii"
+if [[ $encoding == "binary" ]]; then
+ prereader="od -x" # POSIX fallback
+ if [[ -n $(which hd) ]]; then
+ prereader="hd" # preferred
+ fi
+ lexer=hexdump
+ encoding=latin1
fi
-
if [[ -n "$lexer" ]]; then
- concat=cat
- case $(file $file_common_opts --mime-type "$file") in
- application/x-gzip) concat=zcat;;
- application/x-bzip2) concat=bzcat;;
- application/x-xz) concat=xzcat;;
- esac
- exec $concat "$file" | pygmentize -O inencoding=$encoding $PYGMENTIZE_OPTS $options -l $lexer
+ reader="pygmentize -O inencoding=$encoding $PYGMENTIZE_OPTS $options -l $lexer"
+fi
+
+# If we found a reader, run it
+if [[ -n "$reader" ]]; then
+ if [[ -n "$prereader" ]]; then
+ exec $concat "$file" | $prereader | $reader
+ else
+ exec $concat "$file" | $reader
+ fi
fi
exit 1
diff --git a/external/markdown-processor.py b/external/markdown-processor.py
index a3e178ec..cb8d793d 100644
--- a/external/markdown-processor.py
+++ b/external/markdown-processor.py
@@ -22,7 +22,7 @@
.. _Markdown: https://pypi.python.org/pypi/Markdown
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/external/moin-parser.py b/external/moin-parser.py
index 9cb082a2..03a7c3c3 100644
--- a/external/moin-parser.py
+++ b/external/moin-parser.py
@@ -31,7 +31,7 @@
If you do not want to do that and are willing to accept larger HTML
output, you can set the INLINESTYLES option below to True.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/external/rst-directive.py b/external/rst-directive.py
index f81677b6..de26dd03 100644
--- a/external/rst-directive.py
+++ b/external/rst-directive.py
@@ -31,7 +31,7 @@
.. _directive documentation:
http://docutils.sourceforge.net/docs/howto/rst-directives.html
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/__init__.py b/pygments/__init__.py
index ffac59ef..cc238e02 100644
--- a/pygments/__init__.py
+++ b/pygments/__init__.py
@@ -22,7 +22,7 @@
.. _Pygments tip:
http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
diff --git a/pygments/cmdline.py b/pygments/cmdline.py
index 69604481..98c3ec37 100644
--- a/pygments/cmdline.py
+++ b/pygments/cmdline.py
@@ -5,7 +5,7 @@
Command line interface.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -231,7 +231,7 @@ def main_inner(popts, args, usage):
return 0
if opts.pop('-V', None) is not None:
- print('Pygments version %s, (c) 2006-2015 by Georg Brandl.' % __version__)
+ print('Pygments version %s, (c) 2006-2017 by Georg Brandl.' % __version__)
return 0
# handle ``pygmentize -L``
diff --git a/pygments/console.py b/pygments/console.py
index 4aaf5fcb..31b6839d 100644
--- a/pygments/console.py
+++ b/pygments/console.py
@@ -5,7 +5,7 @@
Format colored console output.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/filter.py b/pygments/filter.py
index f3082037..68be7ad7 100644
--- a/pygments/filter.py
+++ b/pygments/filter.py
@@ -5,7 +5,7 @@
Module that implements the default filter.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/filters/__init__.py b/pygments/filters/__init__.py
index 45bd49d5..45f9608c 100644
--- a/pygments/filters/__init__.py
+++ b/pygments/filters/__init__.py
@@ -6,7 +6,7 @@
Module containing filter lookup functions and default
filters.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatter.py b/pygments/formatter.py
index 9f22b3bc..c0780f62 100644
--- a/pygments/formatter.py
+++ b/pygments/formatter.py
@@ -5,7 +5,7 @@
Base formatter class.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/__init__.py b/pygments/formatters/__init__.py
index 2fe57025..4ea79c5a 100644
--- a/pygments/formatters/__init__.py
+++ b/pygments/formatters/__init__.py
@@ -5,7 +5,7 @@
Pygments formatters.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py
index 01d053dd..7bb3e71c 100755
--- a/pygments/formatters/_mapping.py
+++ b/pygments/formatters/_mapping.py
@@ -9,7 +9,7 @@
Do not alter the FORMATTERS dictionary by hand.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/bbcode.py b/pygments/formatters/bbcode.py
index 580989f0..9fc9476d 100644
--- a/pygments/formatters/bbcode.py
+++ b/pygments/formatters/bbcode.py
@@ -5,7 +5,7 @@
BBcode formatter.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/html.py b/pygments/formatters/html.py
index 2c6bb19e..2969d502 100644
--- a/pygments/formatters/html.py
+++ b/pygments/formatters/html.py
@@ -5,7 +5,7 @@
Formatter for HTML output.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/img.py b/pygments/formatters/img.py
index cc95ce24..2fb0dea5 100644
--- a/pygments/formatters/img.py
+++ b/pygments/formatters/img.py
@@ -5,10 +5,11 @@
Formatter for Pixmap output.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+import os
import sys
from pygments.formatter import Formatter
@@ -47,6 +48,7 @@ STYLES = {
# A sane default for modern systems
DEFAULT_FONT_NAME_NIX = 'Bitstream Vera Sans Mono'
DEFAULT_FONT_NAME_WIN = 'Courier New'
+DEFAULT_FONT_NAME_MAC = 'Courier New'
class PilNotAvailable(ImportError):
@@ -71,6 +73,10 @@ class FontManager(object):
if not font_name:
self.font_name = DEFAULT_FONT_NAME_WIN
self._create_win()
+ elif sys.platform.startswith('darwin'):
+ if not font_name:
+ self.font_name = DEFAULT_FONT_NAME_MAC
+ self._create_mac()
else:
if not font_name:
self.font_name = DEFAULT_FONT_NAME_NIX
@@ -111,6 +117,37 @@ class FontManager(object):
else:
self.fonts[style] = self.fonts['NORMAL']
+ def _get_mac_font_path(self, font_map, name, style):
+ return font_map.get((name + ' ' + style).strip().lower())
+
+ def _create_mac(self):
+ font_map = {}
+ for font_dir in (os.path.join(os.getenv("HOME"), 'Library/Fonts/'),
+ '/Library/Fonts/', '/System/Library/Fonts/'):
+ font_map.update(
+ ((os.path.splitext(f)[0].lower(), os.path.join(font_dir, f))
+ for f in os.listdir(font_dir) if f.lower().endswith('ttf')))
+
+ for name in STYLES['NORMAL']:
+ path = self._get_mac_font_path(font_map, self.font_name, name)
+ if path is not None:
+ self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
+ break
+ else:
+ raise FontNotFound('No usable fonts named: "%s"' %
+ self.font_name)
+ for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
+ for stylename in STYLES[style]:
+ path = self._get_mac_font_path(font_map, self.font_name, stylename)
+ if path is not None:
+ self.fonts[style] = ImageFont.truetype(path, self.font_size)
+ break
+ else:
+ if style == 'BOLDITALIC':
+ self.fonts[style] = self.fonts['BOLD']
+ else:
+ self.fonts[style] = self.fonts['NORMAL']
+
def _lookup_win(self, key, basename, styles, fail=False):
for suffix in ('', ' (TrueType)'):
for style in styles:
diff --git a/pygments/formatters/irc.py b/pygments/formatters/irc.py
index d1eed0ac..eb744d74 100644
--- a/pygments/formatters/irc.py
+++ b/pygments/formatters/irc.py
@@ -5,7 +5,7 @@
Formatter for IRC output
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/latex.py b/pygments/formatters/latex.py
index 66d521f5..336b59de 100644
--- a/pygments/formatters/latex.py
+++ b/pygments/formatters/latex.py
@@ -5,7 +5,7 @@
Formatter for LaTeX fancyvrb output.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/other.py b/pygments/formatters/other.py
index 4945d763..d6bfcacf 100644
--- a/pygments/formatters/other.py
+++ b/pygments/formatters/other.py
@@ -5,7 +5,7 @@
Other formatters: NullFormatter, RawTokenFormatter.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/rtf.py b/pygments/formatters/rtf.py
index 27be225a..c6353c12 100644
--- a/pygments/formatters/rtf.py
+++ b/pygments/formatters/rtf.py
@@ -5,7 +5,7 @@
A formatter that generates RTF files.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/svg.py b/pygments/formatters/svg.py
index 0efe9eea..944b25e0 100644
--- a/pygments/formatters/svg.py
+++ b/pygments/formatters/svg.py
@@ -5,7 +5,7 @@
Formatter for SVG output.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/terminal.py b/pygments/formatters/terminal.py
index 2dbfde7f..b8fec52e 100644
--- a/pygments/formatters/terminal.py
+++ b/pygments/formatters/terminal.py
@@ -5,7 +5,7 @@
Formatter for terminal output with ANSI sequences.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/terminal256.py b/pygments/formatters/terminal256.py
index 5110bc9e..b80dc7dd 100644
--- a/pygments/formatters/terminal256.py
+++ b/pygments/formatters/terminal256.py
@@ -11,7 +11,7 @@
Formatter version 1.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexer.py b/pygments/lexer.py
index f16d8106..90905ba5 100644
--- a/pygments/lexer.py
+++ b/pygments/lexer.py
@@ -5,7 +5,7 @@
Base lexer classes.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/__init__.py b/pygments/lexers/__init__.py
index daedff4b..d5e02e49 100644
--- a/pygments/lexers/__init__.py
+++ b/pygments/lexers/__init__.py
@@ -5,7 +5,7 @@
Pygments lexers.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -72,7 +72,7 @@ def find_lexer_class(name):
return cls
-def find_lexer_class_by_name(alias):
+def find_lexer_class_by_name(_alias):
"""Lookup a lexer class by alias.
Like `get_lexer_by_name`, but does not instantiate the class.
@@ -181,8 +181,8 @@ def find_lexer_class_for_filename(_fn, code=None):
# gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
# to find lexers which need it overridden.
if code:
- return cls.analyse_text(code) + bonus
- return cls.priority + bonus
+ return cls.analyse_text(code) + bonus, cls.__name__
+ return cls.priority + bonus, cls.__name__
if matches:
matches.sort(key=get_rating)
diff --git a/pygments/lexers/_asy_builtins.py b/pygments/lexers/_asy_builtins.py
index 51716866..1f831cdb 100644
--- a/pygments/lexers/_asy_builtins.py
+++ b/pygments/lexers/_asy_builtins.py
@@ -10,7 +10,7 @@
TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
for function and variable names.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_cl_builtins.py b/pygments/lexers/_cl_builtins.py
index a2243647..ce5ad48e 100644
--- a/pygments/lexers/_cl_builtins.py
+++ b/pygments/lexers/_cl_builtins.py
@@ -5,7 +5,7 @@
ANSI Common Lisp builtins.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_cocoa_builtins.py b/pygments/lexers/_cocoa_builtins.py
index a4f00d9d..064167ff 100644
--- a/pygments/lexers/_cocoa_builtins.py
+++ b/pygments/lexers/_cocoa_builtins.py
@@ -8,7 +8,7 @@
File may be also used as standalone generator for aboves.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_csound_builtins.py b/pygments/lexers/_csound_builtins.py
index a88e0a83..e5a9aaf7 100644
--- a/pygments/lexers/_csound_builtins.py
+++ b/pygments/lexers/_csound_builtins.py
@@ -3,7 +3,7 @@
pygments.lexers._csound_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_lasso_builtins.py b/pygments/lexers/_lasso_builtins.py
index 7c6fd6d4..d950cbe8 100644
--- a/pygments/lexers/_lasso_builtins.py
+++ b/pygments/lexers/_lasso_builtins.py
@@ -5,7 +5,7 @@
Built-in Lasso types, traits, methods, and members.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -471,6 +471,10 @@ BUILTINS = {
'curl_netrc_ignored',
'curl_netrc_optional',
'curl_netrc_required',
+ 'curl_sslversion_default',
+ 'curl_sslversion_sslv2',
+ 'curl_sslversion_sslv3',
+ 'curl_sslversion_tlsv1',
'curl_version_asynchdns',
'curl_version_debug',
'curl_version_gssnegotiate',
@@ -1102,6 +1106,7 @@ BUILTINS = {
'json_open_array',
'json_open_object',
'json_period',
+ 'json_positive',
'json_quote_double',
'json_rpccall',
'json_serialize',
@@ -1229,6 +1234,7 @@ BUILTINS = {
'lcapi_loadmodules',
'lcapi_updatedatasourceslist',
'ldap_scope_base',
+ 'ldap_scope_children',
'ldap_scope_onelevel',
'ldap_scope_subtree',
'library_once',
@@ -4044,6 +4050,7 @@ MEMBERS = {
'iscntrl',
'isdigit',
'isdir',
+ 'isdirectory',
'isempty',
'isemptyelement',
'isfirststep',
diff --git a/pygments/lexers/_lua_builtins.py b/pygments/lexers/_lua_builtins.py
index 6d2929b6..c60bf5a2 100644
--- a/pygments/lexers/_lua_builtins.py
+++ b/pygments/lexers/_lua_builtins.py
@@ -9,60 +9,71 @@
Do not edit the MODULES dict by hand.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
-
MODULES = {'basic': ('_G',
'_VERSION',
'assert',
'collectgarbage',
'dofile',
'error',
- 'getfenv',
'getmetatable',
'ipairs',
'load',
'loadfile',
- 'loadstring',
'next',
'pairs',
'pcall',
'print',
'rawequal',
'rawget',
+ 'rawlen',
'rawset',
'select',
- 'setfenv',
'setmetatable',
'tonumber',
'tostring',
'type',
- 'unpack',
'xpcall'),
+ 'bit32': ('bit32.arshift',
+ 'bit32.band',
+ 'bit32.bnot',
+ 'bit32.bor',
+ 'bit32.btest',
+ 'bit32.bxor',
+ 'bit32.extract',
+ 'bit32.lrotate',
+ 'bit32.lshift',
+ 'bit32.replace',
+ 'bit32.rrotate',
+ 'bit32.rshift'),
'coroutine': ('coroutine.create',
+ 'coroutine.isyieldable',
'coroutine.resume',
'coroutine.running',
'coroutine.status',
'coroutine.wrap',
'coroutine.yield'),
'debug': ('debug.debug',
- 'debug.getfenv',
'debug.gethook',
'debug.getinfo',
'debug.getlocal',
'debug.getmetatable',
'debug.getregistry',
'debug.getupvalue',
- 'debug.setfenv',
+ 'debug.getuservalue',
'debug.sethook',
'debug.setlocal',
'debug.setmetatable',
'debug.setupvalue',
- 'debug.traceback'),
+ 'debug.setuservalue',
+ 'debug.traceback',
+ 'debug.upvalueid',
+ 'debug.upvaluejoin'),
'io': ('io.close',
'io.flush',
'io.input',
@@ -71,17 +82,20 @@ MODULES = {'basic': ('_G',
'io.output',
'io.popen',
'io.read',
+ 'io.stderr',
+ 'io.stdin',
+ 'io.stdout',
'io.tmpfile',
'io.type',
'io.write'),
'math': ('math.abs',
'math.acos',
'math.asin',
- 'math.atan2',
'math.atan',
+ 'math.atan2',
'math.ceil',
- 'math.cosh',
'math.cos',
+ 'math.cosh',
'math.deg',
'math.exp',
'math.floor',
@@ -89,29 +103,34 @@ MODULES = {'basic': ('_G',
'math.frexp',
'math.huge',
'math.ldexp',
- 'math.log10',
'math.log',
'math.max',
+ 'math.maxinteger',
'math.min',
+ 'math.mininteger',
'math.modf',
'math.pi',
'math.pow',
'math.rad',
'math.random',
'math.randomseed',
- 'math.sinh',
'math.sin',
+ 'math.sinh',
'math.sqrt',
+ 'math.tan',
'math.tanh',
- 'math.tan'),
- 'modules': ('module',
- 'require',
+ 'math.tointeger',
+ 'math.type',
+ 'math.ult'),
+ 'modules': ('package.config',
'package.cpath',
'package.loaded',
'package.loadlib',
'package.path',
'package.preload',
- 'package.seeall'),
+ 'package.searchers',
+ 'package.searchpath',
+ 'require'),
'os': ('os.clock',
'os.date',
'os.difftime',
@@ -133,19 +152,37 @@ MODULES = {'basic': ('_G',
'string.len',
'string.lower',
'string.match',
+ 'string.pack',
+ 'string.packsize',
'string.rep',
'string.reverse',
'string.sub',
+ 'string.unpack',
'string.upper'),
'table': ('table.concat',
'table.insert',
- 'table.maxn',
+ 'table.move',
+ 'table.pack',
'table.remove',
- 'table.sort')}
-
+ 'table.sort',
+ 'table.unpack'),
+ 'utf8': ('utf8.char',
+ 'utf8.charpattern',
+ 'utf8.codepoint',
+ 'utf8.codes',
+ 'utf8.len',
+ 'utf8.offset')}
if __name__ == '__main__': # pragma: no cover
import re
+ import sys
+
+ # urllib ends up wanting to import a module called 'math' -- if
+ # pygments/lexers is in the path, this ends badly.
+ for i in range(len(sys.path)-1, -1, -1):
+ if sys.path[i].endswith('/lexers'):
+ del sys.path[i]
+
try:
from urllib import urlopen
except ImportError:
@@ -196,7 +233,7 @@ if __name__ == '__main__': # pragma: no cover
def get_newest_version():
f = urlopen('http://www.lua.org/manual/')
- r = re.compile(r'^<A HREF="(\d\.\d)/">Lua \1</A>')
+ r = re.compile(r'^<A HREF="(\d\.\d)/">(Lua )?\1</A>')
for line in f:
m = r.match(line)
if m is not None:
@@ -204,7 +241,7 @@ if __name__ == '__main__': # pragma: no cover
def get_lua_functions(version):
f = urlopen('http://www.lua.org/manual/%s/' % version)
- r = re.compile(r'^<A HREF="manual.html#pdf-(.+)">\1</A>')
+ r = re.compile(r'^<A HREF="manual.html#pdf-(?!lua|LUA)([^:]+)">\1</A>')
functions = []
for line in f:
m = r.match(line)
@@ -236,15 +273,22 @@ if __name__ == '__main__': # pragma: no cover
def run():
version = get_newest_version()
- print('> Downloading function index for Lua %s' % version)
- functions = get_lua_functions(version)
- print('> %d functions found:' % len(functions))
+ functions = set()
+ for v in ('5.2', version):
+ print('> Downloading function index for Lua %s' % v)
+ f = get_lua_functions(v)
+ print('> %d functions found, %d new:' %
+ (len(f), len(set(f) - functions)))
+ functions |= set(f)
+
+ functions = sorted(functions)
modules = {}
for full_function_name in functions:
print('>> %s' % full_function_name)
m = get_function_module(full_function_name)
modules.setdefault(m, []).append(full_function_name)
+ modules = {k: tuple(v) for k, v in modules.iteritems()}
regenerate(__file__, modules)
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index dba6d69a..adbff258 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -24,9 +24,12 @@ LEXERS = {
'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
'AdlLexer': ('pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
+ 'AheuiLexer': ('pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()),
'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)),
'AmplLexer': ('pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()),
+ 'Angular2HtmlLexer': ('pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()),
+ 'Angular2Lexer': ('pygments.lexers.templates', 'Angular2', ('ng2',), (), ()),
'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()),
'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
@@ -46,11 +49,13 @@ LEXERS = {
'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
+ 'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
- 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
+ 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
+ 'BibTeXLexer': ('pygments.lexers.bibtex', 'BibTeX', ('bib', 'bibtex'), ('*.bib',), ('text/x-bibtex',)),
'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
@@ -68,6 +73,8 @@ LEXERS = {
'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
'CadlLexer': ('pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()),
+ 'CapDLLexer': ('pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()),
+ 'CapnProtoLexer': ('pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()),
'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
@@ -96,6 +103,7 @@ LEXERS = {
'CrmshLexer': ('pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()),
'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
+ 'CrystalLexer': ('pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)),
'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()),
'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc',), ()),
'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
@@ -113,7 +121,7 @@ LEXERS = {
'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
- 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
+ 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
@@ -147,6 +155,7 @@ LEXERS = {
'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
'FlatlineLexer': ('pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)),
+ 'ForthLexer': ('pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)),
'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
@@ -193,7 +202,6 @@ LEXERS = {
'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
'JLexer': ('pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)),
- 'JadeLexer': ('pygments.lexers.html', 'Jade', ('jade',), ('*.jade',), ('text/x-jade',)),
'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
@@ -205,11 +213,13 @@ LEXERS = {
'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
'JclLexer': ('pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
'JsgfLexer': ('pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')),
+ 'JsonBareObjectLexer': ('pygments.lexers.data', 'JSONBareObject', ('json-object',), (), ('application/json-object',)),
'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
'JsonLexer': ('pygments.lexers.data', 'JSON', ('json',), ('*.json',), ('application/json',)),
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()),
'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
+ 'JuttleLexer': ('pygments.lexers.javascript', 'Juttle', ('juttle', 'juttle'), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')),
'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
@@ -243,6 +253,7 @@ LEXERS = {
'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
+ 'MarkdownLexer': ('pygments.lexers.markup', 'markdown', ('md',), ('*.md',), ('text/x-markdown',)),
'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
@@ -253,6 +264,7 @@ LEXERS = {
'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
+ 'MonteLexer': ('pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()),
'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
'MozPreprocCssLexer': ('pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
'MozPreprocHashLexer': ('pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
@@ -275,12 +287,13 @@ LEXERS = {
'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
- 'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')),
+ 'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')),
'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
- 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)),
+ 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
+ 'NuSMVLexer': ('pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
'ObjectiveCLexer': ('pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
@@ -313,6 +326,7 @@ LEXERS = {
'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
+ 'PugLexer': ('pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
'Python3Lexer': ('pygments.lexers.python', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
@@ -324,6 +338,7 @@ LEXERS = {
'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
+ 'RNCCompactLexer': ('pygments.lexers.rnc', 'Relax-NG Compact', ('rnc', 'rng-compact'), ('*.rnc',), ()),
'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
@@ -353,6 +368,7 @@ LEXERS = {
'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')),
'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs', '*.rs.in'), ('text/rust',)),
+ 'SASLexer': ('pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
@@ -362,12 +378,13 @@ LEXERS = {
'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
- 'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil',), ()),
+ 'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()),
'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
+ 'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
@@ -376,12 +393,14 @@ LEXERS = {
'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
+ 'StataLexer': ('pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')),
'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('sc', 'supercollider'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
+ 'TasmLexer': ('pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)),
'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
'TcshSessionLexer': ('pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()),
@@ -393,6 +412,7 @@ LEXERS = {
'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
+ 'TransactSqlLexer': ('pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)),
'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
@@ -416,6 +436,7 @@ LEXERS = {
'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
'WDiffLexer': ('pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()),
+ 'WhileyLexer': ('pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)),
'X10Lexer': ('pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
diff --git a/pygments/lexers/_mql_builtins.py b/pygments/lexers/_mql_builtins.py
index 524a2ea2..6eb600c4 100644
--- a/pygments/lexers/_mql_builtins.py
+++ b/pygments/lexers/_mql_builtins.py
@@ -5,7 +5,7 @@
Builtins for the MqlLexer.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
types = (
diff --git a/pygments/lexers/_openedge_builtins.py b/pygments/lexers/_openedge_builtins.py
index 46b6cc42..0fa7d1b2 100644
--- a/pygments/lexers/_openedge_builtins.py
+++ b/pygments/lexers/_openedge_builtins.py
@@ -5,7 +5,7 @@
Builtin list for the OpenEdgeLexer.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_php_builtins.py b/pygments/lexers/_php_builtins.py
index f1b64ced..fec3286a 100644
--- a/pygments/lexers/_php_builtins.py
+++ b/pygments/lexers/_php_builtins.py
@@ -12,7 +12,7 @@
internet connection. don't run that at home, use
a server ;-)
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_postgres_builtins.py b/pygments/lexers/_postgres_builtins.py
index 671fa677..a71360f0 100644
--- a/pygments/lexers/_postgres_builtins.py
+++ b/pygments/lexers/_postgres_builtins.py
@@ -5,7 +5,7 @@
Self-updating data files for PostgreSQL lexer.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_scilab_builtins.py b/pygments/lexers/_scilab_builtins.py
index 85c99966..ce0ac67d 100644
--- a/pygments/lexers/_scilab_builtins.py
+++ b/pygments/lexers/_scilab_builtins.py
@@ -5,7 +5,7 @@
Builtin list for the ScilabLexer.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_sourcemod_builtins.py b/pygments/lexers/_sourcemod_builtins.py
index 9ebb1595..f08ea481 100644
--- a/pygments/lexers/_sourcemod_builtins.py
+++ b/pygments/lexers/_sourcemod_builtins.py
@@ -8,7 +8,7 @@
Do not edit the FUNCTIONS list by hand.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py
index 6585ad71..a189647a 100644
--- a/pygments/lexers/_stan_builtins.py
+++ b/pygments/lexers/_stan_builtins.py
@@ -6,7 +6,7 @@
This file contains the names of functions for Stan used by
``pygments.lexers.math.StanLexer. This is for Stan language version 2.8.0.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_stata_builtins.py b/pygments/lexers/_stata_builtins.py
new file mode 100644
index 00000000..5f5f72a9
--- /dev/null
+++ b/pygments/lexers/_stata_builtins.py
@@ -0,0 +1,419 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._stata_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Builtins for Stata
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+
+builtins_base = (
+ "if", "else", "in", "foreach", "for", "forv", "forva",
+ "forval", "forvalu", "forvalue", "forvalues", "by", "bys",
+ "bysort", "quietly", "qui", "about", "ac",
+ "ac_7", "acprplot", "acprplot_7", "adjust", "ado", "adopath",
+ "adoupdate", "alpha", "ameans", "an", "ano", "anov", "anova",
+ "anova_estat", "anova_terms", "anovadef", "aorder", "ap", "app",
+ "appe", "appen", "append", "arch", "arch_dr", "arch_estat",
+ "arch_p", "archlm", "areg", "areg_p", "args", "arima",
+ "arima_dr", "arima_estat", "arima_p", "as", "asmprobit",
+ "asmprobit_estat", "asmprobit_lf", "asmprobit_mfx__dlg",
+ "asmprobit_p", "ass", "asse", "asser", "assert", "avplot",
+ "avplot_7", "avplots", "avplots_7", "bcskew0", "bgodfrey",
+ "binreg", "bip0_lf", "biplot", "bipp_lf", "bipr_lf",
+ "bipr_p", "biprobit", "bitest", "bitesti", "bitowt", "blogit",
+ "bmemsize", "boot", "bootsamp", "bootstrap", "bootstrap_8",
+ "boxco_l", "boxco_p", "boxcox", "boxcox_6", "boxcox_p",
+ "bprobit", "br", "break", "brier", "bro", "brow", "brows",
+ "browse", "brr", "brrstat", "bs", "bs_7", "bsampl_w",
+ "bsample", "bsample_7", "bsqreg", "bstat", "bstat_7", "bstat_8",
+ "bstrap", "bstrap_7", "ca", "ca_estat", "ca_p", "cabiplot",
+ "camat", "canon", "canon_8", "canon_8_p", "canon_estat",
+ "canon_p", "cap", "caprojection", "capt", "captu", "captur",
+ "capture", "cat", "cc", "cchart", "cchart_7", "cci",
+ "cd", "censobs_table", "centile", "cf", "char", "chdir",
+ "checkdlgfiles", "checkestimationsample", "checkhlpfiles",
+ "checksum", "chelp", "ci", "cii", "cl", "class", "classutil",
+ "clear", "cli", "clis", "clist", "clo", "clog", "clog_lf",
+ "clog_p", "clogi", "clogi_sw", "clogit", "clogit_lf",
+ "clogit_p", "clogitp", "clogl_sw", "cloglog", "clonevar",
+ "clslistarray", "cluster", "cluster_measures", "cluster_stop",
+ "cluster_tree", "cluster_tree_8", "clustermat", "cmdlog",
+ "cnr", "cnre", "cnreg", "cnreg_p", "cnreg_sw", "cnsreg",
+ "codebook", "collaps4", "collapse", "colormult_nb",
+ "colormult_nw", "compare", "compress", "conf", "confi",
+ "confir", "confirm", "conren", "cons", "const", "constr",
+ "constra", "constrai", "constrain", "constraint", "continue",
+ "contract", "copy", "copyright", "copysource", "cor", "corc",
+ "corr", "corr2data", "corr_anti", "corr_kmo", "corr_smc",
+ "corre", "correl", "correla", "correlat", "correlate",
+ "corrgram", "cou", "coun", "count", "cox", "cox_p", "cox_sw",
+ "coxbase", "coxhaz", "coxvar", "cprplot", "cprplot_7",
+ "crc", "cret", "cretu", "cretur", "creturn", "cross", "cs",
+ "cscript", "cscript_log", "csi", "ct", "ct_is", "ctset",
+ "ctst_5", "ctst_st", "cttost", "cumsp", "cumsp_7", "cumul",
+ "cusum", "cusum_7", "cutil", "d", "datasig", "datasign",
+ "datasigna", "datasignat", "datasignatu", "datasignatur",
+ "datasignature", "datetof", "db", "dbeta", "de", "dec",
+ "deco", "decod", "decode", "deff", "des", "desc", "descr",
+ "descri", "describ", "describe", "destring", "dfbeta",
+ "dfgls", "dfuller", "di", "di_g", "dir", "dirstats", "dis",
+ "discard", "disp", "disp_res", "disp_s", "displ", "displa",
+ "display", "distinct", "do", "doe", "doed", "doedi",
+ "doedit", "dotplot", "dotplot_7", "dprobit", "drawnorm",
+ "drop", "ds", "ds_util", "dstdize", "duplicates", "durbina",
+ "dwstat", "dydx", "e", "ed", "edi", "edit", "egen",
+ "eivreg", "emdef", "en", "enc", "enco", "encod", "encode",
+ "eq", "erase", "ereg", "ereg_lf", "ereg_p", "ereg_sw",
+ "ereghet", "ereghet_glf", "ereghet_glf_sh", "ereghet_gp",
+ "ereghet_ilf", "ereghet_ilf_sh", "ereghet_ip", "eret",
+ "eretu", "eretur", "ereturn", "err", "erro", "error", "est",
+ "est_cfexist", "est_cfname", "est_clickable", "est_expand",
+ "est_hold", "est_table", "est_unhold", "est_unholdok",
+ "estat", "estat_default", "estat_summ", "estat_vce_only",
+ "esti", "estimates", "etodow", "etof", "etomdy", "ex",
+ "exi", "exit", "expand", "expandcl", "fac", "fact", "facto",
+ "factor", "factor_estat", "factor_p", "factor_pca_rotated",
+ "factor_rotate", "factormat", "fcast", "fcast_compute",
+ "fcast_graph", "fdades", "fdadesc", "fdadescr", "fdadescri",
+ "fdadescrib", "fdadescribe", "fdasav", "fdasave", "fdause",
+ "fh_st", "open", "read", "close",
+ "file", "filefilter", "fillin", "find_hlp_file", "findfile",
+ "findit", "findit_7", "fit", "fl", "fli", "flis", "flist",
+ "for5_0", "form", "forma", "format", "fpredict", "frac_154",
+ "frac_adj", "frac_chk", "frac_cox", "frac_ddp", "frac_dis",
+ "frac_dv", "frac_in", "frac_mun", "frac_pp", "frac_pq",
+ "frac_pv", "frac_wgt", "frac_xo", "fracgen", "fracplot",
+ "fracplot_7", "fracpoly", "fracpred", "fron_ex", "fron_hn",
+ "fron_p", "fron_tn", "fron_tn2", "frontier", "ftodate", "ftoe",
+ "ftomdy", "ftowdate", "g", "gamhet_glf", "gamhet_gp",
+ "gamhet_ilf", "gamhet_ip", "gamma", "gamma_d2", "gamma_p",
+ "gamma_sw", "gammahet", "gdi_hexagon", "gdi_spokes", "ge",
+ "gen", "gene", "gener", "genera", "generat", "generate",
+ "genrank", "genstd", "genvmean", "gettoken", "gl", "gladder",
+ "gladder_7", "glim_l01", "glim_l02", "glim_l03", "glim_l04",
+ "glim_l05", "glim_l06", "glim_l07", "glim_l08", "glim_l09",
+ "glim_l10", "glim_l11", "glim_l12", "glim_lf", "glim_mu",
+ "glim_nw1", "glim_nw2", "glim_nw3", "glim_p", "glim_v1",
+ "glim_v2", "glim_v3", "glim_v4", "glim_v5", "glim_v6",
+ "glim_v7", "glm", "glm_6", "glm_p", "glm_sw", "glmpred", "glo",
+ "glob", "globa", "global", "glogit", "glogit_8", "glogit_p",
+ "gmeans", "gnbre_lf", "gnbreg", "gnbreg_5", "gnbreg_p",
+ "gomp_lf", "gompe_sw", "gomper_p", "gompertz", "gompertzhet",
+ "gomphet_glf", "gomphet_glf_sh", "gomphet_gp", "gomphet_ilf",
+ "gomphet_ilf_sh", "gomphet_ip", "gphdot", "gphpen",
+ "gphprint", "gprefs", "gprobi_p", "gprobit", "gprobit_8", "gr",
+ "gr7", "gr_copy", "gr_current", "gr_db", "gr_describe",
+ "gr_dir", "gr_draw", "gr_draw_replay", "gr_drop", "gr_edit",
+ "gr_editviewopts", "gr_example", "gr_example2", "gr_export",
+ "gr_print", "gr_qscheme", "gr_query", "gr_read", "gr_rename",
+ "gr_replay", "gr_save", "gr_set", "gr_setscheme", "gr_table",
+ "gr_undo", "gr_use", "graph", "graph7", "grebar", "greigen",
+ "greigen_7", "greigen_8", "grmeanby", "grmeanby_7",
+ "gs_fileinfo", "gs_filetype", "gs_graphinfo", "gs_stat",
+ "gsort", "gwood", "h", "hadimvo", "hareg", "hausman",
+ "haver", "he", "heck_d2", "heckma_p", "heckman", "heckp_lf",
+ "heckpr_p", "heckprob", "hel", "help", "hereg", "hetpr_lf",
+ "hetpr_p", "hetprob", "hettest", "hexdump", "hilite",
+ "hist", "hist_7", "histogram", "hlogit", "hlu", "hmeans",
+ "hotel", "hotelling", "hprobit", "hreg", "hsearch", "icd9",
+ "icd9_ff", "icd9p", "iis", "impute", "imtest", "inbase",
+ "include", "inf", "infi", "infil", "infile", "infix", "inp",
+ "inpu", "input", "ins", "insheet", "insp", "inspe",
+ "inspec", "inspect", "integ", "inten", "intreg", "intreg_7",
+ "intreg_p", "intrg2_ll", "intrg_ll", "intrg_ll2", "ipolate",
+ "iqreg", "ir", "irf", "irf_create", "irfm", "iri", "is_svy",
+ "is_svysum", "isid", "istdize", "ivprob_1_lf", "ivprob_lf",
+ "ivprobit", "ivprobit_p", "ivreg", "ivreg_footnote",
+ "ivtob_1_lf", "ivtob_lf", "ivtobit", "ivtobit_p", "jackknife",
+ "jacknife", "jknife", "jknife_6", "jknife_8", "jkstat",
+ "joinby", "kalarma1", "kap", "kap_3", "kapmeier", "kappa",
+ "kapwgt", "kdensity", "kdensity_7", "keep", "ksm", "ksmirnov",
+ "ktau", "kwallis", "l", "la", "lab", "labe", "label",
+ "labelbook", "ladder", "levels", "levelsof", "leverage",
+ "lfit", "lfit_p", "li", "lincom", "line", "linktest",
+ "lis", "list", "lloghet_glf", "lloghet_glf_sh", "lloghet_gp",
+ "lloghet_ilf", "lloghet_ilf_sh", "lloghet_ip", "llogi_sw",
+ "llogis_p", "llogist", "llogistic", "llogistichet",
+ "lnorm_lf", "lnorm_sw", "lnorma_p", "lnormal", "lnormalhet",
+ "lnormhet_glf", "lnormhet_glf_sh", "lnormhet_gp",
+ "lnormhet_ilf", "lnormhet_ilf_sh", "lnormhet_ip", "lnskew0",
+ "loadingplot", "loc", "loca", "local", "log", "logi",
+ "logis_lf", "logistic", "logistic_p", "logit", "logit_estat",
+ "logit_p", "loglogs", "logrank", "loneway", "lookfor",
+ "lookup", "lowess", "lowess_7", "lpredict", "lrecomp", "lroc",
+ "lroc_7", "lrtest", "ls", "lsens", "lsens_7", "lsens_x",
+ "lstat", "ltable", "ltable_7", "ltriang", "lv", "lvr2plot",
+ "lvr2plot_7", "m", "ma", "mac", "macr", "macro", "makecns",
+ "man", "manova", "manova_estat", "manova_p", "manovatest",
+ "mantel", "mark", "markin", "markout", "marksample", "mat",
+ "mat_capp", "mat_order", "mat_put_rr", "mat_rapp", "mata",
+ "mata_clear", "mata_describe", "mata_drop", "mata_matdescribe",
+ "mata_matsave", "mata_matuse", "mata_memory", "mata_mlib",
+ "mata_mosave", "mata_rename", "mata_which", "matalabel",
+ "matcproc", "matlist", "matname", "matr", "matri",
+ "matrix", "matrix_input__dlg", "matstrik", "mcc", "mcci",
+ "md0_", "md1_", "md1debug_", "md2_", "md2debug_", "mds",
+ "mds_estat", "mds_p", "mdsconfig", "mdslong", "mdsmat",
+ "mdsshepard", "mdytoe", "mdytof", "me_derd", "mean",
+ "means", "median", "memory", "memsize", "meqparse", "mer",
+ "merg", "merge", "mfp", "mfx", "mhelp", "mhodds", "minbound",
+ "mixed_ll", "mixed_ll_reparm", "mkassert", "mkdir",
+ "mkmat", "mkspline", "ml", "ml_5", "ml_adjs", "ml_bhhhs",
+ "ml_c_d", "ml_check", "ml_clear", "ml_cnt", "ml_debug",
+ "ml_defd", "ml_e0", "ml_e0_bfgs", "ml_e0_cycle", "ml_e0_dfp",
+ "ml_e0i", "ml_e1", "ml_e1_bfgs", "ml_e1_bhhh", "ml_e1_cycle",
+ "ml_e1_dfp", "ml_e2", "ml_e2_cycle", "ml_ebfg0", "ml_ebfr0",
+ "ml_ebfr1", "ml_ebh0q", "ml_ebhh0", "ml_ebhr0", "ml_ebr0i",
+ "ml_ecr0i", "ml_edfp0", "ml_edfr0", "ml_edfr1", "ml_edr0i",
+ "ml_eds", "ml_eer0i", "ml_egr0i", "ml_elf", "ml_elf_bfgs",
+ "ml_elf_bhhh", "ml_elf_cycle", "ml_elf_dfp", "ml_elfi",
+ "ml_elfs", "ml_enr0i", "ml_enrr0", "ml_erdu0", "ml_erdu0_bfgs",
+ "ml_erdu0_bhhh", "ml_erdu0_bhhhq", "ml_erdu0_cycle",
+ "ml_erdu0_dfp", "ml_erdu0_nrbfgs", "ml_exde", "ml_footnote",
+ "ml_geqnr", "ml_grad0", "ml_graph", "ml_hbhhh", "ml_hd0",
+ "ml_hold", "ml_init", "ml_inv", "ml_log", "ml_max",
+ "ml_mlout", "ml_mlout_8", "ml_model", "ml_nb0", "ml_opt",
+ "ml_p", "ml_plot", "ml_query", "ml_rdgrd", "ml_repor",
+ "ml_s_e", "ml_score", "ml_searc", "ml_technique", "ml_unhold",
+ "mleval", "mlf_", "mlmatbysum", "mlmatsum", "mlog", "mlogi",
+ "mlogit", "mlogit_footnote", "mlogit_p", "mlopts", "mlsum",
+ "mlvecsum", "mnl0_", "mor", "more", "mov", "move", "mprobit",
+ "mprobit_lf", "mprobit_p", "mrdu0_", "mrdu1_", "mvdecode",
+ "mvencode", "mvreg", "mvreg_estat", "n", "nbreg",
+ "nbreg_al", "nbreg_lf", "nbreg_p", "nbreg_sw", "nestreg", "net",
+ "newey", "newey_7", "newey_p", "news", "nl", "nl_7", "nl_9",
+ "nl_9_p", "nl_p", "nl_p_7", "nlcom", "nlcom_p", "nlexp2",
+ "nlexp2_7", "nlexp2a", "nlexp2a_7", "nlexp3", "nlexp3_7",
+ "nlgom3", "nlgom3_7", "nlgom4", "nlgom4_7", "nlinit", "nllog3",
+ "nllog3_7", "nllog4", "nllog4_7", "nlog_rd", "nlogit",
+ "nlogit_p", "nlogitgen", "nlogittree", "nlpred", "no",
+ "nobreak", "noi", "nois", "noisi", "noisil", "noisily", "note",
+ "notes", "notes_dlg", "nptrend", "numlabel", "numlist", "odbc",
+ "old_ver", "olo", "olog", "ologi", "ologi_sw", "ologit",
+ "ologit_p", "ologitp", "on", "one", "onew", "onewa", "oneway",
+ "op_colnm", "op_comp", "op_diff", "op_inv", "op_str", "opr",
+ "opro", "oprob", "oprob_sw", "oprobi", "oprobi_p", "oprobit",
+ "oprobitp", "opts_exclusive", "order", "orthog", "orthpoly",
+ "ou", "out", "outf", "outfi", "outfil", "outfile", "outs",
+ "outsh", "outshe", "outshee", "outsheet", "ovtest", "pac",
+ "pac_7", "palette", "parse", "parse_dissim", "pause", "pca",
+ "pca_8", "pca_display", "pca_estat", "pca_p", "pca_rotate",
+ "pcamat", "pchart", "pchart_7", "pchi", "pchi_7", "pcorr",
+ "pctile", "pentium", "pergram", "pergram_7", "permute",
+ "permute_8", "personal", "peto_st", "pkcollapse", "pkcross",
+ "pkequiv", "pkexamine", "pkexamine_7", "pkshape", "pksumm",
+ "pksumm_7", "pl", "plo", "plot", "plugin", "pnorm",
+ "pnorm_7", "poisgof", "poiss_lf", "poiss_sw", "poisso_p",
+ "poisson", "poisson_estat", "post", "postclose", "postfile",
+ "postutil", "pperron", "pr", "prais", "prais_e", "prais_e2",
+ "prais_p", "predict", "predictnl", "preserve", "print",
+ "pro", "prob", "probi", "probit", "probit_estat", "probit_p",
+ "proc_time", "procoverlay", "procrustes", "procrustes_estat",
+ "procrustes_p", "profiler", "prog", "progr", "progra",
+ "program", "prop", "proportion", "prtest", "prtesti", "pwcorr",
+ "pwd", "q", "s", "qby", "qbys", "qchi", "qchi_7", "qladder",
+ "qladder_7", "qnorm", "qnorm_7", "qqplot", "qqplot_7", "qreg",
+ "qreg_c", "qreg_p", "qreg_sw", "qu", "quadchk", "quantile",
+ "quantile_7", "que", "quer", "query", "range", "ranksum",
+ "ratio", "rchart", "rchart_7", "rcof", "recast", "reclink",
+ "recode", "reg", "reg3", "reg3_p", "regdw", "regr", "regre",
+ "regre_p2", "regres", "regres_p", "regress", "regress_estat",
+ "regriv_p", "remap", "ren", "rena", "renam", "rename",
+ "renpfix", "repeat", "replace", "report", "reshape",
+ "restore", "ret", "retu", "retur", "return", "rm", "rmdir",
+ "robvar", "roccomp", "roccomp_7", "roccomp_8", "rocf_lf",
+ "rocfit", "rocfit_8", "rocgold", "rocplot", "rocplot_7",
+ "roctab", "roctab_7", "rolling", "rologit", "rologit_p",
+ "rot", "rota", "rotat", "rotate", "rotatemat", "rreg",
+ "rreg_p", "ru", "run", "runtest", "rvfplot", "rvfplot_7",
+ "rvpplot", "rvpplot_7", "sa", "safesum", "sample",
+ "sampsi", "sav", "save", "savedresults", "saveold", "sc",
+ "sca", "scal", "scala", "scalar", "scatter", "scm_mine",
+ "sco", "scob_lf", "scob_p", "scobi_sw", "scobit", "scor",
+ "score", "scoreplot", "scoreplot_help", "scree", "screeplot",
+ "screeplot_help", "sdtest", "sdtesti", "se", "search",
+ "separate", "seperate", "serrbar", "serrbar_7", "serset", "set",
+ "set_defaults", "sfrancia", "sh", "she", "shel", "shell",
+ "shewhart", "shewhart_7", "signestimationsample", "signrank",
+ "signtest", "simul", "simul_7", "simulate", "simulate_8",
+ "sktest", "sleep", "slogit", "slogit_d2", "slogit_p", "smooth",
+ "snapspan", "so", "sor", "sort", "spearman", "spikeplot",
+ "spikeplot_7", "spikeplt", "spline_x", "split", "sqreg",
+ "sqreg_p", "sret", "sretu", "sretur", "sreturn", "ssc", "st",
+ "st_ct", "st_hc", "st_hcd", "st_hcd_sh", "st_is", "st_issys",
+ "st_note", "st_promo", "st_set", "st_show", "st_smpl",
+ "st_subid", "stack", "statsby", "statsby_8", "stbase", "stci",
+ "stci_7", "stcox", "stcox_estat", "stcox_fr", "stcox_fr_ll",
+ "stcox_p", "stcox_sw", "stcoxkm", "stcoxkm_7", "stcstat",
+ "stcurv", "stcurve", "stcurve_7", "stdes", "stem", "stepwise",
+ "stereg", "stfill", "stgen", "stir", "stjoin", "stmc", "stmh",
+ "stphplot", "stphplot_7", "stphtest", "stphtest_7",
+ "stptime", "strate", "strate_7", "streg", "streg_sw", "streset",
+ "sts", "sts_7", "stset", "stsplit", "stsum", "sttocc",
+ "sttoct", "stvary", "stweib", "su", "suest", "suest_8",
+ "sum", "summ", "summa", "summar", "summari", "summariz",
+ "summarize", "sunflower", "sureg", "survcurv", "survsum",
+ "svar", "svar_p", "svmat", "svy", "svy_disp", "svy_dreg",
+ "svy_est", "svy_est_7", "svy_estat", "svy_get", "svy_gnbreg_p",
+ "svy_head", "svy_header", "svy_heckman_p", "svy_heckprob_p",
+ "svy_intreg_p", "svy_ivreg_p", "svy_logistic_p", "svy_logit_p",
+ "svy_mlogit_p", "svy_nbreg_p", "svy_ologit_p", "svy_oprobit_p",
+ "svy_poisson_p", "svy_probit_p", "svy_regress_p", "svy_sub",
+ "svy_sub_7", "svy_x", "svy_x_7", "svy_x_p", "svydes",
+ "svydes_8", "svygen", "svygnbreg", "svyheckman", "svyheckprob",
+ "svyintreg", "svyintreg_7", "svyintrg", "svyivreg", "svylc",
+ "svylog_p", "svylogit", "svymarkout", "svymarkout_8",
+ "svymean", "svymlog", "svymlogit", "svynbreg", "svyolog",
+ "svyologit", "svyoprob", "svyoprobit", "svyopts",
+ "svypois", "svypois_7", "svypoisson", "svyprobit", "svyprobt",
+ "svyprop", "svyprop_7", "svyratio", "svyreg", "svyreg_p",
+ "svyregress", "svyset", "svyset_7", "svyset_8", "svytab",
+ "svytab_7", "svytest", "svytotal", "sw", "sw_8", "swcnreg",
+ "swcox", "swereg", "swilk", "swlogis", "swlogit",
+ "swologit", "swoprbt", "swpois", "swprobit", "swqreg",
+ "swtobit", "swweib", "symmetry", "symmi", "symplot",
+ "symplot_7", "syntax", "sysdescribe", "sysdir", "sysuse",
+ "szroeter", "ta", "tab", "tab1", "tab2", "tab_or", "tabd",
+ "tabdi", "tabdis", "tabdisp", "tabi", "table", "tabodds",
+ "tabodds_7", "tabstat", "tabu", "tabul", "tabula", "tabulat",
+ "tabulate", "te", "tempfile", "tempname", "tempvar", "tes",
+ "test", "testnl", "testparm", "teststd", "tetrachoric",
+ "time_it", "timer", "tis", "tob", "tobi", "tobit", "tobit_p",
+ "tobit_sw", "token", "tokeni", "tokeniz", "tokenize",
+ "tostring", "total", "translate", "translator", "transmap",
+ "treat_ll", "treatr_p", "treatreg", "trim", "trnb_cons",
+ "trnb_mean", "trpoiss_d2", "trunc_ll", "truncr_p", "truncreg",
+ "tsappend", "tset", "tsfill", "tsline", "tsline_ex",
+ "tsreport", "tsrevar", "tsrline", "tsset", "tssmooth",
+ "tsunab", "ttest", "ttesti", "tut_chk", "tut_wait", "tutorial",
+ "tw", "tware_st", "two", "twoway", "twoway__fpfit_serset",
+ "twoway__function_gen", "twoway__histogram_gen",
+ "twoway__ipoint_serset", "twoway__ipoints_serset",
+ "twoway__kdensity_gen", "twoway__lfit_serset",
+ "twoway__normgen_gen", "twoway__pci_serset",
+ "twoway__qfit_serset", "twoway__scatteri_serset",
+ "twoway__sunflower_gen", "twoway_ksm_serset", "ty", "typ",
+ "type", "typeof", "u", "unab", "unabbrev", "unabcmd",
+ "update", "us", "use", "uselabel", "var", "var_mkcompanion",
+ "var_p", "varbasic", "varfcast", "vargranger", "varirf",
+ "varirf_add", "varirf_cgraph", "varirf_create", "varirf_ctable",
+ "varirf_describe", "varirf_dir", "varirf_drop", "varirf_erase",
+ "varirf_graph", "varirf_ograph", "varirf_rename", "varirf_set",
+ "varirf_table", "varlist", "varlmar", "varnorm", "varsoc",
+ "varstable", "varstable_w", "varstable_w2", "varwle",
+ "vce", "vec", "vec_fevd", "vec_mkphi", "vec_p", "vec_p_w",
+ "vecirf_create", "veclmar", "veclmar_w", "vecnorm",
+ "vecnorm_w", "vecrank", "vecstable", "verinst", "vers",
+ "versi", "versio", "version", "view", "viewsource", "vif",
+ "vwls", "wdatetof", "webdescribe", "webseek", "webuse",
+ "weib1_lf", "weib2_lf", "weib_lf", "weib_lf0", "weibhet_glf",
+ "weibhet_glf_sh", "weibhet_glfa", "weibhet_glfa_sh",
+ "weibhet_gp", "weibhet_ilf", "weibhet_ilf_sh", "weibhet_ilfa",
+ "weibhet_ilfa_sh", "weibhet_ip", "weibu_sw", "weibul_p",
+ "weibull", "weibull_c", "weibull_s", "weibullhet",
+ "wh", "whelp", "whi", "which", "whil", "while", "wilc_st",
+ "wilcoxon", "win", "wind", "windo", "window", "winexec",
+ "wntestb", "wntestb_7", "wntestq", "xchart", "xchart_7",
+ "xcorr", "xcorr_7", "xi", "xi_6", "xmlsav", "xmlsave",
+ "xmluse", "xpose", "xsh", "xshe", "xshel", "xshell",
+ "xt_iis", "xt_tis", "xtab_p", "xtabond", "xtbin_p",
+ "xtclog", "xtcloglog", "xtcloglog_8", "xtcloglog_d2",
+ "xtcloglog_pa_p", "xtcloglog_re_p", "xtcnt_p", "xtcorr",
+ "xtdata", "xtdes", "xtfront_p", "xtfrontier", "xtgee",
+ "xtgee_elink", "xtgee_estat", "xtgee_makeivar", "xtgee_p",
+ "xtgee_plink", "xtgls", "xtgls_p", "xthaus", "xthausman",
+ "xtht_p", "xthtaylor", "xtile", "xtint_p", "xtintreg",
+ "xtintreg_8", "xtintreg_d2", "xtintreg_p", "xtivp_1",
+ "xtivp_2", "xtivreg", "xtline", "xtline_ex", "xtlogit",
+ "xtlogit_8", "xtlogit_d2", "xtlogit_fe_p", "xtlogit_pa_p",
+ "xtlogit_re_p", "xtmixed", "xtmixed_estat", "xtmixed_p",
+ "xtnb_fe", "xtnb_lf", "xtnbreg", "xtnbreg_pa_p",
+ "xtnbreg_refe_p", "xtpcse", "xtpcse_p", "xtpois", "xtpoisson",
+ "xtpoisson_d2", "xtpoisson_pa_p", "xtpoisson_refe_p", "xtpred",
+ "xtprobit", "xtprobit_8", "xtprobit_d2", "xtprobit_re_p",
+ "xtps_fe", "xtps_lf", "xtps_ren", "xtps_ren_8", "xtrar_p",
+ "xtrc", "xtrc_p", "xtrchh", "xtrefe_p", "xtreg", "xtreg_be",
+ "xtreg_fe", "xtreg_ml", "xtreg_pa_p", "xtreg_re",
+ "xtregar", "xtrere_p", "xtset", "xtsf_ll", "xtsf_llti",
+ "xtsum", "xttab", "xttest0", "xttobit", "xttobit_8",
+ "xttobit_p", "xttrans", "yx", "yxview__barlike_draw",
+ "yxview_area_draw", "yxview_bar_draw", "yxview_dot_draw",
+ "yxview_dropline_draw", "yxview_function_draw",
+ "yxview_iarrow_draw", "yxview_ilabels_draw",
+ "yxview_normal_draw", "yxview_pcarrow_draw",
+ "yxview_pcbarrow_draw", "yxview_pccapsym_draw",
+ "yxview_pcscatter_draw", "yxview_pcspike_draw",
+ "yxview_rarea_draw", "yxview_rbar_draw", "yxview_rbarm_draw",
+ "yxview_rcap_draw", "yxview_rcapsym_draw",
+ "yxview_rconnected_draw", "yxview_rline_draw",
+ "yxview_rscatter_draw", "yxview_rspike_draw",
+ "yxview_spike_draw", "yxview_sunflower_draw", "zap_s", "zinb",
+ "zinb_llf", "zinb_plf", "zip", "zip_llf", "zip_p", "zip_plf",
+ "zt_ct_5", "zt_hc_5", "zt_hcd_5", "zt_is_5", "zt_iss_5",
+ "zt_sho_5", "zt_smp_5", "ztbase_5", "ztcox_5", "ztdes_5",
+ "ztereg_5", "ztfill_5", "ztgen_5", "ztir_5", "ztjoin_5", "ztnb",
+ "ztnb_p", "ztp", "ztp_p", "zts_5", "ztset_5", "ztspli_5",
+ "ztsum_5", "zttoct_5", "ztvary_5", "ztweib_5"
+)
+
+builtins_functions = (
+ "Cdhms", "Chms", "Clock", "Cmdyhms", "Cofc", "Cofd", "F",
+ "Fden", "Ftail", "I", "J", "_caller", "abbrev", "abs", "acos",
+ "acosh", "asin", "asinh", "atan", "atan2", "atanh",
+ "autocode", "betaden", "binomial", "binomialp", "binomialtail",
+ "binormal", "bofd", "byteorder", "c", "ceil", "char",
+ "chi2", "chi2den", "chi2tail", "cholesky", "chop", "clip",
+ "clock", "cloglog", "cofC", "cofd", "colnumb", "colsof", "comb",
+ "cond", "corr", "cos", "cosh", "d", "daily", "date", "day",
+ "det", "dgammapda", "dgammapdada", "dgammapdadx", "dgammapdx",
+ "dgammapdxdx", "dhms", "diag", "diag0cnt", "digamma",
+ "dofC", "dofb", "dofc", "dofh", "dofm", "dofq", "dofw",
+ "dofy", "dow", "doy", "dunnettprob", "e", "el", "epsdouble",
+ "epsfloat", "exp", "fileexists", "fileread", "filereaderror",
+ "filewrite", "float", "floor", "fmtwidth", "gammaden",
+ "gammap", "gammaptail", "get", "group", "h", "hadamard",
+ "halfyear", "halfyearly", "has_eprop", "hh", "hhC", "hms",
+ "hofd", "hours", "hypergeometric", "hypergeometricp", "ibeta",
+ "ibetatail", "index", "indexnot", "inlist", "inrange", "int",
+ "inv", "invF", "invFtail", "invbinomial", "invbinomialtail",
+ "invchi2", "invchi2tail", "invcloglog", "invdunnettprob",
+ "invgammap", "invgammaptail", "invibeta", "invibetatail",
+ "invlogit", "invnFtail", "invnbinomial", "invnbinomialtail",
+ "invnchi2", "invnchi2tail", "invnibeta", "invnorm", "invnormal",
+ "invnttail", "invpoisson", "invpoissontail", "invsym", "invt",
+ "invttail", "invtukeyprob", "irecode", "issym", "issymmetric",
+ "itrim", "length", "ln", "lnfact", "lnfactorial", "lngamma",
+ "lnnormal", "lnnormalden", "log", "log10", "logit", "lower",
+ "ltrim", "m", "match", "matmissing", "matrix", "matuniform",
+ "max", "maxbyte", "maxdouble", "maxfloat", "maxint", "maxlong",
+ "mdy", "mdyhms", "mi", "min", "minbyte", "mindouble",
+ "minfloat", "minint", "minlong", "minutes", "missing", "mm",
+ "mmC", "mod", "mofd", "month", "monthly", "mreldif",
+ "msofhours", "msofminutes", "msofseconds", "nF", "nFden",
+ "nFtail", "nbetaden", "nbinomial", "nbinomialp", "nbinomialtail",
+ "nchi2", "nchi2den", "nchi2tail", "nibeta", "norm", "normal",
+ "normalden", "normd", "npnF", "npnchi2", "npnt", "nt", "ntden",
+ "nttail", "nullmat", "plural", "poisson", "poissonp",
+ "poissontail", "proper", "q", "qofd", "quarter", "quarterly",
+ "r", "rbeta", "rbinomial", "rchi2", "real", "recode", "regexm",
+ "regexr", "regexs", "reldif", "replay", "return", "reverse",
+ "rgamma", "rhypergeometric", "rnbinomial", "rnormal", "round",
+ "rownumb", "rowsof", "rpoisson", "rt", "rtrim", "runiform", "s",
+ "scalar", "seconds", "sign", "sin", "sinh", "smallestdouble",
+ "soundex", "soundex_nara", "sqrt", "ss", "ssC", "strcat",
+ "strdup", "string", "strlen", "strlower", "strltrim", "strmatch",
+ "strofreal", "strpos", "strproper", "strreverse", "strrtrim",
+ "strtoname", "strtrim", "strupper", "subinstr", "subinword",
+ "substr", "sum", "sweep", "syminv", "t", "tC", "tan", "tanh",
+ "tc", "td", "tden", "th", "tin", "tm", "tq", "trace",
+ "trigamma", "trim", "trunc", "ttail", "tukeyprob", "tw",
+ "twithin", "uniform", "upper", "vec", "vecdiag", "w", "week",
+ "weekly", "wofd", "word", "wordcount", "year", "yearly",
+ "yh", "ym", "yofd", "yq", "yw"
+)
+
+
diff --git a/pygments/lexers/_tsql_builtins.py b/pygments/lexers/_tsql_builtins.py
new file mode 100644
index 00000000..e29ed34b
--- /dev/null
+++ b/pygments/lexers/_tsql_builtins.py
@@ -0,0 +1,1004 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._tsql_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ These are manually translated lists from https://msdn.microsoft.com.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+# See https://msdn.microsoft.com/en-us/library/ms174986.aspx.
+OPERATORS = (
+ '!<',
+ '!=',
+ '!>',
+ '<',
+ '<=',
+ '<>',
+ '=',
+ '>',
+ '>=',
+ '+',
+ '+=',
+ '-',
+ '-=',
+ '*',
+ '*=',
+ '/',
+ '/=',
+ '%',
+ '%=',
+ '&',
+ '&=',
+ '|',
+ '|=',
+ '^',
+ '^=',
+ '~',
+ '::',
+)
+
+OPERATOR_WORDS = (
+ 'all',
+ 'and',
+ 'any',
+ 'between',
+ 'except',
+ 'exists',
+ 'in',
+ 'intersect',
+ 'like',
+ 'not',
+ 'or',
+ 'some',
+ 'union',
+)
+
+_KEYWORDS_SERVER = (
+ 'add',
+ 'all',
+ 'alter',
+ 'and',
+ 'any',
+ 'as',
+ 'asc',
+ 'authorization',
+ 'backup',
+ 'begin',
+ 'between',
+ 'break',
+ 'browse',
+ 'bulk',
+ 'by',
+ 'cascade',
+ 'case',
+ 'catch',
+ 'check',
+ 'checkpoint',
+ 'close',
+ 'clustered',
+ 'coalesce',
+ 'collate',
+ 'column',
+ 'commit',
+ 'compute',
+ 'constraint',
+ 'contains',
+ 'containstable',
+ 'continue',
+ 'convert',
+ 'create',
+ 'cross',
+ 'current',
+ 'current_date',
+ 'current_time',
+ 'current_timestamp',
+ 'current_user',
+ 'cursor',
+ 'database',
+ 'dbcc',
+ 'deallocate',
+ 'declare',
+ 'default',
+ 'delete',
+ 'deny',
+ 'desc',
+ 'disk',
+ 'distinct',
+ 'distributed',
+ 'double',
+ 'drop',
+ 'dump',
+ 'else',
+ 'end',
+ 'errlvl',
+ 'escape',
+ 'except',
+ 'exec',
+ 'execute',
+ 'exists',
+ 'exit',
+ 'external',
+ 'fetch',
+ 'file',
+ 'fillfactor',
+ 'for',
+ 'foreign',
+ 'freetext',
+ 'freetexttable',
+ 'from',
+ 'full',
+ 'function',
+ 'goto',
+ 'grant',
+ 'group',
+ 'having',
+ 'holdlock',
+ 'identity',
+ 'identity_insert',
+ 'identitycol',
+ 'if',
+ 'in',
+ 'index',
+ 'inner',
+ 'insert',
+ 'intersect',
+ 'into',
+ 'is',
+ 'join',
+ 'key',
+ 'kill',
+ 'left',
+ 'like',
+ 'lineno',
+ 'load',
+ 'merge',
+ 'national',
+ 'nocheck',
+ 'nonclustered',
+ 'not',
+ 'null',
+ 'nullif',
+ 'of',
+ 'off',
+ 'offsets',
+ 'on',
+ 'open',
+ 'opendatasource',
+ 'openquery',
+ 'openrowset',
+ 'openxml',
+ 'option',
+ 'or',
+ 'order',
+ 'outer',
+ 'over',
+ 'percent',
+ 'pivot',
+ 'plan',
+ 'precision',
+ 'primary',
+ 'print',
+ 'proc',
+ 'procedure',
+ 'public',
+ 'raiserror',
+ 'read',
+ 'readtext',
+ 'reconfigure',
+ 'references',
+ 'replication',
+ 'restore',
+ 'restrict',
+ 'return',
+ 'revert',
+ 'revoke',
+ 'right',
+ 'rollback',
+ 'rowcount',
+ 'rowguidcol',
+ 'rule',
+ 'save',
+ 'schema',
+ 'securityaudit',
+ 'select',
+ 'semantickeyphrasetable',
+ 'semanticsimilaritydetailstable',
+ 'semanticsimilaritytable',
+ 'session_user',
+ 'set',
+ 'setuser',
+ 'shutdown',
+ 'some',
+ 'statistics',
+ 'system_user',
+ 'table',
+ 'tablesample',
+ 'textsize',
+ 'then',
+ 'throw',
+ 'to',
+ 'top',
+ 'tran',
+ 'transaction',
+ 'trigger',
+ 'truncate',
+ 'try',
+ 'try_convert',
+ 'tsequal',
+ 'union',
+ 'unique',
+ 'unpivot',
+ 'update',
+ 'updatetext',
+ 'use',
+ 'user',
+ 'values',
+ 'varying',
+ 'view',
+ 'waitfor',
+ 'when',
+ 'where',
+ 'while',
+ 'with',
+ 'within',
+ 'writetext',
+)
+
+_KEYWORDS_FUTURE = (
+ 'absolute',
+ 'action',
+ 'admin',
+ 'after',
+ 'aggregate',
+ 'alias',
+ 'allocate',
+ 'are',
+ 'array',
+ 'asensitive',
+ 'assertion',
+ 'asymmetric',
+ 'at',
+ 'atomic',
+ 'before',
+ 'binary',
+ 'bit',
+ 'blob',
+ 'boolean',
+ 'both',
+ 'breadth',
+ 'call',
+ 'called',
+ 'cardinality',
+ 'cascaded',
+ 'cast',
+ 'catalog',
+ 'char',
+ 'character',
+ 'class',
+ 'clob',
+ 'collation',
+ 'collect',
+ 'completion',
+ 'condition',
+ 'connect',
+ 'connection',
+ 'constraints',
+ 'constructor',
+ 'corr',
+ 'corresponding',
+ 'covar_pop',
+ 'covar_samp',
+ 'cube',
+ 'cume_dist',
+ 'current_catalog',
+ 'current_default_transform_group',
+ 'current_path',
+ 'current_role',
+ 'current_schema',
+ 'current_transform_group_for_type',
+ 'cycle',
+ 'data',
+ 'date',
+ 'day',
+ 'dec',
+ 'decimal',
+ 'deferrable',
+ 'deferred',
+ 'depth',
+ 'deref',
+ 'describe',
+ 'descriptor',
+ 'destroy',
+ 'destructor',
+ 'deterministic',
+ 'diagnostics',
+ 'dictionary',
+ 'disconnect',
+ 'domain',
+ 'dynamic',
+ 'each',
+ 'element',
+ 'end-exec',
+ 'equals',
+ 'every',
+ 'exception',
+ 'false',
+ 'filter',
+ 'first',
+ 'float',
+ 'found',
+ 'free',
+ 'fulltexttable',
+ 'fusion',
+ 'general',
+ 'get',
+ 'global',
+ 'go',
+ 'grouping',
+ 'hold',
+ 'host',
+ 'hour',
+ 'ignore',
+ 'immediate',
+ 'indicator',
+ 'initialize',
+ 'initially',
+ 'inout',
+ 'input',
+ 'int',
+ 'integer',
+ 'intersection',
+ 'interval',
+ 'isolation',
+ 'iterate',
+ 'language',
+ 'large',
+ 'last',
+ 'lateral',
+ 'leading',
+ 'less',
+ 'level',
+ 'like_regex',
+ 'limit',
+ 'ln',
+ 'local',
+ 'localtime',
+ 'localtimestamp',
+ 'locator',
+ 'map',
+ 'match',
+ 'member',
+ 'method',
+ 'minute',
+ 'mod',
+ 'modifies',
+ 'modify',
+ 'module',
+ 'month',
+ 'multiset',
+ 'names',
+ 'natural',
+ 'nchar',
+ 'nclob',
+ 'new',
+ 'next',
+ 'no',
+ 'none',
+ 'normalize',
+ 'numeric',
+ 'object',
+ 'occurrences_regex',
+ 'old',
+ 'only',
+ 'operation',
+ 'ordinality',
+ 'out',
+ 'output',
+ 'overlay',
+ 'pad',
+ 'parameter',
+ 'parameters',
+ 'partial',
+ 'partition',
+ 'path',
+ 'percent_rank',
+ 'percentile_cont',
+ 'percentile_disc',
+ 'position_regex',
+ 'postfix',
+ 'prefix',
+ 'preorder',
+ 'prepare',
+ 'preserve',
+ 'prior',
+ 'privileges',
+ 'range',
+ 'reads',
+ 'real',
+ 'recursive',
+ 'ref',
+ 'referencing',
+ 'regr_avgx',
+ 'regr_avgy',
+ 'regr_count',
+ 'regr_intercept',
+ 'regr_r2',
+ 'regr_slope',
+ 'regr_sxx',
+ 'regr_sxy',
+ 'regr_syy',
+ 'relative',
+ 'release',
+ 'result',
+ 'returns',
+ 'role',
+ 'rollup',
+ 'routine',
+ 'row',
+ 'rows',
+ 'savepoint',
+ 'scope',
+ 'scroll',
+ 'search',
+ 'second',
+ 'section',
+ 'sensitive',
+ 'sequence',
+ 'session',
+ 'sets',
+ 'similar',
+ 'size',
+ 'smallint',
+ 'space',
+ 'specific',
+ 'specifictype',
+ 'sql',
+ 'sqlexception',
+ 'sqlstate',
+ 'sqlwarning',
+ 'start',
+ 'state',
+ 'statement',
+ 'static',
+ 'stddev_pop',
+ 'stddev_samp',
+ 'structure',
+ 'submultiset',
+ 'substring_regex',
+ 'symmetric',
+ 'system',
+ 'temporary',
+ 'terminate',
+ 'than',
+ 'time',
+ 'timestamp',
+ 'timezone_hour',
+ 'timezone_minute',
+ 'trailing',
+ 'translate_regex',
+ 'translation',
+ 'treat',
+ 'true',
+ 'uescape',
+ 'under',
+ 'unknown',
+ 'unnest',
+ 'usage',
+ 'using',
+ 'value',
+ 'var_pop',
+ 'var_samp',
+ 'varchar',
+ 'variable',
+ 'whenever',
+ 'width_bucket',
+ 'window',
+ 'within',
+ 'without',
+ 'work',
+ 'write',
+ 'xmlagg',
+ 'xmlattributes',
+ 'xmlbinary',
+ 'xmlcast',
+ 'xmlcomment',
+ 'xmlconcat',
+ 'xmldocument',
+ 'xmlelement',
+ 'xmlexists',
+ 'xmlforest',
+ 'xmliterate',
+ 'xmlnamespaces',
+ 'xmlparse',
+ 'xmlpi',
+ 'xmlquery',
+ 'xmlserialize',
+ 'xmltable',
+ 'xmltext',
+ 'xmlvalidate',
+ 'year',
+ 'zone',
+)
+
+_KEYWORDS_ODBC = (
+ 'absolute',
+ 'action',
+ 'ada',
+ 'add',
+ 'all',
+ 'allocate',
+ 'alter',
+ 'and',
+ 'any',
+ 'are',
+ 'as',
+ 'asc',
+ 'assertion',
+ 'at',
+ 'authorization',
+ 'avg',
+ 'begin',
+ 'between',
+ 'bit',
+ 'bit_length',
+ 'both',
+ 'by',
+ 'cascade',
+ 'cascaded',
+ 'case',
+ 'cast',
+ 'catalog',
+ 'char',
+ 'char_length',
+ 'character',
+ 'character_length',
+ 'check',
+ 'close',
+ 'coalesce',
+ 'collate',
+ 'collation',
+ 'column',
+ 'commit',
+ 'connect',
+ 'connection',
+ 'constraint',
+ 'constraints',
+ 'continue',
+ 'convert',
+ 'corresponding',
+ 'count',
+ 'create',
+ 'cross',
+ 'current',
+ 'current_date',
+ 'current_time',
+ 'current_timestamp',
+ 'current_user',
+ 'cursor',
+ 'date',
+ 'day',
+ 'deallocate',
+ 'dec',
+ 'decimal',
+ 'declare',
+ 'default',
+ 'deferrable',
+ 'deferred',
+ 'delete',
+ 'desc',
+ 'describe',
+ 'descriptor',
+ 'diagnostics',
+ 'disconnect',
+ 'distinct',
+ 'domain',
+ 'double',
+ 'drop',
+ 'else',
+ 'end',
+ 'end-exec',
+ 'escape',
+ 'except',
+ 'exception',
+ 'exec',
+ 'execute',
+ 'exists',
+ 'external',
+ 'extract',
+ 'false',
+ 'fetch',
+ 'first',
+ 'float',
+ 'for',
+ 'foreign',
+ 'fortran',
+ 'found',
+ 'from',
+ 'full',
+ 'get',
+ 'global',
+ 'go',
+ 'goto',
+ 'grant',
+ 'group',
+ 'having',
+ 'hour',
+ 'identity',
+ 'immediate',
+ 'in',
+ 'include',
+ 'index',
+ 'indicator',
+ 'initially',
+ 'inner',
+ 'input',
+ 'insensitive',
+ 'insert',
+ 'int',
+ 'integer',
+ 'intersect',
+ 'interval',
+ 'into',
+ 'is',
+ 'isolation',
+ 'join',
+ 'key',
+ 'language',
+ 'last',
+ 'leading',
+ 'left',
+ 'level',
+ 'like',
+ 'local',
+ 'lower',
+ 'match',
+ 'max',
+ 'min',
+ 'minute',
+ 'module',
+ 'month',
+ 'names',
+ 'national',
+ 'natural',
+ 'nchar',
+ 'next',
+ 'no',
+ 'none',
+ 'not',
+ 'null',
+ 'nullif',
+ 'numeric',
+ 'octet_length',
+ 'of',
+ 'on',
+ 'only',
+ 'open',
+ 'option',
+ 'or',
+ 'order',
+ 'outer',
+ 'output',
+ 'overlaps',
+ 'pad',
+ 'partial',
+ 'pascal',
+ 'position',
+ 'precision',
+ 'prepare',
+ 'preserve',
+ 'primary',
+ 'prior',
+ 'privileges',
+ 'procedure',
+ 'public',
+ 'read',
+ 'real',
+ 'references',
+ 'relative',
+ 'restrict',
+ 'revoke',
+ 'right',
+ 'rollback',
+ 'rows',
+ 'schema',
+ 'scroll',
+ 'second',
+ 'section',
+ 'select',
+ 'session',
+ 'session_user',
+ 'set',
+ 'size',
+ 'smallint',
+ 'some',
+ 'space',
+ 'sql',
+ 'sqlca',
+ 'sqlcode',
+ 'sqlerror',
+ 'sqlstate',
+ 'sqlwarning',
+ 'substring',
+ 'sum',
+ 'system_user',
+ 'table',
+ 'temporary',
+ 'then',
+ 'time',
+ 'timestamp',
+ 'timezone_hour',
+ 'timezone_minute',
+ 'to',
+ 'trailing',
+ 'transaction',
+ 'translate',
+ 'translation',
+ 'trim',
+ 'true',
+ 'union',
+ 'unique',
+ 'unknown',
+ 'update',
+ 'upper',
+ 'usage',
+ 'user',
+ 'using',
+ 'value',
+ 'values',
+ 'varchar',
+ 'varying',
+ 'view',
+ 'when',
+ 'whenever',
+ 'where',
+ 'with',
+ 'work',
+ 'write',
+ 'year',
+ 'zone',
+)
+
+# See https://msdn.microsoft.com/en-us/library/ms189822.aspx.
+KEYWORDS = sorted(set(_KEYWORDS_FUTURE + _KEYWORDS_ODBC + _KEYWORDS_SERVER))
+
+# See https://msdn.microsoft.com/en-us/library/ms187752.aspx.
+TYPES = (
+ 'bigint',
+ 'binary',
+ 'bit',
+ 'char',
+ 'cursor',
+ 'date',
+ 'datetime',
+ 'datetime2',
+ 'datetimeoffset',
+ 'decimal',
+ 'float',
+ 'hierarchyid',
+ 'image',
+ 'int',
+ 'money',
+ 'nchar',
+ 'ntext',
+ 'numeric',
+ 'nvarchar',
+ 'real',
+ 'smalldatetime',
+ 'smallint',
+ 'smallmoney',
+ 'sql_variant',
+ 'table',
+ 'text',
+ 'time',
+ 'timestamp',
+ 'tinyint',
+ 'uniqueidentifier',
+ 'varbinary',
+ 'varchar',
+ 'xml',
+)
+
+# See https://msdn.microsoft.com/en-us/library/ms174318.aspx.
+FUNCTIONS = (
+ '$partition',
+ 'abs',
+ 'acos',
+ 'app_name',
+ 'applock_mode',
+ 'applock_test',
+ 'ascii',
+ 'asin',
+ 'assemblyproperty',
+ 'atan',
+ 'atn2',
+ 'avg',
+ 'binary_checksum',
+ 'cast',
+ 'ceiling',
+ 'certencoded',
+ 'certprivatekey',
+ 'char',
+ 'charindex',
+ 'checksum',
+ 'checksum_agg',
+ 'choose',
+ 'col_length',
+ 'col_name',
+ 'columnproperty',
+ 'compress',
+ 'concat',
+ 'connectionproperty',
+ 'context_info',
+ 'convert',
+ 'cos',
+ 'cot',
+ 'count',
+ 'count_big',
+ 'current_request_id',
+ 'current_timestamp',
+ 'current_transaction_id',
+ 'current_user',
+ 'cursor_status',
+ 'database_principal_id',
+ 'databasepropertyex',
+ 'dateadd',
+ 'datediff',
+ 'datediff_big',
+ 'datefromparts',
+ 'datename',
+ 'datepart',
+ 'datetime2fromparts',
+ 'datetimefromparts',
+ 'datetimeoffsetfromparts',
+ 'day',
+ 'db_id',
+ 'db_name',
+ 'decompress',
+ 'degrees',
+ 'dense_rank',
+ 'difference',
+ 'eomonth',
+ 'error_line',
+ 'error_message',
+ 'error_number',
+ 'error_procedure',
+ 'error_severity',
+ 'error_state',
+ 'exp',
+ 'file_id',
+ 'file_idex',
+ 'file_name',
+ 'filegroup_id',
+ 'filegroup_name',
+ 'filegroupproperty',
+ 'fileproperty',
+ 'floor',
+ 'format',
+ 'formatmessage',
+ 'fulltextcatalogproperty',
+ 'fulltextserviceproperty',
+ 'get_filestream_transaction_context',
+ 'getansinull',
+ 'getdate',
+ 'getutcdate',
+ 'grouping',
+ 'grouping_id',
+ 'has_perms_by_name',
+ 'host_id',
+ 'host_name',
+ 'iif',
+ 'index_col',
+ 'indexkey_property',
+ 'indexproperty',
+ 'is_member',
+ 'is_rolemember',
+ 'is_srvrolemember',
+ 'isdate',
+ 'isjson',
+ 'isnull',
+ 'isnumeric',
+ 'json_modify',
+ 'json_query',
+ 'json_value',
+ 'left',
+ 'len',
+ 'log',
+ 'log10',
+ 'lower',
+ 'ltrim',
+ 'max',
+ 'min',
+ 'min_active_rowversion',
+ 'month',
+ 'nchar',
+ 'newid',
+ 'newsequentialid',
+ 'ntile',
+ 'object_definition',
+ 'object_id',
+ 'object_name',
+ 'object_schema_name',
+ 'objectproperty',
+ 'objectpropertyex',
+ 'opendatasource',
+ 'openjson',
+ 'openquery',
+ 'openrowset',
+ 'openxml',
+ 'original_db_name',
+ 'original_login',
+ 'parse',
+ 'parsename',
+ 'patindex',
+ 'permissions',
+ 'pi',
+ 'power',
+ 'pwdcompare',
+ 'pwdencrypt',
+ 'quotename',
+ 'radians',
+ 'rand',
+ 'rank',
+ 'replace',
+ 'replicate',
+ 'reverse',
+ 'right',
+ 'round',
+ 'row_number',
+ 'rowcount_big',
+ 'rtrim',
+ 'schema_id',
+ 'schema_name',
+ 'scope_identity',
+ 'serverproperty',
+ 'session_context',
+ 'session_user',
+ 'sign',
+ 'sin',
+ 'smalldatetimefromparts',
+ 'soundex',
+ 'sp_helplanguage',
+ 'space',
+ 'sqrt',
+ 'square',
+ 'stats_date',
+ 'stdev',
+ 'stdevp',
+ 'str',
+ 'string_escape',
+ 'string_split',
+ 'stuff',
+ 'substring',
+ 'sum',
+ 'suser_id',
+ 'suser_name',
+ 'suser_sid',
+ 'suser_sname',
+ 'switchoffset',
+ 'sysdatetime',
+ 'sysdatetimeoffset',
+ 'system_user',
+ 'sysutcdatetime',
+ 'tan',
+ 'textptr',
+ 'textvalid',
+ 'timefromparts',
+ 'todatetimeoffset',
+ 'try_cast',
+ 'try_convert',
+ 'try_parse',
+ 'type_id',
+ 'type_name',
+ 'typeproperty',
+ 'unicode',
+ 'upper',
+ 'user_id',
+ 'user_name',
+ 'var',
+ 'varp',
+ 'xact_state',
+ 'year',
+)
diff --git a/pygments/lexers/_vim_builtins.py b/pygments/lexers/_vim_builtins.py
index e9b5fa1e..82586289 100644
--- a/pygments/lexers/_vim_builtins.py
+++ b/pygments/lexers/_vim_builtins.py
@@ -5,7 +5,7 @@
This file is autogenerated by scripts/get_vimkw.py
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/actionscript.py b/pygments/lexers/actionscript.py
index 9c687a57..84607e68 100644
--- a/pygments/lexers/actionscript.py
+++ b/pygments/lexers/actionscript.py
@@ -5,7 +5,7 @@
Lexers for ActionScript and MXML.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index defa7b6e..cb200b9e 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/algebra.py b/pygments/lexers/algebra.py
index 79460ad4..15d68842 100644
--- a/pygments/lexers/algebra.py
+++ b/pygments/lexers/algebra.py
@@ -5,7 +5,7 @@
Lexers for computer algebra systems.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ambient.py b/pygments/lexers/ambient.py
index 7f622fbc..53f3a5e1 100644
--- a/pygments/lexers/ambient.py
+++ b/pygments/lexers/ambient.py
@@ -5,7 +5,7 @@
Lexers for AmbientTalk language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ampl.py b/pygments/lexers/ampl.py
index c3ca80d4..d439cb19 100644
--- a/pygments/lexers/ampl.py
+++ b/pygments/lexers/ampl.py
@@ -5,7 +5,7 @@
Lexers for the ampl language. <http://ampl.com/>
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/apl.py b/pygments/lexers/apl.py
index 61ea4c4b..b3414cc0 100644
--- a/pygments/lexers/apl.py
+++ b/pygments/lexers/apl.py
@@ -5,7 +5,7 @@
Lexers for APL.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/archetype.py b/pygments/lexers/archetype.py
index e596b7be..5d4eb9aa 100644
--- a/pygments/lexers/archetype.py
+++ b/pygments/lexers/archetype.py
@@ -14,7 +14,7 @@
Contributed by Thomas Beale <https://github.com/wolandscat>,
<https://bitbucket.org/thomas_beale>.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/asm.py b/pygments/lexers/asm.py
index 325cbbed..9c58478e 100644
--- a/pygments/lexers/asm.py
+++ b/pygments/lexers/asm.py
@@ -5,7 +5,7 @@
Lexers for assembly languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,7 +20,7 @@ from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'NasmLexer',
- 'NasmObjdumpLexer', 'Ca65Lexer']
+ 'NasmObjdumpLexer', 'TasmLexer', 'Ca65Lexer']
class GasLexer(RegexLexer):
@@ -54,8 +54,6 @@ class GasLexer(RegexLexer):
(number, Number.Integer),
(r'[\r\n]+', Text, '#pop'),
- (r'#.*?$', Comment, '#pop'),
-
include('punctuation'),
include('whitespace')
],
@@ -78,14 +76,14 @@ class GasLexer(RegexLexer):
('$'+number, Number.Integer),
(r"$'(.|\\')'", String.Char),
(r'[\r\n]+', Text, '#pop'),
- (r'#.*?$', Comment, '#pop'),
+
include('punctuation'),
include('whitespace')
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
- (r'#.*?\n', Comment)
+ (r'[;#].*?\n', Comment)
],
'punctuation': [
(r'[-*,.()\[\]!:]+', Punctuation)
@@ -281,7 +279,7 @@ class HsailLexer(RegexLexer):
'enabledetectexceptions', 'maxdynamicgroupsize', 'maxflatgridsize',
'maxflatworkgroupsize', 'requireddim', 'requiredgridsize',
'requiredworkgroupsize', 'requirenopartialworkgroups'),
- suffix=r'\b'), Keyword),
+ suffix=r'\b'), Keyword),
# instructions
(roundingMod, Keyword),
@@ -412,12 +410,24 @@ class LlvmLexer(RegexLexer):
'unwind', 'unreachable', 'indirectbr', 'landingpad', 'resume',
'malloc', 'alloca', 'free', 'load', 'store', 'getelementptr',
'extractelement', 'insertelement', 'shufflevector', 'getresult',
- 'extractvalue', 'insertvalue', 'atomicrmw', 'cmpxchg', 'fence'),
- suffix=r'\b'), Keyword),
+ 'extractvalue', 'insertvalue', 'atomicrmw', 'cmpxchg', 'fence',
+ 'allocsize', 'amdgpu_cs', 'amdgpu_gs', 'amdgpu_kernel', 'amdgpu_ps',
+ 'amdgpu_vs', 'any', 'anyregcc', 'argmemonly', 'avr_intrcc',
+ 'avr_signalcc', 'caller', 'catchpad', 'catchret', 'catchswitch',
+ 'cleanuppad', 'cleanupret', 'comdat', 'convergent', 'cxx_fast_tlscc',
+ 'deplibs', 'dereferenceable', 'dereferenceable_or_null', 'distinct',
+ 'exactmatch', 'externally_initialized', 'from', 'ghccc', 'hhvm_ccc',
+ 'hhvmcc', 'ifunc', 'inaccessiblemem_or_argmemonly', 'inaccessiblememonly',
+ 'inalloca', 'jumptable', 'largest', 'local_unnamed_addr', 'minsize',
+ 'musttail', 'noduplicates', 'none', 'nonnull', 'norecurse', 'notail',
+ 'preserve_allcc', 'preserve_mostcc', 'prologue', 'safestack', 'samesize',
+ 'source_filename', 'swiftcc', 'swifterror', 'swiftself', 'webkit_jscc',
+ 'within', 'writeonly', 'x86_intrcc', 'x86_vectorcallcc'),
+ suffix=r'\b'), Keyword),
# Types
(words(('void', 'half', 'float', 'double', 'x86_fp80', 'fp128',
- 'ppc_fp128', 'label', 'metadata')), Keyword.Type),
+ 'ppc_fp128', 'label', 'metadata', 'token')), Keyword.Type),
# Integer types
(r'i[1-9]\d*', Keyword)
@@ -512,6 +522,86 @@ class NasmObjdumpLexer(ObjdumpLexer):
tokens = _objdump_lexer_tokens(NasmLexer)
+class TasmLexer(RegexLexer):
+ """
+ For Tasm (Turbo Assembler) assembly code.
+ """
+ name = 'TASM'
+ aliases = ['tasm']
+ filenames = ['*.asm', '*.ASM', '*.tasm']
+ mimetypes = ['text/x-tasm']
+
+ identifier = r'[@a-z$._?][\w$.?#@~]*'
+ hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
+ octn = r'[0-7]+q'
+ binn = r'[01]+b'
+ decn = r'[0-9]+'
+ floatn = decn + r'\.e?' + decn
+ string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
+ declkw = r'(?:res|d)[bwdqt]|times'
+ register = (r'r[0-9][0-5]?[bwd]|'
+ r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
+ r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]')
+ wordop = r'seg|wrt|strict'
+ type = r'byte|[dq]?word'
+ directives = (r'BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|'
+ r'ORG|ALIGN|STRUC|ENDSTRUC|ENDS|COMMON|CPU|GROUP|UPPERCASE|INCLUDE|'
+ r'EXPORT|LIBRARY|MODULE|PROC|ENDP|USES|ARG|DATASEG|UDATASEG|END|IDEAL|'
+ r'P386|MODEL|ASSUME|CODESEG|SIZE')
+ # T[A-Z][a-z] is more of a convention. Lexer should filter out STRUC definitions
+ # and then 'add' them to datatype somehow.
+ datatype = (r'db|dd|dw|T[A-Z][a-z]+')
+
+ flags = re.IGNORECASE | re.MULTILINE
+ tokens = {
+ 'root': [
+ (r'^\s*%', Comment.Preproc, 'preproc'),
+ include('whitespace'),
+ (identifier + ':', Name.Label),
+ (directives, Keyword, 'instruction-args'),
+ (r'(%s)(\s+)(%s)' % (identifier, datatype),
+ bygroups(Name.Constant, Keyword.Declaration, Keyword.Declaration),
+ 'instruction-args'),
+ (declkw, Keyword.Declaration, 'instruction-args'),
+ (identifier, Name.Function, 'instruction-args'),
+ (r'[\r\n]+', Text)
+ ],
+ 'instruction-args': [
+ (string, String),
+ (hexn, Number.Hex),
+ (octn, Number.Oct),
+ (binn, Number.Bin),
+ (floatn, Number.Float),
+ (decn, Number.Integer),
+ include('punctuation'),
+ (register, Name.Builtin),
+ (identifier, Name.Variable),
+ # Do not match newline when it's preceeded by a backslash
+ (r'(\\\s*)(;.*)([\r\n])', bygroups(Text, Comment.Single, Text)),
+ (r'[\r\n]+', Text, '#pop'),
+ include('whitespace')
+ ],
+ 'preproc': [
+ (r'[^;\n]+', Comment.Preproc),
+ (r';.*?\n', Comment.Single, '#pop'),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'whitespace': [
+ (r'[\n\r]', Text),
+ (r'\\[\n\r]', Text),
+ (r'[ \t]+', Text),
+ (r';.*', Comment.Single)
+ ],
+ 'punctuation': [
+ (r'[,():\[\]]+', Punctuation),
+ (r'[&|^<>+*=/%~-]+', Operator),
+ (r'[$]+', Keyword.Constant),
+ (wordop, Operator.Word),
+ (type, Keyword.Type)
+ ],
+ }
+
+
class Ca65Lexer(RegexLexer):
"""
For ca65 assembler sources.
diff --git a/pygments/lexers/automation.py b/pygments/lexers/automation.py
index 2ebc4d24..be1ec129 100644
--- a/pygments/lexers/automation.py
+++ b/pygments/lexers/automation.py
@@ -5,7 +5,7 @@
Lexers for automation scripting languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/basic.py b/pygments/lexers/basic.py
index a73ad8b4..e6545ee6 100644
--- a/pygments/lexers/basic.py
+++ b/pygments/lexers/basic.py
@@ -5,7 +5,7 @@
Lexers for BASIC like languages (other than VB.net).
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/bibtex.py b/pygments/lexers/bibtex.py
new file mode 100644
index 00000000..a6159f81
--- /dev/null
+++ b/pygments/lexers/bibtex.py
@@ -0,0 +1,160 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.bibtex
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for BibTeX bibliography data and styles
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, default, \
+ words
+from pygments.token import Name, Comment, String, Error, Number, Text, \
+ Keyword, Punctuation
+
+__all__ = ['BibTeXLexer', 'BSTLexer']
+
+
+class BibTeXLexer(ExtendedRegexLexer):
+ """
+ A lexer for BibTeX bibliography data format.
+
+ .. versionadded:: 2.2
+ """
+
+ name = 'BibTeX'
+ aliases = ['bib', 'bibtex']
+ filenames = ['*.bib']
+ mimetypes = ["text/x-bibtex"]
+ flags = re.IGNORECASE
+
+ ALLOWED_CHARS = r'@!$&*+\-./:;<>?\[\\\]^`|~'
+ IDENTIFIER = '[{0}][{1}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
+
+ def open_brace_callback(self, match, ctx):
+ opening_brace = match.group()
+ ctx.opening_brace = opening_brace
+ yield match.start(), Punctuation, opening_brace
+ ctx.pos = match.end()
+
+ def close_brace_callback(self, match, ctx):
+ closing_brace = match.group()
+ if (
+ ctx.opening_brace == '{' and closing_brace != '}' or
+ ctx.opening_brace == '(' and closing_brace != ')'
+ ):
+ yield match.start(), Error, closing_brace
+ else:
+ yield match.start(), Punctuation, closing_brace
+ del ctx.opening_brace
+ ctx.pos = match.end()
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ ('@comment', Comment),
+ ('@preamble', Name.Class, ('closing-brace', 'value', 'opening-brace')),
+ ('@string', Name.Class, ('closing-brace', 'field', 'opening-brace')),
+ ('@' + IDENTIFIER, Name.Class,
+ ('closing-brace', 'command-body', 'opening-brace')),
+ ('.+', Comment),
+ ],
+ 'opening-brace': [
+ include('whitespace'),
+ (r'[{(]', open_brace_callback, '#pop'),
+ ],
+ 'closing-brace': [
+ include('whitespace'),
+ (r'[})]', close_brace_callback, '#pop'),
+ ],
+ 'command-body': [
+ include('whitespace'),
+ (r'[^\s\,\}]+', Name.Label, ('#pop', 'fields')),
+ ],
+ 'fields': [
+ include('whitespace'),
+ (',', Punctuation, 'field'),
+ default('#pop'),
+ ],
+ 'field': [
+ include('whitespace'),
+ (IDENTIFIER, Name.Attribute, ('value', '=')),
+ default('#pop'),
+ ],
+ '=': [
+ include('whitespace'),
+ ('=', Punctuation, '#pop'),
+ ],
+ 'value': [
+ include('whitespace'),
+ (IDENTIFIER, Name.Variable),
+ ('"', String, 'quoted-string'),
+ (r'\{', String, 'braced-string'),
+ (r'[\d]+', Number),
+ ('#', Punctuation),
+ default('#pop'),
+ ],
+ 'quoted-string': [
+ (r'\{', String, 'braced-string'),
+ ('"', String, '#pop'),
+ ('[^\{\"]+', String),
+ ],
+ 'braced-string': [
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ('[^\{\}]+', String),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+ }
+
+
+class BSTLexer(RegexLexer):
+ """
+ A lexer for BibTeX bibliography styles.
+
+ .. versionadded:: 2.2
+ """
+
+ name = 'BST'
+ aliases = ['bst', 'bst-pybtex']
+ filenames = ['*.bst']
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ (words(['read', 'sort']), Keyword),
+ (words(['execute', 'integers', 'iterate', 'reverse', 'strings']),
+ Keyword, ('group')),
+ (words(['function', 'macro']), Keyword, ('group', 'group')),
+ (words(['entry']), Keyword, ('group', 'group', 'group')),
+ ],
+ 'group': [
+ include('whitespace'),
+ (r'\{', Punctuation, ('#pop', 'group-end', 'body')),
+ ],
+ 'group-end': [
+ include('whitespace'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'body': [
+ include('whitespace'),
+ (r"\'[^#\"\{\}\s]+", Name.Function),
+ (r'[^#\"\{\}\s]+\$', Name.Builtin),
+ (r'[^#\"\{\}\s]+', Name.Variable),
+ (r'"[^\"]*"', String),
+ (r'#-?\d+', Number),
+ (r'\{', Punctuation, ('group-end', 'body')),
+ default('#pop'),
+ ],
+ 'whitespace': [
+ ('\s+', Text),
+ ('%.*?$', Comment.SingleLine),
+ ],
+ }
diff --git a/pygments/lexers/business.py b/pygments/lexers/business.py
index 43978690..12ed6925 100644
--- a/pygments/lexers/business.py
+++ b/pygments/lexers/business.py
@@ -5,7 +5,7 @@
Lexers for "business-oriented" languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -256,6 +256,7 @@ class ABAPLexer(RegexLexer):
(r'\s+', Text),
(r'^\*.*$', Comment.Single),
(r'\".*?\n', Comment.Single),
+ (r'##\w+', Comment.Special),
],
'variable-names': [
(r'<\S+>', Name.Variable),
@@ -264,8 +265,8 @@ class ABAPLexer(RegexLexer):
'root': [
include('common'),
# function calls
- (r'(CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION))(\s+)(\'?\S+\'?)',
- bygroups(Keyword, Text, Name.Function)),
+ (r'CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)',
+ Keyword),
(r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
r'TRANSACTION|TRANSFORMATION))\b',
Keyword),
@@ -285,6 +286,12 @@ class ABAPLexer(RegexLexer):
# call methodnames returning style
(r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
+ # text elements
+ (r'(TEXT)(-)(\d{3})',
+ bygroups(Keyword, Punctuation, Number.Integer)),
+ (r'(TEXT)(-)(\w{3})',
+ bygroups(Keyword, Punctuation, Name.Variable)),
+
# keywords with dashes in them.
# these need to be first, because for instance the -ID part
# of MESSAGE-ID wouldn't get highlighted if MESSAGE was
@@ -301,13 +308,13 @@ class ABAPLexer(RegexLexer):
r'OUTPUT-LENGTH|PRINT-CONTROL|'
r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
- r'TYPE-POOL|TYPE-POOLS'
+ r'TYPE-POOL|TYPE-POOLS|NO-DISPLAY'
r')\b', Keyword),
# keyword kombinations
- (r'CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
- r'((PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
- r'(TYPE|LIKE)(\s+(LINE\s+OF|REF\s+TO|'
+ (r'(?<![-\>])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
+ r'(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
+ r'(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|'
r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
@@ -343,10 +350,16 @@ class ABAPLexer(RegexLexer):
r'(BEGIN|END)\s+OF|'
r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
r'COMPARING(\s+ALL\s+FIELDS)?|'
- r'INSERT(\s+INITIAL\s+LINE\s+INTO|\s+LINES\s+OF)?|'
+ r'(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|'
r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
r'END-OF-(DEFINITION|PAGE|SELECTION)|'
r'WITH\s+FRAME(\s+TITLE)|'
+ r'(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|'
+ r'MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|'
+ r'(RESPECTING|IGNORING)\s+CASE|'
+ r'IN\s+UPDATE\s+TASK|'
+ r'(SOURCE|RESULT)\s+(XML)?|'
+ r'REFERENCE\s+INTO|'
# simple kombinations
r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
@@ -355,39 +368,41 @@ class ABAPLexer(RegexLexer):
r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
- r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE)\b', Keyword),
+ r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b', Keyword),
# single word keywords.
- (r'(^|(?<=(\s|\.)))(ABBREVIATED|ADD|ALIASES|APPEND|ASSERT|'
- r'ASSIGN(ING)?|AT(\s+FIRST)?|'
+ (r'(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|'
+ r'ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|'
r'BACK|BLOCK|BREAK-POINT|'
r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
- r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|'
- r'DATA|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
- r'DETAIL|DIRECTORY|DIVIDE|DO|'
- r'ELSE(IF)?|ENDAT|ENDCASE|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
- r'ENDIF|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|'
- r'ENHANCEMENT|EVENTS|EXCEPTIONS|EXIT|EXPORT|EXPORTING|EXTRACT|'
- r'FETCH|FIELDS?|FIND|FOR|FORM|FORMAT|FREE|FROM|'
+ r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|'
+ r'DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
+ r'DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|'
+ r'ELSE(IF)?|ENDAT|ENDCASE|ENDCATCH|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
+ r'ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|'
+ r'ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|'
+ r'FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|'
r'HIDE|'
r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
- r'LENGTH|LINES|LOAD|LOCAL|'
+ r'LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|'
r'JOIN|'
r'KEY|'
- r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFY|MOVE|MULTIPLY|'
- r'NODES|'
- r'OBLIGATORY|OF|OFF|ON|OVERLAY|'
- r'PACK|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|'
- r'RAISE|RAISING|RANGES|READ|RECEIVE|REFRESH|REJECT|REPORT|RESERVE|'
- r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|'
- r'SCROLL|SEARCH|SELECT|SHIFT|SINGLE|SKIP|SORT|SPLIT|STATICS|STOP|'
- r'SUBMIT|SUBTRACT|SUM|SUMMARY|SUMMING|SUPPLY|'
- r'TABLE|TABLES|TIMES|TITLE|TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
+ r'NEXT|'
+ r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|'
+ r'NODES|NUMBER|'
+ r'OBLIGATORY|OBJECT|OF|OFF|ON|OTHERS|OVERLAY|'
+ r'PACK|PAD|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|PF\d\d|'
+ r'RAISE|RAISING|RANGES?|READ|RECEIVE|REDEFINITION|REFRESH|REJECT|REPORT|RESERVE|'
+ r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|REPLACE|'
+ r'SCROLL|SEARCH|SELECT|SHIFT|SIGN|SINGLE|SIZE|SKIP|SORT|SPLIT|STATICS|STOP|'
+ r'STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|'
+ r'TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|'
+ r'TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
r'ULINE|UNDER|UNPACK|UPDATE|USING|'
- r'VALUE|VALUES|VIA|'
- r'WAIT|WHEN|WHERE|WHILE|WITH|WINDOW|WRITE)\b', Keyword),
+ r'VALUE|VALUES|VIA|VARYING|VARY|'
+ r'WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b', Keyword),
# builtins
(r'(abs|acos|asin|atan|'
@@ -413,18 +428,21 @@ class ABAPLexer(RegexLexer):
# operators which look like variable names before
# parsing variable names.
- (r'(?<=(\s|.))(AND|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
+ (r'(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
- r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator),
+ r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator.Word),
include('variable-names'),
- # standard oparators after variable names,
+ # standard operators after variable names,
# because < and > are part of field symbols.
- (r'[?*<>=\-+]', Operator),
+ (r'[?*<>=\-+&]', Operator),
(r"'(''|[^'])*'", String.Single),
(r"`([^`])*`", String.Single),
- (r'[/;:()\[\],.]', Punctuation)
+ (r"([\|\}])([^\{\}\|]*?)([\|\{])",
+ bygroups(Punctuation, String.Single, Punctuation)),
+ (r'[/;:()\[\],.]', Punctuation),
+ (r'(!)(\w+)', bygroups(Operator, Name)),
],
}
diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py
index 632871ba..691f5ab4 100644
--- a/pygments/lexers/c_cpp.py
+++ b/pygments/lexers/c_cpp.py
@@ -5,7 +5,7 @@
Lexers for C/C++ languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -46,8 +46,10 @@ class CFamilyLexer(RegexLexer):
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
+ # Open until EOF, so no ending delimeter
+ (r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
],
'statements': [
(r'(L?)(")', bygroups(String.Affix, String), 'string'),
@@ -61,10 +63,11 @@ class CFamilyLexer(RegexLexer):
(r'\*/', Error),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r'[()\[\],.]', Punctuation),
- (words(('auto', 'break', 'case', 'const', 'continue', 'default', 'do',
- 'else', 'enum', 'extern', 'for', 'goto', 'if', 'register',
- 'restricted', 'return', 'sizeof', 'static', 'struct',
- 'switch', 'typedef', 'union', 'volatile', 'while'),
+ (words(('asm', 'auto', 'break', 'case', 'const', 'continue',
+ 'default', 'do', 'else', 'enum', 'extern', 'for', 'goto',
+ 'if', 'register', 'restricted', 'return', 'sizeof',
+ 'static', 'struct', 'switch', 'typedef', 'union',
+ 'volatile', 'while'),
suffix=r'\b'), Keyword),
(r'(bool|int|long|float|short|double|char|unsigned|signed|void)\b',
Keyword.Type),
@@ -208,7 +211,7 @@ class CppLexer(CFamilyLexer):
tokens = {
'statements': [
(words((
- 'asm', 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
+ 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
'export', 'friend', 'mutable', 'namespace', 'new', 'operator',
'private', 'protected', 'public', 'reinterpret_cast',
'restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
diff --git a/pygments/lexers/c_like.py b/pygments/lexers/c_like.py
index f4a9c299..f7ba7e8f 100644
--- a/pygments/lexers/c_like.py
+++ b/pygments/lexers/c_like.py
@@ -5,7 +5,7 @@
Lexers for other C-like languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/capnproto.py b/pygments/lexers/capnproto.py
new file mode 100644
index 00000000..49fd3d3a
--- /dev/null
+++ b/pygments/lexers/capnproto.py
@@ -0,0 +1,78 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.capnproto
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Cap'n Proto schema language.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Keyword, Name, Literal
+
+__all__ = ['CapnProtoLexer']
+
+
+class CapnProtoLexer(RegexLexer):
+ """
+ For `Cap'n Proto <https://capnproto.org>`_ source.
+
+ .. versionadded:: 2.2
+ """
+ name = 'Cap\'n Proto'
+ filenames = ['*.capnp']
+ aliases = ['capnp']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ tokens = {
+ 'root': [
+ (r'#.*?$', Comment.Single),
+ (r'@[0-9a-zA-Z]*', Name.Decorator),
+ (r'=', Literal, 'expression'),
+ (r':', Name.Class, 'type'),
+ (r'\$', Name.Attribute, 'annotation'),
+ (r'(struct|enum|interface|union|import|using|const|annotation|'
+ r'extends|in|of|on|as|with|from|fixed)\b',
+ Keyword),
+ (r'[a-zA-Z0-9_.]+', Name),
+ (r'[^#@=:$a-zA-Z0-9_]+', Text),
+ ],
+ 'type': [
+ (r'[^][=;,(){}$]+', Name.Class),
+ (r'[[(]', Name.Class, 'parentype'),
+ (r'', Name.Class, '#pop')
+ ],
+ 'parentype': [
+ (r'[^][;()]+', Name.Class),
+ (r'[[(]', Name.Class, '#push'),
+ (r'[])]', Name.Class, '#pop'),
+ (r'', Name.Class, '#pop')
+ ],
+ 'expression': [
+ (r'[^][;,(){}$]+', Literal),
+ (r'[[(]', Literal, 'parenexp'),
+ (r'', Literal, '#pop')
+ ],
+ 'parenexp': [
+ (r'[^][;()]+', Literal),
+ (r'[[(]', Literal, '#push'),
+ (r'[])]', Literal, '#pop'),
+ (r'', Literal, '#pop')
+ ],
+ 'annotation': [
+ (r'[^][;,(){}=:]+', Name.Attribute),
+ (r'[[(]', Name.Attribute, 'annexp'),
+ (r'', Name.Attribute, '#pop')
+ ],
+ 'annexp': [
+ (r'[^][;()]+', Name.Attribute),
+ (r'[[(]', Name.Attribute, '#push'),
+ (r'[])]', Name.Attribute, '#pop'),
+ (r'', Name.Attribute, '#pop')
+ ],
+ }
diff --git a/pygments/lexers/chapel.py b/pygments/lexers/chapel.py
index e6507394..55bf0e1e 100644
--- a/pygments/lexers/chapel.py
+++ b/pygments/lexers/chapel.py
@@ -5,7 +5,7 @@
Lexer for the Chapel language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -52,7 +52,7 @@ class ChapelLexer(RegexLexer):
'then', 'use', 'when', 'where', 'while', 'with', 'yield',
'zip'), suffix=r'\b'),
Keyword),
- (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'),
+ (r'(proc)((?:\s)+)', bygroups(Keyword, Text), 'procname'),
(r'(class|module|record|union)(\s+)', bygroups(Keyword, Text),
'classname'),
@@ -96,6 +96,7 @@ class ChapelLexer(RegexLexer):
(r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
],
'procname': [
- (r'[a-zA-Z_][\w$]*', Name.Function, '#pop'),
+ (r'([a-zA-Z_][\w$]+|\~[a-zA-Z_][\w$]+|[+*/!~%<>=&^|\-]{1,2})',
+ Name.Function, '#pop'),
],
}
diff --git a/pygments/lexers/clean.py b/pygments/lexers/clean.py
index a3e81534..5c8be8d4 100644
--- a/pygments/lexers/clean.py
+++ b/pygments/lexers/clean.py
@@ -5,7 +5,7 @@
Lexer for the Clean language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -100,7 +100,7 @@ class CleanLexer(ExtendedRegexLexer):
ctx.pos = match.end()
yield match.start(), Comment, match.group(0)
- keywords = ('class', 'instance', 'where', 'with', 'let', 'let!', 'with',
+ keywords = ('class', 'instance', 'where', 'with', 'let', 'let!',
'in', 'case', 'of', 'infix', 'infixr', 'infixl', 'generic',
'derive', 'otherwise', 'code', 'inline')
@@ -116,7 +116,7 @@ class CleanLexer(ExtendedRegexLexer):
(r'(?s)/\*.*?\*/', Comment.Multi),
# Modules, imports, etc.
- (r'\b((?:implementation|definition|system)\s+)?(module)(\s+)([\w`]+)',
+ (r'\b((?:implementation|definition|system)\s+)?(module)(\s+)([\w`\.]+)',
bygroups(Keyword.Namespace, Keyword.Namespace, Text, Name.Class)),
(r'(?<=\n)import(?=\s)', Keyword.Namespace, 'import'),
(r'(?<=\n)from(?=\s)', Keyword.Namespace, 'fromimport'),
@@ -128,7 +128,7 @@ class CleanLexer(ExtendedRegexLexer):
# Function definitions
(r'(?=\{\|)', Whitespace, 'genericfunction'),
- (r'(?<=\n)([ \t]*)([\w`$()=\-<>~*\^|+&%]+)((?:\s+[\w])*)(\s*)(::)',
+ (r'(?<=\n)([ \t]*)([\w`$()=\-<>~*\^|+&%]+)((?:\s+\w)*)(\s*)(::)',
bygroups(store_indent, Name.Function, Keyword.Type, Whitespace,
Punctuation),
'functiondefargs'),
@@ -140,7 +140,7 @@ class CleanLexer(ExtendedRegexLexer):
# Literals
(r'\'\\?.(?<!\\)\'', String.Char),
(r'\'\\\d+\'', String.Char),
- (r'\'\\\\\'', String.Char), # (special case for '\\')
+ (r'\'\\\\\'', String.Char), # (special case for '\\')
(r'[+\-~]?\s*\d+\.\d+(E[+\-~]?\d+)?\b', Number.Float),
(r'[+\-~]?\s*0[0-7]\b', Number.Oct),
(r'[+\-~]?\s*0x[0-9a-fA-F]\b', Number.Hex),
@@ -149,8 +149,12 @@ class CleanLexer(ExtendedRegexLexer):
(words(('True', 'False'), prefix=r'(?<=\s)', suffix=r'(?=\s)'),
Literal),
+ # Qualified names
+ (r'(\')([\w\.]+)(\'\.)',
+ bygroups(Punctuation, Name.Namespace, Punctuation)),
+
# Everything else is some name
- (r'([\w`$%]+\.?)*[\w`$%]+', Name),
+ (r'([\w`$%\/\?@]+\.?)*[\w`$%\/\?@]+', Name),
# Punctuation
(r'[{}()\[\],:;.#]', Punctuation),
@@ -167,13 +171,14 @@ class CleanLexer(ExtendedRegexLexer):
],
'fromimport': [
include('common'),
- (r'([\w`]+)', check_class_not_import),
+ (r'([\w`\.]+)', check_class_not_import),
(r'\n', Whitespace, '#pop'),
(r'\s', Whitespace),
],
'fromimportfunc': [
include('common'),
- (r'([\w`$()=\-<>~*\^|+&%]+)', check_instance_class),
+ (r'(::)\s+([^,\s]+)', bygroups(Punctuation, Keyword.Type)),
+ (r'([\w`$()=\-<>~*\^|+&%\/]+)', check_instance_class),
(r',', Punctuation),
(r'\n', Whitespace, '#pop'),
(r'\s', Whitespace),
@@ -199,7 +204,7 @@ class CleanLexer(ExtendedRegexLexer):
include('common'),
(words(('from', 'import', 'as', 'qualified'),
prefix='(?<=\s)', suffix='(?=\s)'), Keyword.Namespace),
- (r'[\w`]+', Name.Class),
+ (r'[\w`\.]+', Name.Class),
(r'\n', Whitespace, '#pop'),
(r',', Punctuation),
(r'[^\S\n]+', Whitespace),
@@ -230,7 +235,7 @@ class CleanLexer(ExtendedRegexLexer):
(r'->', Punctuation),
(r'(\s+of\s+)(\{)', bygroups(Keyword, Punctuation), 'genericftypes'),
(r'\s', Whitespace),
- (r'[\w`]+', Keyword.Type),
+ (r'[\w`\[\]{}!]+', Keyword.Type),
(r'[*()]', Punctuation),
],
'genericftypes': [
@@ -263,12 +268,20 @@ class CleanLexer(ExtendedRegexLexer):
(r'\n(\s*)', check_indent3),
(r'^(?=\S)', Whitespace, '#pop:3'),
(r'[,&]', Punctuation),
- (r'[\w`$()=\-<>~*\^|+&%]', Name.Function, 'functionname'),
- (r'\s', Whitespace),
+ (r'\[', Punctuation, 'functiondefuniquneq'),
+ (r'[\w`$()=\-<>~*\^|+&%\/{}\[\]@]', Name.Function, 'functionname'),
+ (r'\s+', Whitespace),
+ ],
+ 'functiondefuniquneq': [
+ include('common'),
+ (r'[a-z]+', Keyword.Type),
+ (r'\s+', Whitespace),
+ (r'<=|,', Punctuation),
+ (r'\]', Punctuation, '#pop')
],
'functionname': [
include('common'),
- (r'[\w`$()=\-<>~*\^|+&%]+', Name.Function),
+ (r'[\w`$()=\-<>~*\^|+&%\/]+', Name.Function),
(r'(?=\{\|)', Punctuation, 'genericfunction'),
default('#pop'),
]
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
index 1cf83d7f..ab52a370 100644
--- a/pygments/lexers/compiled.py
+++ b/pygments/lexers/compiled.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -29,5 +29,6 @@ from pygments.lexers.dylan import DylanLexer, DylanLidLexer, DylanConsoleLexer
from pygments.lexers.ooc import OocLexer
from pygments.lexers.felix import FelixLexer
from pygments.lexers.nimrod import NimrodLexer
+from pygments.lexers.crystal import CrystalLexer
__all__ = []
diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py
index 9cc291e5..1717a563 100644
--- a/pygments/lexers/configs.py
+++ b/pygments/lexers/configs.py
@@ -5,7 +5,7 @@
Lexers for configuration file formats.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -100,6 +100,8 @@ class PropertiesLexer(RegexLexer):
"""
Lexer for configuration files in Java's properties format.
+ Note: trailing whitespace counts as part of the value as per spec
+
.. versionadded:: 1.4
"""
@@ -110,10 +112,14 @@ class PropertiesLexer(RegexLexer):
tokens = {
'root': [
- (r'\s+', Text),
- (r'(?:[;#]|//).*$', Comment),
+ (r'^(\w+)([ \t])(\w+\s*)$', bygroups(Name.Attribute, Text, String)),
+ (r'^\w+(\\[ \t]\w*)*$', Name.Attribute),
+ (r'(^ *)([#!].*)', bygroups(Text, Comment)),
+ # More controversial comments
+ (r'(^ *)((?:;|//).*)', bygroups(Text, Comment)),
(r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
bygroups(Name.Attribute, Text, Operator, Text, String)),
+ (r'\s', Text),
],
}
@@ -456,7 +462,7 @@ class NginxConfLexer(RegexLexer):
"""
name = 'Nginx configuration file'
aliases = ['nginx']
- filenames = []
+ filenames = ['nginx.conf']
mimetypes = ['text/x-nginx-conf']
tokens = {
diff --git a/pygments/lexers/console.py b/pygments/lexers/console.py
index 1d89b770..77bb72e5 100644
--- a/pygments/lexers/console.py
+++ b/pygments/lexers/console.py
@@ -5,7 +5,7 @@
Lexers for misc console output.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/crystal.py b/pygments/lexers/crystal.py
new file mode 100644
index 00000000..7aecaf3e
--- /dev/null
+++ b/pygments/lexers/crystal.py
@@ -0,0 +1,389 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.crystal
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Crystal.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import ExtendedRegexLexer, include, \
+ bygroups, default, LexerContext, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['CrystalLexer']
+
+line_re = re.compile('.*?\n')
+
+
+CRYSTAL_OPERATORS = [
+ '!=', '!~', '!', '%', '&&', '&', '**', '*', '+', '-', '/', '<=>', '<<', '<=', '<',
+ '===', '==', '=~', '=', '>=', '>>', '>', '[]=', '[]?', '[]', '^', '||', '|', '~'
+]
+
+
+class CrystalLexer(ExtendedRegexLexer):
+ """
+ For `Crystal <http://crystal-lang.org>`_ source code.
+
+ .. versionadded:: 2.2
+ """
+
+ name = 'Crystal'
+ aliases = ['cr', 'crystal']
+ filenames = ['*.cr']
+ mimetypes = ['text/x-crystal']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ def heredoc_callback(self, match, ctx):
+ # okay, this is the hardest part of parsing Crystal...
+ # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
+
+ start = match.start(1)
+ yield start, Operator, match.group(1) # <<-?
+ yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
+ yield match.start(3), String.Delimiter, match.group(3) # heredoc name
+ yield match.start(4), String.Heredoc, match.group(4) # quote again
+
+ heredocstack = ctx.__dict__.setdefault('heredocstack', [])
+ outermost = not bool(heredocstack)
+ heredocstack.append((match.group(1) == '<<-', match.group(3)))
+
+ ctx.pos = match.start(5)
+ ctx.end = match.end(5)
+ # this may find other heredocs
+ for i, t, v in self.get_tokens_unprocessed(context=ctx):
+ yield i, t, v
+ ctx.pos = match.end()
+
+ if outermost:
+ # this is the outer heredoc again, now we can process them all
+ for tolerant, hdname in heredocstack:
+ lines = []
+ for match in line_re.finditer(ctx.text, ctx.pos):
+ if tolerant:
+ check = match.group().strip()
+ else:
+ check = match.group().rstrip()
+ if check == hdname:
+ for amatch in lines:
+ yield amatch.start(), String.Heredoc, amatch.group()
+ yield match.start(), String.Delimiter, match.group()
+ ctx.pos = match.end()
+ break
+ else:
+ lines.append(match)
+ else:
+ # end of heredoc not found -- error!
+ for amatch in lines:
+ yield amatch.start(), Error, amatch.group()
+ ctx.end = len(ctx.text)
+ del heredocstack[:]
+
+ def gen_crystalstrings_rules():
+ def intp_regex_callback(self, match, ctx):
+ yield match.start(1), String.Regex, match.group(1) # begin
+ nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Regex, match.group(4) # end[imsx]*
+ ctx.pos = match.end()
+
+ def intp_string_callback(self, match, ctx):
+ yield match.start(1), String.Other, match.group(1)
+ nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Other, match.group(4) # end
+ ctx.pos = match.end()
+
+ states = {}
+ states['strings'] = [
+ (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
+ (words(CRYSTAL_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
+ (r":'(\\\\|\\'|[^'])*'", String.Symbol),
+ # This allows arbitrary text after '\ for simplicity
+ (r"'(\\\\|\\'|[^']|\\[^'\\]+)'", String.Char),
+ (r':"', String.Symbol, 'simple-sym'),
+ # Crystal doesn't have "symbol:"s but this simplifies function args
+ (r'([a-zA-Z_]\w*)(:)(?!:)', bygroups(String.Symbol, Punctuation)),
+ (r'"', String.Double, 'simple-string'),
+ (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
+ ]
+
+ # double-quoted string and symbol
+ for name, ttype, end in ('string', String.Double, '"'), \
+ ('sym', String.Symbol, '"'), \
+ ('backtick', String.Backtick, '`'):
+ states['simple-'+name] = [
+ include('string-escaped' if name == 'sym' else 'string-intp-escaped'),
+ (r'[^\\%s#]+' % end, ttype),
+ (r'[\\#]', ttype),
+ (end, ttype, '#pop'),
+ ]
+
+ # braced quoted strings
+ for lbrace, rbrace, bracecc, name in \
+ ('\\{', '\\}', '{}', 'cb'), \
+ ('\\[', '\\]', '\\[\\]', 'sb'), \
+ ('\\(', '\\)', '()', 'pa'), \
+ ('<', '>', '<>', 'ab'):
+ states[name+'-intp-string'] = [
+ (r'\\[' + lbrace + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ include('string-intp-escaped'),
+ (r'[\\#' + bracecc + ']', String.Other),
+ (r'[^\\#' + bracecc + ']+', String.Other),
+ ]
+ states['strings'].append((r'%' + lbrace, String.Other,
+ name+'-intp-string'))
+ states[name+'-string'] = [
+ (r'\\[\\' + bracecc + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ (r'[\\#' + bracecc + ']', String.Other),
+ (r'[^\\#' + bracecc + ']+', String.Other),
+ ]
+ # http://crystal-lang.org/docs/syntax_and_semantics/literals/array.html
+ states['strings'].append((r'%[wi]' + lbrace, String.Other,
+ name+'-string'))
+ states[name+'-regex'] = [
+ (r'\\[\\' + bracecc + ']', String.Regex),
+ (lbrace, String.Regex, '#push'),
+ (rbrace + '[imsx]*', String.Regex, '#pop'),
+ include('string-intp'),
+ (r'[\\#' + bracecc + ']', String.Regex),
+ (r'[^\\#' + bracecc + ']+', String.Regex),
+ ]
+ states['strings'].append((r'%r' + lbrace, String.Regex,
+ name+'-regex'))
+
+ # these must come after %<brace>!
+ states['strings'] += [
+ # %r regex
+ (r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[imsx]*)',
+ intp_regex_callback),
+ # regular fancy strings with qsw
+ (r'(%[wi]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ # special forms of fancy strings after operators or
+ # in method calls with braces
+ (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Text, String.Other, None)),
+ # and because of fixed width lookbehinds the whole thing a
+ # second time for line startings...
+ (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Text, String.Other, None)),
+ # all regular fancy strings without qsw
+ (r'(%([\[{(<]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ ]
+
+ return states
+
+ tokens = {
+ 'root': [
+ (r'#.*?$', Comment.Single),
+ # keywords
+ (words('''
+ abstract asm as begin break case do else elsif end ensure extend ifdef if
+ include instance_sizeof next of pointerof private protected rescue return
+ require sizeof super then typeof unless until when while with yield
+ '''.split(), suffix=r'\b'), Keyword),
+ (words(['true', 'false', 'nil'], suffix=r'\b'), Keyword.Constant),
+ # start of function, class and module names
+ (r'(module|lib)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
+ bygroups(Keyword, Text, Name.Namespace)),
+ (r'(def|fun|macro)(\s+)((?:[a-zA-Z_]\w*::)*)',
+ bygroups(Keyword, Text, Name.Namespace), 'funcname'),
+ (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
+ (r'(class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)',
+ bygroups(Keyword, Text, Name.Namespace), 'classname'),
+ (r'(self|out|uninitialized)\b|(is_a|responds_to)\?', Keyword.Pseudo),
+ # macros
+ (words('''
+ debugger record pp assert_responds_to spawn parallel
+ getter setter property delegate def_hash def_equals def_equals_and_hash
+ forward_missing_to
+ '''.split(), suffix=r'\b'), Name.Builtin.Pseudo),
+ (r'getter[!?]|property[!?]|__(DIR|FILE|LINE)__\b', Name.Builtin.Pseudo),
+ # builtins
+ # http://crystal-lang.org/api/toplevel.html
+ (words('''
+ Object Value Struct Reference Proc Class Nil Symbol Enum Void
+ Bool Number Int Int8 Int16 Int32 Int64 UInt8 UInt16 UInt32 UInt64
+ Float Float32 Float64 Char String
+ Pointer Slice Range Exception Regex
+ Mutex StaticArray Array Hash Set Tuple Deque Box Process File
+ Dir Time Channel Concurrent Scheduler
+ abort at_exit caller delay exit fork future get_stack_top gets
+ lazy loop main p print printf puts
+ raise rand read_line sleep sprintf system with_color
+ '''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Name.Builtin),
+ # normal heredocs
+ (r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
+ heredoc_callback),
+ # empty string heredocs
+ (r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
+ (r'__END__', Comment.Preproc, 'end-part'),
+ # multiline regex (after keywords or assignments)
+ (r'(?:^|(?<=[=<>~!:])|'
+ r'(?<=(?:\s|;)when\s)|'
+ r'(?<=(?:\s|;)or\s)|'
+ r'(?<=(?:\s|;)and\s)|'
+ r'(?<=\.index\s)|'
+ r'(?<=\.scan\s)|'
+ r'(?<=\.sub\s)|'
+ r'(?<=\.sub!\s)|'
+ r'(?<=\.gsub\s)|'
+ r'(?<=\.gsub!\s)|'
+ r'(?<=\.match\s)|'
+ r'(?<=(?:\s|;)if\s)|'
+ r'(?<=(?:\s|;)elsif\s)|'
+ r'(?<=^when\s)|'
+ r'(?<=^index\s)|'
+ r'(?<=^scan\s)|'
+ r'(?<=^sub\s)|'
+ r'(?<=^gsub\s)|'
+ r'(?<=^sub!\s)|'
+ r'(?<=^gsub!\s)|'
+ r'(?<=^match\s)|'
+ r'(?<=^if\s)|'
+ r'(?<=^elsif\s)'
+ r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
+ # multiline regex (in method calls or subscripts)
+ (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
+ # multiline regex (this time the funny no whitespace rule)
+ (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
+ 'multiline-regex'),
+ # lex numbers and ignore following regular expressions which
+ # are division operators in fact (grrrr. i hate that. any
+ # better ideas?)
+ # since pygments 0.7 we also eat a "?" operator after numbers
+ # so that the char operator does not work. Chars are not allowed
+ # there so that you can use the ternary operator.
+ # stupid example:
+ # x>=0?n[x]:""
+ (r'(0o[0-7]+(?:_[0-7]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
+ bygroups(Number.Oct, Text, Operator)),
+ (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
+ bygroups(Number.Hex, Text, Operator)),
+ (r'(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
+ bygroups(Number.Bin, Text, Operator)),
+ # 3 separate expressions for floats because any of the 3 optional parts makes it a float
+ (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?(?:_?[f][0-9]+)?)(\s*)([/?])?',
+ bygroups(Number.Float, Text, Operator)),
+ (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)(?:_?[f][0-9]+)?)(\s*)([/?])?',
+ bygroups(Number.Float, Text, Operator)),
+ (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?(?:_?[f][0-9]+))(\s*)([/?])?',
+ bygroups(Number.Float, Text, Operator)),
+ (r'(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
+ bygroups(Number.Integer, Text, Operator)),
+ # Names
+ (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
+ (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
+ (r'\$\w+', Name.Variable.Global),
+ (r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
+ (r'\$-[0adFiIlpvw]', Name.Variable.Global),
+ (r'::', Operator),
+ include('strings'),
+ # chars
+ (r'\?(\\[MC]-)*' # modifiers
+ r'(\\([\\befnrtv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
+ r'(?!\w)',
+ String.Char),
+ (r'[A-Z][A-Z_]+\b', Name.Constant),
+ # macro expansion
+ (r'\{%', String.Interpol, 'in-macro-control'),
+ (r'\{\{', String.Interpol, 'in-macro-expr'),
+ # attributes
+ (r'(@\[)(\s*)([A-Z]\w*)',
+ bygroups(Operator, Text, Name.Decorator), 'in-attr'),
+ # this is needed because Crystal attributes can look
+ # like keywords (class) or like this: ` ?!?
+ (words(CRYSTAL_OPERATORS, prefix=r'(\.|::)'),
+ bygroups(Operator, Name.Operator)),
+ (r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
+ bygroups(Operator, Name)),
+ # Names can end with [!?] unless it's "!="
+ (r'[a-zA-Z_]\w*(?:[!?](?!=))?', Name),
+ (r'(\[|\]\??|\*\*|<=>?|>=|<<?|>>?|=~|===|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&!^|~]=?', Operator),
+ (r'[(){};,/?:\\]', Punctuation),
+ (r'\s+', Text)
+ ],
+ 'funcname': [
+ (r'(?:([a-zA-Z_]\w*)(\.))?'
+ r'([a-zA-Z_]\w*[!?]?|\*\*?|[-+]@?|'
+ r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
+ bygroups(Name.Class, Operator, Name.Function), '#pop'),
+ default('#pop')
+ ],
+ 'classname': [
+ (r'[A-Z_]\w*', Name.Class),
+ (r'(\()(\s*)([A-Z_]\w*)(\s*)(\))',
+ bygroups(Punctuation, Text, Name.Class, Text, Punctuation)),
+ default('#pop')
+ ],
+ 'in-intp': [
+ (r'\{', String.Interpol, '#push'),
+ (r'\}', String.Interpol, '#pop'),
+ include('root'),
+ ],
+ 'string-intp': [
+ (r'#\{', String.Interpol, 'in-intp'),
+ ],
+ 'string-escaped': [
+ (r'\\([\\befnstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'string-intp-escaped': [
+ include('string-intp'),
+ include('string-escaped'),
+ ],
+ 'interpolated-regex': [
+ include('string-intp'),
+ (r'[\\#]', String.Regex),
+ (r'[^\\#]+', String.Regex),
+ ],
+ 'interpolated-string': [
+ include('string-intp'),
+ (r'[\\#]', String.Other),
+ (r'[^\\#]+', String.Other),
+ ],
+ 'multiline-regex': [
+ include('string-intp'),
+ (r'\\\\', String.Regex),
+ (r'\\/', String.Regex),
+ (r'[\\#]', String.Regex),
+ (r'[^\\/#]+', String.Regex),
+ (r'/[imsx]*', String.Regex, '#pop'),
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ],
+ 'in-macro-control': [
+ (r'\{%', String.Interpol, '#push'),
+ (r'%\}', String.Interpol, '#pop'),
+ (r'for\b|in\b', Keyword),
+ include('root'),
+ ],
+ 'in-macro-expr': [
+ (r'\{\{', String.Interpol, '#push'),
+ (r'\}\}', String.Interpol, '#pop'),
+ include('root'),
+ ],
+ 'in-attr': [
+ (r'\[', Operator, '#push'),
+ (r'\]', Operator, '#pop'),
+ include('root'),
+ ],
+ }
+ tokens.update(gen_crystalstrings_rules())
diff --git a/pygments/lexers/csound.py b/pygments/lexers/csound.py
index 95ee73d8..858aa348 100644
--- a/pygments/lexers/csound.py
+++ b/pygments/lexers/csound.py
@@ -5,7 +5,7 @@
Lexers for CSound languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/css.py b/pygments/lexers/css.py
index 6c585dfa..29d83707 100644
--- a/pygments/lexers/css.py
+++ b/pygments/lexers/css.py
@@ -5,7 +5,7 @@
Lexers for CSS and related stylesheet formats.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,6 +21,251 @@ from pygments.util import iteritems
__all__ = ['CssLexer', 'SassLexer', 'ScssLexer', 'LessCssLexer']
+# List of vendor prefixes obtained from:
+# https://www.w3.org/TR/CSS21/syndata.html#vendor-keyword-history
+_vendor_prefixes = (
+ '-ms-', 'mso-', '-moz-', '-o-', '-xv-', '-atsc-', '-wap-', '-khtml-',
+ '-webkit-', 'prince-', '-ah-', '-hp-', '-ro-', '-rim-', '-tc-',
+)
+
+# List of CSS properties obtained from:
+# https://www.w3.org/Style/CSS/all-properties.en.html
+# Note: handle --* separately
+_css_properties = (
+ 'align-content', 'align-items', 'align-self', 'alignment-baseline', 'all',
+ 'animation', 'animation-delay', 'animation-direction',
+ 'animation-duration', 'animation-fill-mode', 'animation-iteration-count',
+ 'animation-name', 'animation-play-state', 'animation-timing-function',
+ 'appearance', 'azimuth', 'backface-visibility', 'background',
+ 'background-attachment', 'background-blend-mode', 'background-clip',
+ 'background-color', 'background-image', 'background-origin',
+ 'background-position', 'background-repeat', 'background-size',
+ 'baseline-shift', 'bookmark-label', 'bookmark-level', 'bookmark-state',
+ 'border', 'border-bottom', 'border-bottom-color',
+ 'border-bottom-left-radius', 'border-bottom-right-radius',
+ 'border-bottom-style', 'border-bottom-width', 'border-boundary',
+ 'border-collapse', 'border-color', 'border-image', 'border-image-outset',
+ 'border-image-repeat', 'border-image-slice', 'border-image-source',
+ 'border-image-width', 'border-left', 'border-left-color',
+ 'border-left-style', 'border-left-width', 'border-radius', 'border-right',
+ 'border-right-color', 'border-right-style', 'border-right-width',
+ 'border-spacing', 'border-style', 'border-top', 'border-top-color',
+ 'border-top-left-radius', 'border-top-right-radius', 'border-top-style',
+ 'border-top-width', 'border-width', 'bottom', 'box-decoration-break',
+ 'box-shadow', 'box-sizing', 'box-snap', 'box-suppress', 'break-after',
+ 'break-before', 'break-inside', 'caption-side', 'caret', 'caret-animation',
+ 'caret-color', 'caret-shape', 'chains', 'clear', 'clip', 'clip-path',
+ 'clip-rule', 'color', 'color-interpolation-filters', 'column-count',
+ 'column-fill', 'column-gap', 'column-rule', 'column-rule-color',
+ 'column-rule-style', 'column-rule-width', 'column-span', 'column-width',
+ 'columns', 'content', 'counter-increment', 'counter-reset', 'counter-set',
+ 'crop', 'cue', 'cue-after', 'cue-before', 'cursor', 'direction', 'display',
+ 'dominant-baseline', 'elevation', 'empty-cells', 'filter', 'flex',
+ 'flex-basis', 'flex-direction', 'flex-flow', 'flex-grow', 'flex-shrink',
+ 'flex-wrap', 'float', 'float-defer', 'float-offset', 'float-reference',
+ 'flood-color', 'flood-opacity', 'flow', 'flow-from', 'flow-into', 'font',
+ 'font-family', 'font-feature-settings', 'font-kerning',
+ 'font-language-override', 'font-size', 'font-size-adjust', 'font-stretch',
+ 'font-style', 'font-synthesis', 'font-variant', 'font-variant-alternates',
+ 'font-variant-caps', 'font-variant-east-asian', 'font-variant-ligatures',
+ 'font-variant-numeric', 'font-variant-position', 'font-weight',
+ 'footnote-display', 'footnote-policy', 'glyph-orientation-vertical',
+ 'grid', 'grid-area', 'grid-auto-columns', 'grid-auto-flow',
+ 'grid-auto-rows', 'grid-column', 'grid-column-end', 'grid-column-gap',
+ 'grid-column-start', 'grid-gap', 'grid-row', 'grid-row-end',
+ 'grid-row-gap', 'grid-row-start', 'grid-template', 'grid-template-areas',
+ 'grid-template-columns', 'grid-template-rows', 'hanging-punctuation',
+ 'height', 'hyphenate-character', 'hyphenate-limit-chars',
+ 'hyphenate-limit-last', 'hyphenate-limit-lines', 'hyphenate-limit-zone',
+ 'hyphens', 'image-orientation', 'image-resolution', 'initial-letter',
+ 'initial-letter-align', 'initial-letter-wrap', 'isolation',
+ 'justify-content', 'justify-items', 'justify-self', 'left',
+ 'letter-spacing', 'lighting-color', 'line-break', 'line-grid',
+ 'line-height', 'line-snap', 'list-style', 'list-style-image',
+ 'list-style-position', 'list-style-type', 'margin', 'margin-bottom',
+ 'margin-left', 'margin-right', 'margin-top', 'marker-side',
+ 'marquee-direction', 'marquee-loop', 'marquee-speed', 'marquee-style',
+ 'mask', 'mask-border', 'mask-border-mode', 'mask-border-outset',
+ 'mask-border-repeat', 'mask-border-slice', 'mask-border-source',
+ 'mask-border-width', 'mask-clip', 'mask-composite', 'mask-image',
+ 'mask-mode', 'mask-origin', 'mask-position', 'mask-repeat', 'mask-size',
+ 'mask-type', 'max-height', 'max-lines', 'max-width', 'min-height',
+ 'min-width', 'mix-blend-mode', 'motion', 'motion-offset', 'motion-path',
+ 'motion-rotation', 'move-to', 'nav-down', 'nav-left', 'nav-right',
+ 'nav-up', 'object-fit', 'object-position', 'offset-after', 'offset-before',
+ 'offset-end', 'offset-start', 'opacity', 'order', 'orphans', 'outline',
+ 'outline-color', 'outline-offset', 'outline-style', 'outline-width',
+ 'overflow', 'overflow-style', 'overflow-wrap', 'overflow-x', 'overflow-y',
+ 'padding', 'padding-bottom', 'padding-left', 'padding-right', 'padding-top',
+ 'page', 'page-break-after', 'page-break-before', 'page-break-inside',
+ 'page-policy', 'pause', 'pause-after', 'pause-before', 'perspective',
+ 'perspective-origin', 'pitch', 'pitch-range', 'play-during', 'polar-angle',
+ 'polar-distance', 'position', 'presentation-level', 'quotes',
+ 'region-fragment', 'resize', 'rest', 'rest-after', 'rest-before',
+ 'richness', 'right', 'rotation', 'rotation-point', 'ruby-align',
+ 'ruby-merge', 'ruby-position', 'running', 'scroll-snap-coordinate',
+ 'scroll-snap-destination', 'scroll-snap-points-x', 'scroll-snap-points-y',
+ 'scroll-snap-type', 'shape-image-threshold', 'shape-inside', 'shape-margin',
+ 'shape-outside', 'size', 'speak', 'speak-as', 'speak-header',
+ 'speak-numeral', 'speak-punctuation', 'speech-rate', 'stress', 'string-set',
+ 'tab-size', 'table-layout', 'text-align', 'text-align-last',
+ 'text-combine-upright', 'text-decoration', 'text-decoration-color',
+ 'text-decoration-line', 'text-decoration-skip', 'text-decoration-style',
+ 'text-emphasis', 'text-emphasis-color', 'text-emphasis-position',
+ 'text-emphasis-style', 'text-indent', 'text-justify', 'text-orientation',
+ 'text-overflow', 'text-shadow', 'text-space-collapse', 'text-space-trim',
+ 'text-spacing', 'text-transform', 'text-underline-position', 'text-wrap',
+ 'top', 'transform', 'transform-origin', 'transform-style', 'transition',
+ 'transition-delay', 'transition-duration', 'transition-property',
+ 'transition-timing-function', 'unicode-bidi', 'user-select',
+ 'vertical-align', 'visibility', 'voice-balance', 'voice-duration',
+ 'voice-family', 'voice-pitch', 'voice-range', 'voice-rate', 'voice-stress',
+ 'voice-volume', 'volume', 'white-space', 'widows', 'width', 'will-change',
+ 'word-break', 'word-spacing', 'word-wrap', 'wrap-after', 'wrap-before',
+ 'wrap-flow', 'wrap-inside', 'wrap-through', 'writing-mode', 'z-index',
+)
+
+# List of keyword values obtained from:
+# http://cssvalues.com/
+_keyword_values = (
+ 'absolute', 'alias', 'all', 'all-petite-caps', 'all-scroll',
+ 'all-small-caps', 'allow-end', 'alpha', 'alternate', 'alternate-reverse',
+ 'always', 'armenian', 'auto', 'avoid', 'avoid-column', 'avoid-page',
+ 'backwards', 'balance', 'baseline', 'below', 'blink', 'block', 'bold',
+ 'bolder', 'border-box', 'both', 'bottom', 'box-decoration', 'break-word',
+ 'capitalize', 'cell', 'center', 'circle', 'clip', 'clone', 'close-quote',
+ 'col-resize', 'collapse', 'color', 'color-burn', 'color-dodge', 'column',
+ 'column-reverse', 'compact', 'condensed', 'contain', 'container',
+ 'content-box', 'context-menu', 'copy', 'cover', 'crisp-edges', 'crosshair',
+ 'currentColor', 'cursive', 'darken', 'dashed', 'decimal',
+ 'decimal-leading-zero', 'default', 'descendants', 'difference', 'digits',
+ 'disc', 'distribute', 'dot', 'dotted', 'double', 'double-circle', 'e-resize',
+ 'each-line', 'ease', 'ease-in', 'ease-in-out', 'ease-out', 'edges',
+ 'ellipsis', 'end', 'ew-resize', 'exclusion', 'expanded', 'extra-condensed',
+ 'extra-expanded', 'fantasy', 'fill', 'fill-box', 'filled', 'first', 'fixed',
+ 'flat', 'flex', 'flex-end', 'flex-start', 'flip', 'force-end', 'forwards',
+ 'from-image', 'full-width', 'geometricPrecision', 'georgian', 'groove',
+ 'hanging', 'hard-light', 'help', 'hidden', 'hide', 'horizontal', 'hue',
+ 'icon', 'infinite', 'inherit', 'initial', 'ink', 'inline', 'inline-block',
+ 'inline-flex', 'inline-table', 'inset', 'inside', 'inter-word', 'invert',
+ 'isolate', 'italic', 'justify', 'large', 'larger', 'last', 'left',
+ 'lighten', 'lighter', 'line-through', 'linear', 'list-item', 'local',
+ 'loose', 'lower-alpha', 'lower-greek', 'lower-latin', 'lower-roman',
+ 'lowercase', 'ltr', 'luminance', 'luminosity', 'mandatory', 'manipulation',
+ 'manual', 'margin-box', 'match-parent', 'medium', 'mixed', 'monospace',
+ 'move', 'multiply', 'n-resize', 'ne-resize', 'nesw-resize',
+ 'no-close-quote', 'no-drop', 'no-open-quote', 'no-repeat', 'none', 'normal',
+ 'not-allowed', 'nowrap', 'ns-resize', 'nw-resize', 'nwse-resize', 'objects',
+ 'oblique', 'off', 'on', 'open', 'open-quote', 'optimizeLegibility',
+ 'optimizeSpeed', 'outset', 'outside', 'over', 'overlay', 'overline',
+ 'padding-box', 'page', 'pan-down', 'pan-left', 'pan-right', 'pan-up',
+ 'pan-x', 'pan-y', 'paused', 'petite-caps', 'pixelated', 'pointer',
+ 'preserve-3d', 'progress', 'proximity', 'relative', 'repeat',
+ 'repeat no-repeat', 'repeat-x', 'repeat-y', 'reverse', 'ridge', 'right',
+ 'round', 'row', 'row-resize', 'row-reverse', 'rtl', 'ruby', 'ruby-base',
+ 'ruby-base-container', 'ruby-text', 'ruby-text-container', 'run-in',
+ 'running', 's-resize', 'sans-serif', 'saturation', 'scale-down', 'screen',
+ 'scroll', 'se-resize', 'semi-condensed', 'semi-expanded', 'separate',
+ 'serif', 'sesame', 'show', 'sideways', 'sideways-left', 'sideways-right',
+ 'slice', 'small', 'small-caps', 'smaller', 'smooth', 'snap', 'soft-light',
+ 'solid', 'space', 'space-around', 'space-between', 'spaces', 'square',
+ 'start', 'static', 'step-end', 'step-start', 'sticky', 'stretch', 'strict',
+ 'stroke-box', 'style', 'sw-resize', 'table', 'table-caption', 'table-cell',
+ 'table-column', 'table-column-group', 'table-footer-group',
+ 'table-header-group', 'table-row', 'table-row-group', 'text', 'thick',
+ 'thin', 'titling-caps', 'to', 'top', 'triangle', 'ultra-condensed',
+ 'ultra-expanded', 'under', 'underline', 'unicase', 'unset', 'upper-alpha',
+ 'upper-latin', 'upper-roman', 'uppercase', 'upright', 'use-glyph-orientation',
+ 'vertical', 'vertical-text', 'view-box', 'visible', 'w-resize', 'wait',
+ 'wavy', 'weight', 'weight style', 'wrap', 'wrap-reverse', 'x-large',
+ 'x-small', 'xx-large', 'xx-small', 'zoom-in', 'zoom-out',
+)
+
+# List of extended color keywords obtained from:
+# https://drafts.csswg.org/css-color/#named-colors
+_color_keywords = (
+ 'aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige',
+ 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown',
+ 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral',
+ 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan',
+ 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki',
+ 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred',
+ 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray',
+ 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue',
+ 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite',
+ 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod',
+ 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred',
+ 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen',
+ 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan',
+ 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey',
+ 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue',
+ 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow',
+ 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine',
+ 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen',
+ 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise',
+ 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin',
+ 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange',
+ 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise',
+ 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum',
+ 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue',
+ 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna',
+ 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow',
+ 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise',
+ 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen',
+) + ('transparent',)
+
+# List of other keyword values from other sources:
+_other_keyword_values = (
+ 'above', 'aural', 'behind', 'bidi-override', 'center-left', 'center-right',
+ 'cjk-ideographic', 'continuous', 'crop', 'cross', 'embed', 'far-left',
+ 'far-right', 'fast', 'faster', 'hebrew', 'high', 'higher', 'hiragana',
+ 'hiragana-iroha', 'katakana', 'katakana-iroha', 'landscape', 'left-side',
+ 'leftwards', 'level', 'loud', 'low', 'lower', 'message-box', 'middle',
+ 'mix', 'narrower', 'once', 'portrait', 'right-side', 'rightwards', 'silent',
+ 'slow', 'slower', 'small-caption', 'soft', 'spell-out', 'status-bar',
+ 'super', 'text-bottom', 'text-top', 'wider', 'x-fast', 'x-high', 'x-loud',
+ 'x-low', 'x-soft', 'yes', 'pre', 'pre-wrap', 'pre-line',
+)
+
+# List of functional notation and function keyword values:
+_functional_notation_keyword_values = (
+ 'attr', 'blackness', 'blend', 'blenda', 'blur', 'brightness', 'calc',
+ 'circle', 'color-mod', 'contrast', 'counter', 'cubic-bezier', 'device-cmyk',
+ 'drop-shadow', 'ellipse', 'gray', 'grayscale', 'hsl', 'hsla', 'hue',
+ 'hue-rotate', 'hwb', 'image', 'inset', 'invert', 'lightness',
+ 'linear-gradient', 'matrix', 'matrix3d', 'opacity', 'perspective',
+ 'polygon', 'radial-gradient', 'rect', 'repeating-linear-gradient',
+ 'repeating-radial-gradient', 'rgb', 'rgba', 'rotate', 'rotate3d', 'rotateX',
+ 'rotateY', 'rotateZ', 'saturate', 'saturation', 'scale', 'scale3d',
+ 'scaleX', 'scaleY', 'scaleZ', 'sepia', 'shade', 'skewX', 'skewY', 'steps',
+ 'tint', 'toggle', 'translate', 'translate3d', 'translateX', 'translateY',
+ 'translateZ', 'whiteness',
+)
+# Note! Handle url(...) separately.
+
+# List of units obtained from:
+# https://www.w3.org/TR/css3-values/
+_angle_units = (
+ 'deg', 'grad', 'rad', 'turn',
+)
+_frequency_units = (
+ 'Hz', 'kHz',
+)
+_length_units = (
+ 'em', 'ex', 'ch', 'rem',
+ 'vh', 'vw', 'vmin', 'vmax',
+ 'px', 'mm', 'cm', 'in', 'pt', 'pc', 'q',
+)
+_resolution_units = (
+ 'dpi', 'dpcm', 'dppx',
+)
+_time_units = (
+ 's', 'ms',
+)
+_all_units = _angle_units + _frequency_units + _length_units + \
+ _resolution_units + _time_units
+
+
class CssLexer(RegexLexer):
"""
For CSS (Cascading Style Sheets).
@@ -39,10 +284,10 @@ class CssLexer(RegexLexer):
(r'\s+', Text),
(r'/\*(?:.|\n)*?\*/', Comment),
(r'\{', Punctuation, 'content'),
- (r'\:[\w-]+', Name.Decorator),
- (r'\.[\w-]+', Name.Class),
- (r'\#[\w-]+', Name.Namespace),
- (r'@[\w-]+', Keyword, 'atrule'),
+ (r'(\:{1,2})([\w-]+)', bygroups(Punctuation, Name.Decorator)),
+ (r'(\.)([\w-]+)', bygroups(Punctuation, Name.Class)),
+ (r'(\#)([\w-]+)', bygroups(Punctuation, Name.Namespace)),
+ (r'(@)([\w-]+)', bygroups(Punctuation, Keyword), 'atrule'),
(r'[\w-]+', Name.Tag),
(r'[~^*!%&$\[\]()<>|+=@:;,./?-]', Operator),
(r'"(\\\\|\\"|[^"])*"', String.Double),
@@ -60,107 +305,81 @@ class CssLexer(RegexLexer):
'content': [
(r'\s+', Text),
(r'\}', Punctuation, '#pop'),
- (r'url\(.*?\)', String.Other),
+ (r';', Punctuation),
(r'^@.*?$', Comment.Preproc),
- (words((
- 'azimuth', 'background-attachment', 'background-color',
- 'background-image', 'background-position', 'background-repeat',
- 'background', 'border-bottom-color', 'border-bottom-style',
- 'border-bottom-width', 'border-left-color', 'border-left-style',
- 'border-left-width', 'border-right', 'border-right-color',
- 'border-right-style', 'border-right-width', 'border-top-color',
- 'border-top-style', 'border-top-width', 'border-bottom',
- 'border-collapse', 'border-left', 'border-width', 'border-color',
- 'border-spacing', 'border-style', 'border-top', 'border', 'caption-side',
- 'clear', 'clip', 'color', 'content', 'counter-increment', 'counter-reset',
- 'cue-after', 'cue-before', 'cue', 'cursor', 'direction', 'display',
- 'elevation', 'empty-cells', 'float', 'font-family', 'font-size',
- 'font-size-adjust', 'font-stretch', 'font-style', 'font-variant',
- 'font-weight', 'font', 'height', 'letter-spacing', 'line-height',
- 'list-style-type', 'list-style-image', 'list-style-position',
- 'list-style', 'margin-bottom', 'margin-left', 'margin-right',
- 'margin-top', 'margin', 'marker-offset', 'marks', 'max-height', 'max-width',
- 'min-height', 'min-width', 'opacity', 'orphans', 'outline-color',
- 'outline-style', 'outline-width', 'outline', 'overflow', 'overflow-x',
- 'overflow-y', 'padding-bottom', 'padding-left', 'padding-right', 'padding-top',
- 'padding', 'page', 'page-break-after', 'page-break-before', 'page-break-inside',
- 'pause-after', 'pause-before', 'pause', 'pitch-range', 'pitch',
- 'play-during', 'position', 'quotes', 'richness', 'right', 'size',
- 'speak-header', 'speak-numeral', 'speak-punctuation', 'speak',
- 'speech-rate', 'stress', 'table-layout', 'text-align', 'text-decoration',
- 'text-indent', 'text-shadow', 'text-transform', 'top', 'unicode-bidi',
- 'vertical-align', 'visibility', 'voice-family', 'volume', 'white-space',
- 'widows', 'width', 'word-spacing', 'z-index', 'bottom',
- 'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
- 'behind', 'below', 'bidi-override', 'blink', 'block', 'bolder', 'bold', 'both',
- 'capitalize', 'center-left', 'center-right', 'center', 'circle',
- 'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
- 'crop', 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
- 'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
- 'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
- 'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
- 'hidden', 'hide', 'higher', 'high', 'hiragana-iroha', 'hiragana', 'icon',
- 'inherit', 'inline-table', 'inline', 'inset', 'inside', 'invert', 'italic',
- 'justify', 'katakana-iroha', 'katakana', 'landscape', 'larger', 'large',
- 'left-side', 'leftwards', 'left', 'level', 'lighter', 'line-through', 'list-item',
- 'loud', 'lower-alpha', 'lower-greek', 'lower-roman', 'lowercase', 'ltr',
- 'lower', 'low', 'medium', 'message-box', 'middle', 'mix', 'monospace',
- 'n-resize', 'narrower', 'ne-resize', 'no-close-quote', 'no-open-quote',
- 'no-repeat', 'none', 'normal', 'nowrap', 'nw-resize', 'oblique', 'once',
- 'open-quote', 'outset', 'outside', 'overline', 'pointer', 'portrait', 'px',
- 'relative', 'repeat-x', 'repeat-y', 'repeat', 'rgb', 'ridge', 'right-side',
- 'rightwards', 's-resize', 'sans-serif', 'scroll', 'se-resize',
- 'semi-condensed', 'semi-expanded', 'separate', 'serif', 'show', 'silent',
- 'slower', 'slow', 'small-caps', 'small-caption', 'smaller', 'soft', 'solid',
- 'spell-out', 'square', 'static', 'status-bar', 'super', 'sw-resize',
- 'table-caption', 'table-cell', 'table-column', 'table-column-group',
- 'table-footer-group', 'table-header-group', 'table-row',
- 'table-row-group', 'text-bottom', 'text-top', 'text', 'thick', 'thin',
- 'transparent', 'ultra-condensed', 'ultra-expanded', 'underline',
- 'upper-alpha', 'upper-latin', 'upper-roman', 'uppercase', 'url',
- 'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
- 'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
- Name.Builtin),
- (words((
- 'indigo', 'gold', 'firebrick', 'indianred', 'yellow', 'darkolivegreen',
- 'darkseagreen', 'mediumvioletred', 'mediumorchid', 'chartreuse',
- 'mediumslateblue', 'black', 'springgreen', 'crimson', 'lightsalmon', 'brown',
- 'turquoise', 'olivedrab', 'cyan', 'silver', 'skyblue', 'gray', 'darkturquoise',
- 'goldenrod', 'darkgreen', 'darkviolet', 'darkgray', 'lightpink', 'teal',
- 'darkmagenta', 'lightgoldenrodyellow', 'lavender', 'yellowgreen', 'thistle',
- 'violet', 'navy', 'orchid', 'blue', 'ghostwhite', 'honeydew', 'cornflowerblue',
- 'darkblue', 'darkkhaki', 'mediumpurple', 'cornsilk', 'red', 'bisque', 'slategray',
- 'darkcyan', 'khaki', 'wheat', 'deepskyblue', 'darkred', 'steelblue', 'aliceblue',
- 'gainsboro', 'mediumturquoise', 'floralwhite', 'coral', 'purple', 'lightgrey',
- 'lightcyan', 'darksalmon', 'beige', 'azure', 'lightsteelblue', 'oldlace',
- 'greenyellow', 'royalblue', 'lightseagreen', 'mistyrose', 'sienna',
- 'lightcoral', 'orangered', 'navajowhite', 'lime', 'palegreen', 'burlywood',
- 'seashell', 'mediumspringgreen', 'fuchsia', 'papayawhip', 'blanchedalmond',
- 'peru', 'aquamarine', 'white', 'darkslategray', 'ivory', 'dodgerblue',
- 'lemonchiffon', 'chocolate', 'orange', 'forestgreen', 'slateblue', 'olive',
- 'mintcream', 'antiquewhite', 'darkorange', 'cadetblue', 'moccasin',
- 'limegreen', 'saddlebrown', 'darkslateblue', 'lightskyblue', 'deeppink',
- 'plum', 'aqua', 'darkgoldenrod', 'maroon', 'sandybrown', 'magenta', 'tan',
- 'rosybrown', 'pink', 'lightblue', 'palevioletred', 'mediumseagreen',
- 'dimgray', 'powderblue', 'seagreen', 'snow', 'mediumblue', 'midnightblue',
- 'paleturquoise', 'palegoldenrod', 'whitesmoke', 'darkorchid', 'salmon',
- 'lightslategray', 'lawngreen', 'lightgreen', 'tomato', 'hotpink',
- 'lightyellow', 'lavenderblush', 'linen', 'mediumaquamarine', 'green',
- 'blueviolet', 'peachpuff'), suffix=r'\b'),
- Name.Builtin),
+
+ (words(_vendor_prefixes,), Keyword.Pseudo),
+ (r'('+r'|'.join(_css_properties)+r')(\s*)(\:)',
+ bygroups(Keyword, Text, Punctuation), 'value-start'),
+ (r'([a-zA-Z_][\w-]*)(\s*)(\:)', bygroups(Name, Text, Punctuation),
+ 'value-start'),
+
+ (r'/\*(?:.|\n)*?\*/', Comment),
+ ],
+ 'value-start': [
+ (r'\s+', Text),
+ (words(_vendor_prefixes,), Name.Builtin.Pseudo),
+ include('urls'),
+ (r'('+r'|'.join(_functional_notation_keyword_values)+r')(\()',
+ bygroups(Name.Builtin, Punctuation), 'function-start'),
+ (r'([a-zA-Z_][\w-]+)(\()', bygroups(Name.Function, Punctuation), 'function-start'),
+ (words(_keyword_values, suffix=r'\b'), Keyword.Constant),
+ (words(_other_keyword_values, suffix=r'\b'), Keyword.Constant),
+ (words(_color_keywords, suffix=r'\b'), Keyword.Constant),
+ (words(_css_properties, suffix=r'\b'), Keyword), # for transition-property etc.
(r'\!important', Comment.Preproc),
(r'/\*(?:.|\n)*?\*/', Comment),
- (r'\#[a-zA-Z0-9]{1,6}', Number),
- (r'[.-]?[0-9]*[.]?[0-9]+(em|px|pt|pc|in|mm|cm|ex|s)\b', Number),
- # Separate regex for percentages, as can't do word boundaries with %
- (r'[.-]?[0-9]*[.]?[0-9]+%', Number),
- (r'-?[0-9]+', Number),
- (r'[~^*!%&<>|+=@:,./?-]+', Operator),
- (r'[\[\]();]+', Punctuation),
+
+ include('numeric-values'),
+
+ (r'[~^*!%&<>|+=@:./?-]+', Operator),
+ (r'[\[\](),]+', Punctuation),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[a-zA-Z_]\w*', Name)
- ]
+ (r'[a-zA-Z_][\w-]*', Name),
+ (r';', Punctuation, '#pop'),
+ (r'\}', Punctuation, '#pop:2'),
+ ],
+ 'function-start': [
+ (r'\s+', Text),
+ include('urls'),
+ (words(_vendor_prefixes,), Keyword.Pseudo),
+ (words(_keyword_values, suffix=r'\b'), Keyword.Constant),
+ (words(_other_keyword_values, suffix=r'\b'), Keyword.Constant),
+ (words(_color_keywords, suffix=r'\b'), Keyword.Constant),
+
+ # function-start may be entered recursively
+ (r'(' + r'|'.join(_functional_notation_keyword_values) + r')(\()',
+ bygroups(Name.Builtin, Punctuation), 'function-start'),
+ (r'([a-zA-Z_][\w-]+)(\()', bygroups(Name.Function, Punctuation), 'function-start'),
+
+ (r'/\*(?:.|\n)*?\*/', Comment),
+ include('numeric-values'),
+ (r'[*+/-]', Operator),
+ (r'[,]', Punctuation),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[a-zA-Z_-]\w*', Name),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ 'urls': [
+ (r'(url)(\()(".*?")(\))', bygroups(Name.Builtin, Punctuation,
+ String.Double, Punctuation)),
+ (r"(url)(\()('.*?')(\))", bygroups(Name.Builtin, Punctuation,
+ String.Single, Punctuation)),
+ (r'(url)(\()(.*?)(\))', bygroups(Name.Builtin, Punctuation,
+ String.Other, Punctuation)),
+ ],
+ 'numeric-values': [
+ (r'\#[a-zA-Z0-9]{1,6}', Number.Hex),
+ (r'[+\-]?[0-9]*[.][0-9]+', Number.Float, 'numeric-end'),
+ (r'[+\-]?[0-9]+', Number.Integer, 'numeric-end'),
+ ],
+ 'numeric-end': [
+ (words(_all_units, suffix=r'\b'), Keyword.Type),
+ (r'%', Keyword.Type),
+ default('#pop'),
+ ],
}
@@ -170,35 +389,7 @@ common_sass_tokens = {
(r'[!$][\w-]+', Name.Variable),
(r'url\(', String.Other, 'string-url'),
(r'[a-z_-][\w-]*(?=\()', Name.Function),
- (words((
- 'azimuth', 'background-attachment', 'background-color',
- 'background-image', 'background-position', 'background-repeat',
- 'background', 'border-bottom-color', 'border-bottom-style',
- 'border-bottom-width', 'border-left-color', 'border-left-style',
- 'border-left-width', 'border-right', 'border-right-color',
- 'border-right-style', 'border-right-width', 'border-top-color',
- 'border-top-style', 'border-top-width', 'border-bottom',
- 'border-collapse', 'border-left', 'border-width', 'border-color',
- 'border-spacing', 'border-style', 'border-top', 'border', 'caption-side',
- 'clear', 'clip', 'color', 'content', 'counter-increment', 'counter-reset',
- 'cue-after', 'cue-before', 'cue', 'cursor', 'direction', 'display',
- 'elevation', 'empty-cells', 'float', 'font-family', 'font-size',
- 'font-size-adjust', 'font-stretch', 'font-style', 'font-variant',
- 'font-weight', 'font', 'height', 'letter-spacing', 'line-height',
- 'list-style-type', 'list-style-image', 'list-style-position',
- 'list-style', 'margin-bottom', 'margin-left', 'margin-right',
- 'margin-top', 'margin', 'marker-offset', 'marks', 'max-height', 'max-width',
- 'min-height', 'min-width', 'opacity', 'orphans', 'outline', 'outline-color',
- 'outline-style', 'outline-width', 'overflow', 'padding-bottom',
- 'padding-left', 'padding-right', 'padding-top', 'padding', 'page',
- 'page-break-after', 'page-break-before', 'page-break-inside',
- 'pause-after', 'pause-before', 'pause', 'pitch', 'pitch-range',
- 'play-during', 'position', 'quotes', 'richness', 'right', 'size',
- 'speak-header', 'speak-numeral', 'speak-punctuation', 'speak',
- 'speech-rate', 'stress', 'table-layout', 'text-align', 'text-decoration',
- 'text-indent', 'text-shadow', 'text-transform', 'top', 'unicode-bidi',
- 'vertical-align', 'visibility', 'voice-family', 'volume', 'white-space',
- 'widows', 'width', 'word-spacing', 'z-index', 'bottom', 'left',
+ (words(_css_properties + (
'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both',
'capitalize', 'center-left', 'center-right', 'center', 'circle',
@@ -229,33 +420,7 @@ common_sass_tokens = {
'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
Name.Constant),
- (words((
- 'indigo', 'gold', 'firebrick', 'indianred', 'darkolivegreen',
- 'darkseagreen', 'mediumvioletred', 'mediumorchid', 'chartreuse',
- 'mediumslateblue', 'springgreen', 'crimson', 'lightsalmon', 'brown',
- 'turquoise', 'olivedrab', 'cyan', 'skyblue', 'darkturquoise',
- 'goldenrod', 'darkgreen', 'darkviolet', 'darkgray', 'lightpink',
- 'darkmagenta', 'lightgoldenrodyellow', 'lavender', 'yellowgreen', 'thistle',
- 'violet', 'orchid', 'ghostwhite', 'honeydew', 'cornflowerblue',
- 'darkblue', 'darkkhaki', 'mediumpurple', 'cornsilk', 'bisque', 'slategray',
- 'darkcyan', 'khaki', 'wheat', 'deepskyblue', 'darkred', 'steelblue', 'aliceblue',
- 'gainsboro', 'mediumturquoise', 'floralwhite', 'coral', 'lightgrey',
- 'lightcyan', 'darksalmon', 'beige', 'azure', 'lightsteelblue', 'oldlace',
- 'greenyellow', 'royalblue', 'lightseagreen', 'mistyrose', 'sienna',
- 'lightcoral', 'orangered', 'navajowhite', 'palegreen', 'burlywood',
- 'seashell', 'mediumspringgreen', 'papayawhip', 'blanchedalmond',
- 'peru', 'aquamarine', 'darkslategray', 'ivory', 'dodgerblue',
- 'lemonchiffon', 'chocolate', 'orange', 'forestgreen', 'slateblue',
- 'mintcream', 'antiquewhite', 'darkorange', 'cadetblue', 'moccasin',
- 'limegreen', 'saddlebrown', 'darkslateblue', 'lightskyblue', 'deeppink',
- 'plum', 'darkgoldenrod', 'sandybrown', 'magenta', 'tan',
- 'rosybrown', 'pink', 'lightblue', 'palevioletred', 'mediumseagreen',
- 'dimgray', 'powderblue', 'seagreen', 'snow', 'mediumblue', 'midnightblue',
- 'paleturquoise', 'palegoldenrod', 'whitesmoke', 'darkorchid', 'salmon',
- 'lightslategray', 'lawngreen', 'lightgreen', 'tomato', 'hotpink',
- 'lightyellow', 'lavenderblush', 'linen', 'mediumaquamarine',
- 'blueviolet', 'peachpuff'), suffix=r'\b'),
- Name.Entity),
+ (words(_color_keywords, suffix=r'\b'), Name.Entity),
(words((
'black', 'silver', 'gray', 'white', 'maroon', 'red', 'purple', 'fuchsia', 'green',
'lime', 'olive', 'yellow', 'navy', 'blue', 'teal', 'aqua'), suffix=r'\b'),
diff --git a/pygments/lexers/d.py b/pygments/lexers/d.py
index 98e01dcf..09e6fe87 100644
--- a/pygments/lexers/d.py
+++ b/pygments/lexers/d.py
@@ -5,7 +5,7 @@
Lexers for D languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/dalvik.py b/pygments/lexers/dalvik.py
index 2f26fa04..c211f13e 100644
--- a/pygments/lexers/dalvik.py
+++ b/pygments/lexers/dalvik.py
@@ -5,7 +5,7 @@
Pygments lexers for Dalvik VM-related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/data.py b/pygments/lexers/data.py
index c88375d5..296366c2 100644
--- a/pygments/lexers/data.py
+++ b/pygments/lexers/data.py
@@ -5,7 +5,7 @@
Lexers for data file format.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,9 +14,9 @@ import re
from pygments.lexer import RegexLexer, ExtendedRegexLexer, LexerContext, \
include, bygroups, inherit
from pygments.token import Text, Comment, Keyword, Name, String, Number, \
- Punctuation, Literal
+ Punctuation, Literal, Error
-__all__ = ['YamlLexer', 'JsonLexer', 'JsonLdLexer']
+__all__ = ['YamlLexer', 'JsonLexer', 'JsonBareObjectLexer', 'JsonLdLexer']
class YamlLexerContext(LexerContext):
@@ -247,10 +247,10 @@ class YamlLexer(ExtendedRegexLexer):
# tags, anchors, aliases
'descriptors': [
# a full-form tag
- (r'!<[\w;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
+ (r'!<[\w#;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
# a tag in the form '!', '!suffix' or '!handle!suffix'
- (r'!(?:[\w-]+)?'
- r'(?:![\w;/?:@&=+$,.!~*\'()\[\]%-]+)?', Keyword.Type),
+ (r'!(?:[\w-]+!)?'
+ r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]+', Keyword.Type),
# an anchor
(r'&[\w-]+', Name.Label),
# an alias
@@ -476,7 +476,7 @@ class JsonLexer(RegexLexer):
# comma terminates the attribute but expects more
(r',', Punctuation, '#pop'),
# a closing bracket terminates the entire object, so pop twice
- (r'\}', Punctuation, ('#pop', '#pop')),
+ (r'\}', Punctuation, '#pop:2'),
],
# a json object - { attr, attr, ... }
@@ -508,6 +508,31 @@ class JsonLexer(RegexLexer):
],
}
+
+class JsonBareObjectLexer(JsonLexer):
+ """
+ For JSON data structures (with missing object curly braces).
+
+ .. versionadded:: 2.2
+ """
+
+ name = 'JSONBareObject'
+ aliases = ['json-object']
+ filenames = []
+ mimetypes = ['application/json-object']
+
+ tokens = {
+ 'root': [
+ (r'\}', Error),
+ include('objectvalue'),
+ ],
+ 'objectattribute': [
+ (r'\}', Error),
+ inherit,
+ ],
+ }
+
+
class JsonLdLexer(JsonLexer):
"""
For `JSON-LD <http://json-ld.org/>`_ linked data.
diff --git a/pygments/lexers/diff.py b/pygments/lexers/diff.py
index 726b49ad..f7019440 100644
--- a/pygments/lexers/diff.py
+++ b/pygments/lexers/diff.py
@@ -5,7 +5,7 @@
Lexers for diff/patch formats.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index 11b4573e..4e2bc8ab 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -5,7 +5,7 @@
Lexers for .net languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py
index 6032017f..a1426bd6 100644
--- a/pygments/lexers/dsls.py
+++ b/pygments/lexers/dsls.py
@@ -5,20 +5,20 @@
Lexers for various domain-specific languages.
- :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import RegexLexer, bygroups, words, include, default, \
- this, using, combined
+from pygments.lexer import ExtendedRegexLexer, RegexLexer, bygroups, words, \
+ include, default, this, using, combined
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Literal, Whitespace
__all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer',
'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
- 'CrmshLexer', 'ThriftLexer', 'FlatlineLexer']
+ 'CrmshLexer', 'ThriftLexer', 'FlatlineLexer', 'SnowballLexer']
class ProtoBufLexer(RegexLexer):
@@ -36,7 +36,7 @@ class ProtoBufLexer(RegexLexer):
tokens = {
'root': [
(r'[ \t]+', Text),
- (r'[,;{}\[\]()]', Punctuation),
+ (r'[,;{}\[\]()<>]', Punctuation),
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
(words((
@@ -156,7 +156,7 @@ class ThriftLexer(RegexLexer):
Keyword.Namespace),
(words((
'void', 'bool', 'byte', 'i16', 'i32', 'i64', 'double',
- 'string', 'binary', 'void', 'map', 'list', 'set', 'slist',
+ 'string', 'binary', 'map', 'list', 'set', 'slist',
'senum'), suffix=r'\b'),
Keyword.Type),
(words((
@@ -581,7 +581,7 @@ class PanLexer(RegexLexer):
'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final',
'prefix', 'unique', 'object', 'foreach', 'include', 'template',
'function', 'variable', 'structure', 'extensible', 'declaration'),
- prefix=r'\b', suffix=r'\s*\b'),
+ prefix=r'\b', suffix=r'\s*\b'),
Keyword),
(words((
'file_contents', 'format', 'index', 'length', 'match', 'matches',
@@ -593,7 +593,7 @@ class PanLexer(RegexLexer):
'is_number', 'is_property', 'is_resource', 'is_string', 'to_boolean',
'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists',
'path_exists', 'if_exists', 'return', 'value'),
- prefix=r'\b', suffix=r'\s*\b'),
+ prefix=r'\b', suffix=r'\s*\b'),
Name.Builtin),
(r'#.*', Comment),
(r'\\[\w\W]', String.Escape),
@@ -767,3 +767,112 @@ class FlatlineLexer(RegexLexer):
(r'(\(|\))', Punctuation),
],
}
+
+
+class SnowballLexer(ExtendedRegexLexer):
+ """
+ Lexer for `Snowball <http://snowballstem.org/>`_ source code.
+
+ .. versionadded:: 2.2
+ """
+
+ name = 'Snowball'
+ aliases = ['snowball']
+ filenames = ['*.sbl']
+
+ _ws = r'\n\r\t '
+
+ def __init__(self, **options):
+ self._reset_stringescapes()
+ ExtendedRegexLexer.__init__(self, **options)
+
+ def _reset_stringescapes(self):
+ self._start = "'"
+ self._end = "'"
+
+ def _string(do_string_first):
+ def callback(lexer, match, ctx):
+ s = match.start()
+ text = match.group()
+ string = re.compile(r'([^%s]*)(.)' % re.escape(lexer._start)).match
+ escape = re.compile(r'([^%s]*)(.)' % re.escape(lexer._end)).match
+ pos = 0
+ do_string = do_string_first
+ while pos < len(text):
+ if do_string:
+ match = string(text, pos)
+ yield s + match.start(1), String.Single, match.group(1)
+ if match.group(2) == "'":
+ yield s + match.start(2), String.Single, match.group(2)
+ ctx.stack.pop()
+ break
+ yield s + match.start(2), String.Escape, match.group(2)
+ pos = match.end()
+ match = escape(text, pos)
+ yield s + match.start(), String.Escape, match.group()
+ if match.group(2) != lexer._end:
+ ctx.stack[-1] = 'escape'
+ break
+ pos = match.end()
+ do_string = True
+ ctx.pos = s + match.end()
+ return callback
+
+ def _stringescapes(lexer, match, ctx):
+ lexer._start = match.group(3)
+ lexer._end = match.group(5)
+ return bygroups(Keyword.Reserved, Text, String.Escape, Text,
+ String.Escape)(lexer, match, ctx)
+
+ tokens = {
+ 'root': [
+ (words(('len', 'lenof'), suffix=r'\b'), Operator.Word),
+ include('root1'),
+ ],
+ 'root1': [
+ (r'[%s]+' % _ws, Text),
+ (r'\d+', Number.Integer),
+ (r"'", String.Single, 'string'),
+ (r'[()]', Punctuation),
+ (r'/\*[\w\W]*?\*/', Comment.Multiline),
+ (r'//.*', Comment.Single),
+ (r'[!*+\-/<=>]=|[-=]>|<[+-]|[$*+\-/<=>?\[\]]', Operator),
+ (words(('as', 'get', 'hex', 'among', 'define', 'decimal',
+ 'backwardmode'), suffix=r'\b'),
+ Keyword.Reserved),
+ (words(('strings', 'booleans', 'integers', 'routines', 'externals',
+ 'groupings'), suffix=r'\b'),
+ Keyword.Reserved, 'declaration'),
+ (words(('do', 'or', 'and', 'for', 'hop', 'non', 'not', 'set', 'try',
+ 'fail', 'goto', 'loop', 'next', 'test', 'true',
+ 'false', 'unset', 'atmark', 'attach', 'delete', 'gopast',
+ 'insert', 'repeat', 'sizeof', 'tomark', 'atleast',
+ 'atlimit', 'reverse', 'setmark', 'tolimit', 'setlimit',
+ 'backwards', 'substring'), suffix=r'\b'),
+ Operator.Word),
+ (words(('size', 'limit', 'cursor', 'maxint', 'minint'),
+ suffix=r'\b'),
+ Name.Builtin),
+ (r'(stringdef\b)([%s]*)([^%s]+)' % (_ws, _ws),
+ bygroups(Keyword.Reserved, Text, String.Escape)),
+ (r'(stringescapes\b)([%s]*)(.)([%s]*)(.)' % (_ws, _ws),
+ _stringescapes),
+ (r'[A-Za-z]\w*', Name),
+ ],
+ 'declaration': [
+ (r'\)', Punctuation, '#pop'),
+ (words(('len', 'lenof'), suffix=r'\b'), Name,
+ ('root1', 'declaration')),
+ include('root1'),
+ ],
+ 'string': [
+ (r"[^']*'", _string(True)),
+ ],
+ 'escape': [
+ (r"[^']*'", _string(False)),
+ ],
+ }
+
+ def get_tokens_unprocessed(self, text=None, context=None):
+ self._reset_stringescapes()
+ return ExtendedRegexLexer.get_tokens_unprocessed(self, text, context)
diff --git a/pygments/lexers/dylan.py b/pygments/lexers/dylan.py
index 600a78e5..f61bb60d 100644
--- a/pygments/lexers/dylan.py
+++ b/pygments/lexers/dylan.py
@@ -5,7 +5,7 @@
Lexers for the Dylan language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ecl.py b/pygments/lexers/ecl.py
index 95572ba7..bd80ad19 100644
--- a/pygments/lexers/ecl.py
+++ b/pygments/lexers/ecl.py
@@ -5,7 +5,7 @@
Lexers for the ECL language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/eiffel.py b/pygments/lexers/eiffel.py
index 8a244613..a90ab0a5 100644
--- a/pygments/lexers/eiffel.py
+++ b/pygments/lexers/eiffel.py
@@ -5,7 +5,7 @@
Lexer for the Eiffel language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/elm.py b/pygments/lexers/elm.py
index cd1fb98e..0fa36367 100644
--- a/pygments/lexers/elm.py
+++ b/pygments/lexers/elm.py
@@ -5,7 +5,7 @@
Lexer for the Elm programming language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/erlang.py b/pygments/lexers/erlang.py
index 93ddd2c2..9e7f85c1 100644
--- a/pygments/lexers/erlang.py
+++ b/pygments/lexers/erlang.py
@@ -5,7 +5,7 @@
Lexers for Erlang.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py
index c9db26b5..54577bf9 100644
--- a/pygments/lexers/esoteric.py
+++ b/pygments/lexers/esoteric.py
@@ -5,7 +5,7 @@
Lexers for esoteric languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,7 +13,8 @@ from pygments.lexer import RegexLexer, include, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error
-__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer']
+__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer',
+ 'CapDLLexer', 'AheuiLexer']
class BrainfuckLexer(RegexLexer):
@@ -143,6 +144,63 @@ class CAmkESLexer(RegexLexer):
}
+class CapDLLexer(RegexLexer):
+ """
+ Basic lexer for
+ `CapDL <https://ssrg.nicta.com.au/publications/nictaabstracts/Kuz_KLW_10.abstract.pml>`_.
+
+ The source of the primary tool that reads such specifications is available
+ at https://github.com/seL4/capdl/tree/master/capDL-tool. Note that this
+ lexer only supports a subset of the grammar. For example, identifiers can
+ shadow type names, but these instances are currently incorrectly
+ highlighted as types. Supporting this would need a stateful lexer that is
+ considered unnecessarily complex for now.
+ """
+ name = 'CapDL'
+ aliases = ['capdl']
+ filenames = ['*.cdl']
+
+ tokens = {
+ 'root':[
+
+ # C pre-processor directive
+ (r'^\s*#.*\n', Comment.Preproc),
+
+ # Whitespace, comments
+ (r'\s+', Text),
+ (r'/\*(.|\n)*?\*/', Comment),
+ (r'(//|--).*\n', Comment),
+
+ (r'[<>\[\(\)\{\},:;=\]]', Punctuation),
+ (r'\.\.', Punctuation),
+
+ (words(('arch', 'arm11', 'caps', 'child_of', 'ia32', 'irq', 'maps',
+ 'objects'), suffix=r'\b'), Keyword),
+
+ (words(('aep', 'asid_pool', 'cnode', 'ep', 'frame', 'io_device',
+ 'io_ports', 'io_pt', 'notification', 'pd', 'pt', 'tcb',
+ 'ut', 'vcpu'), suffix=r'\b'), Keyword.Type),
+
+ # Properties
+ (words(('asid', 'addr', 'badge', 'cached', 'dom', 'domainID', 'elf',
+ 'fault_ep', 'G', 'guard', 'guard_size', 'init', 'ip',
+ 'prio', 'sp', 'R', 'RG', 'RX', 'RW', 'RWG', 'RWX', 'W',
+ 'WG', 'WX', 'level', 'masked', 'master_reply', 'paddr',
+ 'ports', 'reply', 'uncached'), suffix=r'\b'),
+ Keyword.Reserved),
+
+ # Literals
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+(\.\d+)?(k|M)?', Number),
+ (words(('bits',), suffix=r'\b'), Number),
+ (words(('cspace', 'vspace', 'reply_slot', 'caller_slot',
+ 'ipc_buffer_slot'), suffix=r'\b'), Number),
+
+ # Identifiers
+ (r'[a-zA-Z_][-_@\.\w]*', Name),
+ ],
+ }
+
class RedcodeLexer(RegexLexer):
"""
A simple Redcode lexer based on ICWS'94.
@@ -177,3 +235,41 @@ class RedcodeLexer(RegexLexer):
(r'[-+]?\d+', Number.Integer),
],
}
+
+
+class AheuiLexer(RegexLexer):
+ """
+ Aheui_ Lexer.
+
+ Aheui_ is esoteric language based on Korean alphabets.
+
+ .. _Aheui:: http://aheui.github.io/
+
+ """
+
+ name = 'Aheui'
+ aliases = ['aheui']
+ filenames = ['*.aheui']
+
+ tokens = {
+ 'root': [
+ (u'['
+ u'나-낳냐-냫너-넣녀-녛노-놓뇨-눟뉴-닇'
+ u'다-닿댜-댷더-덯뎌-뎧도-돟됴-둫듀-딓'
+ u'따-땋땨-떃떠-떻뗘-뗳또-똫뚀-뚷뜌-띟'
+ u'라-랗랴-럏러-렇려-렿로-롷료-뤃류-릫'
+ u'마-맣먀-먛머-멓며-몋모-뫃묘-뭏뮤-믷'
+ u'바-밯뱌-뱧버-벟벼-볗보-봏뵤-붛뷰-빃'
+ u'빠-빻뺘-뺳뻐-뻫뼈-뼣뽀-뽛뾰-뿧쀼-삏'
+ u'사-샇샤-샿서-섷셔-셯소-솧쇼-숳슈-싛'
+ u'싸-쌓쌰-썋써-쎃쎠-쎻쏘-쏳쑈-쑿쓔-씧'
+ u'자-잫쟈-쟣저-젛져-졓조-좋죠-줗쥬-즿'
+ u'차-챃챠-챻처-첳쳐-쳫초-촣쵸-춯츄-칗'
+ u'카-캏캬-컇커-컿켜-켷코-콯쿄-쿻큐-킣'
+ u'타-탛탸-턓터-텋텨-톃토-톻툐-퉇튜-틯'
+ u'파-팧퍄-퍟퍼-펗펴-폏포-퐇표-풓퓨-픻'
+ u'하-핳햐-햫허-헣혀-혛호-홓효-훟휴-힇'
+ u']', Operator),
+ ('.', Comment),
+ ],
+ }
diff --git a/pygments/lexers/ezhil.py b/pygments/lexers/ezhil.py
index a5468a0f..ce1cdb2d 100644
--- a/pygments/lexers/ezhil.py
+++ b/pygments/lexers/ezhil.py
@@ -4,8 +4,8 @@
~~~~~~~~~~~~~~~~~~~~~
Pygments lexers for Ezhil language.
-
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -16,6 +16,7 @@ from pygments.token import String, Number, Punctuation, Operator
__all__ = ['EzhilLexer']
+
class EzhilLexer(RegexLexer):
"""
Lexer for `Ezhil, a Tamil script-based programming language <http://ezhillang.org>`_
@@ -36,13 +37,13 @@ class EzhilLexer(RegexLexer):
(r'#.*\n', Comment.Single),
(r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?', Operator),
(u'இல்', Operator.Word),
- (words(('assert', 'max', 'min',
- 'நீளம்','சரம்_இடமாற்று','சரம்_கண்டுபிடி',
- 'பட்டியல்','பின்இணை','வரிசைப்படுத்து',
- 'எடு','தலைகீழ்','நீட்டிக்க','நுழைக்க','வை',
- 'கோப்பை_திற','கோப்பை_எழுது','கோப்பை_மூடு',
- 'pi','sin','cos','tan','sqrt','hypot','pow','exp','log','log10'
- 'min','max','exit',
+ (words((u'assert', u'max', u'min',
+ u'நீளம்', u'சரம்_இடமாற்று', u'சரம்_கண்டுபிடி',
+ u'பட்டியல்', u'பின்இணை', u'வரிசைப்படுத்து',
+ u'எடு', u'தலைகீழ்', u'நீட்டிக்க', u'நுழைக்க', u'வை',
+ u'கோப்பை_திற', u'கோப்பை_எழுது', u'கோப்பை_மூடு',
+ u'pi', u'sin', u'cos', u'tan', u'sqrt', u'hypot', u'pow',
+ u'exp', u'log', u'log10', u'exit',
), suffix=r'\b'), Name.Builtin),
(r'(True|False)\b', Keyword.Constant),
(r'[^\S\n]+', Text),
@@ -62,7 +63,7 @@ class EzhilLexer(RegexLexer):
(r'(?u)\d+', Number.Integer),
]
}
-
+
def __init__(self, **options):
super(EzhilLexer, self).__init__(**options)
self.encoding = options.get('encoding', 'utf-8')
diff --git a/pygments/lexers/factor.py b/pygments/lexers/factor.py
index 6a39a1d4..09d85c27 100644
--- a/pygments/lexers/factor.py
+++ b/pygments/lexers/factor.py
@@ -5,7 +5,7 @@
Lexers for the Factor language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/fantom.py b/pygments/lexers/fantom.py
index c20a3f38..3ea2177c 100644
--- a/pygments/lexers/fantom.py
+++ b/pygments/lexers/fantom.py
@@ -5,7 +5,7 @@
Lexer for the Fantom language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/felix.py b/pygments/lexers/felix.py
index 9631bcc1..8f0695b5 100644
--- a/pygments/lexers/felix.py
+++ b/pygments/lexers/felix.py
@@ -5,7 +5,7 @@
Lexer for the Felix language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/forth.py b/pygments/lexers/forth.py
new file mode 100644
index 00000000..a51f1b57
--- /dev/null
+++ b/pygments/lexers/forth.py
@@ -0,0 +1,177 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.forth
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Error, Punctuation, Literal, Token, \
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic
+
+
+__all__ = ['ForthLexer']
+
+
+class ForthLexer(RegexLexer):
+ """
+ Lexer for Forth files.
+
+ .. versionadded:: 2.2
+ """
+ name = 'Forth'
+ aliases = ['forth']
+ filenames = ['*.frt', '*.fs']
+ mimetypes = ['application/x-forth']
+
+ delimiter = r'\s'
+ delimiter_end = r'(?=[%s])' % delimiter
+
+ valid_name_chars = r'[^%s]' % delimiter
+ valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # All comment types
+ (r'\\.*?\n', Comment.Single),
+ (r'\([\s].*?\)', Comment.Single),
+ # defining words. The next word is a new command name
+ (r'(:|variable|constant|value|buffer:)(\s+)',
+ bygroups(Keyword.Namespace, Text), 'worddef'),
+ # strings are rather simple
+ (r'([.sc]")(\s+?)', bygroups(String, Text), 'stringdef'),
+ # keywords from the various wordsets
+ # *** Wordset BLOCK
+ (r'(blk|block|buffer|evaluate|flush|load|save-buffers|update|'
+ # *** Wordset BLOCK-EXT
+ r'empty-buffers|list|refill|scr|thru|'
+ # *** Wordset CORE
+ r'\#s|\*\/mod|\+loop|\/mod|0<|0=|1\+|1-|2!|'
+ r'2\*|2\/|2@|2drop|2dup|2over|2swap|>body|'
+ r'>in|>number|>r|\?dup|abort|abort\"|abs|'
+ r'accept|align|aligned|allot|and|base|begin|'
+ r'bl|c!|c,|c@|cell\+|cells|char|char\+|'
+ r'chars|constant|count|cr|create|decimal|'
+ r'depth|do|does>|drop|dup|else|emit|environment\?|'
+ r'evaluate|execute|exit|fill|find|fm\/mod|'
+ r'here|hold|i|if|immediate|invert|j|key|'
+ r'leave|literal|loop|lshift|m\*|max|min|'
+ r'mod|move|negate|or|over|postpone|quit|'
+ r'r>|r@|recurse|repeat|rot|rshift|s\"|s>d|'
+ r'sign|sm\/rem|source|space|spaces|state|swap|'
+ r'then|type|u\.|u\<|um\*|um\/mod|unloop|until|'
+ r'variable|while|word|xor|\[char\]|\[\'\]|'
+ r'@|!|\#|<\#|\#>|:|;|\+|-|\*|\/|,|<|>|\|1\+|1-|\.|'
+ # *** Wordset CORE-EXT
+ r'\.r|0<>|'
+ r'0>|2>r|2r>|2r@|:noname|\?do|again|c\"|'
+ r'case|compile,|endcase|endof|erase|false|'
+ r'hex|marker|nip|of|pad|parse|pick|refill|'
+ r'restore-input|roll|save-input|source-id|to|'
+ r'true|tuck|u\.r|u>|unused|value|within|'
+ r'\[compile\]|'
+ # *** Wordset CORE-EXT-obsolescent
+ r'\#tib|convert|expect|query|span|'
+ r'tib|'
+ # *** Wordset DOUBLE
+ r'2constant|2literal|2variable|d\+|d-|'
+ r'd\.|d\.r|d0<|d0=|d2\*|d2\/|d<|d=|d>s|'
+ r'dabs|dmax|dmin|dnegate|m\*\/|m\+|'
+ # *** Wordset DOUBLE-EXT
+ r'2rot|du<|'
+ # *** Wordset EXCEPTION
+ r'catch|throw|'
+ # *** Wordset EXCEPTION-EXT
+ r'abort|abort\"|'
+ # *** Wordset FACILITY
+ r'at-xy|key\?|page|'
+ # *** Wordset FACILITY-EXT
+ r'ekey|ekey>char|ekey\?|emit\?|ms|time&date|'
+ # *** Wordset FILE
+ r'BIN|CLOSE-FILE|CREATE-FILE|DELETE-FILE|FILE-POSITION|'
+ r'FILE-SIZE|INCLUDE-FILE|INCLUDED|OPEN-FILE|R\/O|'
+ r'R\/W|READ-FILE|READ-LINE|REPOSITION-FILE|RESIZE-FILE|'
+ r'S\"|SOURCE-ID|W/O|WRITE-FILE|WRITE-LINE|'
+ # *** Wordset FILE-EXT
+ r'FILE-STATUS|FLUSH-FILE|REFILL|RENAME-FILE|'
+ # *** Wordset FLOAT
+ r'>float|d>f|'
+ r'f!|f\*|f\+|f-|f\/|f0<|f0=|f<|f>d|f@|'
+ r'falign|faligned|fconstant|fdepth|fdrop|fdup|'
+ r'fliteral|float\+|floats|floor|fmax|fmin|'
+ r'fnegate|fover|frot|fround|fswap|fvariable|'
+ r'represent|'
+ # *** Wordset FLOAT-EXT
+ r'df!|df@|dfalign|dfaligned|dfloat\+|'
+ r'dfloats|f\*\*|f\.|fabs|facos|facosh|falog|'
+ r'fasin|fasinh|fatan|fatan2|fatanh|fcos|fcosh|'
+ r'fe\.|fexp|fexpm1|fln|flnp1|flog|fs\.|fsin|'
+ r'fsincos|fsinh|fsqrt|ftan|ftanh|f~|precision|'
+ r'set-precision|sf!|sf@|sfalign|sfaligned|sfloat\+|'
+ r'sfloats|'
+ # *** Wordset LOCAL
+ r'\(local\)|to|'
+ # *** Wordset LOCAL-EXT
+ r'locals\||'
+ # *** Wordset MEMORY
+ r'allocate|free|resize|'
+ # *** Wordset SEARCH
+ r'definitions|find|forth-wordlist|get-current|'
+ r'get-order|search-wordlist|set-current|set-order|'
+ r'wordlist|'
+ # *** Wordset SEARCH-EXT
+ r'also|forth|only|order|previous|'
+ # *** Wordset STRING
+ r'-trailing|\/string|blank|cmove|cmove>|compare|'
+ r'search|sliteral|'
+ # *** Wordset TOOLS
+ r'.s|dump|see|words|'
+ # *** Wordset TOOLS-EXT
+ r';code|'
+ r'ahead|assembler|bye|code|cs-pick|cs-roll|'
+ r'editor|state|\[else\]|\[if\]|\[then\]|'
+ # *** Wordset TOOLS-EXT-obsolescent
+ r'forget|'
+ # Forth 2012
+ r'defer|defer@|defer!|action-of|begin-structure|field:|buffer:|'
+ r'parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|'
+ r'name>interpret|name>compile|name>string|'
+ r'cfield:|end-structure)'+delimiter, Keyword),
+
+ # Numbers
+ (r'(\$[0-9A-F]+)', Number.Hex),
+ (r'(\#|%|&|\-|\+)?[0-9]+', Number.Integer),
+ (r'(\#|%|&|\-|\+)?[0-9.]+', Keyword.Type),
+ # amforth specific
+ (r'(@i|!i|@e|!e|pause|noop|turnkey|sleep|'
+ r'itype|icompare|sp@|sp!|rp@|rp!|up@|up!|'
+ r'>a|a>|a@|a!|a@+|a@-|>b|b>|b@|b!|b@+|b@-|'
+ r'find-name|1ms|'
+ r'sp0|rp0|\(evaluate\)|int-trap|int!)' + delimiter,
+ Name.Constant),
+ # a proposal
+ (r'(do-recognizer|r:fail|recognizer:|get-recognizers|'
+ r'set-recognizers|r:float|r>comp|r>int|r>post|'
+ r'r:name|r:word|r:dnum|r:num|recognizer|forth-recognizer|'
+ r'rec:num|rec:float|rec:word)' + delimiter, Name.Decorator),
+ # defining words. The next word is a new command name
+ (r'(Evalue|Rvalue|Uvalue|Edefer|Rdefer|Udefer)(\s+)',
+ bygroups(Keyword.Namespace, Text), 'worddef'),
+
+ (valid_name, Name.Function), # Anything else is executed
+
+ ],
+ 'worddef': [
+ (r'\S+', Name.Class, '#pop'),
+ ],
+ 'stringdef': [
+ (r'[^"]+', String, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/fortran.py b/pygments/lexers/fortran.py
index e2f95b11..1a611c9d 100644
--- a/pygments/lexers/fortran.py
+++ b/pygments/lexers/fortran.py
@@ -5,7 +5,7 @@
Lexers for Fortran languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -156,8 +156,8 @@ class FortranLexer(RegexLexer):
'nums': [
(r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
- (r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
- (r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
+ (r'[+-]?\d*\.\d+([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float),
+ (r'[+-]?\d+\.\d*([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float),
],
}
diff --git a/pygments/lexers/foxpro.py b/pygments/lexers/foxpro.py
index c7f368c7..7c0d2621 100644
--- a/pygments/lexers/foxpro.py
+++ b/pygments/lexers/foxpro.py
@@ -5,7 +5,7 @@
Simple lexer for Microsoft Visual FoxPro source code.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py
index 13c72b1e..254df795 100644
--- a/pygments/lexers/functional.py
+++ b/pygments/lexers/functional.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/go.py b/pygments/lexers/go.py
index 8bd6c7fb..cc2a6d63 100644
--- a/pygments/lexers/go.py
+++ b/pygments/lexers/go.py
@@ -5,7 +5,7 @@
Lexers for the Google Go language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/grammar_notation.py b/pygments/lexers/grammar_notation.py
index d59cc61c..076249d3 100644
--- a/pygments/lexers/grammar_notation.py
+++ b/pygments/lexers/grammar_notation.py
@@ -5,7 +5,7 @@
Lexers for grammer notations like BNF.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,7 +13,7 @@ import re
from pygments.lexer import RegexLexer, bygroups, include, this, using, words
from pygments.token import Comment, Keyword, Literal, Name, Number, \
- Operator, Punctuation, String, Text
+ Operator, Punctuation, String, Text
__all__ = ['BnfLexer', 'AbnfLexer', 'JsgfLexer']
diff --git a/pygments/lexers/graph.py b/pygments/lexers/graph.py
index 8315898c..1a338246 100644
--- a/pygments/lexers/graph.py
+++ b/pygments/lexers/graph.py
@@ -5,7 +5,7 @@
Lexers for graph query languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/graphics.py b/pygments/lexers/graphics.py
index b40e0286..c8af9f99 100644
--- a/pygments/lexers/graphics.py
+++ b/pygments/lexers/graphics.py
@@ -5,7 +5,7 @@
Lexers for computer graphics and plotting related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/haskell.py b/pygments/lexers/haskell.py
index ffc3a3a2..9020ceb6 100644
--- a/pygments/lexers/haskell.py
+++ b/pygments/lexers/haskell.py
@@ -5,7 +5,7 @@
Lexers for Haskell and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -39,7 +39,7 @@ class HaskellLexer(RegexLexer):
flags = re.MULTILINE | re.UNICODE
reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
- 'if', 'in', 'infix[lr]?', 'instance',
+ 'family', 'if', 'in', 'infix[lr]?', 'instance',
'let', 'newtype', 'of', 'then', 'type', 'where', '_')
ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
@@ -63,6 +63,9 @@ class HaskellLexer(RegexLexer):
(r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
(r"'?[_" + uni.Ll + r"][\w']*", Name),
(r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
+ (r"(')[" + uni.Lu + r"][\w\']*", Keyword.Type),
+ (r"(')\[[^\]]*\]", Keyword.Type), # tuples and lists get special treatment in GHC
+ (r"(')\([^\)]*\)", Keyword.Type), # ..
# Operators
(r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
(r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
diff --git a/pygments/lexers/haxe.py b/pygments/lexers/haxe.py
index e0e15c11..6f5c3599 100644
--- a/pygments/lexers/haxe.py
+++ b/pygments/lexers/haxe.py
@@ -5,7 +5,7 @@
Lexers for Haxe and related stuff.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/hdl.py b/pygments/lexers/hdl.py
index 04cef14e..57fb7ac9 100644
--- a/pygments/lexers/hdl.py
+++ b/pygments/lexers/hdl.py
@@ -5,7 +5,7 @@
Lexers for hardware descriptor languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/hexdump.py b/pygments/lexers/hexdump.py
index efe16fa7..cba49be7 100644
--- a/pygments/lexers/hexdump.py
+++ b/pygments/lexers/hexdump.py
@@ -5,12 +5,10 @@
Lexers for hexadecimal dumps.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-
from pygments.lexer import RegexLexer, bygroups, include
from pygments.token import Text, Name, Number, String, Punctuation
@@ -36,7 +34,7 @@ class HexdumpLexer(RegexLexer):
* ``od -t x1z FILE``
* ``xxd FILE``
* ``DEBUG.EXE FILE.COM`` and entering ``d`` to the prompt.
-
+
.. versionadded:: 2.1
"""
name = 'Hexdump'
@@ -48,12 +46,17 @@ class HexdumpLexer(RegexLexer):
'root': [
(r'\n', Text),
include('offset'),
- (r'('+hd+r'{2})(\-)('+hd+r'{2})', bygroups(Number.Hex, Punctuation, Number.Hex)),
+ (r'('+hd+r'{2})(\-)('+hd+r'{2})',
+ bygroups(Number.Hex, Punctuation, Number.Hex)),
(hd+r'{2}', Number.Hex),
- (r'(\s{2,3})(\>)(.{16})(\<)$', bygroups(Text, Punctuation, String, Punctuation), 'bracket-strings'),
- (r'(\s{2,3})(\|)(.{16})(\|)$', bygroups(Text, Punctuation, String, Punctuation), 'piped-strings'),
- (r'(\s{2,3})(\>)(.{1,15})(\<)$', bygroups(Text, Punctuation, String, Punctuation)),
- (r'(\s{2,3})(\|)(.{1,15})(\|)$', bygroups(Text, Punctuation, String, Punctuation)),
+ (r'(\s{2,3})(\>)(.{16})(\<)$',
+ bygroups(Text, Punctuation, String, Punctuation), 'bracket-strings'),
+ (r'(\s{2,3})(\|)(.{16})(\|)$',
+ bygroups(Text, Punctuation, String, Punctuation), 'piped-strings'),
+ (r'(\s{2,3})(\>)(.{1,15})(\<)$',
+ bygroups(Text, Punctuation, String, Punctuation)),
+ (r'(\s{2,3})(\|)(.{1,15})(\|)$',
+ bygroups(Text, Punctuation, String, Punctuation)),
(r'(\s{2,3})(.{1,15})$', bygroups(Text, String)),
(r'(\s{2,3})(.{16}|.{20})$', bygroups(Text, String), 'nonpiped-strings'),
(r'\s', Text),
@@ -72,7 +75,8 @@ class HexdumpLexer(RegexLexer):
(r'\n', Text),
include('offset'),
(hd+r'{2}', Number.Hex),
- (r'(\s{2,3})(\|)(.{1,16})(\|)$', bygroups(Text, Punctuation, String, Punctuation)),
+ (r'(\s{2,3})(\|)(.{1,16})(\|)$',
+ bygroups(Text, Punctuation, String, Punctuation)),
(r'\s', Text),
(r'^\*', Punctuation),
],
@@ -80,14 +84,16 @@ class HexdumpLexer(RegexLexer):
(r'\n', Text),
include('offset'),
(hd+r'{2}', Number.Hex),
- (r'(\s{2,3})(\>)(.{1,16})(\<)$', bygroups(Text, Punctuation, String, Punctuation)),
+ (r'(\s{2,3})(\>)(.{1,16})(\<)$',
+ bygroups(Text, Punctuation, String, Punctuation)),
(r'\s', Text),
(r'^\*', Punctuation),
],
'nonpiped-strings': [
(r'\n', Text),
include('offset'),
- (r'('+hd+r'{2})(\-)('+hd+r'{2})', bygroups(Number.Hex, Punctuation, Number.Hex)),
+ (r'('+hd+r'{2})(\-)('+hd+r'{2})',
+ bygroups(Number.Hex, Punctuation, Number.Hex)),
(hd+r'{2}', Number.Hex),
(r'(\s{19,})(.{1,20}?)$', bygroups(Text, String)),
(r'(\s{2,3})(.{1,20})$', bygroups(Text, String)),
diff --git a/pygments/lexers/html.py b/pygments/lexers/html.py
index 7893952f..73f020fa 100644
--- a/pygments/lexers/html.py
+++ b/pygments/lexers/html.py
@@ -5,7 +5,7 @@
Lexers for HTML, XML and related markup.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,7 +23,7 @@ from pygments.lexers.css import CssLexer, _indentation, _starts_block
from pygments.lexers.ruby import RubyLexer
__all__ = ['HtmlLexer', 'DtdLexer', 'XmlLexer', 'XsltLexer', 'HamlLexer',
- 'ScamlLexer', 'JadeLexer']
+ 'ScamlLexer', 'PugLexer']
class HtmlLexer(RegexLexer):
@@ -492,19 +492,19 @@ class ScamlLexer(ExtendedRegexLexer):
}
-class JadeLexer(ExtendedRegexLexer):
+class PugLexer(ExtendedRegexLexer):
"""
- For Jade markup.
- Jade is a variant of Scaml, see:
+ For Pug markup.
+ Pug is a variant of Scaml, see:
http://scalate.fusesource.org/documentation/scaml-reference.html
.. versionadded:: 1.4
"""
- name = 'Jade'
- aliases = ['jade']
- filenames = ['*.jade']
- mimetypes = ['text/x-jade']
+ name = 'Pug'
+ aliases = ['pug', 'jade']
+ filenames = ['*.pug', '*.jade']
+ mimetypes = ['text/x-pug', 'text/x-jade']
flags = re.IGNORECASE
_dot = r'.'
@@ -599,3 +599,4 @@ class JadeLexer(ExtendedRegexLexer):
(r'\n', Text, 'root'),
],
}
+JadeLexer = PugLexer # compat
diff --git a/pygments/lexers/idl.py b/pygments/lexers/idl.py
index a0b39492..99078970 100644
--- a/pygments/lexers/idl.py
+++ b/pygments/lexers/idl.py
@@ -5,7 +5,7 @@
Lexers for IDL.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/igor.py b/pygments/lexers/igor.py
index 17fedf88..1a21fe87 100644
--- a/pygments/lexers/igor.py
+++ b/pygments/lexers/igor.py
@@ -5,7 +5,7 @@
Lexers for Igor Pro.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/inferno.py b/pygments/lexers/inferno.py
index bfbea571..5fc5a0ba 100644
--- a/pygments/lexers/inferno.py
+++ b/pygments/lexers/inferno.py
@@ -5,7 +5,7 @@
Lexers for Inferno os and all the related stuff.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/installers.py b/pygments/lexers/installers.py
index c436afed..0323d140 100644
--- a/pygments/lexers/installers.py
+++ b/pygments/lexers/installers.py
@@ -5,7 +5,7 @@
Lexers for installer/packager DSLs and formats.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/int_fiction.py b/pygments/lexers/int_fiction.py
index 724f9b27..f280a56d 100644
--- a/pygments/lexers/int_fiction.py
+++ b/pygments/lexers/int_fiction.py
@@ -5,7 +5,7 @@
Lexers for interactive fiction languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/iolang.py b/pygments/lexers/iolang.py
index e62dd434..bbc17faf 100644
--- a/pygments/lexers/iolang.py
+++ b/pygments/lexers/iolang.py
@@ -5,7 +5,7 @@
Lexers for the Io language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/j.py b/pygments/lexers/j.py
index f15595f8..434964fe 100644
--- a/pygments/lexers/j.py
+++ b/pygments/lexers/j.py
@@ -5,7 +5,7 @@
Lexer for the J programming language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -75,8 +75,8 @@ class JLexer(RegexLexer):
'fetch', 'file2url', 'fixdotdot', 'fliprgb', 'getargs',
'getenv', 'hfd', 'inv', 'inverse', 'iospath',
'isatty', 'isutf8', 'items', 'leaf', 'list',
- 'nameclass', 'namelist', 'namelist', 'names', 'nc',
- 'nl', 'on', 'pick', 'pick', 'rows',
+ 'nameclass', 'namelist', 'names', 'nc',
+ 'nl', 'on', 'pick', 'rows',
'script', 'scriptd', 'sign', 'sminfo', 'smoutput',
'sort', 'split', 'stderr', 'stdin', 'stdout',
'table', 'take', 'timespacex', 'timex', 'tmoutput',
diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py
index 5dca6832..7dbaaff6 100644
--- a/pygments/lexers/javascript.py
+++ b/pygments/lexers/javascript.py
@@ -5,7 +5,7 @@
Lexers for JavaScript and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,7 +20,7 @@ import pygments.unistring as uni
__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
- 'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer']
+ 'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer', 'JuttleLexer']
JS_IDENT_START = ('(?:[$_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') +
']|\\\\u[a-fA-F0-9]{4})')
@@ -53,7 +53,7 @@ class JavascriptLexer(RegexLexer):
'slashstartsregex': [
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
+ r'([gimuy]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
default('#pop')
],
@@ -64,9 +64,14 @@ class JavascriptLexer(RegexLexer):
(r'\A#! ?/.*?\n', Comment.Hashbang), # recognized by node.js
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
+ (r'(\.\d+|[0-9]+\.[0-9]*)([eE][-+]?[0-9]+)?', Number.Float),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'\.\.\.|=>', Punctuation),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|=>|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'\.\.\.', Punctuation),
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
@@ -84,11 +89,6 @@ class JavascriptLexer(RegexLexer):
r'Error|eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|'
r'document|this|window)\b', Name.Builtin),
(JS_IDENT, Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'`', String.Backtick, 'interp'),
@@ -366,9 +366,10 @@ class DartLexer(RegexLexer):
(r'\b(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
Keyword),
- (r'\b(abstract|const|extends|factory|final|get|implements|'
- r'native|operator|set|static|typedef|var)\b', Keyword.Declaration),
- (r'\b(bool|double|Dynamic|int|num|Object|String|void)\b', Keyword.Type),
+ (r'\b(abstract|async|await|const|extends|factory|final|get|'
+ r'implements|native|operator|set|static|sync|typedef|var|with|'
+ r'yield)\b', Keyword.Declaration),
+ (r'\b(bool|double|dynamic|int|num|Object|String|void)\b', Keyword.Type),
(r'\b(false|null|true)\b', Keyword.Constant),
(r'[~!%^&*+=|?:<>/-]|as\b', Operator),
(r'[a-zA-Z_$]\w*:', Name.Label),
@@ -511,9 +512,26 @@ class TypeScriptLexer(RegexLexer):
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'`', String.Backtick, 'interp'),
# Match stuff like: Decorators
(r'@\w+', Keyword.Declaration),
- ]
+ ],
+
+ # The 'interp*' rules match those in JavascriptLexer. Changes made
+ # there should be reflected here as well.
+ 'interp': [
+ (r'`', String.Backtick, '#pop'),
+ (r'\\\\', String.Backtick),
+ (r'\\`', String.Backtick),
+ (r'\$\{', String.Interpol, 'interp-inside'),
+ (r'\$', String.Backtick),
+ (r'[^`\\$]+', String.Backtick),
+ ],
+ 'interp-inside': [
+ # TODO: should this include single-line comments and allow nesting strings?
+ (r'\}', String.Interpol, '#pop'),
+ include('root'),
+ ],
}
@@ -545,12 +563,7 @@ class LassoLexer(RegexLexer):
tokens = {
'root': [
(r'^#![ \S]+lasso9\b', Comment.Preproc, 'lasso'),
- (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
- (r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')),
- (r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')),
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc,
- ('delimiters', 'anglebrackets')),
- (r'<(!--.*?-->)?', Other, 'delimiters'),
+ (r'(?=\[|<)', Other, 'delimiters'),
(r'\s+', Other),
default(('delimiters', 'lassofile')),
],
@@ -558,14 +571,14 @@ class LassoLexer(RegexLexer):
(r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
(r'\[noprocess\]', Comment.Preproc, 'noprocess'),
(r'\[', Comment.Preproc, 'squarebrackets'),
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
+ (r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
(r'<(!--.*?-->)?', Other),
(r'[^[<]+', Other),
],
'nosquarebrackets': [
(r'\[noprocess\]', Comment.Preproc, 'noprocess'),
(r'\[', Other),
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
+ (r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
(r'<(!--.*?-->)?', Other),
(r'[^[<]+', Other),
],
@@ -607,7 +620,7 @@ class LassoLexer(RegexLexer):
# names
(r'\$[a-z_][\w.]*', Name.Variable),
- (r'#([a-z_][\w.]*|\d+)', Name.Variable.Instance),
+ (r'#([a-z_][\w.]*|\d+\b)', Name.Variable.Instance),
(r"(\.\s*)('[a-z_][\w.]*')",
bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
(r"(self)(\s*->\s*)('[a-z_][\w.]*')",
@@ -658,20 +671,20 @@ class LassoLexer(RegexLexer):
r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
- r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|'
- r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|'
- r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|'
- r'NoProcess|Output_None|Portal|Private|Protect|Records|Referer|'
- r'Referrer|Repeating|ResultSet|Rows|Search_Args|Search_Arguments|'
- r'Select|Sort_Args|Sort_Arguments|Thread_Atomic|Value_List|While|'
- r'Abort|Case|Else|If_Empty|If_False|If_Null|If_True|Loop_Abort|'
- r'Loop_Continue|Loop_Count|Params|Params_Up|Return|Return_Value|'
- r'Run_Children|SOAP_DefineTag|SOAP_LastRequest|SOAP_LastResponse|'
- r'Tag_Name|ascending|average|by|define|descending|do|equals|'
- r'frozen|group|handle_failure|import|in|into|join|let|match|max|'
- r'min|on|order|parent|protected|provide|public|require|returnhome|'
- r'skip|split_thread|sum|take|thread|to|trait|type|where|with|'
- r'yield|yieldhome)\b',
+ r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|Link_FirstRecord|'
+ r'Link_LastGroup|Link_LastRecord|Link_NextGroup|Link_NextRecord|'
+ r'Link_PrevGroup|Link_PrevRecord|Log|Loop|Output_None|Portal|'
+ r'Private|Protect|Records|Referer|Referrer|Repeating|ResultSet|'
+ r'Rows|Search_Args|Search_Arguments|Select|Sort_Args|'
+ r'Sort_Arguments|Thread_Atomic|Value_List|While|Abort|Case|Else|'
+ r'Fail_If|Fail_IfNot|Fail|If_Empty|If_False|If_Null|If_True|'
+ r'Loop_Abort|Loop_Continue|Loop_Count|Params|Params_Up|Return|'
+ r'Return_Value|Run_Children|SOAP_DefineTag|SOAP_LastRequest|'
+ r'SOAP_LastResponse|Tag_Name|ascending|average|by|define|'
+ r'descending|do|equals|frozen|group|handle_failure|import|in|into|'
+ r'join|let|match|max|min|on|order|parent|protected|provide|public|'
+ r'require|returnhome|skip|split_thread|sum|take|thread|to|trait|'
+ r'type|where|with|yield|yieldhome)\b',
bygroups(Punctuation, Keyword)),
# other
@@ -1016,6 +1029,12 @@ class CoffeeScriptLexer(RegexLexer):
filenames = ['*.coffee']
mimetypes = ['text/coffeescript']
+
+ _operator_re = (
+ r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
+ r'\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?')
+
flags = re.DOTALL
tokens = {
'commentsandwhitespace': [
@@ -1034,17 +1053,17 @@ class CoffeeScriptLexer(RegexLexer):
(r'///', String.Regex, ('#pop', 'multilineregex')),
(r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
+ # This isn't really guarding against mishighlighting well-formed
+ # code, just the ability to infinite-loop between root and
+ # slashstartsregex.
+ (r'/', Operator),
default('#pop'),
],
'root': [
- # this next expr leads to infinite loops root -> slashstartsregex
- # (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
- (r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
- r'\|\||\\(?=\n)|'
- r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&|^/])=?',
- Operator, 'slashstartsregex'),
- (r'(?:\([^()]*\))?\s*[=-]>', Name.Function),
+ (r'^(?=\s|/)', Text, 'slashstartsregex'),
+ (_operator_re, Operator, 'slashstartsregex'),
+ (r'(?:\([^()]*\))?\s*[=-]>', Name.Function, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(?<![.$])(for|own|in|of|while|until|'
@@ -1065,7 +1084,7 @@ class CoffeeScriptLexer(RegexLexer):
(r'@[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable.Instance,
'slashstartsregex'),
(r'@', Name.Other, 'slashstartsregex'),
- (r'@?[$a-zA-Z_][\w$]*', Name.Other, 'slashstartsregex'),
+ (r'@?[$a-zA-Z_][\w$]*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
@@ -1438,3 +1457,63 @@ class EarlGreyLexer(RegexLexer):
(r'\d+', Number.Integer)
],
}
+
+class JuttleLexer(RegexLexer):
+ """
+ For `Juttle`_ source code.
+
+ .. _Juttle: https://github.com/juttle/juttle
+
+ """
+
+ name = 'Juttle'
+ aliases = ['juttle', 'juttle']
+ filenames = ['*.juttle']
+ mimetypes = ['application/juttle', 'application/x-juttle',
+ 'text/x-juttle', 'text/juttle']
+
+ flags = re.DOTALL | re.UNICODE | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r':\d{2}:\d{2}:\d{2}(\.\d*)?:', String.Moment),
+ (r':(now|beginning|end|forever|yesterday|today|tomorrow|(\d+(\.\d*)?|\.\d+)(ms|[smhdwMy])?):', String.Moment),
+ (r':\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(\.\d*)?)?(Z|[+-]\d{2}:\d{2}|[+-]\d{4})?:', String.Moment),
+ (r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?'
+ r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?)'
+ r'|[ ]+(ago|from[ ]+now))*:', String.Moment),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(import|return|continue|if|else)\b', Keyword, 'slashstartsregex'),
+ (r'(var|const|function|reducer|sub|input)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(batch|emit|filter|head|join|keep|pace|pass|put|read|reduce|remove|'
+ r'sequence|skip|sort|split|tail|unbatch|uniq|view|write)\b', Keyword.Reserved),
+ (r'(true|false|null|Infinity)\b', Keyword.Constant),
+ (r'(Array|Date|Juttle|Math|Number|Object|RegExp|String)\b', Name.Builtin),
+ (JS_IDENT, Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single)
+ ]
+
+ }
diff --git a/pygments/lexers/julia.py b/pygments/lexers/julia.py
index 9f84b8d9..d946554b 100644
--- a/pygments/lexers/julia.py
+++ b/pygments/lexers/julia.py
@@ -5,21 +5,23 @@
Lexers for the Julia language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import Lexer, RegexLexer, bygroups, combined, \
- do_insertions, words
+from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
+ words, include
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic
from pygments.util import shebang_matches, unirange
__all__ = ['JuliaLexer', 'JuliaConsoleLexer']
-line_re = re.compile('.*?\n')
+allowed_variable = (
+ u'(?:[a-zA-Z_\u00A1-\uffff]|%s)(?:[a-zA-Z_0-9\u00A1-\uffff]|%s)*!*' %
+ ((unirange(0x10000, 0x10ffff),) * 2))
class JuliaLexer(RegexLexer):
@@ -28,6 +30,7 @@ class JuliaLexer(RegexLexer):
.. versionadded:: 1.6
"""
+
name = 'Julia'
aliases = ['julia', 'jl']
filenames = ['*.jl']
@@ -35,59 +38,151 @@ class JuliaLexer(RegexLexer):
flags = re.MULTILINE | re.UNICODE
- builtins = (
- 'exit', 'whos', 'edit', 'load', 'is', 'isa', 'isequal', 'typeof', 'tuple',
- 'ntuple', 'uid', 'hash', 'finalizer', 'convert', 'promote', 'subtype',
- 'typemin', 'typemax', 'realmin', 'realmax', 'sizeof', 'eps', 'promote_type',
- 'method_exists', 'applicable', 'invoke', 'dlopen', 'dlsym', 'system',
- 'error', 'throw', 'assert', 'new', 'Inf', 'Nan', 'pi', 'im',
- )
-
- keywords = (
- 'begin', 'while', 'for', 'in', 'return', 'break', 'continue',
- 'macro', 'quote', 'let', 'if', 'elseif', 'else', 'try', 'catch', 'end',
- 'bitstype', 'ccall', 'do', 'using', 'module', 'import', 'export',
- 'importall', 'baremodule', 'immutable',
- )
-
- types = (
- 'Bool', 'Int', 'Int8', 'Int16', 'Int32', 'Int64', 'Uint', 'Uint8', 'Uint16',
- 'Uint32', 'Uint64', 'Float32', 'Float64', 'Complex64', 'Complex128', 'Any',
- 'Nothing', 'None',
- )
-
tokens = {
'root': [
(r'\n', Text),
(r'[^\S\n]+', Text),
(r'#=', Comment.Multiline, "blockcomment"),
(r'#.*$', Comment),
- (r'[\[\]{}:(),;@]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
+ (r'[\[\]{}(),;]', Punctuation),
# keywords
+ (r'in\b', Keyword.Pseudo),
+ (r'(true|false)\b', Keyword.Constant),
(r'(local|global|const)\b', Keyword.Declaration),
- (words(keywords, suffix=r'\b'), Keyword),
- (words(types, suffix=r'\b'), Keyword.Type),
-
+ (words([
+ 'function', 'type', 'typealias', 'abstract', 'immutable',
+ 'baremodule', 'begin', 'bitstype', 'break', 'catch', 'ccall',
+ 'continue', 'do', 'else', 'elseif', 'end', 'export', 'finally',
+ 'for', 'if', 'import', 'importall', 'let', 'macro', 'module',
+ 'quote', 'return', 'try', 'using', 'while'],
+ suffix=r'\b'), Keyword),
+
+ # NOTE
+ # Patterns below work only for definition sites and thus hardly reliable.
+ #
# functions
- (r'(function)((?:\s|\\\s)+)',
- bygroups(Keyword, Name.Function), 'funcname'),
-
+ # (r'(function)(\s+)(' + allowed_variable + ')',
+ # bygroups(Keyword, Text, Name.Function)),
+ #
# types
- (r'(type|typealias|abstract|immutable)((?:\s|\\\s)+)',
- bygroups(Keyword, Name.Class), 'typename'),
-
- # operators
- (r'==|!=|<=|>=|->|&&|\|\||::|<:|[-~+/*%=<>&^|.?!$]', Operator),
- (r'\.\*|\.\^|\.\\|\.\/|\\', Operator),
+ # (r'(type|typealias|abstract|immutable)(\s+)(' + allowed_variable + ')',
+ # bygroups(Keyword, Text, Name.Class)),
+
+ # type names
+ (words([
+ 'ANY', 'ASCIIString', 'AbstractArray', 'AbstractChannel',
+ 'AbstractFloat', 'AbstractMatrix', 'AbstractRNG',
+ 'AbstractSparseArray', 'AbstractSparseMatrix',
+ 'AbstractSparseVector', 'AbstractString', 'AbstractVecOrMat',
+ 'AbstractVector', 'Any', 'ArgumentError', 'Array',
+ 'AssertionError', 'Associative', 'Base64DecodePipe',
+ 'Base64EncodePipe', 'Bidiagonal', 'BigFloat', 'BigInt',
+ 'BitArray', 'BitMatrix', 'BitVector', 'Bool', 'BoundsError',
+ 'Box', 'BufferStream', 'CapturedException', 'CartesianIndex',
+ 'CartesianRange', 'Cchar', 'Cdouble', 'Cfloat', 'Channel',
+ 'Char', 'Cint', 'Cintmax_t', 'Clong', 'Clonglong',
+ 'ClusterManager', 'Cmd', 'Coff_t', 'Colon', 'Complex',
+ 'Complex128', 'Complex32', 'Complex64', 'CompositeException',
+ 'Condition', 'Cptrdiff_t', 'Cshort', 'Csize_t', 'Cssize_t',
+ 'Cstring', 'Cuchar', 'Cuint', 'Cuintmax_t', 'Culong',
+ 'Culonglong', 'Cushort', 'Cwchar_t', 'Cwstring', 'DataType',
+ 'Date', 'DateTime', 'DenseArray', 'DenseMatrix',
+ 'DenseVecOrMat', 'DenseVector', 'Diagonal', 'Dict',
+ 'DimensionMismatch', 'Dims', 'DirectIndexString', 'Display',
+ 'DivideError', 'DomainError', 'EOFError', 'EachLine', 'Enum',
+ 'Enumerate', 'ErrorException', 'Exception', 'Expr',
+ 'Factorization', 'FileMonitor', 'FileOffset', 'Filter',
+ 'Float16', 'Float32', 'Float64', 'FloatRange', 'Function',
+ 'GenSym', 'GlobalRef', 'GotoNode', 'HTML', 'Hermitian', 'IO',
+ 'IOBuffer', 'IOStream', 'IPv4', 'IPv6', 'InexactError',
+ 'InitError', 'Int', 'Int128', 'Int16', 'Int32', 'Int64', 'Int8',
+ 'IntSet', 'Integer', 'InterruptException', 'IntrinsicFunction',
+ 'InvalidStateException', 'Irrational', 'KeyError', 'LabelNode',
+ 'LambdaStaticData', 'LinSpace', 'LineNumberNode', 'LoadError',
+ 'LocalProcess', 'LowerTriangular', 'MIME', 'Matrix',
+ 'MersenneTwister', 'Method', 'MethodError', 'MethodTable',
+ 'Module', 'NTuple', 'NewvarNode', 'NullException', 'Nullable',
+ 'Number', 'ObjectIdDict', 'OrdinalRange', 'OutOfMemoryError',
+ 'OverflowError', 'Pair', 'ParseError', 'PartialQuickSort',
+ 'Pipe', 'PollingFileWatcher', 'ProcessExitedException',
+ 'ProcessGroup', 'Ptr', 'QuoteNode', 'RandomDevice', 'Range',
+ 'Rational', 'RawFD', 'ReadOnlyMemoryError', 'Real',
+ 'ReentrantLock', 'Ref', 'Regex', 'RegexMatch',
+ 'RemoteException', 'RemoteRef', 'RepString', 'RevString',
+ 'RopeString', 'RoundingMode', 'SegmentationFault',
+ 'SerializationState', 'Set', 'SharedArray', 'SharedMatrix',
+ 'SharedVector', 'Signed', 'SimpleVector', 'SparseMatrixCSC',
+ 'StackOverflowError', 'StatStruct', 'StepRange', 'StridedArray',
+ 'StridedMatrix', 'StridedVecOrMat', 'StridedVector', 'SubArray',
+ 'SubString', 'SymTridiagonal', 'Symbol', 'SymbolNode',
+ 'Symmetric', 'SystemError', 'TCPSocket', 'Task', 'Text',
+ 'TextDisplay', 'Timer', 'TopNode', 'Tridiagonal', 'Tuple',
+ 'Type', 'TypeConstructor', 'TypeError', 'TypeName', 'TypeVar',
+ 'UDPSocket', 'UInt', 'UInt128', 'UInt16', 'UInt32', 'UInt64',
+ 'UInt8', 'UTF16String', 'UTF32String', 'UTF8String',
+ 'UndefRefError', 'UndefVarError', 'UnicodeError', 'UniformScaling',
+ 'Union', 'UnitRange', 'Unsigned', 'UpperTriangular', 'Val',
+ 'Vararg', 'VecOrMat', 'Vector', 'VersionNumber', 'Void', 'WString',
+ 'WeakKeyDict', 'WeakRef', 'WorkerConfig', 'Zip'], suffix=r'\b'),
+ Keyword.Type),
# builtins
- (words(builtins, suffix=r'\b'), Name.Builtin),
+ (words([
+ u'ARGS', u'CPU_CORES', u'C_NULL', u'DevNull', u'ENDIAN_BOM',
+ u'ENV', u'I', u'Inf', u'Inf16', u'Inf32', u'Inf64',
+ u'InsertionSort', u'JULIA_HOME', u'LOAD_PATH', u'MergeSort',
+ u'NaN', u'NaN16', u'NaN32', u'NaN64', u'OS_NAME',
+ u'QuickSort', u'RoundDown', u'RoundFromZero', u'RoundNearest',
+ u'RoundNearestTiesAway', u'RoundNearestTiesUp',
+ u'RoundToZero', u'RoundUp', u'STDERR', u'STDIN', u'STDOUT',
+ u'VERSION', u'WORD_SIZE', u'catalan', u'e', u'eu',
+ u'eulergamma', u'golden', u'im', u'nothing', u'pi', u'γ',
+ u'π', u'φ'],
+ suffix=r'\b'), Name.Builtin),
- # backticks
- (r'`(?s).*?`', String.Backtick),
+ # operators
+ # see: https://github.com/JuliaLang/julia/blob/master/src/julia-parser.scm
+ (words([
+ # prec-assignment
+ u'=', u':=', u'+=', u'-=', u'*=', u'/=', u'//=', u'.//=', u'.*=', u'./=',
+ u'\=', u'.\=', u'^=', u'.^=', u'÷=', u'.÷=', u'%=', u'.%=', u'|=', u'&=',
+ u'$=', u'=>', u'<<=', u'>>=', u'>>>=', u'~', u'.+=', u'.-=',
+ # prec-conditional
+ u'?',
+ # prec-arrow
+ u'--', u'-->',
+ # prec-lazy-or
+ u'||',
+ # prec-lazy-and
+ u'&&',
+ # prec-comparison
+ u'>', u'<', u'>=', u'≥', u'<=', u'≤', u'==', u'===', u'≡', u'!=', u'≠',
+ u'!==', u'≢', u'.>', u'.<', u'.>=', u'.≥', u'.<=', u'.≤', u'.==', u'.!=',
+ u'.≠', u'.=', u'.!', u'<:', u'>:', u'∈', u'∉', u'∋', u'∌', u'⊆',
+ u'⊈', u'⊂',
+ u'⊄', u'⊊',
+ # prec-pipe
+ u'|>', u'<|',
+ # prec-colon
+ u':',
+ # prec-plus
+ u'+', u'-', u'.+', u'.-', u'|', u'∪', u'$',
+ # prec-bitshift
+ u'<<', u'>>', u'>>>', u'.<<', u'.>>', u'.>>>',
+ # prec-times
+ u'*', u'/', u'./', u'÷', u'.÷', u'%', u'⋅', u'.%', u'.*', u'\\', u'.\\', u'&', u'∩',
+ # prec-rational
+ u'//', u'.//',
+ # prec-power
+ u'^', u'.^',
+ # prec-decl
+ u'::',
+ # prec-dot
+ u'.',
+ # unary op
+ u'+', u'-', u'!', u'~', u'√', u'∛', u'∜'
+ ]), Operator),
# chars
(r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
@@ -97,13 +192,19 @@ class JuliaLexer(RegexLexer):
(r'(?<=[.\w)\]])\'+', Operator),
# strings
- (r'(?:[IL])"', String, 'string'),
- (r'[E]?"', String, combined('stringescape', 'string')),
+ (r'"""', String, 'tqstring'),
+ (r'"', String, 'string'),
+
+ # regular expressions
+ (r'r"""', String.Regex, 'tqregex'),
+ (r'r"', String.Regex, 'regex'),
+
+ # backticks
+ (r'`', String.Backtick, 'command'),
# names
- (r'@[\w.]+', Name.Decorator),
- (u'(?:[a-zA-Z_\u00A1-\uffff]|%s)(?:[a-zA-Z_0-9\u00A1-\uffff]|%s)*!*' %
- ((unirange(0x10000, 0x10ffff),)*2), Name),
+ (allowed_variable, Name),
+ (r'@' + allowed_variable, Name.Decorator),
# numbers
(r'(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?', Number.Float),
@@ -120,45 +221,59 @@ class JuliaLexer(RegexLexer):
(r'\d+', Number.Integer)
],
- 'funcname': [
- ('[a-zA-Z_]\w*', Name.Function, '#pop'),
- ('\([^\s\w{]{1,2}\)', Operator, '#pop'),
- ('[^\s\w{]{1,2}', Operator, '#pop'),
- ],
-
- 'typename': [
- ('[a-zA-Z_]\w*', Name.Class, '#pop'),
- ],
-
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape),
- ],
"blockcomment": [
(r'[^=#]', Comment.Multiline),
(r'#=', Comment.Multiline, '#push'),
(r'=#', Comment.Multiline, '#pop'),
(r'[=#]', Comment.Multiline),
],
+
'string': [
(r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ # FIXME: This escape pattern is not perfect.
+ (r'\\([\\"\'\$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
# Interpolation is defined as "$" followed by the shortest full
# expression, which is something we can't parse.
# Include the most common cases here: $word, and $(paren'd expr).
- (r'\$[a-zA-Z_]+', String.Interpol),
- (r'\$\(', String.Interpol, 'in-intp'),
+ (r'\$' + allowed_variable, String.Interpol),
+ # (r'\$[a-zA-Z_]+', String.Interpol),
+ (r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
# @printf and @sprintf formats
(r'%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]',
String.Interpol),
- (r'[^$%"\\]+', String),
- # unhandled special signs
- (r'[$%"\\]', String),
+ (r'.|\s', String),
],
+
+ 'tqstring': [
+ (r'"""', String, '#pop'),
+ (r'\\([\\"\'\$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
+ (r'\$' + allowed_variable, String.Interpol),
+ (r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
+ (r'.|\s', String),
+ ],
+
+ 'regex': [
+ (r'"', String.Regex, '#pop'),
+ (r'\\"', String.Regex),
+ (r'.|\s', String.Regex),
+ ],
+
+ 'tqregex': [
+ (r'"""', String.Regex, '#pop'),
+ (r'.|\s', String.Regex),
+ ],
+
+ 'command': [
+ (r'`', String.Backtick, '#pop'),
+ (r'\$' + allowed_variable, String.Interpol),
+ (r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
+ (r'.|\s', String.Backtick)
+ ],
+
'in-intp': [
- (r'[^()]+', String.Interpol),
- (r'\(', String.Interpol, '#push'),
- (r'\)', String.Interpol, '#pop'),
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
]
}
@@ -177,27 +292,26 @@ class JuliaConsoleLexer(Lexer):
def get_tokens_unprocessed(self, text):
jllexer = JuliaLexer(**self.options)
-
+ start = 0
curcode = ''
insertions = []
+ output = False
+ error = False
- for match in line_re.finditer(text):
- line = match.group()
-
+ for line in text.splitlines(True):
if line.startswith('julia>'):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:6])]))
+ insertions.append((len(curcode), [(0, Generic.Prompt, line[:6])]))
+ curcode += line[6:]
+ output = False
+ error = False
+ elif line.startswith('help?>') or line.startswith('shell>'):
+ yield start, Generic.Prompt, line[:6]
+ yield start + 6, Text, line[6:]
+ output = False
+ error = False
+ elif line.startswith(' ') and not output:
+ insertions.append((len(curcode), [(0, Text, line[:6])]))
curcode += line[6:]
-
- elif line.startswith(' '):
-
- idx = len(curcode)
-
- # without is showing error on same line as before...?
- line = "\n" + line
- token = (0, Generic.Traceback, line)
- insertions.append((idx, [token]))
-
else:
if curcode:
for item in do_insertions(
@@ -205,10 +319,15 @@ class JuliaConsoleLexer(Lexer):
yield item
curcode = ''
insertions = []
-
- yield match.start(), Generic.Output, line
-
- if curcode: # or item:
+ if line.startswith('ERROR: ') or error:
+ yield start, Generic.Error, line
+ error = True
+ else:
+ yield start, Generic.Output, line
+ output = True
+ start += len(line)
+
+ if curcode:
for item in do_insertions(
insertions, jllexer.get_tokens_unprocessed(curcode)):
yield item
diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py
index af7f8105..f4392839 100644
--- a/pygments/lexers/jvm.py
+++ b/pygments/lexers/jvm.py
@@ -5,7 +5,7 @@
Pygments lexers for JVM languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -59,25 +59,27 @@ class JavaLexer(RegexLexer):
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
+ 'class'),
+ (r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Text),
+ 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
(r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
(r'([^\W\d]|\$)[\w$]*', Name),
- (r'([0-9](_*[0-9]+)*\.([0-9](_*[0-9]+)*)?|'
- r'([0-9](_*[0-9]+)*)?\.[0-9](_*[0-9]+)*)'
- r'([eE][+\-]?[0-9](_*[0-9]+)*)?[fFdD]?|'
- r'[0-9][eE][+\-]?[0-9](_*[0-9]+)*[fFdD]?|'
- r'[0-9]([eE][+\-]?[0-9](_*[0-9]+)*)?[fFdD]|'
- r'0[xX]([0-9a-fA-F](_*[0-9a-fA-F]+)*\.?|'
- r'([0-9a-fA-F](_*[0-9a-fA-F]+)*)?\.[0-9a-fA-F](_*[0-9a-fA-F]+)*)'
- r'[pP][+\-]?[0-9](_*[0-9]+)*[fFdD]?', Number.Float),
- (r'0[xX][0-9a-fA-F](_*[0-9a-fA-F]+)*[lL]?', Number.Hex),
- (r'0[bB][01](_*[01]+)*[lL]?', Number.Bin),
- (r'0(_*[0-7]+)+[lL]?', Number.Oct),
- (r'0|[1-9](_*[0-9]+)*[lL]?', Number.Integer),
+ (r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
+ r'\.[0-9][0-9_]*)'
+ r'([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|'
+ r'[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|'
+ r'[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|'
+ r'0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|'
+ r'([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)'
+ r'[pP][+\-]?[0-9][0-9_]*[fFdD]?', Number.Float),
+ (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?', Number.Hex),
+ (r'0[bB][01][01_]*[lL]?', Number.Bin),
+ (r'0[0-7_]+[lL]?', Number.Oct),
+ (r'0|[1-9][0-9_]*[lL]?', Number.Integer),
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'\n', Text)
],
diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py
index b15fd0c0..e258c347 100644
--- a/pygments/lexers/lisp.py
+++ b/pygments/lexers/lisp.py
@@ -5,7 +5,7 @@
Lexers for Lispy languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -471,731 +471,779 @@ class RacketLexer(RegexLexer):
# Generated by example.rkt
_keywords = (
- '#%app', '#%datum', '#%declare', '#%expression', '#%module-begin',
- '#%plain-app', '#%plain-lambda', '#%plain-module-begin',
- '#%printing-module-begin', '#%provide', '#%require',
- '#%stratified-body', '#%top', '#%top-interaction',
- '#%variable-reference', '->', '->*', '->*m', '->d', '->dm', '->i',
- '->m', '...', ':do-in', '==', '=>', '_', 'absent', 'abstract',
- 'all-defined-out', 'all-from-out', 'and', 'any', 'augment', 'augment*',
- 'augment-final', 'augment-final*', 'augride', 'augride*', 'begin',
- 'begin-for-syntax', 'begin0', 'case', 'case->', 'case->m',
- 'case-lambda', 'class', 'class*', 'class-field-accessor',
- 'class-field-mutator', 'class/c', 'class/derived', 'combine-in',
- 'combine-out', 'command-line', 'compound-unit', 'compound-unit/infer',
- 'cond', 'contract', 'contract-out', 'contract-struct', 'contracted',
- 'define', 'define-compound-unit', 'define-compound-unit/infer',
- 'define-contract-struct', 'define-custom-hash-types',
- 'define-custom-set-types', 'define-for-syntax',
- 'define-local-member-name', 'define-logger', 'define-match-expander',
- 'define-member-name', 'define-module-boundary-contract',
- 'define-namespace-anchor', 'define-opt/c', 'define-sequence-syntax',
- 'define-serializable-class', 'define-serializable-class*',
- 'define-signature', 'define-signature-form', 'define-struct',
- 'define-struct/contract', 'define-struct/derived', 'define-syntax',
- 'define-syntax-rule', 'define-syntaxes', 'define-unit',
- 'define-unit-binding', 'define-unit-from-context',
- 'define-unit/contract', 'define-unit/new-import-export',
- 'define-unit/s', 'define-values', 'define-values-for-export',
- 'define-values-for-syntax', 'define-values/invoke-unit',
- 'define-values/invoke-unit/infer', 'define/augment',
- 'define/augment-final', 'define/augride', 'define/contract',
- 'define/final-prop', 'define/match', 'define/overment',
- 'define/override', 'define/override-final', 'define/private',
- 'define/public', 'define/public-final', 'define/pubment',
- 'define/subexpression-pos-prop', 'delay', 'delay/idle', 'delay/name',
- 'delay/strict', 'delay/sync', 'delay/thread', 'do', 'else', 'except',
- 'except-in', 'except-out', 'export', 'extends', 'failure-cont',
- 'false', 'false/c', 'field', 'field-bound?', 'file',
- 'flat-murec-contract', 'flat-rec-contract', 'for', 'for*', 'for*/and',
- 'for*/first', 'for*/fold', 'for*/fold/derived', 'for*/hash',
- 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list', 'for*/lists',
- 'for*/mutable-set', 'for*/mutable-seteq', 'for*/mutable-seteqv',
- 'for*/or', 'for*/product', 'for*/set', 'for*/seteq', 'for*/seteqv',
- 'for*/sum', 'for*/vector', 'for*/weak-set', 'for*/weak-seteq',
- 'for*/weak-seteqv', 'for-label', 'for-meta', 'for-syntax',
- 'for-template', 'for/and', 'for/first', 'for/fold', 'for/fold/derived',
- 'for/hash', 'for/hasheq', 'for/hasheqv', 'for/last', 'for/list',
- 'for/lists', 'for/mutable-set', 'for/mutable-seteq',
- 'for/mutable-seteqv', 'for/or', 'for/product', 'for/set', 'for/seteq',
- 'for/seteqv', 'for/sum', 'for/vector', 'for/weak-set',
- 'for/weak-seteq', 'for/weak-seteqv', 'gen:custom-write', 'gen:dict',
- 'gen:equal+hash', 'gen:set', 'gen:stream', 'generic', 'get-field',
- 'if', 'implies', 'import', 'include', 'include-at/relative-to',
- 'include-at/relative-to/reader', 'include/reader', 'inherit',
- 'inherit-field', 'inherit/inner', 'inherit/super', 'init',
- 'init-depend', 'init-field', 'init-rest', 'inner', 'inspect',
- 'instantiate', 'interface', 'interface*', 'invoke-unit',
- 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*', 'let*-values',
- 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc', 'let/ec',
- 'letrec', 'letrec-syntax', 'letrec-syntaxes', 'letrec-syntaxes+values',
- 'letrec-values', 'lib', 'link', 'local', 'local-require', 'log-debug',
- 'log-error', 'log-fatal', 'log-info', 'log-warning', 'match', 'match*',
- 'match*/derived', 'match-define', 'match-define-values',
- 'match-lambda', 'match-lambda*', 'match-lambda**', 'match-let',
- 'match-let*', 'match-let*-values', 'match-let-values', 'match-letrec',
- 'match/derived', 'match/values', 'member-name-key', 'method-contract?',
- 'mixin', 'module', 'module*', 'module+', 'nand', 'new', 'nor',
- 'object-contract', 'object/c', 'only', 'only-in', 'only-meta-in',
- 'open', 'opt/c', 'or', 'overment', 'overment*', 'override',
- 'override*', 'override-final', 'override-final*', 'parameterize',
- 'parameterize*', 'parameterize-break', 'parametric->/c', 'place',
- 'place*', 'planet', 'prefix', 'prefix-in', 'prefix-out', 'private',
- 'private*', 'prompt-tag/c', 'protect-out', 'provide',
- 'provide-signature-elements', 'provide/contract', 'public', 'public*',
- 'public-final', 'public-final*', 'pubment', 'pubment*', 'quasiquote',
- 'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax',
- 'quote-syntax/prune', 'recontract-out', 'recursive-contract',
- 'relative-in', 'rename', 'rename-in', 'rename-inner', 'rename-out',
- 'rename-super', 'require', 'send', 'send*', 'send+', 'send-generic',
- 'send/apply', 'send/keyword-apply', 'set!', 'set!-values',
- 'set-field!', 'shared', 'stream', 'stream-cons', 'struct', 'struct*',
- 'struct-copy', 'struct-field-index', 'struct-out', 'struct/c',
- 'struct/ctc', 'struct/dc', 'submod', 'super', 'super-instantiate',
- 'super-make-object', 'super-new', 'syntax', 'syntax-case',
- 'syntax-case*', 'syntax-id-rules', 'syntax-rules', 'syntax/loc', 'tag',
- 'this', 'this%', 'thunk', 'thunk*', 'time', 'unconstrained-domain->',
- 'unit', 'unit-from-context', 'unit/c', 'unit/new-import-export',
- 'unit/s', 'unless', 'unquote', 'unquote-splicing', 'unsyntax',
- 'unsyntax-splicing', 'values/drop', 'when', 'with-continuation-mark',
- 'with-contract', 'with-handlers', 'with-handlers*', 'with-method',
- 'with-syntax', u'λ'
+ u'#%app', u'#%datum', u'#%declare', u'#%expression', u'#%module-begin',
+ u'#%plain-app', u'#%plain-lambda', u'#%plain-module-begin',
+ u'#%printing-module-begin', u'#%provide', u'#%require',
+ u'#%stratified-body', u'#%top', u'#%top-interaction',
+ u'#%variable-reference', u'->', u'->*', u'->*m', u'->d', u'->dm', u'->i',
+ u'->m', u'...', u':do-in', u'==', u'=>', u'_', u'absent', u'abstract',
+ u'all-defined-out', u'all-from-out', u'and', u'any', u'augment', u'augment*',
+ u'augment-final', u'augment-final*', u'augride', u'augride*', u'begin',
+ u'begin-for-syntax', u'begin0', u'case', u'case->', u'case->m',
+ u'case-lambda', u'class', u'class*', u'class-field-accessor',
+ u'class-field-mutator', u'class/c', u'class/derived', u'combine-in',
+ u'combine-out', u'command-line', u'compound-unit', u'compound-unit/infer',
+ u'cond', u'cons/dc', u'contract', u'contract-out', u'contract-struct',
+ u'contracted', u'define', u'define-compound-unit',
+ u'define-compound-unit/infer', u'define-contract-struct',
+ u'define-custom-hash-types', u'define-custom-set-types',
+ u'define-for-syntax', u'define-local-member-name', u'define-logger',
+ u'define-match-expander', u'define-member-name',
+ u'define-module-boundary-contract', u'define-namespace-anchor',
+ u'define-opt/c', u'define-sequence-syntax', u'define-serializable-class',
+ u'define-serializable-class*', u'define-signature',
+ u'define-signature-form', u'define-struct', u'define-struct/contract',
+ u'define-struct/derived', u'define-syntax', u'define-syntax-rule',
+ u'define-syntaxes', u'define-unit', u'define-unit-binding',
+ u'define-unit-from-context', u'define-unit/contract',
+ u'define-unit/new-import-export', u'define-unit/s', u'define-values',
+ u'define-values-for-export', u'define-values-for-syntax',
+ u'define-values/invoke-unit', u'define-values/invoke-unit/infer',
+ u'define/augment', u'define/augment-final', u'define/augride',
+ u'define/contract', u'define/final-prop', u'define/match',
+ u'define/overment', u'define/override', u'define/override-final',
+ u'define/private', u'define/public', u'define/public-final',
+ u'define/pubment', u'define/subexpression-pos-prop',
+ u'define/subexpression-pos-prop/name', u'delay', u'delay/idle',
+ u'delay/name', u'delay/strict', u'delay/sync', u'delay/thread', u'do',
+ u'else', u'except', u'except-in', u'except-out', u'export', u'extends',
+ u'failure-cont', u'false', u'false/c', u'field', u'field-bound?', u'file',
+ u'flat-murec-contract', u'flat-rec-contract', u'for', u'for*', u'for*/and',
+ u'for*/async', u'for*/first', u'for*/fold', u'for*/fold/derived',
+ u'for*/hash', u'for*/hasheq', u'for*/hasheqv', u'for*/last', u'for*/list',
+ u'for*/lists', u'for*/mutable-set', u'for*/mutable-seteq',
+ u'for*/mutable-seteqv', u'for*/or', u'for*/product', u'for*/set',
+ u'for*/seteq', u'for*/seteqv', u'for*/stream', u'for*/sum', u'for*/vector',
+ u'for*/weak-set', u'for*/weak-seteq', u'for*/weak-seteqv', u'for-label',
+ u'for-meta', u'for-syntax', u'for-template', u'for/and', u'for/async',
+ u'for/first', u'for/fold', u'for/fold/derived', u'for/hash', u'for/hasheq',
+ u'for/hasheqv', u'for/last', u'for/list', u'for/lists', u'for/mutable-set',
+ u'for/mutable-seteq', u'for/mutable-seteqv', u'for/or', u'for/product',
+ u'for/set', u'for/seteq', u'for/seteqv', u'for/stream', u'for/sum',
+ u'for/vector', u'for/weak-set', u'for/weak-seteq', u'for/weak-seteqv',
+ u'gen:custom-write', u'gen:dict', u'gen:equal+hash', u'gen:set',
+ u'gen:stream', u'generic', u'get-field', u'hash/dc', u'if', u'implies',
+ u'import', u'include', u'include-at/relative-to',
+ u'include-at/relative-to/reader', u'include/reader', u'inherit',
+ u'inherit-field', u'inherit/inner', u'inherit/super', u'init',
+ u'init-depend', u'init-field', u'init-rest', u'inner', u'inspect',
+ u'instantiate', u'interface', u'interface*', u'invariant-assertion',
+ u'invoke-unit', u'invoke-unit/infer', u'lambda', u'lazy', u'let', u'let*',
+ u'let*-values', u'let-syntax', u'let-syntaxes', u'let-values', u'let/cc',
+ u'let/ec', u'letrec', u'letrec-syntax', u'letrec-syntaxes',
+ u'letrec-syntaxes+values', u'letrec-values', u'lib', u'link', u'local',
+ u'local-require', u'log-debug', u'log-error', u'log-fatal', u'log-info',
+ u'log-warning', u'match', u'match*', u'match*/derived', u'match-define',
+ u'match-define-values', u'match-lambda', u'match-lambda*',
+ u'match-lambda**', u'match-let', u'match-let*', u'match-let*-values',
+ u'match-let-values', u'match-letrec', u'match-letrec-values',
+ u'match/derived', u'match/values', u'member-name-key', u'mixin', u'module',
+ u'module*', u'module+', u'nand', u'new', u'nor', u'object-contract',
+ u'object/c', u'only', u'only-in', u'only-meta-in', u'open', u'opt/c', u'or',
+ u'overment', u'overment*', u'override', u'override*', u'override-final',
+ u'override-final*', u'parameterize', u'parameterize*',
+ u'parameterize-break', u'parametric->/c', u'place', u'place*',
+ u'place/context', u'planet', u'prefix', u'prefix-in', u'prefix-out',
+ u'private', u'private*', u'prompt-tag/c', u'protect-out', u'provide',
+ u'provide-signature-elements', u'provide/contract', u'public', u'public*',
+ u'public-final', u'public-final*', u'pubment', u'pubment*', u'quasiquote',
+ u'quasisyntax', u'quasisyntax/loc', u'quote', u'quote-syntax',
+ u'quote-syntax/prune', u'recontract-out', u'recursive-contract',
+ u'relative-in', u'rename', u'rename-in', u'rename-inner', u'rename-out',
+ u'rename-super', u'require', u'send', u'send*', u'send+', u'send-generic',
+ u'send/apply', u'send/keyword-apply', u'set!', u'set!-values',
+ u'set-field!', u'shared', u'stream', u'stream*', u'stream-cons', u'struct',
+ u'struct*', u'struct-copy', u'struct-field-index', u'struct-out',
+ u'struct/c', u'struct/ctc', u'struct/dc', u'submod', u'super',
+ u'super-instantiate', u'super-make-object', u'super-new', u'syntax',
+ u'syntax-case', u'syntax-case*', u'syntax-id-rules', u'syntax-rules',
+ u'syntax/loc', u'tag', u'this', u'this%', u'thunk', u'thunk*', u'time',
+ u'unconstrained-domain->', u'unit', u'unit-from-context', u'unit/c',
+ u'unit/new-import-export', u'unit/s', u'unless', u'unquote',
+ u'unquote-splicing', u'unsyntax', u'unsyntax-splicing', u'values/drop',
+ u'when', u'with-continuation-mark', u'with-contract',
+ u'with-contract-continuation-mark', u'with-handlers', u'with-handlers*',
+ u'with-method', u'with-syntax', u'λ'
)
# Generated by example.rkt
_builtins = (
- '*', '+', '-', '/', '<', '</c', '<=', '<=/c', '=', '=/c', '>', '>/c',
- '>=', '>=/c', 'abort-current-continuation', 'abs', 'absolute-path?',
- 'acos', 'add-between', 'add1', 'alarm-evt', 'always-evt', 'and/c',
- 'andmap', 'angle', 'any/c', 'append', 'append*', 'append-map', 'apply',
- 'argmax', 'argmin', 'arithmetic-shift', 'arity-at-least',
- 'arity-at-least-value', 'arity-at-least?', 'arity-checking-wrapper',
- 'arity-includes?', 'arity=?', 'asin', 'assf', 'assoc', 'assq', 'assv',
- 'atan', 'bad-number-of-results', 'banner', 'base->-doms/c',
- 'base->-rngs/c', 'base->?', 'between/c', 'bitwise-and',
- 'bitwise-bit-field', 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not',
- 'bitwise-xor', 'blame-add-car-context', 'blame-add-cdr-context',
- 'blame-add-context', 'blame-add-missing-party',
- 'blame-add-nth-arg-context', 'blame-add-or-context',
- 'blame-add-range-context', 'blame-add-unknown-context',
- 'blame-context', 'blame-contract', 'blame-fmt->-string',
- 'blame-negative', 'blame-original?', 'blame-positive',
- 'blame-replace-negative', 'blame-source', 'blame-swap',
- 'blame-swapped?', 'blame-update', 'blame-value', 'blame?', 'boolean=?',
- 'boolean?', 'bound-identifier=?', 'box', 'box-cas!', 'box-immutable',
- 'box-immutable/c', 'box/c', 'box?', 'break-enabled', 'break-thread',
- 'build-chaperone-contract-property', 'build-compound-type-name',
- 'build-contract-property', 'build-flat-contract-property',
- 'build-list', 'build-path', 'build-path/convention-type',
- 'build-string', 'build-vector', 'byte-pregexp', 'byte-pregexp?',
- 'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes',
- 'bytes->immutable-bytes', 'bytes->list', 'bytes->path',
- 'bytes->path-element', 'bytes->string/latin-1', 'bytes->string/locale',
- 'bytes->string/utf-8', 'bytes-append', 'bytes-append*',
- 'bytes-close-converter', 'bytes-convert', 'bytes-convert-end',
- 'bytes-converter?', 'bytes-copy', 'bytes-copy!',
- 'bytes-environment-variable-name?', 'bytes-fill!', 'bytes-join',
- 'bytes-length', 'bytes-no-nuls?', 'bytes-open-converter', 'bytes-ref',
- 'bytes-set!', 'bytes-utf-8-index', 'bytes-utf-8-length',
- 'bytes-utf-8-ref', 'bytes<?', 'bytes=?', 'bytes>?', 'bytes?', 'caaaar',
- 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar',
- 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr',
- 'call-in-nested-thread', 'call-with-atomic-output-file',
- 'call-with-break-parameterization',
- 'call-with-composable-continuation', 'call-with-continuation-barrier',
- 'call-with-continuation-prompt', 'call-with-current-continuation',
- 'call-with-default-reading-parameterization',
- 'call-with-escape-continuation', 'call-with-exception-handler',
- 'call-with-file-lock/timeout', 'call-with-immediate-continuation-mark',
- 'call-with-input-bytes', 'call-with-input-file',
- 'call-with-input-file*', 'call-with-input-string',
- 'call-with-output-bytes', 'call-with-output-file',
- 'call-with-output-file*', 'call-with-output-string',
- 'call-with-parameterization', 'call-with-semaphore',
- 'call-with-semaphore/enable-break', 'call-with-values', 'call/cc',
- 'call/ec', 'car', 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr',
- 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr',
- 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get', 'channel-put',
- 'channel-put-evt', 'channel-put-evt?', 'channel-try-get', 'channel/c',
- 'channel?', 'chaperone-box', 'chaperone-channel',
- 'chaperone-continuation-mark-key', 'chaperone-contract-property?',
- 'chaperone-contract?', 'chaperone-evt', 'chaperone-hash',
- 'chaperone-of?', 'chaperone-procedure', 'chaperone-prompt-tag',
- 'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector',
- 'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?',
- 'char-ci<=?', 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?',
- 'char-downcase', 'char-foldcase', 'char-general-category',
- 'char-graphic?', 'char-iso-control?', 'char-lower-case?',
- 'char-numeric?', 'char-punctuation?', 'char-ready?', 'char-symbolic?',
- 'char-title-case?', 'char-titlecase', 'char-upcase',
- 'char-upper-case?', 'char-utf-8-length', 'char-whitespace?', 'char<=?',
- 'char<?', 'char=?', 'char>=?', 'char>?', 'char?',
- 'check-duplicate-identifier', 'checked-procedure-check-and-extract',
- 'choice-evt', 'class->interface', 'class-info', 'class?',
- 'cleanse-path', 'close-input-port', 'close-output-port',
- 'coerce-chaperone-contract', 'coerce-chaperone-contracts',
- 'coerce-contract', 'coerce-contract/f', 'coerce-contracts',
- 'coerce-flat-contract', 'coerce-flat-contracts', 'collect-garbage',
- 'collection-file-path', 'collection-path', 'compile',
- 'compile-allow-set!-undefined', 'compile-context-preservation-enabled',
- 'compile-enforce-module-constants', 'compile-syntax',
- 'compiled-expression?', 'compiled-module-expression?',
- 'complete-path?', 'complex?', 'compose', 'compose1', 'conjugate',
- 'cons', 'cons/c', 'cons?', 'const', 'continuation-mark-key/c',
- 'continuation-mark-key?', 'continuation-mark-set->context',
- 'continuation-mark-set->list', 'continuation-mark-set->list*',
- 'continuation-mark-set-first', 'continuation-mark-set?',
- 'continuation-marks', 'continuation-prompt-available?',
- 'continuation-prompt-tag?', 'continuation?',
- 'contract-continuation-mark-key', 'contract-first-order',
- 'contract-first-order-passes?', 'contract-name', 'contract-proc',
- 'contract-projection', 'contract-property?',
- 'contract-random-generate', 'contract-stronger?',
- 'contract-struct-exercise', 'contract-struct-generate',
- 'contract-val-first-projection', 'contract?', 'convert-stream',
- 'copy-directory/files', 'copy-file', 'copy-port', 'cos', 'cosh',
- 'count', 'current-blame-format', 'current-break-parameterization',
- 'current-code-inspector', 'current-command-line-arguments',
- 'current-compile', 'current-compiled-file-roots',
- 'current-continuation-marks', 'current-contract-region',
- 'current-custodian', 'current-directory', 'current-directory-for-user',
- 'current-drive', 'current-environment-variables', 'current-error-port',
- 'current-eval', 'current-evt-pseudo-random-generator',
- 'current-future', 'current-gc-milliseconds',
- 'current-get-interaction-input-port', 'current-inexact-milliseconds',
- 'current-input-port', 'current-inspector',
- 'current-library-collection-links', 'current-library-collection-paths',
- 'current-load', 'current-load-extension',
- 'current-load-relative-directory', 'current-load/use-compiled',
- 'current-locale', 'current-logger', 'current-memory-use',
- 'current-milliseconds', 'current-module-declare-name',
- 'current-module-declare-source', 'current-module-name-resolver',
- 'current-module-path-for-load', 'current-namespace',
- 'current-output-port', 'current-parameterization',
- 'current-preserved-thread-cell-values', 'current-print',
- 'current-process-milliseconds', 'current-prompt-read',
- 'current-pseudo-random-generator', 'current-read-interaction',
- 'current-reader-guard', 'current-readtable', 'current-seconds',
- 'current-security-guard', 'current-subprocess-custodian-mode',
- 'current-thread', 'current-thread-group',
- 'current-thread-initial-stack-size',
- 'current-write-relative-directory', 'curry', 'curryr',
- 'custodian-box-value', 'custodian-box?', 'custodian-limit-memory',
- 'custodian-managed-list', 'custodian-memory-accounting-available?',
- 'custodian-require-memory', 'custodian-shutdown-all', 'custodian?',
- 'custom-print-quotable-accessor', 'custom-print-quotable?',
- 'custom-write-accessor', 'custom-write-property-proc', 'custom-write?',
- 'date', 'date*', 'date*-nanosecond', 'date*-time-zone-name', 'date*?',
- 'date-day', 'date-dst?', 'date-hour', 'date-minute', 'date-month',
- 'date-second', 'date-time-zone-offset', 'date-week-day', 'date-year',
- 'date-year-day', 'date?', 'datum->syntax', 'datum-intern-literal',
- 'default-continuation-prompt-tag', 'degrees->radians',
- 'delete-directory', 'delete-directory/files', 'delete-file',
- 'denominator', 'dict->list', 'dict-can-functional-set?',
- 'dict-can-remove-keys?', 'dict-clear', 'dict-clear!', 'dict-copy',
- 'dict-count', 'dict-empty?', 'dict-for-each', 'dict-has-key?',
- 'dict-implements/c', 'dict-implements?', 'dict-iter-contract',
- 'dict-iterate-first', 'dict-iterate-key', 'dict-iterate-next',
- 'dict-iterate-value', 'dict-key-contract', 'dict-keys', 'dict-map',
- 'dict-mutable?', 'dict-ref', 'dict-ref!', 'dict-remove',
- 'dict-remove!', 'dict-set', 'dict-set!', 'dict-set*', 'dict-set*!',
- 'dict-update', 'dict-update!', 'dict-value-contract', 'dict-values',
- 'dict?', 'directory-exists?', 'directory-list', 'display',
- 'display-lines', 'display-lines-to-file', 'display-to-file',
- 'displayln', 'double-flonum?', 'drop', 'drop-right', 'dropf',
- 'dropf-right', 'dump-memory-stats', 'dup-input-port',
- 'dup-output-port', 'dynamic-get-field', 'dynamic-place',
- 'dynamic-place*', 'dynamic-require', 'dynamic-require-for-syntax',
- 'dynamic-send', 'dynamic-set-field!', 'dynamic-wind', 'eighth',
- 'empty', 'empty-sequence', 'empty-stream', 'empty?',
- 'environment-variables-copy', 'environment-variables-names',
- 'environment-variables-ref', 'environment-variables-set!',
- 'environment-variables?', 'eof', 'eof-evt', 'eof-object?',
- 'ephemeron-value', 'ephemeron?', 'eprintf', 'eq-contract-val',
- 'eq-contract?', 'eq-hash-code', 'eq?', 'equal-contract-val',
- 'equal-contract?', 'equal-hash-code', 'equal-secondary-hash-code',
- 'equal<%>', 'equal?', 'equal?/recur', 'eqv-hash-code', 'eqv?', 'error',
- 'error-display-handler', 'error-escape-handler',
- 'error-print-context-length', 'error-print-source-location',
- 'error-print-width', 'error-value->string-handler', 'eval',
- 'eval-jit-enabled', 'eval-syntax', 'even?', 'evt/c', 'evt?',
- 'exact->inexact', 'exact-ceiling', 'exact-floor', 'exact-integer?',
- 'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact-round',
- 'exact-truncate', 'exact?', 'executable-yield-handler', 'exit',
- 'exit-handler', 'exn', 'exn-continuation-marks', 'exn-message',
- 'exn:break', 'exn:break-continuation', 'exn:break:hang-up',
- 'exn:break:hang-up?', 'exn:break:terminate', 'exn:break:terminate?',
- 'exn:break?', 'exn:fail', 'exn:fail:contract',
- 'exn:fail:contract:arity', 'exn:fail:contract:arity?',
- 'exn:fail:contract:blame', 'exn:fail:contract:blame-object',
- 'exn:fail:contract:blame?', 'exn:fail:contract:continuation',
- 'exn:fail:contract:continuation?', 'exn:fail:contract:divide-by-zero',
- 'exn:fail:contract:divide-by-zero?',
- 'exn:fail:contract:non-fixnum-result',
- 'exn:fail:contract:non-fixnum-result?', 'exn:fail:contract:variable',
- 'exn:fail:contract:variable-id', 'exn:fail:contract:variable?',
- 'exn:fail:contract?', 'exn:fail:filesystem',
- 'exn:fail:filesystem:errno', 'exn:fail:filesystem:errno-errno',
- 'exn:fail:filesystem:errno?', 'exn:fail:filesystem:exists',
- 'exn:fail:filesystem:exists?', 'exn:fail:filesystem:missing-module',
- 'exn:fail:filesystem:missing-module-path',
- 'exn:fail:filesystem:missing-module?', 'exn:fail:filesystem:version',
- 'exn:fail:filesystem:version?', 'exn:fail:filesystem?',
- 'exn:fail:network', 'exn:fail:network:errno',
- 'exn:fail:network:errno-errno', 'exn:fail:network:errno?',
- 'exn:fail:network?', 'exn:fail:object', 'exn:fail:object?',
- 'exn:fail:out-of-memory', 'exn:fail:out-of-memory?', 'exn:fail:read',
- 'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?',
- 'exn:fail:read:non-char', 'exn:fail:read:non-char?', 'exn:fail:read?',
- 'exn:fail:syntax', 'exn:fail:syntax-exprs',
- 'exn:fail:syntax:missing-module',
- 'exn:fail:syntax:missing-module-path',
- 'exn:fail:syntax:missing-module?', 'exn:fail:syntax:unbound',
- 'exn:fail:syntax:unbound?', 'exn:fail:syntax?', 'exn:fail:unsupported',
- 'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?',
- 'exn:fail?', 'exn:misc:match?', 'exn:missing-module-accessor',
- 'exn:missing-module?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?',
- 'exp', 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once',
- 'expand-syntax-to-top-form', 'expand-to-top-form', 'expand-user-path',
- 'explode-path', 'expt', 'externalizable<%>', 'false?', 'field-names',
- 'fifth', 'file->bytes', 'file->bytes-lines', 'file->lines',
- 'file->list', 'file->string', 'file->value', 'file-exists?',
- 'file-name-from-path', 'file-or-directory-identity',
- 'file-or-directory-modify-seconds', 'file-or-directory-permissions',
- 'file-position', 'file-position*', 'file-size',
- 'file-stream-buffer-mode', 'file-stream-port?', 'file-truncate',
- 'filename-extension', 'filesystem-change-evt',
- 'filesystem-change-evt-cancel', 'filesystem-change-evt?',
- 'filesystem-root-list', 'filter', 'filter-map', 'filter-not',
- 'filter-read-input-port', 'find-executable-path', 'find-files',
- 'find-library-collection-links', 'find-library-collection-paths',
- 'find-relative-path', 'find-system-path', 'findf', 'first', 'fixnum?',
- 'flat-contract', 'flat-contract-predicate', 'flat-contract-property?',
- 'flat-contract?', 'flat-named-contract', 'flatten',
- 'floating-point-bytes->real', 'flonum?', 'floor', 'flush-output',
- 'fold-files', 'foldl', 'foldr', 'for-each', 'force', 'format',
- 'fourth', 'fprintf', 'free-identifier=?', 'free-label-identifier=?',
- 'free-template-identifier=?', 'free-transformer-identifier=?',
- 'fsemaphore-count', 'fsemaphore-post', 'fsemaphore-try-wait?',
- 'fsemaphore-wait', 'fsemaphore?', 'future', 'future?',
- 'futures-enabled?', 'gcd', 'generate-member-key',
- 'generate-temporaries', 'generic-set?', 'generic?', 'gensym',
- 'get-output-bytes', 'get-output-string', 'get-preference',
- 'get/build-val-first-projection', 'getenv',
- 'global-port-print-handler', 'group-execute-bit', 'group-read-bit',
- 'group-write-bit', 'guard-evt', 'handle-evt', 'handle-evt?',
- 'has-contract?', 'hash', 'hash->list', 'hash-clear', 'hash-clear!',
- 'hash-copy', 'hash-copy-clear', 'hash-count', 'hash-empty?',
- 'hash-eq?', 'hash-equal?', 'hash-eqv?', 'hash-for-each',
- 'hash-has-key?', 'hash-iterate-first', 'hash-iterate-key',
- 'hash-iterate-next', 'hash-iterate-value', 'hash-keys', 'hash-map',
- 'hash-placeholder?', 'hash-ref', 'hash-ref!', 'hash-remove',
- 'hash-remove!', 'hash-set', 'hash-set!', 'hash-set*', 'hash-set*!',
- 'hash-update', 'hash-update!', 'hash-values', 'hash-weak?', 'hash/c',
- 'hash?', 'hasheq', 'hasheqv', 'identifier-binding',
- 'identifier-binding-symbol', 'identifier-label-binding',
- 'identifier-prune-lexical-context',
- 'identifier-prune-to-source-module',
- 'identifier-remove-from-definition-context',
- 'identifier-template-binding', 'identifier-transformer-binding',
- 'identifier?', 'identity', 'imag-part', 'immutable?',
- 'impersonate-box', 'impersonate-channel',
- 'impersonate-continuation-mark-key', 'impersonate-hash',
- 'impersonate-procedure', 'impersonate-prompt-tag',
- 'impersonate-struct', 'impersonate-vector', 'impersonator-contract?',
- 'impersonator-ephemeron', 'impersonator-of?',
- 'impersonator-prop:application-mark', 'impersonator-prop:contracted',
- 'impersonator-property-accessor-procedure?', 'impersonator-property?',
- 'impersonator?', 'implementation?', 'implementation?/c', 'in-bytes',
- 'in-bytes-lines', 'in-cycle', 'in-dict', 'in-dict-keys',
- 'in-dict-pairs', 'in-dict-values', 'in-directory', 'in-hash',
- 'in-hash-keys', 'in-hash-pairs', 'in-hash-values', 'in-indexed',
- 'in-input-port-bytes', 'in-input-port-chars', 'in-lines', 'in-list',
- 'in-mlist', 'in-naturals', 'in-parallel', 'in-permutations', 'in-port',
- 'in-producer', 'in-range', 'in-sequences', 'in-set', 'in-stream',
- 'in-string', 'in-value', 'in-values*-sequence', 'in-values-sequence',
- 'in-vector', 'inexact->exact', 'inexact-real?', 'inexact?',
- 'infinite?', 'input-port-append', 'input-port?', 'inspector?',
- 'instanceof/c', 'integer->char', 'integer->integer-bytes',
- 'integer-bytes->integer', 'integer-in', 'integer-length',
- 'integer-sqrt', 'integer-sqrt/remainder', 'integer?',
- 'interface->method-names', 'interface-extension?', 'interface?',
- 'internal-definition-context-seal', 'internal-definition-context?',
- 'is-a?', 'is-a?/c', 'keyword->string', 'keyword-apply', 'keyword<?',
- 'keyword?', 'keywords-match', 'kill-thread', 'last', 'last-pair',
- 'lcm', 'length', 'liberal-define-context?', 'link-exists?', 'list',
- 'list*', 'list->bytes', 'list->mutable-set', 'list->mutable-seteq',
- 'list->mutable-seteqv', 'list->set', 'list->seteq', 'list->seteqv',
- 'list->string', 'list->vector', 'list->weak-set', 'list->weak-seteq',
- 'list->weak-seteqv', 'list-ref', 'list-tail', 'list/c', 'list?',
- 'listof', 'load', 'load-extension', 'load-on-demand-enabled',
- 'load-relative', 'load-relative-extension', 'load/cd',
- 'load/use-compiled', 'local-expand', 'local-expand/capture-lifts',
- 'local-transformer-expand', 'local-transformer-expand/capture-lifts',
- 'locale-string-encoding', 'log', 'log-level?', 'log-max-level',
- 'log-message', 'log-receiver?', 'logger-name', 'logger?', 'magnitude',
- 'make-arity-at-least', 'make-base-empty-namespace',
- 'make-base-namespace', 'make-bytes', 'make-channel',
- 'make-chaperone-contract', 'make-continuation-mark-key',
- 'make-continuation-prompt-tag', 'make-contract', 'make-custodian',
- 'make-custodian-box', 'make-custom-hash', 'make-custom-hash-types',
- 'make-custom-set', 'make-custom-set-types', 'make-date', 'make-date*',
- 'make-derived-parameter', 'make-directory', 'make-directory*',
- 'make-do-sequence', 'make-empty-namespace',
- 'make-environment-variables', 'make-ephemeron', 'make-exn',
- 'make-exn:break', 'make-exn:break:hang-up', 'make-exn:break:terminate',
- 'make-exn:fail', 'make-exn:fail:contract',
- 'make-exn:fail:contract:arity', 'make-exn:fail:contract:blame',
- 'make-exn:fail:contract:continuation',
- 'make-exn:fail:contract:divide-by-zero',
- 'make-exn:fail:contract:non-fixnum-result',
- 'make-exn:fail:contract:variable', 'make-exn:fail:filesystem',
- 'make-exn:fail:filesystem:errno', 'make-exn:fail:filesystem:exists',
- 'make-exn:fail:filesystem:missing-module',
- 'make-exn:fail:filesystem:version', 'make-exn:fail:network',
- 'make-exn:fail:network:errno', 'make-exn:fail:object',
- 'make-exn:fail:out-of-memory', 'make-exn:fail:read',
- 'make-exn:fail:read:eof', 'make-exn:fail:read:non-char',
- 'make-exn:fail:syntax', 'make-exn:fail:syntax:missing-module',
- 'make-exn:fail:syntax:unbound', 'make-exn:fail:unsupported',
- 'make-exn:fail:user', 'make-file-or-directory-link',
- 'make-flat-contract', 'make-fsemaphore', 'make-generic',
- 'make-handle-get-preference-locked', 'make-hash',
- 'make-hash-placeholder', 'make-hasheq', 'make-hasheq-placeholder',
- 'make-hasheqv', 'make-hasheqv-placeholder',
- 'make-immutable-custom-hash', 'make-immutable-hash',
- 'make-immutable-hasheq', 'make-immutable-hasheqv',
- 'make-impersonator-property', 'make-input-port',
- 'make-input-port/read-to-peek', 'make-inspector',
- 'make-keyword-procedure', 'make-known-char-range-list',
- 'make-limited-input-port', 'make-list', 'make-lock-file-name',
- 'make-log-receiver', 'make-logger', 'make-mixin-contract',
- 'make-mutable-custom-set', 'make-none/c', 'make-object',
- 'make-output-port', 'make-parameter', 'make-phantom-bytes',
- 'make-pipe', 'make-pipe-with-specials', 'make-placeholder',
- 'make-polar', 'make-prefab-struct', 'make-primitive-class',
- 'make-proj-contract', 'make-pseudo-random-generator',
- 'make-reader-graph', 'make-readtable', 'make-rectangular',
- 'make-rename-transformer', 'make-resolved-module-path',
- 'make-security-guard', 'make-semaphore', 'make-set!-transformer',
- 'make-shared-bytes', 'make-sibling-inspector', 'make-special-comment',
- 'make-srcloc', 'make-string', 'make-struct-field-accessor',
- 'make-struct-field-mutator', 'make-struct-type',
- 'make-struct-type-property', 'make-syntax-delta-introducer',
- 'make-syntax-introducer', 'make-temporary-file',
- 'make-tentative-pretty-print-output-port', 'make-thread-cell',
- 'make-thread-group', 'make-vector', 'make-weak-box',
- 'make-weak-custom-hash', 'make-weak-custom-set', 'make-weak-hash',
- 'make-weak-hasheq', 'make-weak-hasheqv', 'make-will-executor', 'map',
- 'match-equality-test', 'matches-arity-exactly?', 'max', 'mcar', 'mcdr',
- 'mcons', 'member', 'member-name-key-hash-code', 'member-name-key=?',
- 'member-name-key?', 'memf', 'memq', 'memv', 'merge-input',
- 'method-in-interface?', 'min', 'mixin-contract', 'module->exports',
- 'module->imports', 'module->language-info', 'module->namespace',
- 'module-compiled-cross-phase-persistent?', 'module-compiled-exports',
- 'module-compiled-imports', 'module-compiled-language-info',
- 'module-compiled-name', 'module-compiled-submodules',
- 'module-declared?', 'module-path-index-join',
- 'module-path-index-resolve', 'module-path-index-split',
- 'module-path-index-submodule', 'module-path-index?', 'module-path?',
- 'module-predefined?', 'module-provide-protected?', 'modulo', 'mpair?',
- 'mutable-set', 'mutable-seteq', 'mutable-seteqv', 'n->th',
- 'nack-guard-evt', 'namespace-anchor->empty-namespace',
- 'namespace-anchor->namespace', 'namespace-anchor?',
- 'namespace-attach-module', 'namespace-attach-module-declaration',
- 'namespace-base-phase', 'namespace-mapped-symbols',
- 'namespace-module-identifier', 'namespace-module-registry',
- 'namespace-require', 'namespace-require/constant',
- 'namespace-require/copy', 'namespace-require/expansion-time',
- 'namespace-set-variable-value!', 'namespace-symbol->identifier',
- 'namespace-syntax-introduce', 'namespace-undefine-variable!',
- 'namespace-unprotect-module', 'namespace-variable-value', 'namespace?',
- 'nan?', 'natural-number/c', 'negate', 'negative?', 'never-evt',
- u'new-∀/c', u'new-∃/c', 'newline', 'ninth', 'non-empty-listof',
- 'none/c', 'normal-case-path', 'normalize-arity', 'normalize-path',
- 'normalized-arity?', 'not', 'not/c', 'null', 'null?', 'number->string',
- 'number?', 'numerator', 'object%', 'object->vector', 'object-info',
- 'object-interface', 'object-method-arity-includes?', 'object-name',
- 'object=?', 'object?', 'odd?', 'one-of/c', 'open-input-bytes',
- 'open-input-file', 'open-input-output-file', 'open-input-string',
- 'open-output-bytes', 'open-output-file', 'open-output-nowhere',
- 'open-output-string', 'or/c', 'order-of-magnitude', 'ormap',
- 'other-execute-bit', 'other-read-bit', 'other-write-bit',
- 'output-port?', 'pair?', 'parameter-procedure=?', 'parameter/c',
- 'parameter?', 'parameterization?', 'parse-command-line', 'partition',
- 'path->bytes', 'path->complete-path', 'path->directory-path',
- 'path->string', 'path-add-suffix', 'path-convention-type',
- 'path-element->bytes', 'path-element->string', 'path-element?',
- 'path-for-some-system?', 'path-list-string->path-list', 'path-only',
- 'path-replace-suffix', 'path-string?', 'path<?', 'path?',
- 'pathlist-closure', 'peek-byte', 'peek-byte-or-special', 'peek-bytes',
- 'peek-bytes!', 'peek-bytes!-evt', 'peek-bytes-avail!',
- 'peek-bytes-avail!*', 'peek-bytes-avail!-evt',
- 'peek-bytes-avail!/enable-break', 'peek-bytes-evt', 'peek-char',
- 'peek-char-or-special', 'peek-string', 'peek-string!',
- 'peek-string!-evt', 'peek-string-evt', 'peeking-input-port',
- 'permutations', 'phantom-bytes?', 'pi', 'pi.f', 'pipe-content-length',
- 'place-break', 'place-channel', 'place-channel-get',
- 'place-channel-put', 'place-channel-put/get', 'place-channel?',
- 'place-dead-evt', 'place-enabled?', 'place-kill', 'place-location?',
- 'place-message-allowed?', 'place-sleep', 'place-wait', 'place?',
- 'placeholder-get', 'placeholder-set!', 'placeholder?',
- 'poll-guard-evt', 'port->bytes', 'port->bytes-lines', 'port->lines',
- 'port->list', 'port->string', 'port-closed-evt', 'port-closed?',
- 'port-commit-peeked', 'port-count-lines!', 'port-count-lines-enabled',
- 'port-counts-lines?', 'port-display-handler', 'port-file-identity',
- 'port-file-unlock', 'port-next-location', 'port-print-handler',
- 'port-progress-evt', 'port-provides-progress-evts?',
- 'port-read-handler', 'port-try-file-lock?', 'port-write-handler',
- 'port-writes-atomic?', 'port-writes-special?', 'port?', 'positive?',
- 'predicate/c', 'prefab-key->struct-type', 'prefab-key?',
- 'prefab-struct-key', 'preferences-lock-file-mode', 'pregexp',
- 'pregexp?', 'pretty-display', 'pretty-format', 'pretty-print',
- 'pretty-print-.-symbol-without-bars',
- 'pretty-print-abbreviate-read-macros', 'pretty-print-columns',
- 'pretty-print-current-style-table', 'pretty-print-depth',
- 'pretty-print-exact-as-decimal', 'pretty-print-extend-style-table',
- 'pretty-print-handler', 'pretty-print-newline',
- 'pretty-print-post-print-hook', 'pretty-print-pre-print-hook',
- 'pretty-print-print-hook', 'pretty-print-print-line',
- 'pretty-print-remap-stylable', 'pretty-print-show-inexactness',
- 'pretty-print-size-hook', 'pretty-print-style-table?',
- 'pretty-printing', 'pretty-write', 'primitive-closure?',
- 'primitive-result-arity', 'primitive?', 'print', 'print-as-expression',
- 'print-boolean-long-form', 'print-box', 'print-graph',
- 'print-hash-table', 'print-mpair-curly-braces',
- 'print-pair-curly-braces', 'print-reader-abbreviations',
- 'print-struct', 'print-syntax-width', 'print-unreadable',
- 'print-vector-length', 'printable/c', 'printable<%>', 'printf',
- 'procedure->method', 'procedure-arity', 'procedure-arity-includes/c',
- 'procedure-arity-includes?', 'procedure-arity?',
- 'procedure-closure-contents-eq?', 'procedure-extract-target',
- 'procedure-keywords', 'procedure-reduce-arity',
- 'procedure-reduce-keyword-arity', 'procedure-rename',
- 'procedure-struct-type?', 'procedure?', 'process', 'process*',
- 'process*/ports', 'process/ports', 'processor-count', 'progress-evt?',
- 'promise-forced?', 'promise-running?', 'promise/c', 'promise?',
- 'prop:arity-string', 'prop:chaperone-contract',
- 'prop:checked-procedure', 'prop:contract', 'prop:contracted',
- 'prop:custom-print-quotable', 'prop:custom-write', 'prop:dict',
- 'prop:dict/contract', 'prop:equal+hash', 'prop:evt',
- 'prop:exn:missing-module', 'prop:exn:srclocs', 'prop:flat-contract',
- 'prop:impersonator-of', 'prop:input-port',
- 'prop:liberal-define-context', 'prop:opt-chaperone-contract',
- 'prop:opt-chaperone-contract-get-test', 'prop:opt-chaperone-contract?',
- 'prop:output-port', 'prop:place-location', 'prop:procedure',
- 'prop:rename-transformer', 'prop:sequence', 'prop:set!-transformer',
- 'prop:stream', 'proper-subset?', 'pseudo-random-generator->vector',
- 'pseudo-random-generator-vector?', 'pseudo-random-generator?',
- 'put-preferences', 'putenv', 'quotient', 'quotient/remainder',
- 'radians->degrees', 'raise', 'raise-argument-error',
- 'raise-arguments-error', 'raise-arity-error', 'raise-blame-error',
- 'raise-contract-error', 'raise-mismatch-error',
- 'raise-not-cons-blame-error', 'raise-range-error',
- 'raise-result-error', 'raise-syntax-error', 'raise-type-error',
- 'raise-user-error', 'random', 'random-seed', 'range', 'rational?',
- 'rationalize', 'read', 'read-accept-bar-quote', 'read-accept-box',
- 'read-accept-compiled', 'read-accept-dot', 'read-accept-graph',
- 'read-accept-infix-dot', 'read-accept-lang', 'read-accept-quasiquote',
- 'read-accept-reader', 'read-byte', 'read-byte-or-special',
- 'read-bytes', 'read-bytes!', 'read-bytes!-evt', 'read-bytes-avail!',
- 'read-bytes-avail!*', 'read-bytes-avail!-evt',
- 'read-bytes-avail!/enable-break', 'read-bytes-evt', 'read-bytes-line',
- 'read-bytes-line-evt', 'read-case-sensitive', 'read-char',
- 'read-char-or-special', 'read-curly-brace-as-paren',
- 'read-decimal-as-inexact', 'read-eval-print-loop', 'read-language',
- 'read-line', 'read-line-evt', 'read-on-demand-source',
- 'read-square-bracket-as-paren', 'read-string', 'read-string!',
- 'read-string!-evt', 'read-string-evt', 'read-syntax',
- 'read-syntax/recursive', 'read/recursive', 'readtable-mapping',
- 'readtable?', 'real->decimal-string', 'real->double-flonum',
- 'real->floating-point-bytes', 'real->single-flonum', 'real-in',
- 'real-part', 'real?', 'reencode-input-port', 'reencode-output-port',
- 'regexp', 'regexp-match', 'regexp-match*', 'regexp-match-evt',
- 'regexp-match-exact?', 'regexp-match-peek',
- 'regexp-match-peek-immediate', 'regexp-match-peek-positions',
- 'regexp-match-peek-positions*',
- 'regexp-match-peek-positions-immediate',
- 'regexp-match-peek-positions-immediate/end',
- 'regexp-match-peek-positions/end', 'regexp-match-positions',
- 'regexp-match-positions*', 'regexp-match-positions/end',
- 'regexp-match/end', 'regexp-match?', 'regexp-max-lookbehind',
- 'regexp-quote', 'regexp-replace', 'regexp-replace*',
- 'regexp-replace-quote', 'regexp-replaces', 'regexp-split',
- 'regexp-try-match', 'regexp?', 'relative-path?', 'relocate-input-port',
- 'relocate-output-port', 'remainder', 'remove', 'remove*',
- 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*',
- 'rename-file-or-directory', 'rename-transformer-target',
- 'rename-transformer?', 'reroot-path', 'resolve-path',
- 'resolved-module-path-name', 'resolved-module-path?', 'rest',
- 'reverse', 'round', 'second', 'seconds->date', 'security-guard?',
- 'semaphore-peek-evt', 'semaphore-peek-evt?', 'semaphore-post',
- 'semaphore-try-wait?', 'semaphore-wait', 'semaphore-wait/enable-break',
- 'semaphore?', 'sequence->list', 'sequence->stream',
- 'sequence-add-between', 'sequence-andmap', 'sequence-append',
- 'sequence-count', 'sequence-filter', 'sequence-fold',
- 'sequence-for-each', 'sequence-generate', 'sequence-generate*',
- 'sequence-length', 'sequence-map', 'sequence-ormap', 'sequence-ref',
- 'sequence-tail', 'sequence?', 'set', 'set!-transformer-procedure',
- 'set!-transformer?', 'set->list', 'set->stream', 'set-add', 'set-add!',
- 'set-box!', 'set-clear', 'set-clear!', 'set-copy', 'set-copy-clear',
- 'set-count', 'set-empty?', 'set-eq?', 'set-equal?', 'set-eqv?',
- 'set-first', 'set-for-each', 'set-implements/c', 'set-implements?',
- 'set-intersect', 'set-intersect!', 'set-map', 'set-mcar!', 'set-mcdr!',
- 'set-member?', 'set-mutable?', 'set-phantom-bytes!',
- 'set-port-next-location!', 'set-remove', 'set-remove!', 'set-rest',
- 'set-subtract', 'set-subtract!', 'set-symmetric-difference',
- 'set-symmetric-difference!', 'set-union', 'set-union!', 'set-weak?',
- 'set/c', 'set=?', 'set?', 'seteq', 'seteqv', 'seventh', 'sgn',
- 'shared-bytes', 'shell-execute', 'shrink-path-wrt', 'shuffle',
- 'simple-form-path', 'simplify-path', 'sin', 'single-flonum?', 'sinh',
- 'sixth', 'skip-projection-wrapper?', 'sleep',
- 'some-system-path->string', 'sort', 'special-comment-value',
- 'special-comment?', 'special-filter-input-port', 'split-at',
- 'split-at-right', 'split-path', 'splitf-at', 'splitf-at-right', 'sqr',
- 'sqrt', 'srcloc', 'srcloc->string', 'srcloc-column', 'srcloc-line',
- 'srcloc-position', 'srcloc-source', 'srcloc-span', 'srcloc?',
- 'stop-after', 'stop-before', 'stream->list', 'stream-add-between',
- 'stream-andmap', 'stream-append', 'stream-count', 'stream-empty?',
- 'stream-filter', 'stream-first', 'stream-fold', 'stream-for-each',
- 'stream-length', 'stream-map', 'stream-ormap', 'stream-ref',
- 'stream-rest', 'stream-tail', 'stream?', 'string',
- 'string->bytes/latin-1', 'string->bytes/locale', 'string->bytes/utf-8',
- 'string->immutable-string', 'string->keyword', 'string->list',
- 'string->number', 'string->path', 'string->path-element',
- 'string->some-system-path', 'string->symbol',
- 'string->uninterned-symbol', 'string->unreadable-symbol',
- 'string-append', 'string-append*', 'string-ci<=?', 'string-ci<?',
- 'string-ci=?', 'string-ci>=?', 'string-ci>?', 'string-copy',
- 'string-copy!', 'string-downcase', 'string-environment-variable-name?',
- 'string-fill!', 'string-foldcase', 'string-join', 'string-len/c',
- 'string-length', 'string-locale-ci<?', 'string-locale-ci=?',
- 'string-locale-ci>?', 'string-locale-downcase', 'string-locale-upcase',
- 'string-locale<?', 'string-locale=?', 'string-locale>?',
- 'string-no-nuls?', 'string-normalize-nfc', 'string-normalize-nfd',
- 'string-normalize-nfkc', 'string-normalize-nfkd',
- 'string-normalize-spaces', 'string-ref', 'string-replace',
- 'string-set!', 'string-split', 'string-titlecase', 'string-trim',
- 'string-upcase', 'string-utf-8-length', 'string<=?', 'string<?',
- 'string=?', 'string>=?', 'string>?', 'string?', 'struct->vector',
- 'struct-accessor-procedure?', 'struct-constructor-procedure?',
- 'struct-info', 'struct-mutator-procedure?',
- 'struct-predicate-procedure?', 'struct-type-info',
- 'struct-type-make-constructor', 'struct-type-make-predicate',
- 'struct-type-property-accessor-procedure?', 'struct-type-property/c',
- 'struct-type-property?', 'struct-type?', 'struct:arity-at-least',
- 'struct:date', 'struct:date*', 'struct:exn', 'struct:exn:break',
- 'struct:exn:break:hang-up', 'struct:exn:break:terminate',
- 'struct:exn:fail', 'struct:exn:fail:contract',
- 'struct:exn:fail:contract:arity', 'struct:exn:fail:contract:blame',
- 'struct:exn:fail:contract:continuation',
- 'struct:exn:fail:contract:divide-by-zero',
- 'struct:exn:fail:contract:non-fixnum-result',
- 'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem',
- 'struct:exn:fail:filesystem:errno',
- 'struct:exn:fail:filesystem:exists',
- 'struct:exn:fail:filesystem:missing-module',
- 'struct:exn:fail:filesystem:version', 'struct:exn:fail:network',
- 'struct:exn:fail:network:errno', 'struct:exn:fail:object',
- 'struct:exn:fail:out-of-memory', 'struct:exn:fail:read',
- 'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char',
- 'struct:exn:fail:syntax', 'struct:exn:fail:syntax:missing-module',
- 'struct:exn:fail:syntax:unbound', 'struct:exn:fail:unsupported',
- 'struct:exn:fail:user', 'struct:srcloc',
- 'struct:wrapped-extra-arg-arrow', 'struct?', 'sub1', 'subbytes',
- 'subclass?', 'subclass?/c', 'subprocess', 'subprocess-group-enabled',
- 'subprocess-kill', 'subprocess-pid', 'subprocess-status',
- 'subprocess-wait', 'subprocess?', 'subset?', 'substring',
- 'symbol->string', 'symbol-interned?', 'symbol-unreadable?', 'symbol<?',
- 'symbol=?', 'symbol?', 'symbols', 'sync', 'sync/enable-break',
- 'sync/timeout', 'sync/timeout/enable-break', 'syntax->datum',
- 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-disarm',
- 'syntax-e', 'syntax-line', 'syntax-local-bind-syntaxes',
- 'syntax-local-certifier', 'syntax-local-context',
- 'syntax-local-expand-expression', 'syntax-local-get-shadower',
- 'syntax-local-introduce', 'syntax-local-lift-context',
- 'syntax-local-lift-expression',
- 'syntax-local-lift-module-end-declaration',
- 'syntax-local-lift-provide', 'syntax-local-lift-require',
- 'syntax-local-lift-values-expression',
- 'syntax-local-make-definition-context',
- 'syntax-local-make-delta-introducer',
- 'syntax-local-module-defined-identifiers',
- 'syntax-local-module-exports',
- 'syntax-local-module-required-identifiers', 'syntax-local-name',
- 'syntax-local-phase-level', 'syntax-local-submodules',
- 'syntax-local-transforming-module-provides?', 'syntax-local-value',
- 'syntax-local-value/immediate', 'syntax-original?', 'syntax-position',
- 'syntax-property', 'syntax-property-symbol-keys', 'syntax-protect',
- 'syntax-rearm', 'syntax-recertify', 'syntax-shift-phase-level',
- 'syntax-source', 'syntax-source-module', 'syntax-span', 'syntax-taint',
- 'syntax-tainted?', 'syntax-track-origin',
- 'syntax-transforming-module-expression?', 'syntax-transforming?',
- 'syntax/c', 'syntax?', 'system', 'system*', 'system*/exit-code',
- 'system-big-endian?', 'system-idle-evt', 'system-language+country',
- 'system-library-subpath', 'system-path-convention-type', 'system-type',
- 'system/exit-code', 'tail-marks-match?', 'take', 'take-right', 'takef',
- 'takef-right', 'tan', 'tanh', 'tcp-abandon-port', 'tcp-accept',
- 'tcp-accept-evt', 'tcp-accept-ready?', 'tcp-accept/enable-break',
- 'tcp-addresses', 'tcp-close', 'tcp-connect',
- 'tcp-connect/enable-break', 'tcp-listen', 'tcp-listener?', 'tcp-port?',
- 'tentative-pretty-print-port-cancel',
- 'tentative-pretty-print-port-transfer', 'tenth', 'terminal-port?',
- 'the-unsupplied-arg', 'third', 'thread', 'thread-cell-ref',
- 'thread-cell-set!', 'thread-cell-values?', 'thread-cell?',
- 'thread-dead-evt', 'thread-dead?', 'thread-group?', 'thread-receive',
- 'thread-receive-evt', 'thread-resume', 'thread-resume-evt',
- 'thread-rewind-receive', 'thread-running?', 'thread-send',
- 'thread-suspend', 'thread-suspend-evt', 'thread-try-receive',
- 'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply',
- 'touch', 'transplant-input-port', 'transplant-output-port', 'true',
- 'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?', 'udp-close',
- 'udp-connect!', 'udp-connected?', 'udp-multicast-interface',
- 'udp-multicast-join-group!', 'udp-multicast-leave-group!',
- 'udp-multicast-loopback?', 'udp-multicast-set-interface!',
- 'udp-multicast-set-loopback!', 'udp-multicast-set-ttl!',
- 'udp-multicast-ttl', 'udp-open-socket', 'udp-receive!',
- 'udp-receive!*', 'udp-receive!-evt', 'udp-receive!/enable-break',
- 'udp-receive-ready-evt', 'udp-send', 'udp-send*', 'udp-send-evt',
- 'udp-send-ready-evt', 'udp-send-to', 'udp-send-to*', 'udp-send-to-evt',
- 'udp-send-to/enable-break', 'udp-send/enable-break', 'udp?', 'unbox',
- 'uncaught-exception-handler', 'unit?', 'unspecified-dom',
- 'unsupplied-arg?', 'use-collection-link-paths',
- 'use-compiled-file-paths', 'use-user-specific-search-paths',
- 'user-execute-bit', 'user-read-bit', 'user-write-bit',
- 'value-contract', 'values', 'variable-reference->empty-namespace',
- 'variable-reference->module-base-phase',
- 'variable-reference->module-declaration-inspector',
- 'variable-reference->module-path-index',
- 'variable-reference->module-source', 'variable-reference->namespace',
- 'variable-reference->phase',
- 'variable-reference->resolved-module-path',
- 'variable-reference-constant?', 'variable-reference?', 'vector',
- 'vector->immutable-vector', 'vector->list',
- 'vector->pseudo-random-generator', 'vector->pseudo-random-generator!',
- 'vector->values', 'vector-append', 'vector-argmax', 'vector-argmin',
- 'vector-copy', 'vector-copy!', 'vector-count', 'vector-drop',
- 'vector-drop-right', 'vector-fill!', 'vector-filter',
- 'vector-filter-not', 'vector-immutable', 'vector-immutable/c',
- 'vector-immutableof', 'vector-length', 'vector-map', 'vector-map!',
- 'vector-member', 'vector-memq', 'vector-memv', 'vector-ref',
- 'vector-set!', 'vector-set*!', 'vector-set-performance-stats!',
- 'vector-split-at', 'vector-split-at-right', 'vector-take',
- 'vector-take-right', 'vector/c', 'vector?', 'vectorof', 'version',
- 'void', 'void?', 'weak-box-value', 'weak-box?', 'weak-set',
- 'weak-seteq', 'weak-seteqv', 'will-execute', 'will-executor?',
- 'will-register', 'will-try-execute', 'with-input-from-bytes',
- 'with-input-from-file', 'with-input-from-string',
- 'with-output-to-bytes', 'with-output-to-file', 'with-output-to-string',
- 'would-be-future', 'wrap-evt', 'wrapped-extra-arg-arrow',
- 'wrapped-extra-arg-arrow-extra-neg-party-argument',
- 'wrapped-extra-arg-arrow-real-func', 'wrapped-extra-arg-arrow?',
- 'writable<%>', 'write', 'write-byte', 'write-bytes',
- 'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt',
- 'write-bytes-avail/enable-break', 'write-char', 'write-special',
- 'write-special-avail*', 'write-special-evt', 'write-string',
- 'write-to-file', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a', '~e', '~r',
- '~s', '~v'
+ u'*', u'*list/c', u'+', u'-', u'/', u'<', u'</c', u'<=', u'<=/c', u'=', u'=/c',
+ u'>', u'>/c', u'>=', u'>=/c', u'abort-current-continuation', u'abs',
+ u'absolute-path?', u'acos', u'add-between', u'add1', u'alarm-evt',
+ u'always-evt', u'and/c', u'andmap', u'angle', u'any/c', u'append', u'append*',
+ u'append-map', u'apply', u'argmax', u'argmin', u'arithmetic-shift',
+ u'arity-at-least', u'arity-at-least-value', u'arity-at-least?',
+ u'arity-checking-wrapper', u'arity-includes?', u'arity=?',
+ u'arrow-contract-info', u'arrow-contract-info-accepts-arglist',
+ u'arrow-contract-info-chaperone-procedure',
+ u'arrow-contract-info-check-first-order', u'arrow-contract-info?',
+ u'asin', u'assf', u'assoc', u'assq', u'assv', u'atan',
+ u'bad-number-of-results', u'banner', u'base->-doms/c', u'base->-rngs/c',
+ u'base->?', u'between/c', u'bitwise-and', u'bitwise-bit-field',
+ u'bitwise-bit-set?', u'bitwise-ior', u'bitwise-not', u'bitwise-xor',
+ u'blame-add-car-context', u'blame-add-cdr-context', u'blame-add-context',
+ u'blame-add-missing-party', u'blame-add-nth-arg-context',
+ u'blame-add-range-context', u'blame-add-unknown-context',
+ u'blame-context', u'blame-contract', u'blame-fmt->-string',
+ u'blame-missing-party?', u'blame-negative', u'blame-original?',
+ u'blame-positive', u'blame-replace-negative', u'blame-source',
+ u'blame-swap', u'blame-swapped?', u'blame-update', u'blame-value',
+ u'blame?', u'boolean=?', u'boolean?', u'bound-identifier=?', u'box',
+ u'box-cas!', u'box-immutable', u'box-immutable/c', u'box/c', u'box?',
+ u'break-enabled', u'break-parameterization?', u'break-thread',
+ u'build-chaperone-contract-property', u'build-compound-type-name',
+ u'build-contract-property', u'build-flat-contract-property',
+ u'build-list', u'build-path', u'build-path/convention-type',
+ u'build-string', u'build-vector', u'byte-pregexp', u'byte-pregexp?',
+ u'byte-ready?', u'byte-regexp', u'byte-regexp?', u'byte?', u'bytes',
+ u'bytes->immutable-bytes', u'bytes->list', u'bytes->path',
+ u'bytes->path-element', u'bytes->string/latin-1', u'bytes->string/locale',
+ u'bytes->string/utf-8', u'bytes-append', u'bytes-append*',
+ u'bytes-close-converter', u'bytes-convert', u'bytes-convert-end',
+ u'bytes-converter?', u'bytes-copy', u'bytes-copy!',
+ u'bytes-environment-variable-name?', u'bytes-fill!', u'bytes-join',
+ u'bytes-length', u'bytes-no-nuls?', u'bytes-open-converter', u'bytes-ref',
+ u'bytes-set!', u'bytes-utf-8-index', u'bytes-utf-8-length',
+ u'bytes-utf-8-ref', u'bytes<?', u'bytes=?', u'bytes>?', u'bytes?', u'caaaar',
+ u'caaadr', u'caaar', u'caadar', u'caaddr', u'caadr', u'caar', u'cadaar',
+ u'cadadr', u'cadar', u'caddar', u'cadddr', u'caddr', u'cadr',
+ u'call-in-nested-thread', u'call-with-atomic-output-file',
+ u'call-with-break-parameterization',
+ u'call-with-composable-continuation', u'call-with-continuation-barrier',
+ u'call-with-continuation-prompt', u'call-with-current-continuation',
+ u'call-with-default-reading-parameterization',
+ u'call-with-escape-continuation', u'call-with-exception-handler',
+ u'call-with-file-lock/timeout', u'call-with-immediate-continuation-mark',
+ u'call-with-input-bytes', u'call-with-input-file',
+ u'call-with-input-file*', u'call-with-input-string',
+ u'call-with-output-bytes', u'call-with-output-file',
+ u'call-with-output-file*', u'call-with-output-string',
+ u'call-with-parameterization', u'call-with-semaphore',
+ u'call-with-semaphore/enable-break', u'call-with-values', u'call/cc',
+ u'call/ec', u'car', u'cartesian-product', u'cdaaar', u'cdaadr', u'cdaar',
+ u'cdadar', u'cdaddr', u'cdadr', u'cdar', u'cddaar', u'cddadr', u'cddar',
+ u'cdddar', u'cddddr', u'cdddr', u'cddr', u'cdr', u'ceiling', u'channel-get',
+ u'channel-put', u'channel-put-evt', u'channel-put-evt?',
+ u'channel-try-get', u'channel/c', u'channel?', u'chaperone-box',
+ u'chaperone-channel', u'chaperone-continuation-mark-key',
+ u'chaperone-contract-property?', u'chaperone-contract?', u'chaperone-evt',
+ u'chaperone-hash', u'chaperone-hash-set', u'chaperone-of?',
+ u'chaperone-procedure', u'chaperone-procedure*', u'chaperone-prompt-tag',
+ u'chaperone-struct', u'chaperone-struct-type', u'chaperone-vector',
+ u'chaperone?', u'char->integer', u'char-alphabetic?', u'char-blank?',
+ u'char-ci<=?', u'char-ci<?', u'char-ci=?', u'char-ci>=?', u'char-ci>?',
+ u'char-downcase', u'char-foldcase', u'char-general-category',
+ u'char-graphic?', u'char-in', u'char-in/c', u'char-iso-control?',
+ u'char-lower-case?', u'char-numeric?', u'char-punctuation?',
+ u'char-ready?', u'char-symbolic?', u'char-title-case?', u'char-titlecase',
+ u'char-upcase', u'char-upper-case?', u'char-utf-8-length',
+ u'char-whitespace?', u'char<=?', u'char<?', u'char=?', u'char>=?', u'char>?',
+ u'char?', u'check-duplicate-identifier', u'check-duplicates',
+ u'checked-procedure-check-and-extract', u'choice-evt',
+ u'class->interface', u'class-info', u'class-seal', u'class-unseal',
+ u'class?', u'cleanse-path', u'close-input-port', u'close-output-port',
+ u'coerce-chaperone-contract', u'coerce-chaperone-contracts',
+ u'coerce-contract', u'coerce-contract/f', u'coerce-contracts',
+ u'coerce-flat-contract', u'coerce-flat-contracts', u'collect-garbage',
+ u'collection-file-path', u'collection-path', u'combinations', u'compile',
+ u'compile-allow-set!-undefined', u'compile-context-preservation-enabled',
+ u'compile-enforce-module-constants', u'compile-syntax',
+ u'compiled-expression-recompile', u'compiled-expression?',
+ u'compiled-module-expression?', u'complete-path?', u'complex?', u'compose',
+ u'compose1', u'conjoin', u'conjugate', u'cons', u'cons/c', u'cons?', u'const',
+ u'continuation-mark-key/c', u'continuation-mark-key?',
+ u'continuation-mark-set->context', u'continuation-mark-set->list',
+ u'continuation-mark-set->list*', u'continuation-mark-set-first',
+ u'continuation-mark-set?', u'continuation-marks',
+ u'continuation-prompt-available?', u'continuation-prompt-tag?',
+ u'continuation?', u'contract-continuation-mark-key',
+ u'contract-custom-write-property-proc', u'contract-exercise',
+ u'contract-first-order', u'contract-first-order-passes?',
+ u'contract-late-neg-projection', u'contract-name', u'contract-proc',
+ u'contract-projection', u'contract-property?',
+ u'contract-random-generate', u'contract-random-generate-fail',
+ u'contract-random-generate-fail?',
+ u'contract-random-generate-get-current-environment',
+ u'contract-random-generate-stash', u'contract-random-generate/choose',
+ u'contract-stronger?', u'contract-struct-exercise',
+ u'contract-struct-generate', u'contract-struct-late-neg-projection',
+ u'contract-struct-list-contract?', u'contract-val-first-projection',
+ u'contract?', u'convert-stream', u'copy-directory/files', u'copy-file',
+ u'copy-port', u'cos', u'cosh', u'count', u'current-blame-format',
+ u'current-break-parameterization', u'current-code-inspector',
+ u'current-command-line-arguments', u'current-compile',
+ u'current-compiled-file-roots', u'current-continuation-marks',
+ u'current-contract-region', u'current-custodian', u'current-directory',
+ u'current-directory-for-user', u'current-drive',
+ u'current-environment-variables', u'current-error-port', u'current-eval',
+ u'current-evt-pseudo-random-generator',
+ u'current-force-delete-permissions', u'current-future',
+ u'current-gc-milliseconds', u'current-get-interaction-input-port',
+ u'current-inexact-milliseconds', u'current-input-port',
+ u'current-inspector', u'current-library-collection-links',
+ u'current-library-collection-paths', u'current-load',
+ u'current-load-extension', u'current-load-relative-directory',
+ u'current-load/use-compiled', u'current-locale', u'current-logger',
+ u'current-memory-use', u'current-milliseconds',
+ u'current-module-declare-name', u'current-module-declare-source',
+ u'current-module-name-resolver', u'current-module-path-for-load',
+ u'current-namespace', u'current-output-port', u'current-parameterization',
+ u'current-plumber', u'current-preserved-thread-cell-values',
+ u'current-print', u'current-process-milliseconds', u'current-prompt-read',
+ u'current-pseudo-random-generator', u'current-read-interaction',
+ u'current-reader-guard', u'current-readtable', u'current-seconds',
+ u'current-security-guard', u'current-subprocess-custodian-mode',
+ u'current-thread', u'current-thread-group',
+ u'current-thread-initial-stack-size',
+ u'current-write-relative-directory', u'curry', u'curryr',
+ u'custodian-box-value', u'custodian-box?', u'custodian-limit-memory',
+ u'custodian-managed-list', u'custodian-memory-accounting-available?',
+ u'custodian-require-memory', u'custodian-shutdown-all', u'custodian?',
+ u'custom-print-quotable-accessor', u'custom-print-quotable?',
+ u'custom-write-accessor', u'custom-write-property-proc', u'custom-write?',
+ u'date', u'date*', u'date*-nanosecond', u'date*-time-zone-name', u'date*?',
+ u'date-day', u'date-dst?', u'date-hour', u'date-minute', u'date-month',
+ u'date-second', u'date-time-zone-offset', u'date-week-day', u'date-year',
+ u'date-year-day', u'date?', u'datum->syntax', u'datum-intern-literal',
+ u'default-continuation-prompt-tag', u'degrees->radians',
+ u'delete-directory', u'delete-directory/files', u'delete-file',
+ u'denominator', u'dict->list', u'dict-can-functional-set?',
+ u'dict-can-remove-keys?', u'dict-clear', u'dict-clear!', u'dict-copy',
+ u'dict-count', u'dict-empty?', u'dict-for-each', u'dict-has-key?',
+ u'dict-implements/c', u'dict-implements?', u'dict-iter-contract',
+ u'dict-iterate-first', u'dict-iterate-key', u'dict-iterate-next',
+ u'dict-iterate-value', u'dict-key-contract', u'dict-keys', u'dict-map',
+ u'dict-mutable?', u'dict-ref', u'dict-ref!', u'dict-remove',
+ u'dict-remove!', u'dict-set', u'dict-set!', u'dict-set*', u'dict-set*!',
+ u'dict-update', u'dict-update!', u'dict-value-contract', u'dict-values',
+ u'dict?', u'directory-exists?', u'directory-list', u'disjoin', u'display',
+ u'display-lines', u'display-lines-to-file', u'display-to-file',
+ u'displayln', u'double-flonum?', u'drop', u'drop-common-prefix',
+ u'drop-right', u'dropf', u'dropf-right', u'dump-memory-stats',
+ u'dup-input-port', u'dup-output-port', u'dynamic->*', u'dynamic-get-field',
+ u'dynamic-object/c', u'dynamic-place', u'dynamic-place*',
+ u'dynamic-require', u'dynamic-require-for-syntax', u'dynamic-send',
+ u'dynamic-set-field!', u'dynamic-wind', u'eighth', u'empty',
+ u'empty-sequence', u'empty-stream', u'empty?',
+ u'environment-variables-copy', u'environment-variables-names',
+ u'environment-variables-ref', u'environment-variables-set!',
+ u'environment-variables?', u'eof', u'eof-evt', u'eof-object?',
+ u'ephemeron-value', u'ephemeron?', u'eprintf', u'eq-contract-val',
+ u'eq-contract?', u'eq-hash-code', u'eq?', u'equal-contract-val',
+ u'equal-contract?', u'equal-hash-code', u'equal-secondary-hash-code',
+ u'equal<%>', u'equal?', u'equal?/recur', u'eqv-hash-code', u'eqv?', u'error',
+ u'error-display-handler', u'error-escape-handler',
+ u'error-print-context-length', u'error-print-source-location',
+ u'error-print-width', u'error-value->string-handler', u'eval',
+ u'eval-jit-enabled', u'eval-syntax', u'even?', u'evt/c', u'evt?',
+ u'exact->inexact', u'exact-ceiling', u'exact-floor', u'exact-integer?',
+ u'exact-nonnegative-integer?', u'exact-positive-integer?', u'exact-round',
+ u'exact-truncate', u'exact?', u'executable-yield-handler', u'exit',
+ u'exit-handler', u'exn', u'exn-continuation-marks', u'exn-message',
+ u'exn:break', u'exn:break-continuation', u'exn:break:hang-up',
+ u'exn:break:hang-up?', u'exn:break:terminate', u'exn:break:terminate?',
+ u'exn:break?', u'exn:fail', u'exn:fail:contract',
+ u'exn:fail:contract:arity', u'exn:fail:contract:arity?',
+ u'exn:fail:contract:blame', u'exn:fail:contract:blame-object',
+ u'exn:fail:contract:blame?', u'exn:fail:contract:continuation',
+ u'exn:fail:contract:continuation?', u'exn:fail:contract:divide-by-zero',
+ u'exn:fail:contract:divide-by-zero?',
+ u'exn:fail:contract:non-fixnum-result',
+ u'exn:fail:contract:non-fixnum-result?', u'exn:fail:contract:variable',
+ u'exn:fail:contract:variable-id', u'exn:fail:contract:variable?',
+ u'exn:fail:contract?', u'exn:fail:filesystem',
+ u'exn:fail:filesystem:errno', u'exn:fail:filesystem:errno-errno',
+ u'exn:fail:filesystem:errno?', u'exn:fail:filesystem:exists',
+ u'exn:fail:filesystem:exists?', u'exn:fail:filesystem:missing-module',
+ u'exn:fail:filesystem:missing-module-path',
+ u'exn:fail:filesystem:missing-module?', u'exn:fail:filesystem:version',
+ u'exn:fail:filesystem:version?', u'exn:fail:filesystem?',
+ u'exn:fail:network', u'exn:fail:network:errno',
+ u'exn:fail:network:errno-errno', u'exn:fail:network:errno?',
+ u'exn:fail:network?', u'exn:fail:object', u'exn:fail:object?',
+ u'exn:fail:out-of-memory', u'exn:fail:out-of-memory?', u'exn:fail:read',
+ u'exn:fail:read-srclocs', u'exn:fail:read:eof', u'exn:fail:read:eof?',
+ u'exn:fail:read:non-char', u'exn:fail:read:non-char?', u'exn:fail:read?',
+ u'exn:fail:syntax', u'exn:fail:syntax-exprs',
+ u'exn:fail:syntax:missing-module',
+ u'exn:fail:syntax:missing-module-path',
+ u'exn:fail:syntax:missing-module?', u'exn:fail:syntax:unbound',
+ u'exn:fail:syntax:unbound?', u'exn:fail:syntax?', u'exn:fail:unsupported',
+ u'exn:fail:unsupported?', u'exn:fail:user', u'exn:fail:user?',
+ u'exn:fail?', u'exn:misc:match?', u'exn:missing-module-accessor',
+ u'exn:missing-module?', u'exn:srclocs-accessor', u'exn:srclocs?', u'exn?',
+ u'exp', u'expand', u'expand-once', u'expand-syntax', u'expand-syntax-once',
+ u'expand-syntax-to-top-form', u'expand-to-top-form', u'expand-user-path',
+ u'explode-path', u'expt', u'externalizable<%>', u'failure-result/c',
+ u'false?', u'field-names', u'fifth', u'file->bytes', u'file->bytes-lines',
+ u'file->lines', u'file->list', u'file->string', u'file->value',
+ u'file-exists?', u'file-name-from-path', u'file-or-directory-identity',
+ u'file-or-directory-modify-seconds', u'file-or-directory-permissions',
+ u'file-position', u'file-position*', u'file-size',
+ u'file-stream-buffer-mode', u'file-stream-port?', u'file-truncate',
+ u'filename-extension', u'filesystem-change-evt',
+ u'filesystem-change-evt-cancel', u'filesystem-change-evt?',
+ u'filesystem-root-list', u'filter', u'filter-map', u'filter-not',
+ u'filter-read-input-port', u'find-executable-path', u'find-files',
+ u'find-library-collection-links', u'find-library-collection-paths',
+ u'find-relative-path', u'find-system-path', u'findf', u'first',
+ u'first-or/c', u'fixnum?', u'flat-contract', u'flat-contract-predicate',
+ u'flat-contract-property?', u'flat-contract?', u'flat-named-contract',
+ u'flatten', u'floating-point-bytes->real', u'flonum?', u'floor',
+ u'flush-output', u'fold-files', u'foldl', u'foldr', u'for-each', u'force',
+ u'format', u'fourth', u'fprintf', u'free-identifier=?',
+ u'free-label-identifier=?', u'free-template-identifier=?',
+ u'free-transformer-identifier=?', u'fsemaphore-count', u'fsemaphore-post',
+ u'fsemaphore-try-wait?', u'fsemaphore-wait', u'fsemaphore?', u'future',
+ u'future?', u'futures-enabled?', u'gcd', u'generate-member-key',
+ u'generate-temporaries', u'generic-set?', u'generic?', u'gensym',
+ u'get-output-bytes', u'get-output-string', u'get-preference',
+ u'get/build-late-neg-projection', u'get/build-val-first-projection',
+ u'getenv', u'global-port-print-handler', u'group-by', u'group-execute-bit',
+ u'group-read-bit', u'group-write-bit', u'guard-evt', u'handle-evt',
+ u'handle-evt?', u'has-blame?', u'has-contract?', u'hash', u'hash->list',
+ u'hash-clear', u'hash-clear!', u'hash-copy', u'hash-copy-clear',
+ u'hash-count', u'hash-empty?', u'hash-eq?', u'hash-equal?', u'hash-eqv?',
+ u'hash-for-each', u'hash-has-key?', u'hash-iterate-first',
+ u'hash-iterate-key', u'hash-iterate-key+value', u'hash-iterate-next',
+ u'hash-iterate-pair', u'hash-iterate-value', u'hash-keys', u'hash-map',
+ u'hash-placeholder?', u'hash-ref', u'hash-ref!', u'hash-remove',
+ u'hash-remove!', u'hash-set', u'hash-set!', u'hash-set*', u'hash-set*!',
+ u'hash-update', u'hash-update!', u'hash-values', u'hash-weak?', u'hash/c',
+ u'hash?', u'hasheq', u'hasheqv', u'identifier-binding',
+ u'identifier-binding-symbol', u'identifier-label-binding',
+ u'identifier-prune-lexical-context',
+ u'identifier-prune-to-source-module',
+ u'identifier-remove-from-definition-context',
+ u'identifier-template-binding', u'identifier-transformer-binding',
+ u'identifier?', u'identity', u'if/c', u'imag-part', u'immutable?',
+ u'impersonate-box', u'impersonate-channel',
+ u'impersonate-continuation-mark-key', u'impersonate-hash',
+ u'impersonate-hash-set', u'impersonate-procedure',
+ u'impersonate-procedure*', u'impersonate-prompt-tag',
+ u'impersonate-struct', u'impersonate-vector', u'impersonator-contract?',
+ u'impersonator-ephemeron', u'impersonator-of?',
+ u'impersonator-prop:application-mark', u'impersonator-prop:blame',
+ u'impersonator-prop:contracted',
+ u'impersonator-property-accessor-procedure?', u'impersonator-property?',
+ u'impersonator?', u'implementation?', u'implementation?/c', u'in-bytes',
+ u'in-bytes-lines', u'in-combinations', u'in-cycle', u'in-dict',
+ u'in-dict-keys', u'in-dict-pairs', u'in-dict-values', u'in-directory',
+ u'in-hash', u'in-hash-keys', u'in-hash-pairs', u'in-hash-values',
+ u'in-immutable-hash', u'in-immutable-hash-keys',
+ u'in-immutable-hash-pairs', u'in-immutable-hash-values',
+ u'in-immutable-set', u'in-indexed', u'in-input-port-bytes',
+ u'in-input-port-chars', u'in-lines', u'in-list', u'in-mlist',
+ u'in-mutable-hash', u'in-mutable-hash-keys', u'in-mutable-hash-pairs',
+ u'in-mutable-hash-values', u'in-mutable-set', u'in-naturals',
+ u'in-parallel', u'in-permutations', u'in-port', u'in-producer', u'in-range',
+ u'in-sequences', u'in-set', u'in-slice', u'in-stream', u'in-string',
+ u'in-syntax', u'in-value', u'in-values*-sequence', u'in-values-sequence',
+ u'in-vector', u'in-weak-hash', u'in-weak-hash-keys', u'in-weak-hash-pairs',
+ u'in-weak-hash-values', u'in-weak-set', u'inexact->exact',
+ u'inexact-real?', u'inexact?', u'infinite?', u'input-port-append',
+ u'input-port?', u'inspector?', u'instanceof/c', u'integer->char',
+ u'integer->integer-bytes', u'integer-bytes->integer', u'integer-in',
+ u'integer-length', u'integer-sqrt', u'integer-sqrt/remainder', u'integer?',
+ u'interface->method-names', u'interface-extension?', u'interface?',
+ u'internal-definition-context-binding-identifiers',
+ u'internal-definition-context-introduce',
+ u'internal-definition-context-seal', u'internal-definition-context?',
+ u'is-a?', u'is-a?/c', u'keyword->string', u'keyword-apply', u'keyword<?',
+ u'keyword?', u'keywords-match', u'kill-thread', u'last', u'last-pair',
+ u'lcm', u'length', u'liberal-define-context?', u'link-exists?', u'list',
+ u'list*', u'list*of', u'list->bytes', u'list->mutable-set',
+ u'list->mutable-seteq', u'list->mutable-seteqv', u'list->set',
+ u'list->seteq', u'list->seteqv', u'list->string', u'list->vector',
+ u'list->weak-set', u'list->weak-seteq', u'list->weak-seteqv',
+ u'list-contract?', u'list-prefix?', u'list-ref', u'list-set', u'list-tail',
+ u'list-update', u'list/c', u'list?', u'listen-port-number?', u'listof',
+ u'load', u'load-extension', u'load-on-demand-enabled', u'load-relative',
+ u'load-relative-extension', u'load/cd', u'load/use-compiled',
+ u'local-expand', u'local-expand/capture-lifts',
+ u'local-transformer-expand', u'local-transformer-expand/capture-lifts',
+ u'locale-string-encoding', u'log', u'log-all-levels', u'log-level-evt',
+ u'log-level?', u'log-max-level', u'log-message', u'log-receiver?',
+ u'logger-name', u'logger?', u'magnitude', u'make-arity-at-least',
+ u'make-base-empty-namespace', u'make-base-namespace', u'make-bytes',
+ u'make-channel', u'make-chaperone-contract',
+ u'make-continuation-mark-key', u'make-continuation-prompt-tag',
+ u'make-contract', u'make-custodian', u'make-custodian-box',
+ u'make-custom-hash', u'make-custom-hash-types', u'make-custom-set',
+ u'make-custom-set-types', u'make-date', u'make-date*',
+ u'make-derived-parameter', u'make-directory', u'make-directory*',
+ u'make-do-sequence', u'make-empty-namespace',
+ u'make-environment-variables', u'make-ephemeron', u'make-exn',
+ u'make-exn:break', u'make-exn:break:hang-up', u'make-exn:break:terminate',
+ u'make-exn:fail', u'make-exn:fail:contract',
+ u'make-exn:fail:contract:arity', u'make-exn:fail:contract:blame',
+ u'make-exn:fail:contract:continuation',
+ u'make-exn:fail:contract:divide-by-zero',
+ u'make-exn:fail:contract:non-fixnum-result',
+ u'make-exn:fail:contract:variable', u'make-exn:fail:filesystem',
+ u'make-exn:fail:filesystem:errno', u'make-exn:fail:filesystem:exists',
+ u'make-exn:fail:filesystem:missing-module',
+ u'make-exn:fail:filesystem:version', u'make-exn:fail:network',
+ u'make-exn:fail:network:errno', u'make-exn:fail:object',
+ u'make-exn:fail:out-of-memory', u'make-exn:fail:read',
+ u'make-exn:fail:read:eof', u'make-exn:fail:read:non-char',
+ u'make-exn:fail:syntax', u'make-exn:fail:syntax:missing-module',
+ u'make-exn:fail:syntax:unbound', u'make-exn:fail:unsupported',
+ u'make-exn:fail:user', u'make-file-or-directory-link',
+ u'make-flat-contract', u'make-fsemaphore', u'make-generic',
+ u'make-handle-get-preference-locked', u'make-hash',
+ u'make-hash-placeholder', u'make-hasheq', u'make-hasheq-placeholder',
+ u'make-hasheqv', u'make-hasheqv-placeholder',
+ u'make-immutable-custom-hash', u'make-immutable-hash',
+ u'make-immutable-hasheq', u'make-immutable-hasheqv',
+ u'make-impersonator-property', u'make-input-port',
+ u'make-input-port/read-to-peek', u'make-inspector',
+ u'make-keyword-procedure', u'make-known-char-range-list',
+ u'make-limited-input-port', u'make-list', u'make-lock-file-name',
+ u'make-log-receiver', u'make-logger', u'make-mixin-contract',
+ u'make-mutable-custom-set', u'make-none/c', u'make-object',
+ u'make-output-port', u'make-parameter', u'make-parent-directory*',
+ u'make-phantom-bytes', u'make-pipe', u'make-pipe-with-specials',
+ u'make-placeholder', u'make-plumber', u'make-polar', u'make-prefab-struct',
+ u'make-primitive-class', u'make-proj-contract',
+ u'make-pseudo-random-generator', u'make-reader-graph', u'make-readtable',
+ u'make-rectangular', u'make-rename-transformer',
+ u'make-resolved-module-path', u'make-security-guard', u'make-semaphore',
+ u'make-set!-transformer', u'make-shared-bytes', u'make-sibling-inspector',
+ u'make-special-comment', u'make-srcloc', u'make-string',
+ u'make-struct-field-accessor', u'make-struct-field-mutator',
+ u'make-struct-type', u'make-struct-type-property',
+ u'make-syntax-delta-introducer', u'make-syntax-introducer',
+ u'make-temporary-file', u'make-tentative-pretty-print-output-port',
+ u'make-thread-cell', u'make-thread-group', u'make-vector',
+ u'make-weak-box', u'make-weak-custom-hash', u'make-weak-custom-set',
+ u'make-weak-hash', u'make-weak-hasheq', u'make-weak-hasheqv',
+ u'make-will-executor', u'map', u'match-equality-test',
+ u'matches-arity-exactly?', u'max', u'mcar', u'mcdr', u'mcons', u'member',
+ u'member-name-key-hash-code', u'member-name-key=?', u'member-name-key?',
+ u'memf', u'memq', u'memv', u'merge-input', u'method-in-interface?', u'min',
+ u'mixin-contract', u'module->exports', u'module->imports',
+ u'module->language-info', u'module->namespace',
+ u'module-compiled-cross-phase-persistent?', u'module-compiled-exports',
+ u'module-compiled-imports', u'module-compiled-language-info',
+ u'module-compiled-name', u'module-compiled-submodules',
+ u'module-declared?', u'module-path-index-join',
+ u'module-path-index-resolve', u'module-path-index-split',
+ u'module-path-index-submodule', u'module-path-index?', u'module-path?',
+ u'module-predefined?', u'module-provide-protected?', u'modulo', u'mpair?',
+ u'mutable-set', u'mutable-seteq', u'mutable-seteqv', u'n->th',
+ u'nack-guard-evt', u'namespace-anchor->empty-namespace',
+ u'namespace-anchor->namespace', u'namespace-anchor?',
+ u'namespace-attach-module', u'namespace-attach-module-declaration',
+ u'namespace-base-phase', u'namespace-mapped-symbols',
+ u'namespace-module-identifier', u'namespace-module-registry',
+ u'namespace-require', u'namespace-require/constant',
+ u'namespace-require/copy', u'namespace-require/expansion-time',
+ u'namespace-set-variable-value!', u'namespace-symbol->identifier',
+ u'namespace-syntax-introduce', u'namespace-undefine-variable!',
+ u'namespace-unprotect-module', u'namespace-variable-value', u'namespace?',
+ u'nan?', u'natural-number/c', u'negate', u'negative?', u'never-evt',
+ u'new-∀/c', u'new-∃/c', u'newline', u'ninth', u'non-empty-listof',
+ u'non-empty-string?', u'none/c', u'normal-case-path', u'normalize-arity',
+ u'normalize-path', u'normalized-arity?', u'not', u'not/c', u'null', u'null?',
+ u'number->string', u'number?', u'numerator', u'object%', u'object->vector',
+ u'object-info', u'object-interface', u'object-method-arity-includes?',
+ u'object-name', u'object-or-false=?', u'object=?', u'object?', u'odd?',
+ u'one-of/c', u'open-input-bytes', u'open-input-file',
+ u'open-input-output-file', u'open-input-string', u'open-output-bytes',
+ u'open-output-file', u'open-output-nowhere', u'open-output-string',
+ u'or/c', u'order-of-magnitude', u'ormap', u'other-execute-bit',
+ u'other-read-bit', u'other-write-bit', u'output-port?', u'pair?',
+ u'parameter-procedure=?', u'parameter/c', u'parameter?',
+ u'parameterization?', u'parse-command-line', u'partition', u'path->bytes',
+ u'path->complete-path', u'path->directory-path', u'path->string',
+ u'path-add-suffix', u'path-convention-type', u'path-element->bytes',
+ u'path-element->string', u'path-element?', u'path-for-some-system?',
+ u'path-list-string->path-list', u'path-only', u'path-replace-suffix',
+ u'path-string?', u'path<?', u'path?', u'pathlist-closure', u'peek-byte',
+ u'peek-byte-or-special', u'peek-bytes', u'peek-bytes!', u'peek-bytes!-evt',
+ u'peek-bytes-avail!', u'peek-bytes-avail!*', u'peek-bytes-avail!-evt',
+ u'peek-bytes-avail!/enable-break', u'peek-bytes-evt', u'peek-char',
+ u'peek-char-or-special', u'peek-string', u'peek-string!',
+ u'peek-string!-evt', u'peek-string-evt', u'peeking-input-port',
+ u'permutations', u'phantom-bytes?', u'pi', u'pi.f', u'pipe-content-length',
+ u'place-break', u'place-channel', u'place-channel-get',
+ u'place-channel-put', u'place-channel-put/get', u'place-channel?',
+ u'place-dead-evt', u'place-enabled?', u'place-kill', u'place-location?',
+ u'place-message-allowed?', u'place-sleep', u'place-wait', u'place?',
+ u'placeholder-get', u'placeholder-set!', u'placeholder?',
+ u'plumber-add-flush!', u'plumber-flush-all',
+ u'plumber-flush-handle-remove!', u'plumber-flush-handle?', u'plumber?',
+ u'poll-guard-evt', u'port->bytes', u'port->bytes-lines', u'port->lines',
+ u'port->list', u'port->string', u'port-closed-evt', u'port-closed?',
+ u'port-commit-peeked', u'port-count-lines!', u'port-count-lines-enabled',
+ u'port-counts-lines?', u'port-display-handler', u'port-file-identity',
+ u'port-file-unlock', u'port-next-location', u'port-number?',
+ u'port-print-handler', u'port-progress-evt',
+ u'port-provides-progress-evts?', u'port-read-handler',
+ u'port-try-file-lock?', u'port-write-handler', u'port-writes-atomic?',
+ u'port-writes-special?', u'port?', u'positive?', u'predicate/c',
+ u'prefab-key->struct-type', u'prefab-key?', u'prefab-struct-key',
+ u'preferences-lock-file-mode', u'pregexp', u'pregexp?', u'pretty-display',
+ u'pretty-format', u'pretty-print', u'pretty-print-.-symbol-without-bars',
+ u'pretty-print-abbreviate-read-macros', u'pretty-print-columns',
+ u'pretty-print-current-style-table', u'pretty-print-depth',
+ u'pretty-print-exact-as-decimal', u'pretty-print-extend-style-table',
+ u'pretty-print-handler', u'pretty-print-newline',
+ u'pretty-print-post-print-hook', u'pretty-print-pre-print-hook',
+ u'pretty-print-print-hook', u'pretty-print-print-line',
+ u'pretty-print-remap-stylable', u'pretty-print-show-inexactness',
+ u'pretty-print-size-hook', u'pretty-print-style-table?',
+ u'pretty-printing', u'pretty-write', u'primitive-closure?',
+ u'primitive-result-arity', u'primitive?', u'print', u'print-as-expression',
+ u'print-boolean-long-form', u'print-box', u'print-graph',
+ u'print-hash-table', u'print-mpair-curly-braces',
+ u'print-pair-curly-braces', u'print-reader-abbreviations',
+ u'print-struct', u'print-syntax-width', u'print-unreadable',
+ u'print-vector-length', u'printable/c', u'printable<%>', u'printf',
+ u'println', u'procedure->method', u'procedure-arity',
+ u'procedure-arity-includes/c', u'procedure-arity-includes?',
+ u'procedure-arity?', u'procedure-closure-contents-eq?',
+ u'procedure-extract-target', u'procedure-keywords',
+ u'procedure-reduce-arity', u'procedure-reduce-keyword-arity',
+ u'procedure-rename', u'procedure-result-arity', u'procedure-specialize',
+ u'procedure-struct-type?', u'procedure?', u'process', u'process*',
+ u'process*/ports', u'process/ports', u'processor-count', u'progress-evt?',
+ u'promise-forced?', u'promise-running?', u'promise/c', u'promise/name?',
+ u'promise?', u'prop:arity-string', u'prop:arrow-contract',
+ u'prop:arrow-contract-get-info', u'prop:arrow-contract?', u'prop:blame',
+ u'prop:chaperone-contract', u'prop:checked-procedure', u'prop:contract',
+ u'prop:contracted', u'prop:custom-print-quotable', u'prop:custom-write',
+ u'prop:dict', u'prop:dict/contract', u'prop:equal+hash', u'prop:evt',
+ u'prop:exn:missing-module', u'prop:exn:srclocs',
+ u'prop:expansion-contexts', u'prop:flat-contract',
+ u'prop:impersonator-of', u'prop:input-port',
+ u'prop:liberal-define-context', u'prop:object-name',
+ u'prop:opt-chaperone-contract', u'prop:opt-chaperone-contract-get-test',
+ u'prop:opt-chaperone-contract?', u'prop:orc-contract',
+ u'prop:orc-contract-get-subcontracts', u'prop:orc-contract?',
+ u'prop:output-port', u'prop:place-location', u'prop:procedure',
+ u'prop:recursive-contract', u'prop:recursive-contract-unroll',
+ u'prop:recursive-contract?', u'prop:rename-transformer', u'prop:sequence',
+ u'prop:set!-transformer', u'prop:stream', u'proper-subset?',
+ u'pseudo-random-generator->vector', u'pseudo-random-generator-vector?',
+ u'pseudo-random-generator?', u'put-preferences', u'putenv', u'quotient',
+ u'quotient/remainder', u'radians->degrees', u'raise',
+ u'raise-argument-error', u'raise-arguments-error', u'raise-arity-error',
+ u'raise-blame-error', u'raise-contract-error', u'raise-mismatch-error',
+ u'raise-not-cons-blame-error', u'raise-range-error',
+ u'raise-result-error', u'raise-syntax-error', u'raise-type-error',
+ u'raise-user-error', u'random', u'random-seed', u'range', u'rational?',
+ u'rationalize', u'read', u'read-accept-bar-quote', u'read-accept-box',
+ u'read-accept-compiled', u'read-accept-dot', u'read-accept-graph',
+ u'read-accept-infix-dot', u'read-accept-lang', u'read-accept-quasiquote',
+ u'read-accept-reader', u'read-byte', u'read-byte-or-special',
+ u'read-bytes', u'read-bytes!', u'read-bytes!-evt', u'read-bytes-avail!',
+ u'read-bytes-avail!*', u'read-bytes-avail!-evt',
+ u'read-bytes-avail!/enable-break', u'read-bytes-evt', u'read-bytes-line',
+ u'read-bytes-line-evt', u'read-case-sensitive', u'read-cdot', u'read-char',
+ u'read-char-or-special', u'read-curly-brace-as-paren',
+ u'read-curly-brace-with-tag', u'read-decimal-as-inexact',
+ u'read-eval-print-loop', u'read-language', u'read-line', u'read-line-evt',
+ u'read-on-demand-source', u'read-square-bracket-as-paren',
+ u'read-square-bracket-with-tag', u'read-string', u'read-string!',
+ u'read-string!-evt', u'read-string-evt', u'read-syntax',
+ u'read-syntax/recursive', u'read/recursive', u'readtable-mapping',
+ u'readtable?', u'real->decimal-string', u'real->double-flonum',
+ u'real->floating-point-bytes', u'real->single-flonum', u'real-in',
+ u'real-part', u'real?', u'reencode-input-port', u'reencode-output-port',
+ u'regexp', u'regexp-match', u'regexp-match*', u'regexp-match-evt',
+ u'regexp-match-exact?', u'regexp-match-peek',
+ u'regexp-match-peek-immediate', u'regexp-match-peek-positions',
+ u'regexp-match-peek-positions*',
+ u'regexp-match-peek-positions-immediate',
+ u'regexp-match-peek-positions-immediate/end',
+ u'regexp-match-peek-positions/end', u'regexp-match-positions',
+ u'regexp-match-positions*', u'regexp-match-positions/end',
+ u'regexp-match/end', u'regexp-match?', u'regexp-max-lookbehind',
+ u'regexp-quote', u'regexp-replace', u'regexp-replace*',
+ u'regexp-replace-quote', u'regexp-replaces', u'regexp-split',
+ u'regexp-try-match', u'regexp?', u'relative-path?', u'relocate-input-port',
+ u'relocate-output-port', u'remainder', u'remf', u'remf*', u'remove',
+ u'remove*', u'remove-duplicates', u'remq', u'remq*', u'remv', u'remv*',
+ u'rename-contract', u'rename-file-or-directory',
+ u'rename-transformer-target', u'rename-transformer?', u'replace-evt',
+ u'reroot-path', u'resolve-path', u'resolved-module-path-name',
+ u'resolved-module-path?', u'rest', u'reverse', u'round', u'second',
+ u'seconds->date', u'security-guard?', u'semaphore-peek-evt',
+ u'semaphore-peek-evt?', u'semaphore-post', u'semaphore-try-wait?',
+ u'semaphore-wait', u'semaphore-wait/enable-break', u'semaphore?',
+ u'sequence->list', u'sequence->stream', u'sequence-add-between',
+ u'sequence-andmap', u'sequence-append', u'sequence-count',
+ u'sequence-filter', u'sequence-fold', u'sequence-for-each',
+ u'sequence-generate', u'sequence-generate*', u'sequence-length',
+ u'sequence-map', u'sequence-ormap', u'sequence-ref', u'sequence-tail',
+ u'sequence/c', u'sequence?', u'set', u'set!-transformer-procedure',
+ u'set!-transformer?', u'set->list', u'set->stream', u'set-add', u'set-add!',
+ u'set-box!', u'set-clear', u'set-clear!', u'set-copy', u'set-copy-clear',
+ u'set-count', u'set-empty?', u'set-eq?', u'set-equal?', u'set-eqv?',
+ u'set-first', u'set-for-each', u'set-implements/c', u'set-implements?',
+ u'set-intersect', u'set-intersect!', u'set-map', u'set-mcar!', u'set-mcdr!',
+ u'set-member?', u'set-mutable?', u'set-phantom-bytes!',
+ u'set-port-next-location!', u'set-remove', u'set-remove!', u'set-rest',
+ u'set-some-basic-contracts!', u'set-subtract', u'set-subtract!',
+ u'set-symmetric-difference', u'set-symmetric-difference!', u'set-union',
+ u'set-union!', u'set-weak?', u'set/c', u'set=?', u'set?', u'seteq', u'seteqv',
+ u'seventh', u'sgn', u'shared-bytes', u'shell-execute', u'shrink-path-wrt',
+ u'shuffle', u'simple-form-path', u'simplify-path', u'sin',
+ u'single-flonum?', u'sinh', u'sixth', u'skip-projection-wrapper?', u'sleep',
+ u'some-system-path->string', u'sort', u'special-comment-value',
+ u'special-comment?', u'special-filter-input-port', u'split-at',
+ u'split-at-right', u'split-common-prefix', u'split-path', u'splitf-at',
+ u'splitf-at-right', u'sqr', u'sqrt', u'srcloc', u'srcloc->string',
+ u'srcloc-column', u'srcloc-line', u'srcloc-position', u'srcloc-source',
+ u'srcloc-span', u'srcloc?', u'stop-after', u'stop-before', u'stream->list',
+ u'stream-add-between', u'stream-andmap', u'stream-append', u'stream-count',
+ u'stream-empty?', u'stream-filter', u'stream-first', u'stream-fold',
+ u'stream-for-each', u'stream-length', u'stream-map', u'stream-ormap',
+ u'stream-ref', u'stream-rest', u'stream-tail', u'stream/c', u'stream?',
+ u'string', u'string->bytes/latin-1', u'string->bytes/locale',
+ u'string->bytes/utf-8', u'string->immutable-string', u'string->keyword',
+ u'string->list', u'string->number', u'string->path',
+ u'string->path-element', u'string->some-system-path', u'string->symbol',
+ u'string->uninterned-symbol', u'string->unreadable-symbol',
+ u'string-append', u'string-append*', u'string-ci<=?', u'string-ci<?',
+ u'string-ci=?', u'string-ci>=?', u'string-ci>?', u'string-contains?',
+ u'string-copy', u'string-copy!', u'string-downcase',
+ u'string-environment-variable-name?', u'string-fill!', u'string-foldcase',
+ u'string-join', u'string-len/c', u'string-length', u'string-locale-ci<?',
+ u'string-locale-ci=?', u'string-locale-ci>?', u'string-locale-downcase',
+ u'string-locale-upcase', u'string-locale<?', u'string-locale=?',
+ u'string-locale>?', u'string-no-nuls?', u'string-normalize-nfc',
+ u'string-normalize-nfd', u'string-normalize-nfkc',
+ u'string-normalize-nfkd', u'string-normalize-spaces', u'string-port?',
+ u'string-prefix?', u'string-ref', u'string-replace', u'string-set!',
+ u'string-split', u'string-suffix?', u'string-titlecase', u'string-trim',
+ u'string-upcase', u'string-utf-8-length', u'string<=?', u'string<?',
+ u'string=?', u'string>=?', u'string>?', u'string?', u'struct->vector',
+ u'struct-accessor-procedure?', u'struct-constructor-procedure?',
+ u'struct-info', u'struct-mutator-procedure?',
+ u'struct-predicate-procedure?', u'struct-type-info',
+ u'struct-type-make-constructor', u'struct-type-make-predicate',
+ u'struct-type-property-accessor-procedure?', u'struct-type-property/c',
+ u'struct-type-property?', u'struct-type?', u'struct:arity-at-least',
+ u'struct:arrow-contract-info', u'struct:date', u'struct:date*',
+ u'struct:exn', u'struct:exn:break', u'struct:exn:break:hang-up',
+ u'struct:exn:break:terminate', u'struct:exn:fail',
+ u'struct:exn:fail:contract', u'struct:exn:fail:contract:arity',
+ u'struct:exn:fail:contract:blame',
+ u'struct:exn:fail:contract:continuation',
+ u'struct:exn:fail:contract:divide-by-zero',
+ u'struct:exn:fail:contract:non-fixnum-result',
+ u'struct:exn:fail:contract:variable', u'struct:exn:fail:filesystem',
+ u'struct:exn:fail:filesystem:errno',
+ u'struct:exn:fail:filesystem:exists',
+ u'struct:exn:fail:filesystem:missing-module',
+ u'struct:exn:fail:filesystem:version', u'struct:exn:fail:network',
+ u'struct:exn:fail:network:errno', u'struct:exn:fail:object',
+ u'struct:exn:fail:out-of-memory', u'struct:exn:fail:read',
+ u'struct:exn:fail:read:eof', u'struct:exn:fail:read:non-char',
+ u'struct:exn:fail:syntax', u'struct:exn:fail:syntax:missing-module',
+ u'struct:exn:fail:syntax:unbound', u'struct:exn:fail:unsupported',
+ u'struct:exn:fail:user', u'struct:srcloc',
+ u'struct:wrapped-extra-arg-arrow', u'struct?', u'sub1', u'subbytes',
+ u'subclass?', u'subclass?/c', u'subprocess', u'subprocess-group-enabled',
+ u'subprocess-kill', u'subprocess-pid', u'subprocess-status',
+ u'subprocess-wait', u'subprocess?', u'subset?', u'substring', u'suggest/c',
+ u'symbol->string', u'symbol-interned?', u'symbol-unreadable?', u'symbol<?',
+ u'symbol=?', u'symbol?', u'symbols', u'sync', u'sync/enable-break',
+ u'sync/timeout', u'sync/timeout/enable-break', u'syntax->datum',
+ u'syntax->list', u'syntax-arm', u'syntax-column', u'syntax-debug-info',
+ u'syntax-disarm', u'syntax-e', u'syntax-line',
+ u'syntax-local-bind-syntaxes', u'syntax-local-certifier',
+ u'syntax-local-context', u'syntax-local-expand-expression',
+ u'syntax-local-get-shadower', u'syntax-local-identifier-as-binding',
+ u'syntax-local-introduce', u'syntax-local-lift-context',
+ u'syntax-local-lift-expression', u'syntax-local-lift-module',
+ u'syntax-local-lift-module-end-declaration',
+ u'syntax-local-lift-provide', u'syntax-local-lift-require',
+ u'syntax-local-lift-values-expression',
+ u'syntax-local-make-definition-context',
+ u'syntax-local-make-delta-introducer',
+ u'syntax-local-module-defined-identifiers',
+ u'syntax-local-module-exports',
+ u'syntax-local-module-required-identifiers', u'syntax-local-name',
+ u'syntax-local-phase-level', u'syntax-local-submodules',
+ u'syntax-local-transforming-module-provides?', u'syntax-local-value',
+ u'syntax-local-value/immediate', u'syntax-original?', u'syntax-position',
+ u'syntax-property', u'syntax-property-preserved?',
+ u'syntax-property-symbol-keys', u'syntax-protect', u'syntax-rearm',
+ u'syntax-recertify', u'syntax-shift-phase-level', u'syntax-source',
+ u'syntax-source-module', u'syntax-span', u'syntax-taint',
+ u'syntax-tainted?', u'syntax-track-origin',
+ u'syntax-transforming-module-expression?',
+ u'syntax-transforming-with-lifts?', u'syntax-transforming?', u'syntax/c',
+ u'syntax?', u'system', u'system*', u'system*/exit-code',
+ u'system-big-endian?', u'system-idle-evt', u'system-language+country',
+ u'system-library-subpath', u'system-path-convention-type', u'system-type',
+ u'system/exit-code', u'tail-marks-match?', u'take', u'take-common-prefix',
+ u'take-right', u'takef', u'takef-right', u'tan', u'tanh',
+ u'tcp-abandon-port', u'tcp-accept', u'tcp-accept-evt',
+ u'tcp-accept-ready?', u'tcp-accept/enable-break', u'tcp-addresses',
+ u'tcp-close', u'tcp-connect', u'tcp-connect/enable-break', u'tcp-listen',
+ u'tcp-listener?', u'tcp-port?', u'tentative-pretty-print-port-cancel',
+ u'tentative-pretty-print-port-transfer', u'tenth', u'terminal-port?',
+ u'the-unsupplied-arg', u'third', u'thread', u'thread-cell-ref',
+ u'thread-cell-set!', u'thread-cell-values?', u'thread-cell?',
+ u'thread-dead-evt', u'thread-dead?', u'thread-group?', u'thread-receive',
+ u'thread-receive-evt', u'thread-resume', u'thread-resume-evt',
+ u'thread-rewind-receive', u'thread-running?', u'thread-send',
+ u'thread-suspend', u'thread-suspend-evt', u'thread-try-receive',
+ u'thread-wait', u'thread/suspend-to-kill', u'thread?', u'time-apply',
+ u'touch', u'transplant-input-port', u'transplant-output-port', u'true',
+ u'truncate', u'udp-addresses', u'udp-bind!', u'udp-bound?', u'udp-close',
+ u'udp-connect!', u'udp-connected?', u'udp-multicast-interface',
+ u'udp-multicast-join-group!', u'udp-multicast-leave-group!',
+ u'udp-multicast-loopback?', u'udp-multicast-set-interface!',
+ u'udp-multicast-set-loopback!', u'udp-multicast-set-ttl!',
+ u'udp-multicast-ttl', u'udp-open-socket', u'udp-receive!',
+ u'udp-receive!*', u'udp-receive!-evt', u'udp-receive!/enable-break',
+ u'udp-receive-ready-evt', u'udp-send', u'udp-send*', u'udp-send-evt',
+ u'udp-send-ready-evt', u'udp-send-to', u'udp-send-to*', u'udp-send-to-evt',
+ u'udp-send-to/enable-break', u'udp-send/enable-break', u'udp?', u'unbox',
+ u'uncaught-exception-handler', u'unit?', u'unspecified-dom',
+ u'unsupplied-arg?', u'use-collection-link-paths',
+ u'use-compiled-file-paths', u'use-user-specific-search-paths',
+ u'user-execute-bit', u'user-read-bit', u'user-write-bit', u'value-blame',
+ u'value-contract', u'values', u'variable-reference->empty-namespace',
+ u'variable-reference->module-base-phase',
+ u'variable-reference->module-declaration-inspector',
+ u'variable-reference->module-path-index',
+ u'variable-reference->module-source', u'variable-reference->namespace',
+ u'variable-reference->phase',
+ u'variable-reference->resolved-module-path',
+ u'variable-reference-constant?', u'variable-reference?', u'vector',
+ u'vector->immutable-vector', u'vector->list',
+ u'vector->pseudo-random-generator', u'vector->pseudo-random-generator!',
+ u'vector->values', u'vector-append', u'vector-argmax', u'vector-argmin',
+ u'vector-copy', u'vector-copy!', u'vector-count', u'vector-drop',
+ u'vector-drop-right', u'vector-fill!', u'vector-filter',
+ u'vector-filter-not', u'vector-immutable', u'vector-immutable/c',
+ u'vector-immutableof', u'vector-length', u'vector-map', u'vector-map!',
+ u'vector-member', u'vector-memq', u'vector-memv', u'vector-ref',
+ u'vector-set!', u'vector-set*!', u'vector-set-performance-stats!',
+ u'vector-split-at', u'vector-split-at-right', u'vector-take',
+ u'vector-take-right', u'vector/c', u'vector?', u'vectorof', u'version',
+ u'void', u'void?', u'weak-box-value', u'weak-box?', u'weak-set',
+ u'weak-seteq', u'weak-seteqv', u'will-execute', u'will-executor?',
+ u'will-register', u'will-try-execute', u'with-input-from-bytes',
+ u'with-input-from-file', u'with-input-from-string',
+ u'with-output-to-bytes', u'with-output-to-file', u'with-output-to-string',
+ u'would-be-future', u'wrap-evt', u'wrapped-extra-arg-arrow',
+ u'wrapped-extra-arg-arrow-extra-neg-party-argument',
+ u'wrapped-extra-arg-arrow-real-func', u'wrapped-extra-arg-arrow?',
+ u'writable<%>', u'write', u'write-byte', u'write-bytes',
+ u'write-bytes-avail', u'write-bytes-avail*', u'write-bytes-avail-evt',
+ u'write-bytes-avail/enable-break', u'write-char', u'write-special',
+ u'write-special-avail*', u'write-special-evt', u'write-string',
+ u'write-to-file', u'writeln', u'xor', u'zero?', u'~.a', u'~.s', u'~.v', u'~a',
+ u'~e', u'~r', u'~s', u'~v'
)
_opening_parenthesis = r'[([{]'
@@ -1359,7 +1407,7 @@ class NewLispLexer(RegexLexer):
name = 'NewLisp'
aliases = ['newlisp']
- filenames = ['*.lsp', '*.nl']
+ filenames = ['*.lsp', '*.nl', '*.kif']
mimetypes = ['text/x-newlisp', 'application/x-newlisp']
flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
@@ -2448,7 +2496,7 @@ class XtlangLexer(RegexLexer):
'write-char', 'zero?',
)
xtlang_functions = (
- 'printf', 'toString', 'afill!', 'pfill!', 'tfill!', 'tbind', 'vfill!',
+ 'toString', 'afill!', 'pfill!', 'tfill!', 'tbind', 'vfill!',
'array-fill!', 'pointer-fill!', 'tuple-fill!', 'vector-fill!', 'free',
'array', 'tuple', 'list', '~', 'cset!', 'cref', '&', 'bor',
'ang-names', '<<', '>>', 'nil', 'printf', 'sprintf', 'null', 'now',
diff --git a/pygments/lexers/make.py b/pygments/lexers/make.py
index f5eac127..b222b672 100644
--- a/pygments/lexers/make.py
+++ b/pygments/lexers/make.py
@@ -5,7 +5,7 @@
Lexers for Makefiles and similar.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -90,7 +90,7 @@ class BaseMakefileLexer(RegexLexer):
bygroups(Keyword, Text), 'export'),
(r'export\s+', Keyword),
# assignment
- (r'([\w${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
+ (r'([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
# strings
(r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double),
@@ -102,7 +102,7 @@ class BaseMakefileLexer(RegexLexer):
(r'\$\(', Keyword, 'expansion'),
],
'expansion': [
- (r'[^$a-zA-Z_)]+', Text),
+ (r'[^$a-zA-Z_()]+', Text),
(r'[a-zA-Z_]+', Name.Variable),
(r'\$', Keyword),
(r'\(', Keyword, '#push'),
diff --git a/pygments/lexers/markup.py b/pygments/lexers/markup.py
index aac8d27e..92dc9e7a 100644
--- a/pygments/lexers/markup.py
+++ b/pygments/lexers/markup.py
@@ -5,7 +5,7 @@
Lexers for non-HTML markup languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -24,7 +24,7 @@ from pygments.util import get_bool_opt, ClassNotFound
__all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer',
'MozPreprocHashLexer', 'MozPreprocPercentLexer',
'MozPreprocXulLexer', 'MozPreprocJavascriptLexer',
- 'MozPreprocCssLexer']
+ 'MozPreprocCssLexer', 'MarkdownLexer']
class BBCodeLexer(RegexLexer):
@@ -500,3 +500,96 @@ class MozPreprocCssLexer(DelegatingLexer):
super(MozPreprocCssLexer, self).__init__(
CssLexer, MozPreprocPercentLexer, **options)
+
+class MarkdownLexer(RegexLexer):
+ """
+ For `Markdown <https://help.github.com/categories/writing-on-github/>`_ markup.
+
+ .. versionadded:: 2.2
+ """
+ name = 'markdown'
+ aliases = ['md']
+ filenames = ['*.md']
+ mimetypes = ["text/x-markdown"]
+ flags = re.MULTILINE
+
+ def _handle_codeblock(self, match):
+ """
+ match args: 1:backticks, 2:lang_name, 3:newline, 4:code, 5:backticks
+ """
+ from pygments.lexers import get_lexer_by_name
+
+ # section header
+ yield match.start(1), String , match.group(1)
+ yield match.start(2), String , match.group(2)
+ yield match.start(3), Text , match.group(3)
+
+ # lookup lexer if wanted and existing
+ lexer = None
+ if self.handlecodeblocks:
+ try:
+ lexer = get_lexer_by_name( match.group(2).strip() )
+ except ClassNotFound:
+ pass
+ code = match.group(4)
+
+ # no lexer for this language. handle it like it was a code block
+ if lexer is None:
+ yield match.start(4), String, code
+ return
+
+ for item in do_insertions([], lexer.get_tokens_unprocessed(code)):
+ yield item
+
+ yield match.start(5), String , match.group(5)
+
+ tokens = {
+ 'root': [
+ # heading with pound prefix
+ (r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)),
+ (r'^(#{2,6})(.+\n)', bygroups(Generic.Subheading, Text)),
+ # task list
+ (r'^(\s*)([*-] )(\[[ xX]\])( .+\n)',
+ bygroups(Text, Keyword, Keyword, using(this, state='inline'))),
+ # bulleted lists
+ (r'^(\s*)([*-])(\s)(.+\n)',
+ bygroups(Text, Keyword, Text, using(this, state='inline'))),
+ # numbered lists
+ (r'^(\s*)([0-9]+\.)( .+\n)',
+ bygroups(Text, Keyword, using(this, state='inline'))),
+ # quote
+ (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
+ # text block
+ (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
+ # code block with language
+ (r'^(```)(\w+)(\n)([\w\W]*?)(^```$)', _handle_codeblock),
+
+ include('inline'),
+ ],
+ 'inline': [
+ # escape
+ (r'\\.', Text),
+ # italics
+ (r'(\s)([*_][^*_]+[*_])(\W|\n)', bygroups(Text, Generic.Emph, Text)),
+ # bold
+ # warning: the following rule eats internal tags. eg. **foo _bar_ baz** bar is not italics
+ (r'(\s)((\*\*|__).*\3)((?=\W|\n))', bygroups(Text, Generic.Strong, None, Text)),
+ # "proper way" (r'(\s)([*_]{2}[^*_]+[*_]{2})((?=\W|\n))', bygroups(Text, Generic.Strong, Text)),
+ # strikethrough
+ (r'(\s)(~~[^~]+~~)((?=\W|\n))', bygroups(Text, Generic.Deleted, Text)),
+ # inline code
+ (r'`[^`]+`', String.Backtick),
+ # mentions and topics (twitter and github stuff)
+ (r'[@#][\w/:]+', Name.Entity),
+ # (image?) links eg: ![Image of Yaktocat](https://octodex.github.com/images/yaktocat.png)
+ (r'(!?\[)([^]]+)(\])(\()([^)]+)(\))', bygroups(Text, Name.Tag, Text, Text, Name.Attribute, Text)),
+
+ # general text, must come last!
+ (r'[^\\\s]+', Text),
+ (r'.', Text),
+ ],
+ }
+
+ def __init__(self, **options):
+ self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
+ RegexLexer.__init__(self, **options)
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index 7a92f5bb..ea0ebee2 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -5,7 +5,7 @@
Just export lexers that were contained in this module.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/matlab.py b/pygments/lexers/matlab.py
index ccb11a5d..56a0f6d6 100644
--- a/pygments/lexers/matlab.py
+++ b/pygments/lexers/matlab.py
@@ -5,7 +5,7 @@
Lexers for Matlab and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ml.py b/pygments/lexers/ml.py
index 4f10edd0..f80d5bfa 100644
--- a/pygments/lexers/ml.py
+++ b/pygments/lexers/ml.py
@@ -5,7 +5,7 @@
Lexers for ML family languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/modeling.py b/pygments/lexers/modeling.py
index a6b0cb77..b354f1cf 100644
--- a/pygments/lexers/modeling.py
+++ b/pygments/lexers/modeling.py
@@ -5,7 +5,7 @@
Lexers for modeling languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/modula2.py b/pygments/lexers/modula2.py
index 01771f55..c0a69b40 100644
--- a/pygments/lexers/modula2.py
+++ b/pygments/lexers/modula2.py
@@ -5,7 +5,7 @@
Multi-Dialect Lexer for Modula-2.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/monte.py b/pygments/lexers/monte.py
new file mode 100644
index 00000000..e18560b8
--- /dev/null
+++ b/pygments/lexers/monte.py
@@ -0,0 +1,204 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.monte
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Monte programming language.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
+ Punctuation, String, Whitespace
+from pygments.lexer import RegexLexer, include, words
+
+__all__ = ['MonteLexer']
+
+
+# `var` handled separately
+# `interface` handled separately
+_declarations = ['bind', 'def', 'fn', 'object']
+_methods = ['method', 'to']
+_keywords = [
+ 'as', 'break', 'catch', 'continue', 'else', 'escape', 'exit', 'exports',
+ 'extends', 'finally', 'for', 'guards', 'if', 'implements', 'import',
+ 'in', 'match', 'meta', 'pass', 'return', 'switch', 'try', 'via', 'when',
+ 'while',
+]
+_operators = [
+ # Unary
+ '~', '!',
+ # Binary
+ '+', '-', '*', '/', '%', '**', '&', '|', '^', '<<', '>>',
+ # Binary augmented
+ '+=', '-=', '*=', '/=', '%=', '**=', '&=', '|=', '^=', '<<=', '>>=',
+ # Comparison
+ '==', '!=', '<', '<=', '>', '>=', '<=>',
+ # Patterns and assignment
+ ':=', '?', '=~', '!~', '=>',
+ # Calls and sends
+ '.', '<-', '->',
+]
+_escape_pattern = (
+ r'(?:\\x[0-9a-fA-F]{2}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
+ r'\\["\'\\bftnr])')
+# _char = _escape_chars + [('.', String.Char)]
+_identifier = '[_a-zA-Z][_0-9a-zA-Z]*'
+
+_constants = [
+ # Void constants
+ 'null',
+ # Bool constants
+ 'false', 'true',
+ # Double constants
+ 'Infinity', 'NaN',
+ # Special objects
+ 'M', 'Ref', 'throw', 'traceln',
+]
+
+_guards = [
+ 'Any', 'Binding', 'Bool', 'Bytes', 'Char', 'DeepFrozen', 'Double',
+ 'Empty', 'Int', 'List', 'Map', 'Near', 'NullOk', 'Same', 'Selfless',
+ 'Set', 'Str', 'SubrangeGuard', 'Transparent', 'Void',
+]
+
+_safeScope = [
+ '_accumulateList', '_accumulateMap', '_auditedBy', '_bind',
+ '_booleanFlow', '_comparer', '_equalizer', '_iterForever', '_loop',
+ '_makeBytes', '_makeDouble', '_makeFinalSlot', '_makeInt', '_makeList',
+ '_makeMap', '_makeMessageDesc', '_makeOrderedSpace', '_makeParamDesc',
+ '_makeProtocolDesc', '_makeSourceSpan', '_makeString', '_makeVarSlot',
+ '_makeVerbFacet', '_mapExtract', '_matchSame', '_quasiMatcher',
+ '_slotToBinding', '_splitList', '_suchThat', '_switchFailed',
+ '_validateFor', 'b__quasiParser', 'eval', 'import', 'm__quasiParser',
+ 'makeBrandPair', 'makeLazySlot', 'safeScope', 'simple__quasiParser',
+]
+
+
+class MonteLexer(RegexLexer):
+ """
+ Lexer for the `Monte <https://monte.readthedocs.io/>`_ programming language.
+
+ .. versionadded:: 2.2
+ """
+ name = 'Monte'
+ aliases = ['monte']
+ filenames = ['*.mt']
+
+ tokens = {
+ 'root': [
+ # Comments
+ (r'#[^\n]*\n', Comment),
+
+ # Docstrings
+ # Apologies for the non-greedy matcher here.
+ (r'/\*\*.*?\*/', String.Doc),
+
+ # `var` declarations
+ (r'\bvar\b', Keyword.Declaration, 'var'),
+
+ # `interface` declarations
+ (r'\binterface\b', Keyword.Declaration, 'interface'),
+
+ # method declarations
+ (words(_methods, prefix='\\b', suffix='\\b'),
+ Keyword, 'method'),
+
+ # All other declarations
+ (words(_declarations, prefix='\\b', suffix='\\b'),
+ Keyword.Declaration),
+
+ # Keywords
+ (words(_keywords, prefix='\\b', suffix='\\b'), Keyword),
+
+ # Literals
+ ('[+-]?0x[_0-9a-fA-F]+', Number.Hex),
+ (r'[+-]?[_0-9]+\.[_0-9]*([eE][+-]?[_0-9]+)?', Number.Float),
+ ('[+-]?[_0-9]+', Number.Integer),
+ ("'", String.Double, 'char'),
+ ('"', String.Double, 'string'),
+
+ # Quasiliterals
+ ('`', String.Backtick, 'ql'),
+
+ # Operators
+ (words(_operators), Operator),
+
+ # Verb operators
+ (_identifier + '=', Operator.Word),
+
+ # Safe scope constants
+ (words(_constants, prefix='\\b', suffix='\\b'),
+ Keyword.Pseudo),
+
+ # Safe scope guards
+ (words(_guards, prefix='\\b', suffix='\\b'), Keyword.Type),
+
+ # All other safe scope names
+ (words(_safeScope, prefix='\\b', suffix='\\b'),
+ Name.Builtin),
+
+ # Identifiers
+ (_identifier, Name),
+
+ # Punctuation
+ (r'\(|\)|\{|\}|\[|\]|:|,', Punctuation),
+
+ # Whitespace
+ (' +', Whitespace),
+
+ # Definite lexer errors
+ ('=', Error),
+ ],
+ 'char': [
+ # It is definitely an error to have a char of width == 0.
+ ("'", Error, 'root'),
+ (_escape_pattern, String.Escape, 'charEnd'),
+ ('.', String.Char, 'charEnd'),
+ ],
+ 'charEnd': [
+ ("'", String.Char, '#pop:2'),
+ # It is definitely an error to have a char of width > 1.
+ ('.', Error),
+ ],
+ # The state of things coming into an interface.
+ 'interface': [
+ (' +', Whitespace),
+ (_identifier, Name.Class, '#pop'),
+ include('root'),
+ ],
+ # The state of things coming into a method.
+ 'method': [
+ (' +', Whitespace),
+ (_identifier, Name.Function, '#pop'),
+ include('root'),
+ ],
+ 'string': [
+ ('"', String.Double, 'root'),
+ (_escape_pattern, String.Escape),
+ (r'\n', String.Double),
+ ('.', String.Double),
+ ],
+ 'ql': [
+ ('`', String.Backtick, 'root'),
+ (r'\$' + _escape_pattern, String.Escape),
+ (r'\$\$', String.Escape),
+ (r'@@', String.Escape),
+ (r'\$\{', String.Interpol, 'qlNest'),
+ (r'@\{', String.Interpol, 'qlNest'),
+ (r'\$' + _identifier, Name),
+ ('@' + _identifier, Name),
+ ('.', String.Backtick),
+ ],
+ 'qlNest': [
+ (r'\}', String.Interpol, '#pop'),
+ include('root'),
+ ],
+ # The state of things immediately following `var`.
+ 'var': [
+ (' +', Whitespace),
+ (_identifier, Name.Variable, '#pop'),
+ include('root'),
+ ],
+ }
diff --git a/pygments/lexers/ncl.py b/pygments/lexers/ncl.py
index 23eba786..1ba7f4a7 100644
--- a/pygments/lexers/ncl.py
+++ b/pygments/lexers/ncl.py
@@ -5,7 +5,7 @@
Lexers for NCAR Command Language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -35,7 +35,7 @@ class NCLLexer(RegexLexer):
(r';.*\n', Comment),
include('strings'),
include('core'),
- (r'[a-z][\w$]*', Name),
+ (r'[a-zA-Z_]\w*', Name),
include('nums'),
(r'[\s]+', Text),
],
@@ -43,9 +43,9 @@ class NCLLexer(RegexLexer):
# Statements
(words((
'begin', 'break', 'continue', 'create', 'defaultapp', 'do',
- 'else', 'end', 'external', 'exit', 'False', 'file', 'function',
+ 'else', 'end', 'external', 'exit', 'True', 'False', 'file', 'function',
'getvalues', 'graphic', 'group', 'if', 'list', 'load', 'local',
- 'new', '_Missing', 'Missing', 'new', 'noparent', 'procedure',
+ 'new', '_Missing', 'Missing', 'noparent', 'procedure',
'quit', 'QUIT', 'Quit', 'record', 'return', 'setvalues', 'stop',
'then', 'while'), prefix=r'\b', suffix=r'\s*\b'),
Keyword),
@@ -59,10 +59,10 @@ class NCLLexer(RegexLexer):
Keyword.Type),
# Operators
- (r'[\^*+\-/<>]', Operator),
+ (r'[\%^*+\-/<>]', Operator),
# punctuation:
- (r'[\[\]():@$.,]', Punctuation),
+ (r'[\[\]():@$!&\|.,\\{}]', Punctuation),
(r'[=:]', Punctuation),
# Intrinsics
@@ -589,149 +589,60 @@ class NCLLexer(RegexLexer):
'lgTitleFontQuality', 'lgTitleFontThicknessF', 'lgTitleFuncCode',
'lgTitleJust', 'lgTitleOffsetF', 'lgTitleOn', 'lgTitlePosition',
'lgTitleString', 'lgTopMarginF', 'mpAreaGroupCount',
- 'mpAreaGroupCount_MapPlot', 'mpAreaMaskingOn',
- 'mpAreaMaskingOn_MapPlot', 'mpAreaNames', 'mpAreaNames_MapPlot',
- 'mpAreaTypes', 'mpAreaTypes_MapPlot', 'mpBottomAngleF',
- 'mpBottomAngleF_MapTransformation', 'mpBottomMapPosF',
- 'mpBottomMapPosF_MapTransformation', 'mpBottomNDCF',
- 'mpBottomNDCF_MapTransformation', 'mpBottomNPCF',
- 'mpBottomNPCF_MapTransformation', 'mpBottomPointLatF',
- 'mpBottomPointLatF_MapTransformation', 'mpBottomPointLonF',
- 'mpBottomPointLonF_MapTransformation', 'mpBottomWindowF',
- 'mpBottomWindowF_MapTransformation', 'mpCenterLatF',
- 'mpCenterLatF_MapTransformation', 'mpCenterLonF',
- 'mpCenterLonF_MapTransformation', 'mpCenterRotF',
- 'mpCenterRotF_MapTransformation', 'mpCountyLineColor',
- 'mpCountyLineColor_MapPlot', 'mpCountyLineDashPattern',
- 'mpCountyLineDashPattern_MapPlot', 'mpCountyLineDashSegLenF',
- 'mpCountyLineDashSegLenF_MapPlot', 'mpCountyLineThicknessF',
- 'mpCountyLineThicknessF_MapPlot', 'mpDataBaseVersion',
- 'mpDataBaseVersion_MapPlot', 'mpDataResolution',
- 'mpDataResolution_MapPlot', 'mpDataSetName', 'mpDataSetName_MapPlot',
- 'mpDefaultFillColor', 'mpDefaultFillColor_MapPlot',
- 'mpDefaultFillPattern', 'mpDefaultFillPattern_MapPlot',
- 'mpDefaultFillScaleF', 'mpDefaultFillScaleF_MapPlot',
- 'mpDynamicAreaGroups', 'mpDynamicAreaGroups_MapPlot',
- 'mpEllipticalBoundary', 'mpEllipticalBoundary_MapTransformation',
- 'mpFillAreaSpecifiers', 'mpFillAreaSpecifiers_MapPlot',
- 'mpFillBoundarySets', 'mpFillBoundarySets_MapPlot', 'mpFillColor',
- 'mpFillColor_MapPlot', 'mpFillColors', 'mpFillColors_MapPlot',
- 'mpFillColors-default', 'mpFillDotSizeF', 'mpFillDotSizeF_MapPlot',
- 'mpFillDrawOrder', 'mpFillDrawOrder_MapPlot', 'mpFillOn',
- 'mpFillOn_MapPlot', 'mpFillPatternBackground',
- 'mpFillPatternBackground_MapPlot', 'mpFillPattern',
- 'mpFillPattern_MapPlot', 'mpFillPatterns', 'mpFillPatterns_MapPlot',
- 'mpFillPatterns-default', 'mpFillScaleF', 'mpFillScaleF_MapPlot',
- 'mpFillScales', 'mpFillScales_MapPlot', 'mpFillScales-default',
- 'mpFixedAreaGroups', 'mpFixedAreaGroups_MapPlot',
- 'mpGeophysicalLineColor', 'mpGeophysicalLineColor_MapPlot',
- 'mpGeophysicalLineDashPattern',
- 'mpGeophysicalLineDashPattern_MapPlot',
- 'mpGeophysicalLineDashSegLenF',
- 'mpGeophysicalLineDashSegLenF_MapPlot', 'mpGeophysicalLineThicknessF',
- 'mpGeophysicalLineThicknessF_MapPlot', 'mpGreatCircleLinesOn',
- 'mpGreatCircleLinesOn_MapTransformation', 'mpGridAndLimbDrawOrder',
- 'mpGridAndLimbDrawOrder_MapPlot', 'mpGridAndLimbOn',
- 'mpGridAndLimbOn_MapPlot', 'mpGridLatSpacingF',
- 'mpGridLatSpacingF_MapPlot', 'mpGridLineColor',
- 'mpGridLineColor_MapPlot', 'mpGridLineDashPattern',
- 'mpGridLineDashPattern_MapPlot', 'mpGridLineDashSegLenF',
- 'mpGridLineDashSegLenF_MapPlot', 'mpGridLineThicknessF',
- 'mpGridLineThicknessF_MapPlot', 'mpGridLonSpacingF',
- 'mpGridLonSpacingF_MapPlot', 'mpGridMaskMode',
- 'mpGridMaskMode_MapPlot', 'mpGridMaxLatF', 'mpGridMaxLatF_MapPlot',
- 'mpGridPolarLonSpacingF', 'mpGridPolarLonSpacingF_MapPlot',
- 'mpGridSpacingF', 'mpGridSpacingF_MapPlot', 'mpInlandWaterFillColor',
- 'mpInlandWaterFillColor_MapPlot', 'mpInlandWaterFillPattern',
- 'mpInlandWaterFillPattern_MapPlot', 'mpInlandWaterFillScaleF',
- 'mpInlandWaterFillScaleF_MapPlot', 'mpLabelDrawOrder',
- 'mpLabelDrawOrder_MapPlot', 'mpLabelFontColor',
- 'mpLabelFontColor_MapPlot', 'mpLabelFontHeightF',
- 'mpLabelFontHeightF_MapPlot', 'mpLabelsOn', 'mpLabelsOn_MapPlot',
- 'mpLambertMeridianF', 'mpLambertMeridianF_MapTransformation',
- 'mpLambertParallel1F', 'mpLambertParallel1F_MapTransformation',
- 'mpLambertParallel2F', 'mpLambertParallel2F_MapTransformation',
- 'mpLandFillColor', 'mpLandFillColor_MapPlot', 'mpLandFillPattern',
- 'mpLandFillPattern_MapPlot', 'mpLandFillScaleF',
- 'mpLandFillScaleF_MapPlot', 'mpLeftAngleF',
- 'mpLeftAngleF_MapTransformation', 'mpLeftCornerLatF',
- 'mpLeftCornerLatF_MapTransformation', 'mpLeftCornerLonF',
- 'mpLeftCornerLonF_MapTransformation', 'mpLeftMapPosF',
- 'mpLeftMapPosF_MapTransformation', 'mpLeftNDCF',
- 'mpLeftNDCF_MapTransformation', 'mpLeftNPCF',
- 'mpLeftNPCF_MapTransformation', 'mpLeftPointLatF',
- 'mpLeftPointLatF_MapTransformation', 'mpLeftPointLonF',
- 'mpLeftPointLonF_MapTransformation', 'mpLeftWindowF',
- 'mpLeftWindowF_MapTransformation', 'mpLimbLineColor',
- 'mpLimbLineColor_MapPlot', 'mpLimbLineDashPattern',
- 'mpLimbLineDashPattern_MapPlot', 'mpLimbLineDashSegLenF',
- 'mpLimbLineDashSegLenF_MapPlot', 'mpLimbLineThicknessF',
- 'mpLimbLineThicknessF_MapPlot', 'mpLimitMode',
- 'mpLimitMode_MapTransformation', 'Angle_projection_limits',
- 'mpMaskAreaSpecifiers', 'mpMaskAreaSpecifiers_MapPlot',
- 'mpMaskOutlineSpecifiers', 'mpMaskOutlineSpecifiers_MapPlot',
- 'mpMaxLatF', 'mpMaxLatF_MapTransformation', 'mpMaxLonF',
- 'mpMaxLonF_MapTransformation', 'mpMinLatF',
- 'mpMinLatF_MapTransformation', 'mpMinLonF',
- 'mpMinLonF_MapTransformation', 'mpMonoFillColor',
- 'mpMonoFillColor_MapPlot', 'mpMonoFillPattern',
- 'mpMonoFillPattern_MapPlot', 'mpMonoFillScale',
- 'mpMonoFillScale_MapPlot', 'mpNationalLineColor',
- 'mpNationalLineColor_MapPlot', 'mpNationalLineDashPattern',
- 'mpNationalLineDashPattern_MapPlot',
- 'mpNationalLineDashSegLenF_MapPlot', 'mpNationalLineThicknessF',
- 'mpNationalLineThicknessF_MapPlot', 'mpOceanFillColor',
- 'mpOceanFillColor_MapPlot', 'mpOceanFillPattern',
- 'mpOceanFillPattern_MapPlot', 'mpOceanFillScaleF',
- 'mpOceanFillScaleF_MapPlot', 'mpOutlineBoundarySets',
- 'mpOutlineBoundarySets_MapPlot', 'mpOutlineDrawOrder',
- 'mpOutlineDrawOrder_MapPlot', 'mpOutlineMaskingOn',
- 'mpOutlineMaskingOn_MapPlot', 'mpOutlineOn', 'mpOutlineOn_MapPlot',
- 'mpOutlineSpecifiers', 'mpOutlineSpecifiers_MapPlot',
- 'mpPerimDrawOrder', 'mpPerimDrawOrder_MapPlot', 'mpPerimLineColor',
- 'mpPerimLineColor_MapPlot', 'mpPerimLineDashPattern',
- 'mpPerimLineDashPattern_MapPlot', 'mpPerimLineDashSegLenF',
- 'mpPerimLineDashSegLenF_MapPlot', 'mpPerimLineThicknessF',
- 'mpPerimLineThicknessF_MapPlot', 'mpPerimOn', 'mpPerimOn_MapPlot',
- 'mpPolyMode', 'mpPolyMode_MapTransformation', 'mpProjection',
- 'mpProjection_MapTransformation', 'mpProvincialLineColor',
- 'mpProvincialLineColor_MapPlot', 'mpProvincialLineDashPattern',
- 'mpProvincialLineDashPattern_MapPlot', 'mpProvincialLineDashSegLenF',
- 'mpProvincialLineDashSegLenF_MapPlot', 'mpProvincialLineThicknessF',
- 'mpProvincialLineThicknessF_MapPlot', 'mpRelativeCenterLat',
- 'mpRelativeCenterLat_MapTransformation', 'mpRelativeCenterLon',
- 'mpRelativeCenterLon_MapTransformation', 'mpRightAngleF',
- 'mpRightAngleF_MapTransformation', 'mpRightCornerLatF',
- 'mpRightCornerLatF_MapTransformation', 'mpRightCornerLonF',
- 'mpRightCornerLonF_MapTransformation', 'mpRightMapPosF',
- 'mpRightMapPosF_MapTransformation', 'mpRightNDCF',
- 'mpRightNDCF_MapTransformation', 'mpRightNPCF',
- 'mpRightNPCF_MapTransformation', 'mpRightPointLatF',
- 'mpRightPointLatF_MapTransformation', 'mpRightPointLonF',
- 'mpRightPointLonF_MapTransformation', 'mpRightWindowF',
- 'mpRightWindowF_MapTransformation', 'mpSatelliteAngle1F',
- 'mpSatelliteAngle1F_MapTransformation', 'mpSatelliteAngle2F',
- 'mpSatelliteAngle2F_MapTransformation', 'mpSatelliteDistF',
- 'mpSatelliteDistF_MapTransformation', 'mpShapeMode',
- 'mpShapeMode_MapPlot', 'mpSpecifiedFillColors',
- 'mpSpecifiedFillColors_MapPlot', 'mpSpecifiedFillDirectIndexing',
- 'mpSpecifiedFillDirectIndexing_MapPlot', 'mpSpecifiedFillPatterns',
- 'mpSpecifiedFillPatterns_MapPlot', 'mpSpecifiedFillPriority',
- 'mpSpecifiedFillPriority_MapPlot', 'mpSpecifiedFillScales',
- 'mpSpecifiedFillScales_MapPlot', 'mpTopAngleF',
- 'mpTopAngleF_MapTransformation', 'mpTopMapPosF',
- 'mpTopMapPosF_MapTransformation', 'mpTopNDCF',
- 'mpTopNDCF_MapTransformation', 'mpTopNPCF',
- 'mpTopNPCF_MapTransformation', 'mpTopPointLatF',
- 'mpTopPointLatF_MapTransformation', 'mpTopPointLonF',
- 'mpTopPointLonF_MapTransformation', 'mpTopWindowF',
- 'mpTopWindowF_MapTransformation', 'mpUSStateLineColor',
- 'mpUSStateLineColor_MapPlot', 'mpUSStateLineDashPattern',
- 'mpUSStateLineDashPattern_MapPlot', 'mpUSStateLineDashSegLenF',
- 'mpUSStateLineDashSegLenF_MapPlot', 'mpUSStateLineThicknessF',
- 'mpUSStateLineThicknessF_MapPlot', 'pmAnnoManagers',
- 'pmAnnoViews', 'pmLabelBarDisplayMode', 'pmLabelBarHeightF',
- 'pmLabelBarKeepAspect', 'pmLabelBarOrthogonalPosF',
+ 'mpAreaMaskingOn', 'mpAreaNames', 'mpAreaTypes', 'mpBottomAngleF',
+ 'mpBottomMapPosF', 'mpBottomNDCF', 'mpBottomNPCF',
+ 'mpBottomPointLatF', 'mpBottomPointLonF', 'mpBottomWindowF',
+ 'mpCenterLatF', 'mpCenterLonF', 'mpCenterRotF', 'mpCountyLineColor',
+ 'mpCountyLineDashPattern', 'mpCountyLineDashSegLenF',
+ 'mpCountyLineThicknessF', 'mpDataBaseVersion', 'mpDataResolution',
+ 'mpDataSetName', 'mpDefaultFillColor', 'mpDefaultFillPattern',
+ 'mpDefaultFillScaleF', 'mpDynamicAreaGroups', 'mpEllipticalBoundary',
+ 'mpFillAreaSpecifiers', 'mpFillBoundarySets', 'mpFillColor',
+ 'mpFillColors', 'mpFillColors-default', 'mpFillDotSizeF',
+ 'mpFillDrawOrder', 'mpFillOn', 'mpFillPatternBackground',
+ 'mpFillPattern', 'mpFillPatterns', 'mpFillPatterns-default',
+ 'mpFillScaleF', 'mpFillScales', 'mpFillScales-default',
+ 'mpFixedAreaGroups', 'mpGeophysicalLineColor',
+ 'mpGeophysicalLineDashPattern', 'mpGeophysicalLineDashSegLenF',
+ 'mpGeophysicalLineThicknessF', 'mpGreatCircleLinesOn',
+ 'mpGridAndLimbDrawOrder', 'mpGridAndLimbOn', 'mpGridLatSpacingF',
+ 'mpGridLineColor', 'mpGridLineDashPattern', 'mpGridLineDashSegLenF',
+ 'mpGridLineThicknessF', 'mpGridLonSpacingF', 'mpGridMaskMode',
+ 'mpGridMaxLatF', 'mpGridPolarLonSpacingF', 'mpGridSpacingF',
+ 'mpInlandWaterFillColor', 'mpInlandWaterFillPattern',
+ 'mpInlandWaterFillScaleF', 'mpLabelDrawOrder', 'mpLabelFontColor',
+ 'mpLabelFontHeightF', 'mpLabelsOn', 'mpLambertMeridianF',
+ 'mpLambertParallel1F', 'mpLambertParallel2F', 'mpLandFillColor',
+ 'mpLandFillPattern', 'mpLandFillScaleF', 'mpLeftAngleF',
+ 'mpLeftCornerLatF', 'mpLeftCornerLonF', 'mpLeftMapPosF',
+ 'mpLeftNDCF', 'mpLeftNPCF', 'mpLeftPointLatF',
+ 'mpLeftPointLonF', 'mpLeftWindowF', 'mpLimbLineColor',
+ 'mpLimbLineDashPattern', 'mpLimbLineDashSegLenF',
+ 'mpLimbLineThicknessF', 'mpLimitMode', 'mpMaskAreaSpecifiers',
+ 'mpMaskOutlineSpecifiers', 'mpMaxLatF', 'mpMaxLonF',
+ 'mpMinLatF', 'mpMinLonF', 'mpMonoFillColor', 'mpMonoFillPattern',
+ 'mpMonoFillScale', 'mpNationalLineColor', 'mpNationalLineDashPattern',
+ 'mpNationalLineThicknessF', 'mpOceanFillColor', 'mpOceanFillPattern',
+ 'mpOceanFillScaleF', 'mpOutlineBoundarySets', 'mpOutlineDrawOrder',
+ 'mpOutlineMaskingOn', 'mpOutlineOn', 'mpOutlineSpecifiers',
+ 'mpPerimDrawOrder', 'mpPerimLineColor', 'mpPerimLineDashPattern',
+ 'mpPerimLineDashSegLenF', 'mpPerimLineThicknessF', 'mpPerimOn',
+ 'mpPolyMode', 'mpProjection', 'mpProvincialLineColor',
+ 'mpProvincialLineDashPattern', 'mpProvincialLineDashSegLenF',
+ 'mpProvincialLineThicknessF', 'mpRelativeCenterLat',
+ 'mpRelativeCenterLon', 'mpRightAngleF', 'mpRightCornerLatF',
+ 'mpRightCornerLonF', 'mpRightMapPosF', 'mpRightNDCF',
+ 'mpRightNPCF', 'mpRightPointLatF', 'mpRightPointLonF',
+ 'mpRightWindowF', 'mpSatelliteAngle1F', 'mpSatelliteAngle2F',
+ 'mpSatelliteDistF', 'mpShapeMode', 'mpSpecifiedFillColors',
+ 'mpSpecifiedFillDirectIndexing', 'mpSpecifiedFillPatterns',
+ 'mpSpecifiedFillPriority', 'mpSpecifiedFillScales',
+ 'mpTopAngleF', 'mpTopMapPosF', 'mpTopNDCF', 'mpTopNPCF',
+ 'mpTopPointLatF', 'mpTopPointLonF', 'mpTopWindowF',
+ 'mpUSStateLineColor', 'mpUSStateLineDashPattern',
+ 'mpUSStateLineDashSegLenF', 'mpUSStateLineThicknessF',
+ 'pmAnnoManagers', 'pmAnnoViews', 'pmLabelBarDisplayMode',
+ 'pmLabelBarHeightF', 'pmLabelBarKeepAspect', 'pmLabelBarOrthogonalPosF',
'pmLabelBarParallelPosF', 'pmLabelBarSide', 'pmLabelBarWidthF',
'pmLabelBarZone', 'pmLegendDisplayMode', 'pmLegendHeightF',
'pmLegendKeepAspect', 'pmLegendOrthogonalPosF',
@@ -739,35 +650,14 @@ class NCLLexer(RegexLexer):
'pmLegendZone', 'pmOverlaySequenceIds', 'pmTickMarkDisplayMode',
'pmTickMarkZone', 'pmTitleDisplayMode', 'pmTitleZone',
'prGraphicStyle', 'prPolyType', 'prXArray', 'prYArray',
- 'sfCopyData_MeshScalarField', 'sfCopyData', 'sfCopyData_ScalarField',
- 'sfDataArray_MeshScalarField', 'sfDataArray',
- 'sfDataArray_ScalarField', 'sfDataMaxV_MeshScalarField', 'sfDataMaxV',
- 'sfDataMaxV_ScalarField', 'sfDataMinV_MeshScalarField', 'sfDataMinV',
- 'sfDataMinV_ScalarField', 'sfElementNodes',
- 'sfElementNodes_MeshScalarField', 'sfExchangeDimensions',
- 'sfExchangeDimensions_ScalarField', 'sfFirstNodeIndex',
- 'sfFirstNodeIndex_MeshScalarField', 'sfMissingValueV_MeshScalarField',
- 'sfMissingValueV', 'sfMissingValueV_ScalarField',
- 'sfXArray_MeshScalarField', 'sfXArray', 'sfXArray_ScalarField',
- 'sfXCActualEndF_MeshScalarField', 'sfXCActualEndF',
- 'sfXCActualEndF_ScalarField', 'sfXCActualStartF_MeshScalarField',
- 'sfXCActualStartF', 'sfXCActualStartF_ScalarField', 'sfXCEndIndex',
- 'sfXCEndIndex_ScalarField', 'sfXCEndSubsetV',
- 'sfXCEndSubsetV_ScalarField', 'sfXCEndV', 'sfXCEndV_ScalarField',
- 'sfXCStartIndex', 'sfXCStartIndex_ScalarField', 'sfXCStartSubsetV',
- 'sfXCStartSubsetV_ScalarField', 'sfXCStartV',
- 'sfXCStartV_ScalarField', 'sfXCStride', 'sfXCStride_ScalarField',
- 'sfXCellBounds', 'sfXCellBounds_MeshScalarField',
- 'sfYArray_MeshScalarField', 'sfYArray', 'sfYArray_ScalarField',
- 'sfYCActualEndF_MeshScalarField', 'sfYCActualEndF',
- 'sfYCActualEndF_ScalarField', 'sfYCActualStartF_MeshScalarField',
- 'sfYCActualStartF', 'sfYCActualStartF_ScalarField', 'sfYCEndIndex',
- 'sfYCEndIndex_ScalarField', 'sfYCEndSubsetV',
- 'sfYCEndSubsetV_ScalarField', 'sfYCEndV', 'sfYCEndV_ScalarField',
- 'sfYCStartIndex', 'sfYCStartIndex_ScalarField', 'sfYCStartSubsetV',
- 'sfYCStartSubsetV_ScalarField', 'sfYCStartV',
- 'sfYCStartV_ScalarField', 'sfYCStride', 'sfYCStride_ScalarField',
- 'sfYCellBounds', 'sfYCellBounds_MeshScalarField', 'stArrowLengthF',
+ 'sfCopyData', 'sfDataArray', 'sfDataMaxV', 'sfDataMinV',
+ 'sfElementNodes', 'sfExchangeDimensions', 'sfFirstNodeIndex',
+ 'sfMissingValueV', 'sfXArray', 'sfXCActualEndF', 'sfXCActualStartF',
+ 'sfXCEndIndex', 'sfXCEndSubsetV', 'sfXCEndV', 'sfXCStartIndex',
+ 'sfXCStartSubsetV', 'sfXCStartV', 'sfXCStride', 'sfXCellBounds',
+ 'sfYArray', 'sfYCActualEndF', 'sfYCActualStartF', 'sfYCEndIndex',
+ 'sfYCEndSubsetV', 'sfYCEndV', 'sfYCStartIndex', 'sfYCStartSubsetV',
+ 'sfYCStartV', 'sfYCStride', 'sfYCellBounds', 'stArrowLengthF',
'stArrowStride', 'stCrossoverCheckCount',
'stExplicitLabelBarLabelsOn', 'stLabelBarEndLabelsOn',
'stLabelFormat', 'stLengthCheckCount', 'stLevelColors',
@@ -870,25 +760,12 @@ class NCLLexer(RegexLexer):
'tmYRMinorPerMajor', 'tmYRMinorThicknessF', 'tmYRMinorValues',
'tmYRMode', 'tmYROn', 'tmYRPrecision', 'tmYRStyle', 'tmYRTickEndF',
'tmYRTickSpacingF', 'tmYRTickStartF', 'tmYRValues', 'tmYUseLeft',
- 'trGridType', 'trGridType_Transformation', 'trLineInterpolationOn',
- 'trLineInterpolationOn_Transformation', 'trXAxisType',
- 'trXAxisType_IrregularTransformation', 'trXCoordPoints',
- 'trXCoordPoints_IrregularTransformation', 'trXInterPoints',
- 'trXInterPoints_IrregularTransformation', 'trXLog',
- 'trXLog_LogLinTransformation', 'trXMaxF', 'trXMaxF_Transformation',
- 'trXMinF', 'trXMinF_Transformation', 'trXReverse',
- 'trXReverse_Transformation', 'trXSamples',
- 'trXSamples_IrregularTransformation', 'trXTensionF',
- 'trXTensionF_IrregularTransformation', 'trYAxisType',
- 'trYAxisType_IrregularTransformation', 'trYCoordPoints',
- 'trYCoordPoints_IrregularTransformation', 'trYInterPoints',
- 'trYInterPoints_IrregularTransformation', 'trYLog',
- 'trYLog_LogLinTransformation', 'trYMaxF', 'trYMaxF_Transformation',
- 'trYMinF', 'trYMinF_Transformation', 'trYReverse',
- 'trYReverse_Transformation', 'trYSamples',
- 'trYSamples_IrregularTransformation', 'trYTensionF',
- 'trYTensionF_IrregularTransformation', 'txAngleF',
- 'txBackgroundFillColor', 'txConstantSpacingF', 'txDirection',
+ 'trGridType', 'trLineInterpolationOn',
+ 'trXAxisType', 'trXCoordPoints', 'trXInterPoints', 'trXLog',
+ 'trXMaxF', 'trXMinF', 'trXReverse', 'trXSamples', 'trXTensionF',
+ 'trYAxisType', 'trYCoordPoints', 'trYInterPoints', 'trYLog',
+ 'trYMaxF', 'trYMinF', 'trYReverse', 'trYSamples', 'trYTensionF',
+ 'txAngleF', 'txBackgroundFillColor', 'txConstantSpacingF', 'txDirection',
'txFont', 'HLU-Fonts', 'txFontAspectF', 'txFontColor',
'txFontHeightF', 'txFontOpacityF', 'txFontQuality',
'txFontThicknessF', 'txFuncCode', 'txJust', 'txPerimColor',
@@ -969,53 +846,16 @@ class NCLLexer(RegexLexer):
'vfYCEndSubsetV', 'vfYCEndV', 'vfYCStartIndex', 'vfYCStartSubsetV',
'vfYCStartV', 'vfYCStride', 'vpAnnoManagerId', 'vpClipOn',
'vpHeightF', 'vpKeepAspect', 'vpOn', 'vpUseSegments', 'vpWidthF',
- 'vpXF', 'vpYF', 'wkAntiAlias', 'wkAntiAlias_DocumentWorkstation',
- 'wkAntiAlias_ImageWorkstation', 'wkAntiAlias_XWorkstation',
- 'wkBackgroundColor', 'wkBackgroundColor_Workstation',
- 'wkBackgroundOpacityF', 'wkBackgroundOpacityF_DocumentWorkstation',
- 'wkBackgroundOpacityF_ImageWorkstation',
- 'wkBackgroundOpacityF_XWorkstation', 'wkColorMapLen',
- 'wkColorMapLen_Workstation', 'wkColorMap', 'wkColorMap_Workstation',
- 'wkColorModel', 'wkColorModel_PDFWorkstation',
- 'wkColorModel_PSWorkstation', 'wkDashTableLength',
- 'wkDashTableLength_Workstation', 'wkDefGraphicStyleId',
- 'wkDefGraphicStyleId_Workstation', 'wkDeviceLowerX',
- 'wkDeviceLowerX_DocumentWorkstation', 'wkDeviceLowerX_PDFWorkstation',
- 'wkDeviceLowerX_PSWorkstation', 'wkDeviceLowerY',
- 'wkDeviceLowerY_DocumentWorkstation', 'wkDeviceLowerY_PDFWorkstation',
- 'wkDeviceLowerY_PSWorkstation', 'wkDeviceUpperX',
- 'wkDeviceUpperX_DocumentWorkstation', 'wkDeviceUpperX_PDFWorkstation',
- 'wkDeviceUpperX_PSWorkstation', 'wkDeviceUpperY',
- 'wkDeviceUpperY_DocumentWorkstation', 'wkDeviceUpperY_PDFWorkstation',
- 'wkDeviceUpperY_PSWorkstation', 'wkFileName',
- 'wkFileName_DocumentWorkstation', 'wkFileName_ImageWorkstation',
- 'wkFillTableLength', 'wkFillTableLength_Workstation',
- 'wkForegroundColor', 'wkForegroundColor_Workstation', 'wkFormat',
- 'wkFormat_DocumentWorkstation', 'wkFormat_ImageWorkstation',
- 'wkFullBackground', 'wkFullBackground_PDFWorkstation',
- 'wkFullBackground_PSWorkstation', 'wkGksWorkId',
- 'wkGksWorkId_Workstation', 'wkHeight', 'wkHeight_ImageWorkstation',
- 'wkHeight_XWorkstation', 'wkMarkerTableLength',
- 'wkMarkerTableLength_Workstation', 'wkMetaName',
- 'wkMetaName_NcgmWorkstation', 'wkOrientation',
- 'wkOrientation_PDFWorkstation', 'wkOrientation_PSWorkstation',
- 'wkPDFFileName', 'wkPDFFileName_PDFWorkstation', 'wkPDFFormat',
- 'wkPDFFormat_PDFWorkstation', 'wkPDFResolution',
- 'wkPDFResolution_PDFWorkstation', 'wkPSFileName',
- 'wkPSFileName_PSWorkstation', 'wkPSFormat',
- 'wkPSFormat_PSWorkstation', 'wkPSResolution',
- 'wkPSResolution_PSWorkstation', 'wkPaperHeightF',
- 'wkPaperHeightF_DocumentWorkstation', 'wkPaperHeightF_PDFWorkstation',
- 'wkPaperHeightF_PSWorkstation', 'wkPaperSize',
- 'wkPaperSize_DocumentWorkstation', 'wkPaperSize_PDFWorkstation',
- 'wkPaperSize_PSWorkstation', 'wkPaperWidthF',
- 'wkPaperWidthF_DocumentWorkstation', 'wkPaperWidthF_PDFWorkstation',
- 'wkPaperWidthF_PSWorkstation', 'wkPause', 'wkPause_XWorkstation',
- 'wkTopLevelViews', 'wkTopLevelViews_Workstation', 'wkViews',
- 'wkViews_Workstation', 'wkVisualType', 'wkVisualType_PDFWorkstation',
- 'wkVisualType_PSWorkstation', 'wkWidth', 'wkWidth_ImageWorkstation',
- 'wkWidth_XWorkstation', 'wkWindowId', 'wkWindowId_XWorkstation',
- 'wkXColorMode', 'wkXColorMode_XWorkstation', 'wsCurrentSize',
+ 'vpXF', 'vpYF', 'wkAntiAlias', 'wkBackgroundColor', 'wkBackgroundOpacityF',
+ 'wkColorMapLen', 'wkColorMap', 'wkColorModel', 'wkDashTableLength',
+ 'wkDefGraphicStyleId', 'wkDeviceLowerX', 'wkDeviceLowerY',
+ 'wkDeviceUpperX', 'wkDeviceUpperY', 'wkFileName', 'wkFillTableLength',
+ 'wkForegroundColor', 'wkFormat', 'wkFullBackground', 'wkGksWorkId',
+ 'wkHeight', 'wkMarkerTableLength', 'wkMetaName', 'wkOrientation',
+ 'wkPDFFileName', 'wkPDFFormat', 'wkPDFResolution', 'wkPSFileName',
+ 'wkPSFormat', 'wkPSResolution', 'wkPaperHeightF', 'wkPaperSize',
+ 'wkPaperWidthF', 'wkPause', 'wkTopLevelViews', 'wkViews',
+ 'wkVisualType', 'wkWidth', 'wkWindowId', 'wkXColorMode', 'wsCurrentSize',
'wsMaximumSize', 'wsThresholdSize', 'xyComputeXMax',
'xyComputeXMin', 'xyComputeYMax', 'xyComputeYMin', 'xyCoordData',
'xyCoordDataSpec', 'xyCurveDrawOrder', 'xyDashPattern',
@@ -1037,7 +877,8 @@ class NCLLexer(RegexLexer):
Name.Builtin),
# Booleans
- (r'True|False', Name.Builtin), # Comparing Operators
+ (r'\.(True|False)\.', Name.Builtin),
+ # Comparing Operators
(r'\.(eq|ne|lt|le|gt|ge|not|and|or|xor)\.', Operator.Word),
],
diff --git a/pygments/lexers/nimrod.py b/pygments/lexers/nimrod.py
index 00b849a6..d438c1bf 100644
--- a/pygments/lexers/nimrod.py
+++ b/pygments/lexers/nimrod.py
@@ -3,9 +3,9 @@
pygments.lexers.nimrod
~~~~~~~~~~~~~~~~~~~~~~
- Lexer for the Nimrod language.
+ Lexer for the Nim language (formerly known as Nimrod).
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,15 +20,15 @@ __all__ = ['NimrodLexer']
class NimrodLexer(RegexLexer):
"""
- For `Nimrod <http://nimrod-code.org/>`_ source code.
+ For `Nim <http://nim-lang.org/>`_ source code.
.. versionadded:: 1.5
"""
name = 'Nimrod'
- aliases = ['nimrod', 'nim']
+ aliases = ['nim', 'nimrod']
filenames = ['*.nim', '*.nimrod']
- mimetypes = ['text/x-nimrod']
+ mimetypes = ['text/x-nim']
flags = re.MULTILINE | re.IGNORECASE | re.UNICODE
@@ -43,13 +43,13 @@ class NimrodLexer(RegexLexer):
return "|".join(newWords)
keywords = [
- 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break',
- 'case', 'cast', 'const', 'continue', 'converter', 'discard',
- 'distinct', 'div', 'elif', 'else', 'end', 'enum', 'except', 'finally',
- 'for', 'generic', 'if', 'implies', 'in', 'yield',
- 'is', 'isnot', 'iterator', 'lambda', 'let', 'macro', 'method',
- 'mod', 'not', 'notin', 'object', 'of', 'or', 'out', 'proc',
- 'ptr', 'raise', 'ref', 'return', 'shl', 'shr', 'template', 'try',
+ 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break', 'case',
+ 'cast', 'concept', 'const', 'continue', 'converter', 'defer', 'discard',
+ 'distinct', 'div', 'do', 'elif', 'else', 'end', 'enum', 'except',
+ 'export', 'finally', 'for', 'func', 'if', 'in', 'yield', 'interface',
+ 'is', 'isnot', 'iterator', 'let', 'macro', 'method', 'mixin', 'mod',
+ 'not', 'notin', 'object', 'of', 'or', 'out', 'proc', 'ptr', 'raise',
+ 'ref', 'return', 'shared', 'shl', 'shr', 'static', 'template', 'try',
'tuple', 'type', 'when', 'while', 'with', 'without', 'xor'
]
diff --git a/pygments/lexers/nit.py b/pygments/lexers/nit.py
index ab59c4e5..21116499 100644
--- a/pygments/lexers/nit.py
+++ b/pygments/lexers/nit.py
@@ -5,7 +5,7 @@
Lexer for the Nit language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/nix.py b/pygments/lexers/nix.py
index 57f08623..e148c919 100644
--- a/pygments/lexers/nix.py
+++ b/pygments/lexers/nix.py
@@ -5,7 +5,7 @@
Lexers for the NixOS Nix language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/oberon.py b/pygments/lexers/oberon.py
index 51dfdab6..3b5fb3e4 100644
--- a/pygments/lexers/oberon.py
+++ b/pygments/lexers/oberon.py
@@ -5,7 +5,7 @@
Lexers for Oberon family languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/objective.py b/pygments/lexers/objective.py
index fc8e5d17..7807255e 100644
--- a/pygments/lexers/objective.py
+++ b/pygments/lexers/objective.py
@@ -5,7 +5,7 @@
Lexers for Objective-C family languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -298,7 +298,7 @@ class SwiftLexer(RegexLexer):
(r'\s+', Text),
(r'//', Comment.Single, 'comment-single'),
(r'/\*', Comment.Multiline, 'comment-multi'),
- (r'#(if|elseif|else|endif)\b', Comment.Preproc, 'preproc'),
+ (r'#(if|elseif|else|endif|available)\b', Comment.Preproc, 'preproc'),
# Keywords
include('keywords'),
@@ -413,23 +413,26 @@ class SwiftLexer(RegexLexer):
],
'keywords': [
(words((
- 'break', 'case', 'continue', 'default', 'do', 'else',
- 'fallthrough', 'for', 'if', 'in', 'return', 'switch', 'where',
- 'while'), suffix=r'\b'),
+ 'as', 'break', 'case', 'catch', 'continue', 'default', 'defer',
+ 'do', 'else', 'fallthrough', 'for', 'guard', 'if', 'in', 'is',
+ 'repeat', 'return', '#selector', 'switch', 'throw', 'try',
+ 'where', 'while'), suffix=r'\b'),
Keyword),
(r'@availability\([^)]+\)', Keyword.Reserved),
(words((
'associativity', 'convenience', 'dynamic', 'didSet', 'final',
- 'get', 'infix', 'inout', 'lazy', 'left', 'mutating', 'none',
- 'nonmutating', 'optional', 'override', 'postfix', 'precedence',
- 'prefix', 'Protocol', 'required', 'right', 'set', 'Type',
- 'unowned', 'weak', 'willSet', '@availability', '@autoclosure',
- '@noreturn', '@NSApplicationMain', '@NSCopying', '@NSManaged',
- '@objc', '@UIApplicationMain', '@IBAction', '@IBDesignable',
+ 'get', 'indirect', 'infix', 'inout', 'lazy', 'left', 'mutating',
+ 'none', 'nonmutating', 'optional', 'override', 'postfix',
+ 'precedence', 'prefix', 'Protocol', 'required', 'rethrows',
+ 'right', 'set', 'throws', 'Type', 'unowned', 'weak', 'willSet',
+ '@availability', '@autoclosure', '@noreturn',
+ '@NSApplicationMain', '@NSCopying', '@NSManaged', '@objc',
+ '@UIApplicationMain', '@IBAction', '@IBDesignable',
'@IBInspectable', '@IBOutlet'), suffix=r'\b'),
Keyword.Reserved),
(r'(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__'
- r'|__FILE__|__FUNCTION__|__LINE__|_)\b', Keyword.Constant),
+ r'|__FILE__|__FUNCTION__|__LINE__|_'
+ r'|#(?:file|line|column|function))\b', Keyword.Constant),
(r'import\b', Keyword.Declaration, 'module'),
(r'(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)',
bygroups(Keyword.Declaration, Text, Name.Class)),
diff --git a/pygments/lexers/ooc.py b/pygments/lexers/ooc.py
index b4e8c6db..957b72f1 100644
--- a/pygments/lexers/ooc.py
+++ b/pygments/lexers/ooc.py
@@ -5,7 +5,7 @@
Lexers for the Ooc language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index afd0fda5..bfce4c3c 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,5 +36,6 @@ from pygments.lexers.urbi import UrbiscriptLexer
from pygments.lexers.smalltalk import SmalltalkLexer, NewspeakLexer
from pygments.lexers.installers import NSISLexer, RPMSpecLexer
from pygments.lexers.textedit import AwkLexer
+from pygments.lexers.smv import NuSMVLexer
__all__ = []
diff --git a/pygments/lexers/parasail.py b/pygments/lexers/parasail.py
index 812e2923..53088023 100644
--- a/pygments/lexers/parasail.py
+++ b/pygments/lexers/parasail.py
@@ -5,7 +5,7 @@
Lexer for ParaSail.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/parsers.py b/pygments/lexers/parsers.py
index e1b74dee..1f3c9b4d 100644
--- a/pygments/lexers/parsers.py
+++ b/pygments/lexers/parsers.py
@@ -5,7 +5,7 @@
Lexers for parser generators.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/pascal.py b/pygments/lexers/pascal.py
index ce991a77..9aa1ac8f 100644
--- a/pygments/lexers/pascal.py
+++ b/pygments/lexers/pascal.py
@@ -5,7 +5,7 @@
Lexers for Pascal family languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -44,7 +44,7 @@ class DelphiLexer(Lexer):
"""
name = 'Delphi'
aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
- filenames = ['*.pas']
+ filenames = ['*.pas', '*.dpr']
mimetypes = ['text/x-pascal']
TURBO_PASCAL_KEYWORDS = (
diff --git a/pygments/lexers/pawn.py b/pygments/lexers/pawn.py
index f32fdbed..f462a883 100644
--- a/pygments/lexers/pawn.py
+++ b/pygments/lexers/pawn.py
@@ -5,7 +5,7 @@
Lexers for the Pawn languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/perl.py b/pygments/lexers/perl.py
index 8df3c810..4d5ab3b3 100644
--- a/pygments/lexers/perl.py
+++ b/pygments/lexers/perl.py
@@ -5,7 +5,7 @@
Lexers for Perl and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -52,7 +52,7 @@ class PerlLexer(RegexLexer):
(words((
'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach',
'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then',
- 'unless', 'until', 'while', 'use', 'print', 'new', 'BEGIN',
+ 'unless', 'until', 'while', 'print', 'new', 'BEGIN',
'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'),
Keyword),
(r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
@@ -94,10 +94,10 @@ class PerlLexer(RegexLexer):
'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime',
'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last',
'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat',
- 'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'no', 'oct', 'open',
- 'opendir', 'ord', 'our', 'pack', 'package', 'pipe', 'pop', 'pos', 'printf',
+ 'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'oct', 'open',
+ 'opendir', 'ord', 'our', 'pack', 'pipe', 'pop', 'pos', 'printf',
'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir',
- 'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename', 'require',
+ 'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename',
'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir',
'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent',
'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent',
@@ -131,8 +131,14 @@ class PerlLexer(RegexLexer):
(r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
(r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
(r'(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2', String.Other),
- (r'package\s+', Keyword, 'modulename'),
- (r'sub\s+', Keyword, 'funcname'),
+ (r'(package)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
+ bygroups(Keyword, Text, Name.Namespace)),
+ (r'(use|require|no)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
+ bygroups(Keyword, Text, Name.Namespace)),
+ (r'(sub)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (words((
+ 'no', 'package', 'require', 'use'), suffix=r'\b'),
+ Keyword),
(r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
r'!~|&&?|\|\||\.{1,3})', Operator),
(r'[-+/*%=<>&^|!\\~]=?', Operator),
@@ -152,14 +158,12 @@ class PerlLexer(RegexLexer):
(r'[\w:]+', Name.Variable, '#pop'),
],
'name': [
- (r'\w+::', Name.Namespace),
+ (r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*(::)?(?=\s*->)', Name.Namespace, '#pop'),
+ (r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*::', Name.Namespace, '#pop'),
(r'[\w:]+', Name, '#pop'),
(r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
(r'(?=\W)', Text, '#pop'),
],
- 'modulename': [
- (r'[a-zA-Z_]\w*', Name.Namespace, '#pop')
- ],
'funcname': [
(r'[a-zA-Z_]\w*[!?]?', Name.Function),
(r'\s+', Text),
diff --git a/pygments/lexers/php.py b/pygments/lexers/php.py
index 2421738f..f618b5fd 100644
--- a/pygments/lexers/php.py
+++ b/pygments/lexers/php.py
@@ -5,7 +5,7 @@
Lexers for PHP and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -224,7 +224,7 @@ class PhpLexer(RegexLexer):
String.Interpol)),
(r'(\$\{)(\S+)(\})',
bygroups(String.Interpol, Name.Variable, String.Interpol)),
- (r'[${\\]+', String.Double)
+ (r'[${\\]', String.Double)
],
}
diff --git a/pygments/lexers/praat.py b/pygments/lexers/praat.py
index 9255216d..1a38a9e8 100644
--- a/pygments/lexers/praat.py
+++ b/pygments/lexers/praat.py
@@ -5,7 +5,7 @@
Lexer for Praat
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -168,8 +168,8 @@ class PraatLexer(RegexLexer):
],
'function_call': [
(words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'),
- (words(functions_array, suffix=r'#(?=\s*[:(])'), Name.Function, 'function'),
- (words(functions_numeric, suffix=r'(?=\s*[:(])'), Name.Function, 'function'),
+ (words(functions_array, suffix=r'#(?=\s*[:(])'), Name.Function, 'function'),
+ (words(functions_numeric, suffix=r'(?=\s*[:(])'), Name.Function, 'function'),
],
'function': [
(r'\s+', Text),
@@ -190,6 +190,7 @@ class PraatLexer(RegexLexer):
include('operator'),
include('number'),
+ (r'[()]', Text),
(r',', Punctuation),
],
'old_arguments': [
@@ -203,6 +204,7 @@ class PraatLexer(RegexLexer):
(r'[^\n]', Text),
],
'number': [
+ (r'\n', Text, '#pop'),
(r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
],
'object_attributes': [
@@ -235,8 +237,8 @@ class PraatLexer(RegexLexer):
(r"'(?=.*')", String.Interpol, 'string_interpolated'),
],
'operator': [
- (r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)', Operator),
- (r'\b(and|or|not|div|mod)\b', Operator.Word),
+ (r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)', Operator),
+ (r'(?<![\w.])(and|or|not|div|mod)(?![\w.])', Operator.Word),
],
'string_interpolated': [
(r'\.?[_a-z][\w.]*[$#]?(?:\[[a-zA-Z0-9,]+\])?(:[0-9]+)?',
@@ -267,9 +269,6 @@ class PraatLexer(RegexLexer):
bygroups(Keyword, Text), 'number'),
(r'(option|button)([ \t]+)',
- bygroups(Keyword, Text), 'number'),
-
- (r'(option|button)([ \t]+)',
bygroups(Keyword, Text), 'string_unquoted'),
(r'(sentence|text)([ \t]+\S+)',
diff --git a/pygments/lexers/prolog.py b/pygments/lexers/prolog.py
index 7d32d7f6..90f9529c 100644
--- a/pygments/lexers/prolog.py
+++ b/pygments/lexers/prolog.py
@@ -5,7 +5,7 @@
Lexers for Prolog and Prolog-like languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py
index 7601afa8..390eafe8 100644
--- a/pygments/lexers/python.py
+++ b/pygments/lexers/python.py
@@ -5,7 +5,7 @@
Lexers for Python and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -116,7 +116,7 @@ class PythonLexer(RegexLexer):
'unichr', 'unicode', 'vars', 'xrange', 'zip'),
prefix=r'(?<!\.)', suffix=r'\b'),
Name.Builtin),
- (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True'
+ (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls'
r')\b', Name.Builtin.Pseudo),
(words((
'ArithmeticError', 'AssertionError', 'AttributeError',
@@ -303,7 +303,7 @@ class Python3Lexer(RegexLexer):
'sum', 'super', 'tuple', 'type', 'vars', 'zip'), prefix=r'(?<!\.)',
suffix=r'\b'),
Name.Builtin),
- (r'(?<!\.)(self|Ellipsis|NotImplemented)\b', Name.Builtin.Pseudo),
+ (r'(?<!\.)(self|Ellipsis|NotImplemented|cls)\b', Name.Builtin.Pseudo),
(words((
'ArithmeticError', 'AssertionError', 'AttributeError',
'BaseException', 'BufferError', 'BytesWarning', 'DeprecationWarning',
@@ -362,6 +362,7 @@ class Python3Lexer(RegexLexer):
]
tokens['numbers'] = [
(r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
(r'0[oO][0-7]+', Number.Oct),
(r'0[bB][01]+', Number.Bin),
(r'0[xX][a-fA-F0-9]+', Number.Hex),
diff --git a/pygments/lexers/qvt.py b/pygments/lexers/qvt.py
index f30e4887..f496d600 100644
--- a/pygments/lexers/qvt.py
+++ b/pygments/lexers/qvt.py
@@ -5,7 +5,7 @@
Lexer for QVT Operational language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/r.py b/pygments/lexers/r.py
index 1a47ca26..dce61969 100644
--- a/pygments/lexers/r.py
+++ b/pygments/lexers/r.py
@@ -5,7 +5,7 @@
Lexers for the R/S languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/rdf.py b/pygments/lexers/rdf.py
index 6dd6e8b9..d0f8778a 100644
--- a/pygments/lexers/rdf.py
+++ b/pygments/lexers/rdf.py
@@ -5,7 +5,7 @@
Lexers for semantic web and RDF query languages and markup.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/rebol.py b/pygments/lexers/rebol.py
index b844ad96..f3d00200 100644
--- a/pygments/lexers/rebol.py
+++ b/pygments/lexers/rebol.py
@@ -5,7 +5,7 @@
Lexers for the REBOL and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/resource.py b/pygments/lexers/resource.py
index 4647bef8..f7494904 100644
--- a/pygments/lexers/resource.py
+++ b/pygments/lexers/resource.py
@@ -5,7 +5,7 @@
Lexer for resource definition files.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -81,4 +81,5 @@ class ResourceLexer(RegexLexer):
}
def analyse_text(text):
- return text.startswith('root:table')
+ if text.startswith('root:table'):
+ return 1.0
diff --git a/pygments/lexers/rnc.py b/pygments/lexers/rnc.py
new file mode 100644
index 00000000..2f2aacdd
--- /dev/null
+++ b/pygments/lexers/rnc.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.rnc
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Relax-NG Compact syntax
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation
+
+__all__ = ['RNCCompactLexer']
+
+
+class RNCCompactLexer(RegexLexer):
+ """
+ For `RelaxNG-compact <http://relaxng.org>`_ syntax.
+
+ .. versionadded:: 2.2
+ """
+
+ name = 'Relax-NG Compact'
+ aliases = ['rnc', 'rng-compact']
+ filenames = ['*.rnc']
+
+ tokens = {
+ 'root': [
+ (r'namespace\b', Keyword.Namespace),
+ (r'(?:default|datatypes)\b', Keyword.Declaration),
+ (r'##.*$', Comment.Preproc),
+ (r'#.*$', Comment.Single),
+ (r'"[^"]*"', String.Double),
+ # TODO single quoted strings and escape sequences outside of
+ # double-quoted strings
+ (r'(?:element|attribute|mixed)\b', Keyword.Declaration, 'variable'),
+ (r'(text\b|xsd:[^ ]+)', Keyword.Type, 'maybe_xsdattributes'),
+ (r'[,?&*=|~]|>>', Operator),
+ (r'[(){}]', Punctuation),
+ (r'.', Text),
+ ],
+
+ # a variable has been declared using `element` or `attribute`
+ 'variable': [
+ (r'[^{]+', Name.Variable),
+ (r'\{', Punctuation, '#pop'),
+ ],
+
+ # after an xsd:<datatype> declaration there may be attributes
+ 'maybe_xsdattributes': [
+ (r'\{', Punctuation, 'xsdattributes'),
+ (r'\}', Punctuation, '#pop'),
+ (r'.', Text),
+ ],
+
+ # attributes take the form { key1 = value1 key2 = value2 ... }
+ 'xsdattributes': [
+ (r'[^ =}]', Name.Attribute),
+ (r'=', Operator),
+ (r'"[^"]*"', String.Double),
+ (r'\}', Punctuation, '#pop'),
+ (r'.', Text),
+ ],
+ }
diff --git a/pygments/lexers/roboconf.py b/pygments/lexers/roboconf.py
index 59755a68..8c7df83d 100644
--- a/pygments/lexers/roboconf.py
+++ b/pygments/lexers/roboconf.py
@@ -5,7 +5,7 @@
Lexers for Roboconf DSL.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/robotframework.py b/pygments/lexers/robotframework.py
index eab06efe..e868127b 100644
--- a/pygments/lexers/robotframework.py
+++ b/pygments/lexers/robotframework.py
@@ -5,7 +5,7 @@
Lexer for Robot Framework.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ruby.py b/pygments/lexers/ruby.py
index f16416d3..fe750f1a 100644
--- a/pygments/lexers/ruby.py
+++ b/pygments/lexers/ruby.py
@@ -5,7 +5,7 @@
Lexers for Ruby and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/rust.py b/pygments/lexers/rust.py
index 5d1162b8..6914f54d 100644
--- a/pygments/lexers/rust.py
+++ b/pygments/lexers/rust.py
@@ -5,7 +5,7 @@
Lexers for the Rust language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,7 +18,7 @@ __all__ = ['RustLexer']
class RustLexer(RegexLexer):
"""
- Lexer for the Rust programming language (version 1.0).
+ Lexer for the Rust programming language (version 1.10).
.. versionadded:: 1.6
"""
@@ -27,6 +27,35 @@ class RustLexer(RegexLexer):
aliases = ['rust']
mimetypes = ['text/rust']
+ keyword_types = (
+ words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64',
+ 'usize', 'isize', 'f32', 'f64', 'str', 'bool'),
+ suffix=r'\b'),
+ Keyword.Type)
+
+ builtin_types = (words((
+ # Reexported core operators
+ 'Copy', 'Send', 'Sized', 'Sync',
+ 'Drop', 'Fn', 'FnMut', 'FnOnce',
+
+ # Reexported types and traits
+ 'Box',
+ 'ToOwned',
+ 'Clone',
+ 'PartialEq', 'PartialOrd', 'Eq', 'Ord',
+ 'AsRef', 'AsMut', 'Into', 'From',
+ 'Default',
+ 'Iterator', 'Extend', 'IntoIterator',
+ 'DoubleEndedIterator', 'ExactSizeIterator',
+ 'Option',
+ 'Some', 'None',
+ 'Result',
+ 'Ok', 'Err',
+ 'SliceConcatExt',
+ 'String', 'ToString',
+ 'Vec'), suffix=r'\b'),
+ Name.Builtin)
+
tokens = {
'root': [
# rust allows a file to start with a shebang, but if the first line
@@ -49,50 +78,32 @@ class RustLexer(RegexLexer):
(r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
# Keywords
(words((
- 'as', 'box', 'crate', 'do', 'else', 'enum', 'extern', # break and continue are in labels
- 'fn', 'for', 'if', 'impl', 'in', 'loop', 'match', 'mut', 'priv',
- 'proc', 'pub', 'ref', 'return', 'static', 'struct',
- 'trait', 'true', 'type', 'unsafe', 'while'), suffix=r'\b'),
+ 'as', 'box', 'const', 'crate', 'else', 'extern',
+ 'for', 'if', 'impl', 'in', 'loop', 'match', 'move',
+ 'mut', 'pub', 'ref', 'return', 'static', 'super',
+ 'trait', 'unsafe', 'use', 'where', 'while'), suffix=r'\b'),
Keyword),
- (words(('alignof', 'be', 'const', 'offsetof', 'pure', 'sizeof',
- 'typeof', 'once', 'unsized', 'yield'), suffix=r'\b'),
+ (words(('abstract', 'alignof', 'become', 'do', 'final', 'macro',
+ 'offsetof', 'override', 'priv', 'proc', 'pure', 'sizeof',
+ 'typeof', 'unsized', 'virtual', 'yield'), suffix=r'\b'),
Keyword.Reserved),
- (r'(mod|use)\b', Keyword.Namespace),
(r'(true|false)\b', Keyword.Constant),
+ (r'mod\b', Keyword, 'modname'),
(r'let\b', Keyword.Declaration),
- (words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', 'usize',
- 'isize', 'f32', 'f64', 'str', 'bool'), suffix=r'\b'),
- Keyword.Type),
+ (r'fn\b', Keyword, 'funcname'),
+ (r'(struct|enum|type|union)\b', Keyword, 'typename'),
+ (r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
+ keyword_types,
(r'self\b', Name.Builtin.Pseudo),
# Prelude (taken from Rust’s src/libstd/prelude.rs)
- (words((
- # Reexported core operators
- 'Copy', 'Send', 'Sized', 'Sync',
- 'Drop', 'Fn', 'FnMut', 'FnOnce',
-
- # Reexported functions
- 'drop',
-
- # Reexported types and traits
- 'Box',
- 'ToOwned',
- 'Clone',
- 'PartialEq', 'PartialOrd', 'Eq', 'Ord',
- 'AsRef', 'AsMut', 'Into', 'From',
- 'Default',
- 'Iterator', 'Extend', 'IntoIterator',
- 'DoubleEndedIterator', 'ExactSizeIterator',
- 'Option',
- 'Some', 'None',
- 'Result',
- 'Ok', 'Err',
- 'SliceConcatExt',
- 'String', 'ToString',
- 'Vec',
- ), suffix=r'\b'),
- Name.Builtin),
+ builtin_types,
+ # Path seperators, so types don't catch them.
+ (r'::\b', Text),
+ # Types in positions.
+ (r'(?::|->)', Text, 'typename'),
# Labels
- (r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?', bygroups(Keyword, Text.Whitespace, Name.Label)),
+ (r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?',
+ bygroups(Keyword, Text.Whitespace, Name.Label)),
# Character Literal
(r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
@@ -108,7 +119,8 @@ class RustLexer(RegexLexer):
(r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
# Decimal Literal
(r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float, 'number_lit'),
+ r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float,
+ 'number_lit'),
(r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
# String Literal
(r'b"', String, 'bytestring'),
@@ -148,6 +160,24 @@ class RustLexer(RegexLexer):
(r'\*/', String.Doc, '#pop'),
(r'[*/]', String.Doc),
],
+ 'modname': [
+ (r'\s+', Text),
+ (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
+ default('#pop'),
+ ],
+ 'funcname': [
+ (r'\s+', Text),
+ (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
+ default('#pop'),
+ ],
+ 'typename': [
+ (r'\s+', Text),
+ (r'&', Keyword.Pseudo),
+ builtin_types,
+ keyword_types,
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ default('#pop'),
+ ],
'number_lit': [
(r'[ui](8|16|32|64|size)', Keyword, '#pop'),
(r'f(32|64)', Keyword, '#pop'),
diff --git a/pygments/lexers/sas.py b/pygments/lexers/sas.py
new file mode 100644
index 00000000..264ba51f
--- /dev/null
+++ b/pygments/lexers/sas.py
@@ -0,0 +1,228 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.sas
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for SAS.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Comment, Keyword, Name, Number, String, Text, \
+ Other, Generic
+
+__all__ = ['SASLexer']
+
+class SASLexer(RegexLexer):
+ """
+ For `SAS <http://www.sas.com/>`_ files.
+
+ .. versionadded:: 2.2
+ """
+ # Syntax from syntax/sas.vim by James Kidd <james.kidd@covance.com>
+
+ name = 'SAS'
+ aliases = ['sas']
+ filenames = ['*.SAS', '*.sas']
+ mimetypes = ['text/x-sas', 'text/sas', 'application/x-sas']
+ flags = re.IGNORECASE | re.MULTILINE
+
+ builtins_macros = (
+ "bquote", "nrbquote", "cmpres", "qcmpres", "compstor", "datatyp",
+ "display", "do", "else", "end", "eval", "global", "goto", "if",
+ "index", "input", "keydef", "label", "left", "length", "let",
+ "local", "lowcase", "macro", "mend", "nrquote",
+ "nrstr", "put", "qleft", "qlowcase", "qscan",
+ "qsubstr", "qsysfunc", "qtrim", "quote", "qupcase", "scan",
+ "str", "substr", "superq", "syscall", "sysevalf", "sysexec",
+ "sysfunc", "sysget", "syslput", "sysprod", "sysrc", "sysrput",
+ "then", "to", "trim", "unquote", "until", "upcase", "verify",
+ "while", "window"
+ )
+
+ builtins_conditionals = (
+ "do", "if", "then", "else", "end", "until", "while"
+ )
+
+ builtins_statements = (
+ "abort", "array", "attrib", "by", "call", "cards", "cards4",
+ "catname", "continue", "datalines", "datalines4", "delete", "delim",
+ "delimiter", "display", "dm", "drop", "endsas", "error", "file",
+ "filename", "footnote", "format", "goto", "in", "infile", "informat",
+ "input", "keep", "label", "leave", "length", "libname", "link",
+ "list", "lostcard", "merge", "missing", "modify", "options", "output",
+ "out", "page", "put", "redirect", "remove", "rename", "replace",
+ "retain", "return", "select", "set", "skip", "startsas", "stop",
+ "title", "update", "waitsas", "where", "window", "x", "systask"
+ )
+
+ builtins_sql = (
+ "add", "and", "alter", "as", "cascade", "check", "create",
+ "delete", "describe", "distinct", "drop", "foreign", "from",
+ "group", "having", "index", "insert", "into", "in", "key", "like",
+ "message", "modify", "msgtype", "not", "null", "on", "or",
+ "order", "primary", "references", "reset", "restrict", "select",
+ "set", "table", "unique", "update", "validate", "view", "where"
+ )
+
+ builtins_functions = (
+ "abs", "addr", "airy", "arcos", "arsin", "atan", "attrc",
+ "attrn", "band", "betainv", "blshift", "bnot", "bor",
+ "brshift", "bxor", "byte", "cdf", "ceil", "cexist", "cinv",
+ "close", "cnonct", "collate", "compbl", "compound",
+ "compress", "cos", "cosh", "css", "curobs", "cv", "daccdb",
+ "daccdbsl", "daccsl", "daccsyd", "dacctab", "dairy", "date",
+ "datejul", "datepart", "datetime", "day", "dclose", "depdb",
+ "depdbsl", "depsl", "depsyd",
+ "deptab", "dequote", "dhms", "dif", "digamma",
+ "dim", "dinfo", "dnum", "dopen", "doptname", "doptnum",
+ "dread", "dropnote", "dsname", "erf", "erfc", "exist", "exp",
+ "fappend", "fclose", "fcol", "fdelete", "fetch", "fetchobs",
+ "fexist", "fget", "fileexist", "filename", "fileref",
+ "finfo", "finv", "fipname", "fipnamel", "fipstate", "floor",
+ "fnonct", "fnote", "fopen", "foptname", "foptnum", "fpoint",
+ "fpos", "fput", "fread", "frewind", "frlen", "fsep", "fuzz",
+ "fwrite", "gaminv", "gamma", "getoption", "getvarc", "getvarn",
+ "hbound", "hms", "hosthelp", "hour", "ibessel", "index",
+ "indexc", "indexw", "input", "inputc", "inputn", "int",
+ "intck", "intnx", "intrr", "irr", "jbessel", "juldate",
+ "kurtosis", "lag", "lbound", "left", "length", "lgamma",
+ "libname", "libref", "log", "log10", "log2", "logpdf", "logpmf",
+ "logsdf", "lowcase", "max", "mdy", "mean", "min", "minute",
+ "mod", "month", "mopen", "mort", "n", "netpv", "nmiss",
+ "normal", "note", "npv", "open", "ordinal", "pathname",
+ "pdf", "peek", "peekc", "pmf", "point", "poisson", "poke",
+ "probbeta", "probbnml", "probchi", "probf", "probgam",
+ "probhypr", "probit", "probnegb", "probnorm", "probt",
+ "put", "putc", "putn", "qtr", "quote", "ranbin", "rancau",
+ "ranexp", "rangam", "range", "rank", "rannor", "ranpoi",
+ "rantbl", "rantri", "ranuni", "repeat", "resolve", "reverse",
+ "rewind", "right", "round", "saving", "scan", "sdf", "second",
+ "sign", "sin", "sinh", "skewness", "soundex", "spedis",
+ "sqrt", "std", "stderr", "stfips", "stname", "stnamel",
+ "substr", "sum", "symget", "sysget", "sysmsg", "sysprod",
+ "sysrc", "system", "tan", "tanh", "time", "timepart", "tinv",
+ "tnonct", "today", "translate", "tranwrd", "trigamma",
+ "trim", "trimn", "trunc", "uniform", "upcase", "uss", "var",
+ "varfmt", "varinfmt", "varlabel", "varlen", "varname",
+ "varnum", "varray", "varrayx", "vartype", "verify", "vformat",
+ "vformatd", "vformatdx", "vformatn", "vformatnx", "vformatw",
+ "vformatwx", "vformatx", "vinarray", "vinarrayx", "vinformat",
+ "vinformatd", "vinformatdx", "vinformatn", "vinformatnx",
+ "vinformatw", "vinformatwx", "vinformatx", "vlabel",
+ "vlabelx", "vlength", "vlengthx", "vname", "vnamex", "vtype",
+ "vtypex", "weekday", "year", "yyq", "zipfips", "zipname",
+ "zipnamel", "zipstate"
+ )
+
+ tokens = {
+ 'root': [
+ include('comments'),
+ include('proc-data'),
+ include('cards-datalines'),
+ include('logs'),
+ include('general'),
+ (r'.', Text),
+ ],
+ # SAS is multi-line regardless, but * is ended by ;
+ 'comments': [
+ (r'^\s*\*.*?;', Comment),
+ (r'/\*.*?\*/', Comment),
+ (r'^\s*\*(.|\n)*?;', Comment.Multiline),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ ],
+ # Special highlight for proc, data, quit, run
+ 'proc-data': [
+ (r'(^|;)\s*(proc [a-zA-Z0-9_]+|data|run|quit)[\s;\n]',
+ Keyword.Reserved),
+ ],
+ # Special highlight cards and datalines
+ 'cards-datalines': [
+ (r'^\s*(datalines|cards)\s*;\s*$', Keyword, 'data'),
+ ],
+ 'data': [
+ (r'(.|\n)*^\s*;\s*$', Other, '#pop'),
+ ],
+ # Special highlight for put NOTE|ERROR|WARNING (order matters)
+ 'logs': [
+ (r'\n?^\s*%?put ', Keyword, 'log-messages'),
+ ],
+ 'log-messages': [
+ (r'NOTE(:|-).*', Generic, '#pop'),
+ (r'WARNING(:|-).*', Generic.Emph, '#pop'),
+ (r'ERROR(:|-).*', Generic.Error, '#pop'),
+ (r'(?!(WARNING|NOTE|ERROR))+', Text, '#pop'),
+ include('general'),
+ ],
+ 'general': [
+ include('keywords'),
+ include('vars-strings'),
+ include('special'),
+ include('numbers'),
+ ],
+ # Keywords, statements, functions, macros
+ 'keywords': [
+ (words(builtins_statements,
+ prefix = r'\b',
+ suffix = r'\b'),
+ Keyword),
+ (words(builtins_sql,
+ prefix = r'\b',
+ suffix = r'\b'),
+ Keyword),
+ (words(builtins_conditionals,
+ prefix = r'\b',
+ suffix = r'\b'),
+ Keyword),
+ (words(builtins_macros,
+ prefix = r'%',
+ suffix = r'\b'),
+ Name.Builtin),
+ (words(builtins_functions,
+ prefix = r'\b',
+ suffix = r'\('),
+ Name.Builtin),
+ ],
+ # Strings and user-defined variables and macros (order matters)
+ 'vars-strings': [
+ (r'&[a-zA-Z_][a-zA-Z0-9_]{0,31}\.?', Name.Variable),
+ (r'%[a-zA-Z_][a-zA-Z0-9_]{0,31}', Name.Function),
+ (r'\'', String, 'string_squote'),
+ (r'"', String, 'string_dquote'),
+ ],
+ 'string_squote': [
+ ('\'', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape),
+ # AFAIK, macro variables are not evaluated in single quotes
+ # (r'&', Name.Variable, 'validvar'),
+ (r'[^$\'\\]+', String),
+ (r'[$\'\\]', String),
+ ],
+ 'string_dquote': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape),
+ (r'&', Name.Variable, 'validvar'),
+ (r'[^$&"\\]+', String),
+ (r'[$"\\]', String),
+ ],
+ 'validvar': [
+ (r'[a-zA-Z_][a-zA-Z0-9_]{0,31}\.?', Name.Variable, '#pop'),
+ ],
+ # SAS numbers and special variables
+ 'numbers': [
+ (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[i]?\b',
+ Number),
+ ],
+ 'special': [
+ (r'(null|missing|_all_|_automatic_|_character_|_n_|'
+ r'_infile_|_name_|_null_|_numeric_|_user_|_webout_)',
+ Keyword.Constant),
+ ],
+ # 'operators': [
+ # (r'(-|=|<=|>=|<|>|<>|&|!=|'
+ # r'\||\*|\+|\^|/|!|~|~=)', Operator)
+ # ],
+ }
diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py
index ac0f7533..b3af606e 100644
--- a/pygments/lexers/scripting.py
+++ b/pygments/lexers/scripting.py
@@ -5,7 +5,7 @@
Lexer for scripting and embedded languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -50,36 +50,47 @@ class LuaLexer(RegexLexer):
filenames = ['*.lua', '*.wlua']
mimetypes = ['text/x-lua', 'application/x-lua']
+ _comment_multiline = r'(?:--\[(?P<level>=*)\[[\w\W]*?\](?P=level)\])'
+ _comment_single = r'(?:--.*$)'
+ _space = r'(?:\s+)'
+ _s = r'(?:%s|%s|%s)' % (_comment_multiline, _comment_single, _space)
+ _name = r'(?:[^\W\d]\w*)'
+
tokens = {
'root': [
- # lua allows a file to start with a shebang
- (r'#!(.*?)$', Comment.Preproc),
+ # Lua allows a file to start with a shebang.
+ (r'#!.*', Comment.Preproc),
default('base'),
],
+ 'ws': [
+ (_comment_multiline, Comment.Multiline),
+ (_comment_single, Comment.Single),
+ (_space, Text),
+ ],
'base': [
- (r'(?s)--\[(=*)\[.*?\]\1\]', Comment.Multiline),
- ('--.*$', Comment.Single),
+ include('ws'),
+ (r'(?i)0x[\da-f]*(\.[\da-f]*)?(p[+-]?\d+)?', Number.Hex),
(r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
(r'(?i)\d+e[+-]?\d+', Number.Float),
- ('(?i)0x[0-9a-f]*', Number.Hex),
(r'\d+', Number.Integer),
- (r'\n', Text),
- (r'[^\S\n]', Text),
# multiline strings
(r'(?s)\[(=*)\[.*?\]\1\]', String),
- (r'(==|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#])', Operator),
+ (r'::', Punctuation, 'label'),
+ (r'\.{3}', Punctuation),
+ (r'[=<>|~&+\-*/%#^]+|\.\.', Operator),
(r'[\[\]{}().,:;]', Punctuation),
(r'(and|or|not)\b', Operator.Word),
('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
- r'while)\b', Keyword),
+ r'while)\b', Keyword.Reserved),
+ (r'goto\b', Keyword.Reserved, 'goto'),
(r'(local)\b', Keyword.Declaration),
(r'(true|false|nil)\b', Keyword.Constant),
- (r'(function)\b', Keyword, 'funcname'),
+ (r'(function)\b', Keyword.Reserved, 'funcname'),
(r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
@@ -88,31 +99,38 @@ class LuaLexer(RegexLexer):
],
'funcname': [
- (r'\s+', Text),
- ('(?:([A-Za-z_]\w*)(\.))?([A-Za-z_]\w*)',
- bygroups(Name.Class, Punctuation, Name.Function), '#pop'),
+ include('ws'),
+ (r'[.:]', Punctuation),
+ (r'%s(?=%s*[.:])' % (_name, _s), Name.Class),
+ (_name, Name.Function, '#pop'),
# inline function
('\(', Punctuation, '#pop'),
],
- # if I understand correctly, every character is valid in a lua string,
- # so this state is only for later corrections
- 'string': [
- ('.', String)
+ 'goto': [
+ include('ws'),
+ (_name, Name.Label, '#pop'),
+ ],
+
+ 'label': [
+ include('ws'),
+ (r'::', Punctuation, '#pop'),
+ (_name, Name.Label),
],
'stringescape': [
- (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
+ (r'\\([abfnrtv\\"\']|[\r\n]{1,2}|z\s*|x[0-9a-fA-F]{2}|\d{1,3}|'
+ r'u\{[0-9a-fA-F]+\})', String.Escape),
],
'sqs': [
- ("'", String, '#pop'),
- include('string')
+ (r"'", String.Single, '#pop'),
+ (r"[^\\']+", String.Single),
],
'dqs': [
- ('"', String, '#pop'),
- include('string')
+ (r'"', String.Double, '#pop'),
+ (r'[^\\"]+', String.Double),
]
}
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
index 4145939e..7c2c3743 100644
--- a/pygments/lexers/shell.py
+++ b/pygments/lexers/shell.py
@@ -5,7 +5,7 @@
Lexers for various shells.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -27,16 +27,17 @@ line_re = re.compile('.*?\n')
class BashLexer(RegexLexer):
"""
- Lexer for (ba|k|)sh shell scripts.
+ Lexer for (ba|k|z|)sh shell scripts.
.. versionadded:: 0.6
"""
name = 'Bash'
- aliases = ['bash', 'sh', 'ksh', 'shell']
+ aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
- '*.exheres-0', '*.exlib',
- '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD']
+ '*.exheres-0', '*.exlib', '*.zsh',
+ '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
+ 'PKGBUILD']
mimetypes = ['application/x-sh', 'application/x-shellscript']
tokens = {
@@ -50,7 +51,7 @@ class BashLexer(RegexLexer):
(r'\$\(\(', Keyword, 'math'),
(r'\$\(', Keyword, 'paren'),
(r'\$\{#?', String.Interpol, 'curly'),
- (r'\$[a-fA-F_][a-fA-F0-9_]*', Name.Variable), # user variable
+ (r'\$[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable), # user variable
(r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
(r'\$', Text),
],
@@ -68,7 +69,7 @@ class BashLexer(RegexLexer):
(r'\A#!.+\n', Comment.Hashbang),
(r'#.*\n', Comment.Single),
(r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'(\b\w+)(\s*)(\+?=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]', Operator),
(r'<<<', Operator), # here-string
(r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
@@ -83,7 +84,7 @@ class BashLexer(RegexLexer):
(r'&', Punctuation),
(r'\|', Punctuation),
(r'\s+', Text),
- (r'\d+(?= |\Z)', Number),
+ (r'\d+\b', Number),
(r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
(r'<', Text),
],
@@ -137,11 +138,15 @@ class ShellSessionBaseLexer(Lexer):
pos = 0
curcode = ''
insertions = []
+ backslash_continuation = False
for match in line_re.finditer(text):
line = match.group()
m = re.match(self._ps1rgx, line)
- if m:
+ if backslash_continuation:
+ curcode += line
+ backslash_continuation = curcode.endswith('\\\n')
+ elif m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
@@ -151,10 +156,12 @@ class ShellSessionBaseLexer(Lexer):
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
+ backslash_continuation = curcode.endswith('\\\n')
elif line.startswith(self._ps2):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:len(self._ps2)])]))
curcode += line[len(self._ps2):]
+ backslash_continuation = curcode.endswith('\\\n')
else:
if insertions:
toks = innerlexer.get_tokens_unprocessed(curcode)
@@ -214,16 +221,16 @@ class BatchLexer(RegexLexer):
(_nl, _punct, _ws, _nl))
_number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
_opword = r'(?:equ|geq|gtr|leq|lss|neq)'
- _string = r'(?:"[^%s"]*"?)' % _nl
+ _string = r'(?:"[^%s"]*(?:"|(?=[%s])))' % (_nl, _nl)
_variable = (r'(?:(?:%%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
r'[^%%:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%%%s^]|'
r'\^[^%%%s])[^=%s]*=(?:[^%%%s^]|\^[^%%%s])*)?)?%%))|'
r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
(_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
- _core_token = r'(?:(?:(?:\^[%s]?)?[^%s%s%s])+)' % (_nl, _nl, _punct, _ws)
- _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^%s%s%s)])+)' % (_nl, _nl,
- _punct, _ws)
+ _core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s])+)' % (_nl, _nl, _punct, _ws)
+ _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s)])+)' % (_nl, _nl,
+ _punct, _ws)
_token = r'(?:[%s]+|%s)' % (_punct, _core_token)
_token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
_stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
@@ -452,9 +459,9 @@ class BatchLexer(RegexLexer):
bygroups(String.Double, using(this, state='string'), Text,
Punctuation)),
(r'"', String.Double, ('#pop', 'for2', 'string')),
- (r"('(?:%s|[\w\W])*?')([%s%s]*)(\))" % (_variable, _nl, _ws),
+ (r"('(?:%%%%|%s|[\w\W])*?')([%s%s]*)(\))" % (_variable, _nl, _ws),
bygroups(using(this, state='sqstring'), Text, Punctuation)),
- (r'(`(?:%s|[\w\W])*?`)([%s%s]*)(\))' % (_variable, _nl, _ws),
+ (r'(`(?:%%%%|%s|[\w\W])*?`)([%s%s]*)(\))' % (_variable, _nl, _ws),
bygroups(using(this, state='bqstring'), Text, Punctuation)),
include('for2')
],
@@ -472,13 +479,16 @@ class BatchLexer(RegexLexer):
using(this, state='variable')), '#pop'),
(r'(exist%s)(%s%s)' % (_token_terminator, _space, _stoken),
bygroups(Keyword, using(this, state='text')), '#pop'),
- (r'(%s%s?)(==)(%s?%s)' % (_stoken, _space, _space, _stoken),
- bygroups(using(this, state='text'), Operator,
- using(this, state='text')), '#pop'),
(r'(%s%s)(%s)(%s%s)' % (_number, _space, _opword, _space, _number),
bygroups(using(this, state='arithmetic'), Operator.Word,
using(this, state='arithmetic')), '#pop'),
- (r'(%s%s)(%s)(%s%s)' % (_stoken, _space, _opword, _space, _stoken),
+ (_stoken, using(this, state='text'), ('#pop', 'if2')),
+ ],
+ 'if2': [
+ (r'(%s?)(==)(%s?%s)' % (_space, _space, _stoken),
+ bygroups(using(this, state='text'), Operator,
+ using(this, state='text')), '#pop'),
+ (r'(%s)(%s)(%s%s)' % (_space, _opword, _space, _stoken),
bygroups(using(this, state='text'), Operator.Word,
using(this, state='text')), '#pop')
],
diff --git a/pygments/lexers/smalltalk.py b/pygments/lexers/smalltalk.py
index ebeb6320..79078b66 100644
--- a/pygments/lexers/smalltalk.py
+++ b/pygments/lexers/smalltalk.py
@@ -5,7 +5,7 @@
Lexers for Smalltalk and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/smv.py b/pygments/lexers/smv.py
new file mode 100644
index 00000000..529a3814
--- /dev/null
+++ b/pygments/lexers/smv.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.smv
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the SMV languages.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Comment, Generic, Keyword, Name, Number, \
+ Operator, Punctuation, Text
+
+__all__ = ['NuSMVLexer']
+
+
+class NuSMVLexer(RegexLexer):
+ """
+ Lexer for the NuSMV language.
+ """
+
+ name = 'NuSMV'
+ aliases = ['nusmv']
+ filenames = ['*.smv']
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ # Comments
+ (r'(?s)\/\-\-.*?\-\-/', Comment),
+ (r'--.*\n', Comment),
+
+ # Reserved
+ (words(('MODULE','DEFINE','MDEFINE','CONSTANTS','VAR','IVAR',
+ 'FROZENVAR','INIT','TRANS','INVAR','SPEC','CTLSPEC','LTLSPEC',
+ 'PSLSPEC','COMPUTE','NAME','INVARSPEC','FAIRNESS','JUSTICE',
+ 'COMPASSION','ISA','ASSIGN','CONSTRAINT','SIMPWFF','CTLWFF',
+ 'LTLWFF','PSLWFF','COMPWFF','IN','MIN','MAX','MIRROR','PRED',
+ 'PREDICATES'), suffix=r'(?![\w$#-])'), Keyword.Declaration),
+ (r'process(?![\w$#-])', Keyword),
+ (words(('array','of','boolean','integer','real','word'),
+ suffix=r'(?![\w$#-])'), Keyword.Type),
+ (words(('case','esac'), suffix=r'(?![\w$#-])'), Keyword),
+ (words(('word1','bool','signed','unsigned','extend','resize',
+ 'sizeof','uwconst','swconst','init','self','count','abs','max',
+ 'min'), suffix=r'(?![\w$#-])'), Name.Builtin),
+ (words(('EX','AX','EF','AF','EG','AG','E','F','O','G','H','X','Y',
+ 'Z','A','U','S','V','T','BU','EBF','ABF','EBG','ABG','next',
+ 'mod','union','in','xor','xnor'), suffix=r'(?![\w$#-])'),
+ Operator.Word),
+ (words(('TRUE','FALSE'), suffix=r'(?![\w$#-])'), Keyword.Constant),
+
+ # Names
+ (r'[a-zA-Z_][\w$#-]*', Name.Variable),
+
+ # Operators
+ (r':=', Operator),
+ (r'[&\|\+\-\*/<>!=]', Operator),
+
+ # Literals
+ (r'\-?\d+\b', Number.Integer),
+ (r'0[su][bB]\d*_[01_]+', Number.Bin),
+ (r'0[su][oO]\d*_[01234567_]+', Number.Oct),
+ (r'0[su][dD]\d*_[\d_]+', Number.Dec),
+ (r'0[su][hH]\d*_[\da-fA-F_]+', Number.Hex),
+
+ # Whitespace, punctuation and the rest
+ (r'\s+', Text.Whitespace),
+ (r'[\(\)\[\]\{\};\?:\.,]', Punctuation),
+ (r'.', Generic.Error),
+ ]
+ }
+
diff --git a/pygments/lexers/snobol.py b/pygments/lexers/snobol.py
index e4178f9c..f6e12fd2 100644
--- a/pygments/lexers/snobol.py
+++ b/pygments/lexers/snobol.py
@@ -5,7 +5,7 @@
Lexers for the SNOBOL language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/special.py b/pygments/lexers/special.py
index d3a168e7..6e076b0c 100644
--- a/pygments/lexers/special.py
+++ b/pygments/lexers/special.py
@@ -5,7 +5,7 @@
Special lexers.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -27,10 +27,13 @@ class TextLexer(Lexer):
aliases = ['text']
filenames = ['*.txt']
mimetypes = ['text/plain']
+ priority = 0.01
def get_tokens_unprocessed(self, text):
yield 0, Text, text
+ def analyse_text(text):
+ return TextLexer.priority
_ttype_cache = {}
diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py
index 7c06226b..7507c0fc 100644
--- a/pygments/lexers/sql.py
+++ b/pygments/lexers/sql.py
@@ -34,24 +34,26 @@
The ``tests/examplefiles`` contains a few test files with data to be
parsed by these lexers.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
-from pygments.token import Punctuation, \
+from pygments.token import Punctuation, Whitespace, Error, \
Text, Comment, Operator, Keyword, Name, String, Number, Generic
from pygments.lexers import get_lexer_by_name, ClassNotFound
from pygments.util import iteritems
from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
PSEUDO_TYPES, PLPGSQL_KEYWORDS
+from pygments.lexers import _tsql_builtins
__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
- 'SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer', 'RqlLexer']
+ 'SqlLexer', 'TransactSqlLexer', 'MySqlLexer',
+ 'SqliteConsoleLexer', 'RqlLexer']
line_re = re.compile('.*?\n')
@@ -151,7 +153,7 @@ class PostgresLexer(PostgresBase, RegexLexer):
tokens = {
'root': [
(r'\s+', Text),
- (r'--.*?\n', Comment.Single),
+ (r'--.*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'(' + '|'.join(s.replace(" ", "\s+")
for s in DATATYPES + PSEUDO_TYPES)
@@ -378,7 +380,7 @@ class SqlLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Text),
- (r'--.*?\n', Comment.Single),
+ (r'--.*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(words((
'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER', 'AGGREGATE',
@@ -479,6 +481,62 @@ class SqlLexer(RegexLexer):
}
+class TransactSqlLexer(RegexLexer):
+ """
+ Transact-SQL (T-SQL) is Microsoft's and Sybase's proprietary extension to
+ SQL.
+
+ The list of keywords includes ODBC and keywords reserved for future use..
+ """
+
+ name = 'Transact-SQL'
+ aliases = ['tsql', 't-sql']
+ filenames = ['*.sql']
+ mimetypes = ['text/x-tsql']
+
+ # Use re.UNICODE to allow non ASCII letters in names.
+ flags = re.IGNORECASE | re.UNICODE
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'--(?m).*?$\n?', Comment.Single),
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (words(_tsql_builtins.OPERATORS), Operator),
+ (words(_tsql_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word),
+ (words(_tsql_builtins.TYPES, suffix=r'\b'), Name.Class),
+ (words(_tsql_builtins.FUNCTIONS, suffix=r'\b'), Name.Function),
+ (r'(goto)(\s+)(\w+\b)', bygroups(Keyword, Whitespace, Name.Label)),
+ (words(_tsql_builtins.KEYWORDS, suffix=r'\b'), Keyword),
+ (r'(\[)([^]]+)(\])', bygroups(Operator, Name, Operator)),
+ (r'0x[0-9a-f]+', Number.Hex),
+ # Float variant 1, for example: 1., 1.e2, 1.2e3
+ (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float),
+ # Float variant 2, for example: .1, .1e2
+ (r'\.[0-9]+(e[+-]?[0-9]+)?', Number.Float),
+ # Float variant 3, for example: 123e45
+ (r'[0-9]+e[+-]?[0-9]+', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Symbol),
+ (r'[;(),.]', Punctuation),
+ # Below we use \w even for the first "real" character because
+ # tokens starting with a digit have already been recognized
+ # as Number above.
+ (r'@@\w+', Name.Builtin),
+ (r'@\w+', Name.Variable),
+ (r'(\w+)(:)', bygroups(Name.Label, Punctuation)),
+ (r'#?#?\w+', Name), # names for temp tables and anything else
+ (r'\?', Name.Variable.Magic), # parameter for prepared statements
+ ],
+ 'multiline-comments': [
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[^/*]+', Comment.Multiline),
+ (r'[/*]', Comment.Multiline)
+ ]
+ }
+
+
class MySqlLexer(RegexLexer):
"""
Special lexer for MySQL.
@@ -492,7 +550,7 @@ class MySqlLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Text),
- (r'(#|--\s+).*?\n', Comment.Single),
+ (r'(#|--\s+).*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'[0-9]+', Number.Integer),
(r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
diff --git a/pygments/lexers/stata.py b/pygments/lexers/stata.py
new file mode 100644
index 00000000..d3d87ed7
--- /dev/null
+++ b/pygments/lexers/stata.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.stata
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Stata
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Comment, Keyword, Name, Number, \
+ String, Text, Operator
+
+from pygments.lexers._stata_builtins import builtins_base, builtins_functions
+
+__all__ = ['StataLexer']
+
+class StataLexer(RegexLexer):
+ """
+ For `Stata <http://www.stata.com/>`_ do files.
+
+ .. versionadded:: 2.2
+ """
+ # Syntax based on
+ # - http://fmwww.bc.edu/RePEc/bocode/s/synlightlist.ado
+ # - http://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js
+ # - http://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim
+
+ name = 'Stata'
+ aliases = ['stata', 'do']
+ filenames = ['*.do', '*.ado']
+ mimetypes = ['text/x-stata', 'text/stata', 'application/x-stata']
+
+ tokens = {
+ 'root': [
+ include('comments'),
+ include('vars-strings'),
+ include('numbers'),
+ include('keywords'),
+ (r'.', Text),
+ ],
+ # Global and local macros; regular and special strings
+ 'vars-strings': [
+ (r'\$[a-zA-Z_0-9\{]', Name.Variable.Global, 'var_validglobal'),
+ (r'`[a-zA-Z_0-9]{0,31}\'', Name.Variable),
+ (r'"', String, 'string_dquote'),
+ (r'`"', String, 'string_mquote'),
+ ],
+ # For either string type, highlight macros as macros
+ 'string_dquote': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape),
+ (r'\$', Name.Variable.Global, 'var_validglobal'),
+ (r'`', Name.Variable, 'var_validlocal'),
+ (r'[^$\$`"\\]+', String),
+ (r'[$"\\]', String),
+ ],
+ 'string_mquote': [
+ (r'"\'', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape),
+ (r'\$', Name.Variable.Global, 'var_validglobal'),
+ (r'`', Name.Variable, 'var_validlocal'),
+ (r'[^$\$`"\\]+', String),
+ (r'[$"\\]', String),
+ ],
+ 'var_validglobal': [
+ (r'\{?[a-zA-Z0-9_]{0,32}\}?', Name.Variable.Global, '#pop'),
+ ],
+ 'var_validlocal': [
+ (r'[a-zA-Z0-9_]{0,31}\'', Name.Variable, '#pop'),
+ ],
+ # * only OK at line start, // OK anywhere
+ 'comments': [
+ (r'^\s*\*.*$', Comment),
+ (r'//.*', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ ],
+ # Built in functions and statements
+ 'keywords': [
+ (words(builtins_functions, prefix = r'\b', suffix = r'\('),
+ Name.Function),
+ (words(builtins_base, prefix = r'(^\s*|\s)', suffix = r'\b'),
+ Keyword),
+ ],
+ # http://www.stata.com/help.cgi?operators
+ 'operators': [
+ (r'-|==|<=|>=|<|>|&|!=', Operator),
+ (r'\*|\+|\^|/|!|~|==|~=', Operator)
+ ],
+ # Stata numbers
+ 'numbers': [
+ # decimal number
+ (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[i]?\b',
+ Number),
+ ],
+ # Stata formats
+ 'format': [
+ (r'%-?\d{1,2}(\.\d{1,2})?[gfe]c?', Name.Variable),
+ (r'%(21x|16H|16L|8H|8L)', Name.Variable),
+ (r'%-?(tc|tC|td|tw|tm|tq|th|ty|tg).{0,32}', Name.Variable),
+ (r'%[-~]?\d{1,4}s', Name.Variable),
+ ]
+ }
diff --git a/pygments/lexers/supercollider.py b/pygments/lexers/supercollider.py
index cef147b8..40ff0aeb 100644
--- a/pygments/lexers/supercollider.py
+++ b/pygments/lexers/supercollider.py
@@ -5,7 +5,7 @@
Lexer for SuperCollider
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -74,7 +74,7 @@ class SuperColliderLexer(RegexLexer):
(words(('true', 'false', 'nil', 'inf'), suffix=r'\b'), Keyword.Constant),
(words((
'Array', 'Boolean', 'Date', 'Error', 'Function', 'Number',
- 'Object', 'Packages', 'RegExp', 'String', 'Error',
+ 'Object', 'Packages', 'RegExp', 'String',
'isFinite', 'isNaN', 'parseFloat', 'parseInt', 'super',
'thisFunctionDef', 'thisFunction', 'thisMethod', 'thisProcess',
'thisThread', 'this'), suffix=r'\b'),
diff --git a/pygments/lexers/tcl.py b/pygments/lexers/tcl.py
index 96feb7a8..1d1be033 100644
--- a/pygments/lexers/tcl.py
+++ b/pygments/lexers/tcl.py
@@ -5,7 +5,7 @@
Lexers for Tcl and related languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
index 3e55b6ad..2c3feaac 100644
--- a/pygments/lexers/templates.py
+++ b/pygments/lexers/templates.py
@@ -5,7 +5,7 @@
Lexers for various template engines' markup.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -44,7 +44,7 @@ __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
- 'TwigLexer', 'TwigHtmlLexer']
+ 'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer']
class ErbLexer(Lexer):
@@ -1814,8 +1814,9 @@ class HandlebarsLexer(RegexLexer):
(r'\}\}', Comment.Preproc, '#pop'),
# Handlebars
- (r'([#/]*)(each|if|unless|else|with|log|in)', bygroups(Keyword,
+ (r'([#/]*)(each|if|unless|else|with|log|in(line)?)', bygroups(Keyword,
Keyword)),
+ (r'#\*inline', Keyword),
# General {{#block}}
(r'([#/])([\w-]+)', bygroups(Name.Function, Name.Function)),
@@ -1823,11 +1824,37 @@ class HandlebarsLexer(RegexLexer):
# {{opt=something}}
(r'([\w-]+)(=)', bygroups(Name.Attribute, Operator)),
+ # Partials {{> ...}}
+ (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
+ (r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
+ (r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation),
+ 'dynamic-partial'),
+
+ include('generic'),
+ ],
+ 'dynamic-partial': [
+ (r'\s+', Text),
+ (r'\)', Punctuation, '#pop'),
+
+ (r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
+ Name.Variable, Text)),
+ (r'(lookup)(\s+)([^\s]+)', bygroups(Keyword, Text,
+ using(this, state='variable'))),
+ (r'[\w-]+', Name.Function),
+
+ include('generic'),
+ ],
+ 'variable': [
+ (r'[a-zA-Z][\w-]*', Name.Variable),
+ (r'\.[\w-]+', Name.Variable),
+ (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
+ ],
+ 'generic': [
+ include('variable'),
+
# borrowed from DjangoLexer
(r':?"(\\\\|\\"|[^"])*"', String.Double),
(r":?'(\\\\|\\'|[^'])*'", String.Single),
- (r'[a-zA-Z][\w-]*', Name.Variable),
- (r'\.[\w-]+', Name.Variable),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
]
@@ -2174,3 +2201,83 @@ class TwigHtmlLexer(DelegatingLexer):
def __init__(self, **options):
super(TwigHtmlLexer, self).__init__(HtmlLexer, TwigLexer, **options)
+
+
+class Angular2Lexer(RegexLexer):
+ """
+ Generic
+ `angular2 <http://victorsavkin.com/post/119943127151/angular-2-template-syntax>`_
+ template lexer.
+
+ Highlights only the Angular template tags (stuff between `{{` and `}}` and
+ special attributes: '(event)=', '[property]=', '[(twoWayBinding)]=').
+ Everything else is left for a delegating lexer.
+
+ .. versionadded:: 2.1
+ """
+
+ name = "Angular2"
+ aliases = ['ng2']
+
+ tokens = {
+ 'root': [
+ (r'[^{([*#]+', Other),
+
+ # {{meal.name}}
+ (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'),
+
+ # (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar"
+ (r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)',
+ bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text),
+ 'attr'),
+ (r'([([]+)([\w:.-]+)([\])]+)(\s*)',
+ bygroups(Punctuation, Name.Attribute, Punctuation, Text)),
+
+ # *ngIf="..."; #f="ngForm"
+ (r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
+ bygroups(Punctuation, Name.Attribute, Punctuation, Operator), 'attr'),
+ (r'([*#])([\w:.-]+)(\s*)',
+ bygroups(Punctuation, Name.Attribute, Punctuation)),
+ ],
+
+ 'ngExpression': [
+ (r'\s+(\|\s+)?', Text),
+ (r'\}\}', Comment.Preproc, '#pop'),
+
+ # Literals
+ (r':?(true|false)', String.Boolean),
+ (r':?"(\\\\|\\"|[^"])*"', String.Double),
+ (r":?'(\\\\|\\'|[^'])*'", String.Single),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+
+ # Variabletext
+ (r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable),
+ (r'\.[\w-]+(\(.*\))?', Name.Variable),
+
+ # inline If
+ (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)',
+ bygroups(Operator, Text, String, Text, Operator, Text, String, Text)),
+ ],
+ 'attr': [
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+
+class Angular2HtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `Angular2Lexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ .. versionadded:: 2.0
+ """
+
+ name = "HTML + Angular2"
+ aliases = ["html+ng2"]
+ filenames = ['*.ng2']
+
+ def __init__(self, **options):
+ super(Angular2HtmlLexer, self).__init__(HtmlLexer, Angular2Lexer, **options)
diff --git a/pygments/lexers/testing.py b/pygments/lexers/testing.py
index be8b6f71..1e0795b1 100644
--- a/pygments/lexers/testing.py
+++ b/pygments/lexers/testing.py
@@ -5,7 +5,7 @@
Lexers for testing languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index 4bec5ec8..9b3b5fea 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -5,7 +5,7 @@
Lexers for non-source code file types.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/textedit.py b/pygments/lexers/textedit.py
index 89417216..e8856dbd 100644
--- a/pygments/lexers/textedit.py
+++ b/pygments/lexers/textedit.py
@@ -5,7 +5,7 @@
Lexers for languages related to text processing.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/textfmts.py b/pygments/lexers/textfmts.py
index cab9add5..bb8124ef 100644
--- a/pygments/lexers/textfmts.py
+++ b/pygments/lexers/textfmts.py
@@ -5,7 +5,7 @@
Lexers for various text formats.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/theorem.py b/pygments/lexers/theorem.py
index f8c7d0a9..e84a398b 100644
--- a/pygments/lexers/theorem.py
+++ b/pygments/lexers/theorem.py
@@ -5,7 +5,7 @@
Lexers for theorem-proving languages.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -394,7 +394,7 @@ class LeanLexer(RegexLexer):
'import', 'abbreviation', 'opaque_hint', 'tactic_hint', 'definition',
'renaming', 'inline', 'hiding', 'exposing', 'parameter', 'parameters',
'conjecture', 'hypothesis', 'lemma', 'corollary', 'variable', 'variables',
- 'print', 'theorem', 'axiom', 'inductive', 'structure', 'universe', 'alias',
+ 'theorem', 'axiom', 'inductive', 'structure', 'universe', 'alias',
'help', 'options', 'precedence', 'postfix', 'prefix', 'calc_trans',
'calc_subst', 'calc_refl', 'infix', 'infixl', 'infixr', 'notation', 'eval',
'check', 'exit', 'coercion', 'end', 'private', 'using', 'namespace',
@@ -415,15 +415,16 @@ class LeanLexer(RegexLexer):
)
operators = (
- '!=', '#', '&', '&&', '*', '+', '-', '/', '@', '!', '`',
- '-.', '->', '.', '..', '...', '::', ':>', ';', ';;', '<',
- '<-', '=', '==', '>', '_', '|', '||', '~', '=>', '<=', '>=',
- '/\\', '\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥',
+ u'!=', u'#', u'&', u'&&', u'*', u'+', u'-', u'/', u'@', u'!', u'`',
+ u'-.', u'->', u'.', u'..', u'...', u'::', u':>', u';', u';;', u'<',
+ u'<-', u'=', u'==', u'>', u'_', u'|', u'||', u'~', u'=>', u'<=', u'>=',
+ u'/\\', u'\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥',
u'¬', u'⁻¹', u'⬝', u'▸', u'→', u'∃', u'ℕ', u'ℤ', u'≈', u'×', u'⌞',
u'⌟', u'≡', u'⟨', u'⟩',
)
- punctuation = ('(', ')', ':', '{', '}', '[', ']', u'⦃', u'⦄', ':=', ',')
+ punctuation = (u'(', u')', u':', u'{', u'}', u'[', u']', u'⦃', u'⦄',
+ u':=', u',')
tokens = {
'root': [
diff --git a/pygments/lexers/trafficscript.py b/pygments/lexers/trafficscript.py
index 03ab6a06..42542280 100644
--- a/pygments/lexers/trafficscript.py
+++ b/pygments/lexers/trafficscript.py
@@ -5,7 +5,7 @@
Lexer for RiverBed's TrafficScript (RTS) language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/typoscript.py b/pygments/lexers/typoscript.py
index 407847ed..d9adb4ad 100644
--- a/pygments/lexers/typoscript.py
+++ b/pygments/lexers/typoscript.py
@@ -14,17 +14,15 @@
`TypoScriptHtmlDataLexer`
Lexer that highlights markers, constants and registers within html tags.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using
-from pygments.token import Keyword, Text, Comment, Name, String, Number, \
+from pygments.token import Text, Comment, Name, String, Number, \
Operator, Punctuation
-from pygments.lexer import DelegatingLexer
-from pygments.lexers.web import HtmlLexer, CssLexer
__all__ = ['TypoScriptLexer', 'TypoScriptCssDataLexer', 'TypoScriptHtmlDataLexer']
@@ -168,14 +166,14 @@ class TypoScriptLexer(RegexLexer):
'whitespace': [
(r'\s+', Text),
],
- 'html':[
+ 'html': [
(r'<\S[^\n>]*>', using(TypoScriptHtmlDataLexer)),
(r'&[^;\n]*;', String),
(r'(_CSS_DEFAULT_STYLE)(\s*)(\()(?s)(.*(?=\n\)))',
- bygroups(Name.Class, Text, String.Symbol, using(TypoScriptCssDataLexer))),
+ bygroups(Name.Class, Text, String.Symbol, using(TypoScriptCssDataLexer))),
],
'literal': [
- (r'0x[0-9A-Fa-f]+t?',Number.Hex),
+ (r'0x[0-9A-Fa-f]+t?', Number.Hex),
# (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?\s*(?:[^=])', Number.Float),
(r'[0-9]+', Number.Integer),
(r'(###\w+###)', Name.Constant),
diff --git a/pygments/lexers/urbi.py b/pygments/lexers/urbi.py
index 558a21fb..7aaba90c 100644
--- a/pygments/lexers/urbi.py
+++ b/pygments/lexers/urbi.py
@@ -5,7 +5,7 @@
Lexers for UrbiScript language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/varnish.py b/pygments/lexers/varnish.py
index e64a601b..44521422 100644
--- a/pygments/lexers/varnish.py
+++ b/pygments/lexers/varnish.py
@@ -5,7 +5,7 @@
Lexers for Varnish configuration
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -91,14 +91,14 @@ class VCLLexer(RegexLexer):
'resp.reason', 'bereq.url', 'beresp.do_esi', 'beresp.proto', 'client.ip',
'bereq.proto', 'server.hostname', 'remote.ip', 'req.backend_hint',
'server.identity', 'req_top.url', 'beresp.grace', 'beresp.was_304',
- 'server.ip', 'bereq.uncacheable', 'now'), suffix=r'\b'),
+ 'server.ip', 'bereq.uncacheable'), suffix=r'\b'),
Name.Variable),
(r'[!%&+*\-,/<.}{>=|~]+', Operator),
(r'[();]', Punctuation),
(r'[,]+', Punctuation),
- (words(('include', 'hash_data', 'regsub', 'regsuball', 'if', 'else',
- 'elsif', 'elif', 'synth', 'synthetic', 'ban', 'synth',
+ (words(('hash_data', 'regsub', 'regsuball', 'if', 'else',
+ 'elsif', 'elif', 'synth', 'synthetic', 'ban',
'return', 'set', 'unset', 'import', 'include', 'new',
'rollback', 'call'), suffix=r'\b'),
Keyword),
@@ -121,13 +121,13 @@ class VCLLexer(RegexLexer):
r'(\s*\(.*\))',
bygroups(Name.Function, Punctuation, Name.Function, using(this))),
('[a-zA-Z_]\w*', Name),
- ],
+ ],
'comment': [
(r'[^*/]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
- ],
+ ],
'comments': [
(r'#.*$', Comment),
(r'/\*', Comment.Multiline, 'comment'),
diff --git a/pygments/lexers/verification.py b/pygments/lexers/verification.py
index 4042d44e..5322e17f 100644
--- a/pygments/lexers/verification.py
+++ b/pygments/lexers/verification.py
@@ -5,7 +5,7 @@
Lexer for Intermediate Verification Languages (IVLs).
- :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -69,7 +69,7 @@ class SilverLexer(RegexLexer):
"""
name = 'Silver'
aliases = ['silver']
- filenames = ['*.sil']
+ filenames = ['*.sil', '*.vpr']
tokens = {
'root': [
@@ -88,13 +88,14 @@ class SilverLexer(RegexLexer):
'assume', 'goto', 'while', 'if', 'elseif', 'else', 'fresh',
'constraining', 'Seq', 'Set', 'Multiset', 'union', 'intersection',
'setminus', 'subset', 'unfolding', 'in', 'old', 'forall', 'exists',
- 'acc', 'wildcard', 'write', 'none', 'epsilon', 'perm', 'unique'),
+ 'acc', 'wildcard', 'write', 'none', 'epsilon', 'perm', 'unique',
+ 'apply', 'package', 'folding', 'label', 'forperm'),
suffix=r'\b'), Keyword),
(words(('Int', 'Perm', 'Bool', 'Ref'), suffix=r'\b'), Keyword.Type),
include('numbers'),
- (r'[!%&*+=|?:<>/-]', Operator),
- (r"([{}():;,.])", Punctuation),
+ (r'[!%&*+=|?:<>/\-\[\]]', Operator),
+ (r'([{}():;,.])', Punctuation),
# Identifier
(r'[\w$]\w*', Name),
],
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index 59fbf2fc..6e9c4f92 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -5,7 +5,7 @@
Just export previously exported lexers.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py
index 551846c2..712c8246 100644
--- a/pygments/lexers/webmisc.py
+++ b/pygments/lexers/webmisc.py
@@ -5,7 +5,7 @@
Lexers for misc. web stuff.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -358,8 +358,10 @@ class XQueryLexer(ExtendedRegexLexer):
bygroups(Keyword, Text, Keyword), 'itemtype'),
(r'(treat)(\s+)(as)\b',
bygroups(Keyword, Text, Keyword), 'itemtype'),
- (r'(case)(\s+)(' + stringdouble + ')', bygroups(Keyword, Text, String.Double), 'itemtype'),
- (r'(case)(\s+)(' + stringsingle + ')', bygroups(Keyword, Text, String.Single), 'itemtype'),
+ (r'(case)(\s+)(' + stringdouble + ')',
+ bygroups(Keyword, Text, String.Double), 'itemtype'),
+ (r'(case)(\s+)(' + stringsingle + ')',
+ bygroups(Keyword, Text, String.Single), 'itemtype'),
(r'(case|as)\b', Keyword, 'itemtype'),
(r'(\))(\s*)(as)',
bygroups(Punctuation, Text, Keyword), 'itemtype'),
@@ -367,7 +369,8 @@ class XQueryLexer(ExtendedRegexLexer):
(r'(for|let|previous|next)(\s+)(\$)',
bygroups(Keyword, Text, Name.Variable), 'varname'),
(r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
- bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
+ bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable),
+ 'varname'),
# (r'\)|\?|\]', Punctuation, '#push'),
(r'\)|\?|\]', Punctuation),
(r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
@@ -417,17 +420,21 @@ class XQueryLexer(ExtendedRegexLexer):
(r'preserve|no-preserve', Keyword),
(r',', Punctuation),
],
- 'annotationname':[
+ 'annotationname': [
(r'\(:', Comment, 'comment'),
(qname, Name.Decorator),
(r'(\()(' + stringdouble + ')', bygroups(Punctuation, String.Double)),
(r'(\()(' + stringsingle + ')', bygroups(Punctuation, String.Single)),
- (r'(\,)(\s+)(' + stringdouble + ')', bygroups(Punctuation, Text, String.Double)),
- (r'(\,)(\s+)(' + stringsingle + ')', bygroups(Punctuation, Text, String.Single)),
+ (r'(\,)(\s+)(' + stringdouble + ')',
+ bygroups(Punctuation, Text, String.Double)),
+ (r'(\,)(\s+)(' + stringsingle + ')',
+ bygroups(Punctuation, Text, String.Single)),
(r'\)', Punctuation),
(r'(\s+)(\%)', bygroups(Text, Name.Decorator), 'annotationname'),
- (r'(\s+)(variable)(\s+)(\$)', bygroups(Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
- (r'(\s+)(function)(\s+)', bygroups(Text, Keyword.Declaration, Text), 'root')
+ (r'(\s+)(variable)(\s+)(\$)',
+ bygroups(Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
+ (r'(\s+)(function)(\s+)',
+ bygroups(Text, Keyword.Declaration, Text), 'root')
],
'varname': [
(r'\(:', Comment, 'comment'),
@@ -473,8 +480,10 @@ class XQueryLexer(ExtendedRegexLexer):
bygroups(Keyword, Text, Keyword), 'singletype'),
(r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
(r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
- (r'(case)(\s+)(' + stringdouble + ')', bygroups(Keyword, Text, String.Double), 'itemtype'),
- (r'(case)(\s+)(' + stringsingle + ')', bygroups(Keyword, Text, String.Single), 'itemtype'),
+ (r'(case)(\s+)(' + stringdouble + ')',
+ bygroups(Keyword, Text, String.Double), 'itemtype'),
+ (r'(case)(\s+)(' + stringsingle + ')',
+ bygroups(Keyword, Text, String.Single), 'itemtype'),
(r'case|as', Keyword, 'itemtype'),
(r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
(ncname + r':\*', Keyword.Type, 'operator'),
diff --git a/pygments/lexers/whiley.py b/pygments/lexers/whiley.py
new file mode 100644
index 00000000..0d0e8ab8
--- /dev/null
+++ b/pygments/lexers/whiley.py
@@ -0,0 +1,116 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.whiley
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Whiley language.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+ Punctuation, String, Text
+
+__all__ = ['WhileyLexer']
+
+
+class WhileyLexer(RegexLexer):
+ """
+ Lexer for the Whiley programming language.
+
+ .. versionadded:: 2.2
+ """
+ name = 'Whiley'
+ filenames = ['*.whiley']
+ aliases = ['whiley']
+ mimetypes = ['text/x-whiley']
+
+ # See the language specification:
+ # http://whiley.org/download/WhileyLanguageSpec.pdf
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ (r'\s+', Text),
+
+ # Comments
+ (r'//.*', Comment.Single),
+ # don't parse empty comment as doc comment
+ (r'/\*\*/', Comment.Multiline),
+ (r'(?s)/\*\*.*?\*/', String.Doc),
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+
+ # Keywords
+ (words((
+ 'if', 'else', 'while', 'for', 'do', 'return',
+ 'switch', 'case', 'default', 'break', 'continue',
+ 'requires', 'ensures', 'where', 'assert', 'assume',
+ 'all', 'no', 'some', 'in', 'is', 'new',
+ 'throw', 'try', 'catch', 'debug', 'skip', 'fail',
+ 'finite', 'total'), suffix=r'\b'), Keyword.Reserved),
+ (words((
+ 'function', 'method', 'public', 'private', 'protected',
+ 'export', 'native'), suffix=r'\b'), Keyword.Declaration),
+ # "constant" & "type" are not keywords unless used in declarations
+ (r'(constant|type)(\s+)([a-zA-Z_]\w*)(\s+)(is)\b',
+ bygroups(Keyword.Declaration, Text, Name, Text, Keyword.Reserved)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(bool|byte|int|real|any|void)\b', Keyword.Type),
+ # "from" is not a keyword unless used with import
+ (r'(import)(\s+)(\*)([^\S\n]+)(from)\b',
+ bygroups(Keyword.Namespace, Text, Punctuation, Text, Keyword.Namespace)),
+ (r'(import)(\s+)([a-zA-Z_]\w*)([^\S\n]+)(from)\b',
+ bygroups(Keyword.Namespace, Text, Name, Text, Keyword.Namespace)),
+ (r'(package|import)\b', Keyword.Namespace),
+
+ # standard library: https://github.com/Whiley/WhileyLibs/
+ (words((
+ # types defined in whiley.lang.Int
+ 'i8', 'i16', 'i32', 'i64',
+ 'u8', 'u16', 'u32', 'u64',
+ 'uint', 'nat',
+
+ # whiley.lang.Any
+ 'toString'), suffix=r'\b'), Name.Builtin),
+
+ # byte literal
+ (r'[01]+b', Number.Bin),
+
+ # decimal literal
+ (r'[0-9]+\.[0-9]+', Number.Float),
+ # match "1." but not ranges like "3..5"
+ (r'[0-9]+\.(?!\.)', Number.Float),
+
+ # integer literal
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+
+ # character literal
+ (r"""'[^\\]'""", String.Char),
+ (r"""(')(\\['"\\btnfr])(')""",
+ bygroups(String.Char, String.Escape, String.Char)),
+
+ # string literal
+ (r'"', String, 'string'),
+
+ # operators and punctuation
+ (r'[{}()\[\],.;]', Punctuation),
+ (u'[+\\-*/%&|<>^!~@=:?'
+ # unicode operators
+ u'\u2200\u2203\u2205\u2282\u2286\u2283\u2287'
+ u'\u222A\u2229\u2264\u2265\u2208\u2227\u2228'
+ u']', Operator),
+
+ # identifier
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\[btnfr]', String.Escape),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\.', String),
+ (r'[^\\"]+', String),
+ ],
+ }
diff --git a/pygments/lexers/x10.py b/pygments/lexers/x10.py
index ea75ab71..1c63326d 100644
--- a/pygments/lexers/x10.py
+++ b/pygments/lexers/x10.py
@@ -5,7 +5,7 @@
Lexers for the X10 programming language.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/modeline.py b/pygments/modeline.py
index 2200f1cf..9f8d5dab 100644
--- a/pygments/modeline.py
+++ b/pygments/modeline.py
@@ -5,7 +5,7 @@
A simple modeline parser (based on pymodeline).
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -35,9 +35,10 @@ def get_filetype_from_buffer(buf, max_lines=5):
ret = get_filetype_from_line(l)
if ret:
return ret
- for l in lines[max_lines:-1:-1]:
- ret = get_filetype_from_line(l)
- if ret:
- return ret
+ for i in range(max_lines, -1, -1):
+ if i < len(lines):
+ ret = get_filetype_from_line(lines[i])
+ if ret:
+ return ret
return None
diff --git a/pygments/plugin.py b/pygments/plugin.py
index f9ea0890..7987d646 100644
--- a/pygments/plugin.py
+++ b/pygments/plugin.py
@@ -32,43 +32,37 @@
yourfilter = yourfilter:YourFilter
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-try:
- import pkg_resources
-except ImportError:
- pkg_resources = None
-
LEXER_ENTRY_POINT = 'pygments.lexers'
FORMATTER_ENTRY_POINT = 'pygments.formatters'
STYLE_ENTRY_POINT = 'pygments.styles'
FILTER_ENTRY_POINT = 'pygments.filters'
+def iter_entry_points(group_name):
+ try:
+ import pkg_resources
+ except ImportError:
+ return []
+
+ return pkg_resources.iter_entry_points(group_name)
def find_plugin_lexers():
- if pkg_resources is None:
- return
- for entrypoint in pkg_resources.iter_entry_points(LEXER_ENTRY_POINT):
+ for entrypoint in iter_entry_points(LEXER_ENTRY_POINT):
yield entrypoint.load()
def find_plugin_formatters():
- if pkg_resources is None:
- return
- for entrypoint in pkg_resources.iter_entry_points(FORMATTER_ENTRY_POINT):
+ for entrypoint in iter_entry_points(FORMATTER_ENTRY_POINT):
yield entrypoint.name, entrypoint.load()
def find_plugin_styles():
- if pkg_resources is None:
- return
- for entrypoint in pkg_resources.iter_entry_points(STYLE_ENTRY_POINT):
+ for entrypoint in iter_entry_points(STYLE_ENTRY_POINT):
yield entrypoint.name, entrypoint.load()
def find_plugin_filters():
- if pkg_resources is None:
- return
- for entrypoint in pkg_resources.iter_entry_points(FILTER_ENTRY_POINT):
+ for entrypoint in iter_entry_points(FILTER_ENTRY_POINT):
yield entrypoint.name, entrypoint.load()
diff --git a/pygments/regexopt.py b/pygments/regexopt.py
index 79903684..dcfae2fd 100644
--- a/pygments/regexopt.py
+++ b/pygments/regexopt.py
@@ -6,7 +6,7 @@
An algorithm that generates optimized regexes for matching long lists of
literal strings.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -54,7 +54,7 @@ def regex_opt_inner(strings, open_paren):
return open_paren + regex_opt_inner(rest, '') + '|' \
+ make_charset(oneletter) + close_paren
# print '-> only 1-character'
- return make_charset(oneletter)
+ return open_paren + make_charset(oneletter) + close_paren
prefix = commonprefix(strings)
if prefix:
plen = len(prefix)
diff --git a/pygments/scanner.py b/pygments/scanner.py
index 3ff11e4a..3350ac8e 100644
--- a/pygments/scanner.py
+++ b/pygments/scanner.py
@@ -12,7 +12,7 @@
Have a look at the `DelphiLexer` to get an idea of how to use
this scanner.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
diff --git a/pygments/sphinxext.py b/pygments/sphinxext.py
index de8cd73b..f962f8c6 100644
--- a/pygments/sphinxext.py
+++ b/pygments/sphinxext.py
@@ -6,7 +6,7 @@
Sphinx extension to generate automatic documentation of lexers,
formatters and filters.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/style.py b/pygments/style.py
index 68ee3a19..879c4e05 100644
--- a/pygments/style.py
+++ b/pygments/style.py
@@ -5,7 +5,7 @@
Basic style object.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/__init__.py b/pygments/styles/__init__.py
index 4efd196e..839a9b78 100644
--- a/pygments/styles/__init__.py
+++ b/pygments/styles/__init__.py
@@ -5,7 +5,7 @@
Contains built-in styles.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -41,7 +41,9 @@ STYLE_MAP = {
'lovelace': 'lovelace::LovelaceStyle',
'algol': 'algol::AlgolStyle',
'algol_nu': 'algol_nu::Algol_NuStyle',
- 'arduino': 'arduino::ArduinoStyle'
+ 'arduino': 'arduino::ArduinoStyle',
+ 'rainbow_dash': 'rainbow_dash::RainbowDashStyle',
+ 'abap': 'abap::AbapStyle',
}
diff --git a/pygments/styles/abap.py b/pygments/styles/abap.py
new file mode 100644
index 00000000..91286a3a
--- /dev/null
+++ b/pygments/styles/abap.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.abap
+ ~~~~~~~~~~~~~~~~~~~~
+
+ ABAP workbench like style.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator
+
+
+class AbapStyle(Style):
+ default_style = ""
+ styles = {
+ Comment: 'italic #888',
+ Comment.Special: '#888',
+ Keyword: '#00f',
+ Operator.Word: '#00f',
+ Name: '#000',
+ Number: '#3af',
+ String: '#5a2',
+
+ Error: '#F00',
+ }
diff --git a/pygments/styles/algol.py b/pygments/styles/algol.py
index a8726009..16461e0b 100644
--- a/pygments/styles/algol.py
+++ b/pygments/styles/algol.py
@@ -26,7 +26,7 @@
[1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/algol_nu.py b/pygments/styles/algol_nu.py
index 392838f2..366ae215 100644
--- a/pygments/styles/algol_nu.py
+++ b/pygments/styles/algol_nu.py
@@ -26,7 +26,7 @@
[1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/arduino.py b/pygments/styles/arduino.py
index 1bf2103c..57e3809e 100644
--- a/pygments/styles/arduino.py
+++ b/pygments/styles/arduino.py
@@ -5,7 +5,7 @@
Arduino® Syntax highlighting style.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/autumn.py b/pygments/styles/autumn.py
index 2040659e..71b93b1e 100644
--- a/pygments/styles/autumn.py
+++ b/pygments/styles/autumn.py
@@ -5,7 +5,7 @@
A colorful style, inspired by the terminal highlighting style.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/borland.py b/pygments/styles/borland.py
index 2b1f4ca9..0d13d1aa 100644
--- a/pygments/styles/borland.py
+++ b/pygments/styles/borland.py
@@ -5,7 +5,7 @@
Style similar to the style used in the Borland IDEs.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/bw.py b/pygments/styles/bw.py
index 56d78bd6..f0a6b148 100644
--- a/pygments/styles/bw.py
+++ b/pygments/styles/bw.py
@@ -5,7 +5,7 @@
Simple black/white only style.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/colorful.py b/pygments/styles/colorful.py
index ebedc02f..bfc0b502 100644
--- a/pygments/styles/colorful.py
+++ b/pygments/styles/colorful.py
@@ -5,7 +5,7 @@
A colorful style, inspired by CodeRay.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/default.py b/pygments/styles/default.py
index df99768c..6b9bd446 100644
--- a/pygments/styles/default.py
+++ b/pygments/styles/default.py
@@ -5,7 +5,7 @@
The default highlighting style.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/emacs.py b/pygments/styles/emacs.py
index 27ae19ad..af15f30d 100644
--- a/pygments/styles/emacs.py
+++ b/pygments/styles/emacs.py
@@ -5,7 +5,7 @@
A highlighting style for Pygments, inspired by Emacs.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/friendly.py b/pygments/styles/friendly.py
index d5256a4b..b2d1c0ce 100644
--- a/pygments/styles/friendly.py
+++ b/pygments/styles/friendly.py
@@ -5,7 +5,7 @@
A modern style based on the VIM pyte theme.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/fruity.py b/pygments/styles/fruity.py
index 99bbae6f..1bbe0316 100644
--- a/pygments/styles/fruity.py
+++ b/pygments/styles/fruity.py
@@ -5,7 +5,7 @@
pygments version of my "fruity" vim theme.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/igor.py b/pygments/styles/igor.py
index 8f552709..d4620a42 100644
--- a/pygments/styles/igor.py
+++ b/pygments/styles/igor.py
@@ -5,7 +5,7 @@
Igor Pro default style.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/lovelace.py b/pygments/styles/lovelace.py
index 236dde9b..861f778d 100644
--- a/pygments/styles/lovelace.py
+++ b/pygments/styles/lovelace.py
@@ -9,7 +9,7 @@
A desaturated, somewhat subdued style created for the Lovelace interactive
learning environment.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/manni.py b/pygments/styles/manni.py
index dd09f263..f0a325af 100644
--- a/pygments/styles/manni.py
+++ b/pygments/styles/manni.py
@@ -8,7 +8,7 @@
This is a port of the style used in the `php port`_ of pygments
by Manni. The style is called 'default' there.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/monokai.py b/pygments/styles/monokai.py
index 9c2a0a87..337e2f89 100644
--- a/pygments/styles/monokai.py
+++ b/pygments/styles/monokai.py
@@ -7,7 +7,7 @@
http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/murphy.py b/pygments/styles/murphy.py
index 1f83cb26..c8270065 100644
--- a/pygments/styles/murphy.py
+++ b/pygments/styles/murphy.py
@@ -5,7 +5,7 @@
Murphy's style from CodeRay.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/native.py b/pygments/styles/native.py
index 33ea3c17..921a58d9 100644
--- a/pygments/styles/native.py
+++ b/pygments/styles/native.py
@@ -5,7 +5,7 @@
pygments version of my "native" vim theme.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/paraiso_dark.py b/pygments/styles/paraiso_dark.py
index f906f87d..5f334bb9 100644
--- a/pygments/styles/paraiso_dark.py
+++ b/pygments/styles/paraiso_dark.py
@@ -9,7 +9,7 @@
Created with Base16 Builder by Chris Kempson
(https://github.com/chriskempson/base16-builder).
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/paraiso_light.py b/pygments/styles/paraiso_light.py
index 5424d122..a8112819 100644
--- a/pygments/styles/paraiso_light.py
+++ b/pygments/styles/paraiso_light.py
@@ -9,7 +9,7 @@
Created with Base16 Builder by Chris Kempson
(https://github.com/chriskempson/base16-builder).
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/pastie.py b/pygments/styles/pastie.py
index f65940be..d6142908 100644
--- a/pygments/styles/pastie.py
+++ b/pygments/styles/pastie.py
@@ -7,7 +7,7 @@
.. _pastie: http://pastie.caboo.se/
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/perldoc.py b/pygments/styles/perldoc.py
index eae6170d..24af2df6 100644
--- a/pygments/styles/perldoc.py
+++ b/pygments/styles/perldoc.py
@@ -7,7 +7,7 @@
.. _perldoc: http://perldoc.perl.org/
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/rainbow_dash.py b/pygments/styles/rainbow_dash.py
new file mode 100644
index 00000000..7cf5c9d7
--- /dev/null
+++ b/pygments/styles/rainbow_dash.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.rainbow_dash
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ A bright and colorful syntax highlighting `theme`.
+
+ .. _theme: http://sanssecours.github.io/Rainbow-Dash.tmbundle
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import (Comment, Error, Generic, Name, Number, Operator,
+ String, Text, Whitespace, Keyword)
+
+BLUE_LIGHT = '#0080ff'
+BLUE = '#2c5dcd'
+GREEN = '#00cc66'
+GREEN_LIGHT = '#ccffcc'
+GREEN_NEON = '#00cc00'
+GREY = '#aaaaaa'
+GREY_LIGHT = '#cbcbcb'
+GREY_DARK = '#4d4d4d'
+PURPLE = '#5918bb'
+RED = '#cc0000'
+RED_DARK = '#c5060b'
+RED_LIGHT = '#ffcccc'
+RED_BRIGHT = '#ff0000'
+WHITE = '#ffffff'
+TURQUOISE = '#318495'
+ORANGE = '#ff8000'
+
+
+class RainbowDashStyle(Style):
+ """
+ A bright and colorful syntax highlighting theme.
+ """
+
+ background_color = WHITE
+
+ styles = {
+ Comment: 'italic {}'.format(BLUE_LIGHT),
+ Comment.Preproc: 'noitalic',
+ Comment.Special: 'bold',
+
+ Error: 'bg:{} {}'.format(RED, WHITE),
+
+ Generic.Deleted: 'border:{} bg:{}'.format(RED_DARK, RED_LIGHT),
+ Generic.Emph: 'italic',
+ Generic.Error: RED_BRIGHT,
+ Generic.Heading: 'bold {}'.format(BLUE),
+ Generic.Inserted: 'border:{} bg:{}'.format(GREEN_NEON, GREEN_LIGHT),
+ Generic.Output: GREY,
+ Generic.Prompt: 'bold {}'.format(BLUE),
+ Generic.Strong: 'bold',
+ Generic.Subheading: 'bold {}'.format(BLUE),
+ Generic.Traceback: RED_DARK,
+
+ Keyword: 'bold {}'.format(BLUE),
+ Keyword.Pseudo: 'nobold',
+ Keyword.Type: PURPLE,
+
+ Name.Attribute: 'italic {}'.format(BLUE),
+ Name.Builtin: 'bold {}'.format(PURPLE),
+ Name.Class: 'underline',
+ Name.Constant: TURQUOISE,
+ Name.Decorator: 'bold {}'.format(ORANGE),
+ Name.Entity: 'bold {}'.format(PURPLE),
+ Name.Exception: 'bold {}'.format(PURPLE),
+ Name.Function: 'bold {}'.format(ORANGE),
+ Name.Tag: 'bold {}'.format(BLUE),
+
+ Number: 'bold {}'.format(PURPLE),
+
+ Operator: BLUE,
+ Operator.Word: 'bold',
+
+ String: GREEN,
+ String.Doc: 'italic',
+ String.Escape: 'bold {}'.format(RED_DARK),
+ String.Other: TURQUOISE,
+ String.Symbol: 'bold {}'.format(RED_DARK),
+
+ Text: GREY_DARK,
+
+ Whitespace: GREY_LIGHT
+ }
diff --git a/pygments/styles/rrt.py b/pygments/styles/rrt.py
index 342c9fc6..96f9490c 100644
--- a/pygments/styles/rrt.py
+++ b/pygments/styles/rrt.py
@@ -5,7 +5,7 @@
pygments "rrt" theme, based on Zap and Emacs defaults.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/sas.py b/pygments/styles/sas.py
new file mode 100644
index 00000000..78686fc2
--- /dev/null
+++ b/pygments/styles/sas.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.sas
+ ~~~~~~~~~~~~~~~~~~~
+
+ Style inspired by SAS' enhanced program editor. Note This is not
+ meant to be a complete style. It's merely meant to mimic SAS'
+ program editor syntax highlighting.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Other, Whitespace, Generic
+
+
+class SasStyle(Style):
+ """
+ Style inspired by SAS' enhanced program editor. Note This is not
+ meant to be a complete style. It's merely meant to mimic SAS'
+ program editor syntax highlighting.
+ """
+
+ default_style = ''
+
+ styles = {
+ Whitespace: '#bbbbbb',
+ Comment: 'italic #008800',
+ String: '#800080',
+ Number: 'bold #2e8b57',
+ Other: 'bg:#ffffe0',
+ Keyword: '#2c2cff',
+ Keyword.Reserved: 'bold #353580',
+ Keyword.Constant: 'bold',
+ Name.Builtin: '#2c2cff',
+ Name.Function: 'bold italic',
+ Name.Variable: 'bold #2c2cff',
+ Generic: '#2c2cff',
+ Generic.Emph: '#008800',
+ Generic.Error: '#d30202',
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/pygments/styles/stata.py b/pygments/styles/stata.py
new file mode 100644
index 00000000..2b5f5edd
--- /dev/null
+++ b/pygments/styles/stata.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.stata
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Style inspired by Stata's do-file editor. Note this is not meant
+ to be a complete style. It's merely meant to mimic Stata's do file
+ editor syntax highlighting.
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Whitespace
+
+
+class StataStyle(Style):
+ """
+ Style inspired by Stata's do-file editor. Note this is not meant
+ to be a complete style. It's merely meant to mimic Stata's do file
+ editor syntax highlighting.
+ """
+
+ default_style = ''
+
+ styles = {
+ Whitespace: '#bbbbbb',
+ Comment: 'italic #008800',
+ String: '#7a2424',
+ Number: '#2c2cff',
+ Operator: '',
+ Keyword: 'bold #353580',
+ Keyword.Constant: '',
+ Name.Function: '#2c2cff',
+ Name.Variable: 'bold #35baba',
+ Name.Variable.Global: 'bold #b5565e',
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/pygments/styles/tango.py b/pygments/styles/tango.py
index c65850bd..2abc8c61 100644
--- a/pygments/styles/tango.py
+++ b/pygments/styles/tango.py
@@ -33,7 +33,7 @@
have been chosen to have the same style. Similarly, keywords (Keyword.*),
and Operator.Word (and, or, in) have been assigned the same style.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/trac.py b/pygments/styles/trac.py
index bf36ce03..aff39fd4 100644
--- a/pygments/styles/trac.py
+++ b/pygments/styles/trac.py
@@ -5,7 +5,7 @@
Port of the default trac highlighter design.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/vim.py b/pygments/styles/vim.py
index 383fd8f0..888088b1 100644
--- a/pygments/styles/vim.py
+++ b/pygments/styles/vim.py
@@ -5,7 +5,7 @@
A highlighting style for Pygments, inspired by vim.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/vs.py b/pygments/styles/vs.py
index 78efc547..bc3ed2b5 100644
--- a/pygments/styles/vs.py
+++ b/pygments/styles/vs.py
@@ -5,7 +5,7 @@
Simple style with MS Visual Studio colors.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/xcode.py b/pygments/styles/xcode.py
index 3dc9240d..64bfcf03 100644
--- a/pygments/styles/xcode.py
+++ b/pygments/styles/xcode.py
@@ -5,7 +5,7 @@
Style similar to the `Xcode` default theme.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/token.py b/pygments/token.py
index fbd5b805..43f73c85 100644
--- a/pygments/token.py
+++ b/pygments/token.py
@@ -5,7 +5,7 @@
Basic token types and the standard tokens.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/unistring.py b/pygments/unistring.py
index 49a2819a..6096d110 100644
--- a/pygments/unistring.py
+++ b/pygments/unistring.py
@@ -8,7 +8,7 @@
Inspired by chartypes_create.py from the MoinMoin project.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/util.py b/pygments/util.py
index 07b662d0..45070063 100644
--- a/pygments/util.py
+++ b/pygments/util.py
@@ -5,7 +5,7 @@
Utility functions.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,7 +14,7 @@ import sys
split_path_re = re.compile(r'[/\\ ]')
-doctype_lookup_re = re.compile(r'''(?smx)
+doctype_lookup_re = re.compile(r'''
(<\?.*?\?>)?\s*
<!DOCTYPE\s+(
[a-zA-Z_][a-zA-Z0-9]*
@@ -23,8 +23,9 @@ doctype_lookup_re = re.compile(r'''(?smx)
"[^"]*")?
)
[^>]*>
-''')
-tag_re = re.compile(r'<(.+?)(\s.*?)?>.*?</.+?>(?uism)')
+''', re.DOTALL | re.MULTILINE | re.VERBOSE)
+tag_re = re.compile(r'<(.+?)(\s.*?)?>.*?</.+?>',
+ re.UNICODE | re.IGNORECASE | re.DOTALL | re.MULTILINE)
xml_decl_re = re.compile(r'\s*<\?xml[^>]*\?>', re.I)
diff --git a/scripts/check_sources.py b/scripts/check_sources.py
index 4f5926f6..db09de42 100755
--- a/scripts/check_sources.py
+++ b/scripts/check_sources.py
@@ -7,7 +7,7 @@
Make sure each Python file has a correct file header
including copyright and license information.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,7 +36,7 @@ def checker(*suffixes, **kwds):
name_mail_re = r'[\w ]+(<.*?>)?'
-copyright_re = re.compile(r'^ :copyright: Copyright 2006-2015 by '
+copyright_re = re.compile(r'^ :copyright: Copyright 2006-2017 by '
r'the Pygments team, see AUTHORS\.$', re.UNICODE)
copyright_2_re = re.compile(r'^ %s(, %s)*[,.]$' %
(name_mail_re, name_mail_re), re.UNICODE)
diff --git a/scripts/debug_lexer.py b/scripts/debug_lexer.py
index 4b7db41a..02bb9fef 100755
--- a/scripts/debug_lexer.py
+++ b/scripts/debug_lexer.py
@@ -8,7 +8,7 @@
the text where Error tokens are being generated, along
with some context.
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/scripts/get_vimkw.py b/scripts/get_vimkw.py
index 45652740..688a0c64 100644
--- a/scripts/get_vimkw.py
+++ b/scripts/get_vimkw.py
@@ -16,7 +16,7 @@ HEADER = '''\
This file is autogenerated by scripts/get_vimkw.py
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/setup.py b/setup.py
index 6442fd10..5ef51ab5 100755
--- a/setup.py
+++ b/setup.py
@@ -16,7 +16,7 @@
formats that PIL supports and ANSI sequences
* it is usable as a command-line tool and as a library
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/examplefiles/capdl_example.cdl b/tests/examplefiles/capdl_example.cdl
new file mode 100644
index 00000000..050e56a6
--- /dev/null
+++ b/tests/examplefiles/capdl_example.cdl
@@ -0,0 +1,64 @@
+#ifdef ARCH_ARM
+arch arm11
+#else
+arch ia32
+#endif
+
+objects {
+ my_ep = ep /* A synchronous endpoint */
+
+ /* Two thread control blocks */
+ tcb1 = tcb
+ tcb2 = tcb
+
+ /* Four frames of physical memory */
+ frame1 = frame (4k)
+ frame2 = frame (4k)
+ frame3 = frame (4k)
+ frame4 = frame (4k)
+
+ /* Two page tables */
+ pt1 = pt
+ pt2 = pt
+
+ /* Two page directories */
+ pd1 = pd
+ pd2 = pd
+
+ /* Two capability nodes */
+ cnode1 = cnode (2 bits)
+ cnode2 = cnode (3 bits)
+}
+caps {
+ cnode1 {
+ 0x1: frame1 (RW) /* read/write */
+ 0x2: my_ep (R) /* read-only */
+ }
+ cnode2 {
+ 0x1: my_ep (W) /* write-only */
+ }
+ tcb1 {
+ vspace: pd1
+ ipc_buffer_slot: frame1
+ cspace: cnode1
+ }
+ pd1 {
+ 0x10: pt1
+ }
+ pt1 {
+ 0x8: frame1 (RW)
+ 0x9: frame2 (R)
+ }
+ tcb2 {
+ vspace: pd2
+ ipc_buffer_slot: frame3
+ cspace: cnode2
+ }
+ pd2 {
+ 0x10: pt2
+ }
+ pt2 {
+ 0x10: frame3 (RW)
+ 0x12: frame4 (R)
+ }
+}
diff --git a/tests/examplefiles/demo.frt b/tests/examplefiles/demo.frt
new file mode 100644
index 00000000..1b09ebb0
--- /dev/null
+++ b/tests/examplefiles/demo.frt
@@ -0,0 +1,3 @@
+2 3 + CR .
+: F ( blah ) DUP DROP 1 + ;
+1 F CR .
diff --git a/tests/examplefiles/demo.hbs b/tests/examplefiles/demo.hbs
index 1b9ed5a7..ae80cc1b 100644
--- a/tests/examplefiles/demo.hbs
+++ b/tests/examplefiles/demo.hbs
@@ -10,3 +10,25 @@
{{else}}
<button {{action expand}}>Show More...</button>
{{/if}}
+
+{{> myPartial}}
+{{> myPartial var="value" }}
+{{> myPartial var=../value}}
+{{> (myPartial)}}
+{{> (myPartial) var="value"}}
+{{> (lookup . "myPartial")}}
+{{> ( lookup . "myPartial" ) var="value" }}
+{{> (lookup ../foo "myPartial") var="value" }}
+{{> @partial-block}}
+
+{{#>myPartial}}
+...
+{{/myPartial}}
+
+{{#*inline "myPartial"}}
+...
+{{/inline}}
+
+{{../name}}
+{{./name}}
+{{this/name}}
diff --git a/tests/examplefiles/durexmania.aheui b/tests/examplefiles/durexmania.aheui
new file mode 100644
index 00000000..89654c00
--- /dev/null
+++ b/tests/examplefiles/durexmania.aheui
@@ -0,0 +1,4 @@
+우주메이저☆듀렉스전도사♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♂♀♡먊
+삶은밥과야근밥샤주세양♡밥사밥사밥사밥사밥사땅땅땅빵☆따밦내발따밦다빵맣밥밥밥내놔밥줘밥밥밥밗땅땅땅박밝땅땅딻타밟타맣밦밣따박타맣밦밣따박타맣밦밣따박타맣박빵빵빵빵따따따따맣삶몲
+Original Source by @harunene // Run it on AheuiChem(http://yoo2001818.github.io/AheuiChem/)
+https://gist.github.com/item4/ca870a63b390da6cc6f1
diff --git a/tests/examplefiles/example.bat b/tests/examplefiles/example.bat
index bf27673c..2b45d2bc 100644
--- a/tests/examplefiles/example.bat
+++ b/tests/examplefiles/example.bat
@@ -99,6 +99,10 @@ goto fail
rem "comment comment"^
goto fail
rem comment comment^
+if "1==1" equ "1==1" goto comments4
+goto fail
+:comments4
+rem comment"comment^
set /a _passed+=1
GOTO :EOF
goto :fail
@@ -201,5 +205,7 @@ for /f "tokens=2 delims==" %%G in ( 'assoc %+;/p extension'),%'
) &>nul ver
if errorlevel 0 if not errorlevel 1 set /a _passed+=1
goto :eof
+FOR /F %%a IN ('%%c%%') DO %%a
+rem %x% %x% %x% %x% %x% %x% %x% %x% %x% %x% %x% %x% %x% %x% %x% %x%
:/?
goto :fail
diff --git a/tests/examplefiles/example.hs b/tests/examplefiles/example.hs
index f5e2b555..764cab77 100644
--- a/tests/examplefiles/example.hs
+++ b/tests/examplefiles/example.hs
@@ -29,3 +29,13 @@ data ĈrazyThings =
-- some char literals:
charl = ['"', 'a', '\ESC', '\'', ' ']
+
+-- closed type families
+type family Fam (a :: Type) = r :: Type where
+ Fam Int = True
+ Fam a = False
+
+-- type literals
+type IntChar = '[Int, Char]
+type Falsy = 'False
+type Falsy = '(10, 20, 30)
diff --git a/tests/examplefiles/example.juttle b/tests/examplefiles/example.juttle
new file mode 100644
index 00000000..ae861996
--- /dev/null
+++ b/tests/examplefiles/example.juttle
@@ -0,0 +1,110 @@
+/* Block comment */
+/*
+ Multiline block
+ comment
+*/
+
+// inline comment
+function juttleFunction(arg) {
+ if (arg == null) {
+ return null;
+ }
+ else if (arg == 0) {
+ return 'zero';
+ }
+ else if (arg == 1) {
+ return "one";
+ }
+ else {
+ return 1.1;
+ }
+}
+
+reducer juttleReducer(field) {
+ var x = 0;
+ function update() {
+ x = *field;
+ }
+
+ function result() {
+ return x;
+ }
+}
+
+sub myemit(limit) {
+ emit -limit limit
+}
+
+input test: text -default 'input';
+const object = {
+ xyz: 123,
+ name: 'something'
+};
+
+const array = [
+ :2016-01-01:,
+ :2016-01-01T01:00:00:,
+ :2016-01-01T01:00:00.000:,
+ :2016-01-01T01:00:00.000Z:,
+ :2016-01-01T01:00:00.000-0800:,
+ :2016-01-01T01:00:00.000-08:00:,
+ :00:00:01:,
+ :00:00:00.001:,
+ :now:,
+ :beginning:,
+ :end:,
+ :forever:,
+ :yesterday:,
+ :today:,
+ :tomorrow:,
+ :1:,
+ :1.1:,
+ :1s:,
+ :1 second:,
+ :1 seconds:,
+ :100ms:,
+ :100 millisecond:,
+ :100 milliseconds:,
+ :1d:,
+ :1 day:,
+ :1 days:,
+ :.2h:,
+ :1.2h:,
+ :.2 hour:,
+ :1.2 hours:,
+ :.5d:,
+ :1.5d:,
+ :.5 day:,
+ :1.5 days:,
+ :5m:,
+ :5 minutes:,
+ :10w:,
+ :10 weeks:,
+ :10M:,
+ :10 months:,
+ :100y:,
+ :100 years:,
+ :1 year and 2 months and 2 days:
+];
+
+emit
+ | batch :10 minutes:
+ | filter x=true
+ | head 1
+ | join
+ | keep x
+ | pace -every :1 minute:
+ | pass
+ | put y=false
+ | remove z
+ | sequence
+ | skip 1
+ | sort field -desc
+ | split field
+ | tail 10
+ | unbatch
+ | uniq field
+;
+
+read adapter -last :day: 'search' AND field~/pattern/ OR field == 'string'
+ | write adapter
diff --git a/tests/examplefiles/example.lua b/tests/examplefiles/example.lua
index 0289e58c..8ecd6a13 100644
--- a/tests/examplefiles/example.lua
+++ b/tests/examplefiles/example.lua
@@ -247,4 +247,28 @@ function AucAdvanced.Debug.Assert(test, message)
return DebugLib.Assert(addonName, test, message)
end
+--[==[
+Here follow further tests of Lua syntax.
+]]==]
+---[[
+local t = {
+ [ [[
+x
+]==] \]]]=1|2; a={b={c={}}},
+ 1, 1., 1.2, .2, 1e3, 1.e3, 1.2e3, .2e3, 1.2e+3, 1.2E-3;
+ 0xA, 0Xa, 0xA., 0x.F, 0xA.F, 0xA.Fp1, 0xA.FP+1, 0Xa.fp-1;
+}
+function t.f()
+ goto eof
+ os.exit()
+ :: eof ::
+end
+
+function t . a --[==[x]==] .b --[==[y]==] --
+-- () end
+ . c : d (file)
+ return '.\a.\b.\f.\n.\r.\t.\v.\\.\".\'.\
+.\z
+ .\0.\00.\000.\0000.\xFa.\u{1}.\u{1234}'
+end
diff --git a/tests/examplefiles/example.md b/tests/examplefiles/example.md
new file mode 100644
index 00000000..2befb107
--- /dev/null
+++ b/tests/examplefiles/example.md
@@ -0,0 +1,61 @@
+# this is a header
+
+## this is a 2nd level header
+
+* list item 1
+ * list item 1.1
+* list item 2
+- list item 3
+
+1. numbered list item 1
+1. numbered list item 2
+
+- [ ] todo
+- [x] done
+- [X] done
+
+The following is italic: *italic*
+The following is italic: _italic_
+
+The following is not italic: \*italic\*
+The following is not italic: \_italic\_
+
+The following is not italic: snake*case*word
+The following is not italic: snake_case_word
+
+The following is bold: **bold** **two or more words**
+The following is bold: __bold__ __two or more words__
+
+The following is not bold: snake**case**word
+The following is not bold: snake__case__word
+
+The following is strikethrough: ~~bold~~
+The following is not strikethrough: snake~~case~~word
+
+The following is bold with italics inside: **the next _word_ should have been italics**
+
+> this is a quote
+
+> this is a multiline
+> quote string thing
+
+this sentence `has monospace` in it
+
+this sentence @tweets a person about a #topic.
+
+[google](https://google.com/some/path.html)
+![Image of Yaktocat](https://octodex.github.com/images/yaktocat.png)
+
+```
+ * this is just unformated
+ __text__
+```
+
+some other text
+
+```python
+from pygments import token
+# comment
+```
+
+some more text
diff --git a/tests/examplefiles/example.ng2 b/tests/examplefiles/example.ng2
new file mode 100644
index 00000000..0f424aca
--- /dev/null
+++ b/tests/examplefiles/example.ng2
@@ -0,0 +1,11 @@
+<div>
+ <p>{{order.DueTime | date:'d. MMMM yyyy HH:mm'}}</p>
+ <p>Status: {{order.OrderState}}</p>
+ <button (click)="deleteOrder()" *ngIf="cancelable" [value]="test" [(twoWayTest)]="foo.bar">Remove</button>
+ <ul>
+ <li *ngFor="#meal of order.Positions">
+ {{meal.Name}}
+ </li>
+ </ul>
+ <p>Preis: <b>{{order.TotalPrice | currency:'EUR':true:'1.2-2'}}</b></p>
+</div> \ No newline at end of file
diff --git a/tests/examplefiles/example.praat b/tests/examplefiles/example.praat
index bf2d005f..85573919 100644
--- a/tests/examplefiles/example.praat
+++ b/tests/examplefiles/example.praat
@@ -2,17 +2,27 @@ form Highlighter test
sentence Blank
sentence My_sentence This should all be a string
text My_text This should also all be a string
- word My_word Only the first word is a string, the rest is invalid
+ word My_word Only the first word is a string, the rest is discarded
boolean Binary 1
boolean Text no
boolean Quoted "yes"
comment This should be a string
+ optionmenu Choice: 1
+ option Foo
+ option Bar
+ option 100
real left_Range -123.6
positive right_Range_max 3.3
integer Int 4
natural Nat 4
endform
+# Periods do not establish boundaries for keywords
+form.var = 10
+# Or operators
+not.an.operator$ = "Bad variable name"
+bad.or.not = 1
+
# External scripts
include /path/to/file
runScript: "/path/to/file"
@@ -51,12 +61,16 @@ endif
string$ = "Strings can be 'interpolated'"
string$ = "But don't interpolate everything!"
+string$(10)
+
+repeat
+ string$ = string$ - right$(string$)
+until !length(string$)
Text... 1 Right 0.2 Half many----hyphens
Text... 1 Right -0.4 Bottom aحبيبa
Text... 1 Right -0.6 Bottom 日本
Draw circle (mm)... 0.5 0.5 i
-x=1
rows = Object_'table'.nrow
value$ = Table_'table'$[25, "f0"]
@@ -83,17 +97,19 @@ var = if macintosh = 1 then 0 else 1 fi ; This is an inline comment
n = numberOfSelected("Sound")
for i from newStyle.local to n
name = selected$(extractWord$(selected$(), " "))
- sound'i' = selected("Sound", i)
+ sound'i' = selected("Sound", i+(a*b))
sound[i] = sound'i'
endfor
-for i from 1 to n
+i = 1
+while i < n
+ i++
# Different styles of object selection
select sound'i'
sound = selected()
sound$ = selected$("Sound")
select Sound 'sound$'
- selectObject(sound[i])
+ selectObject( sound[i])
selectObject: sound
# Pause commands
@@ -124,14 +140,16 @@ for i from 1 to n
# Multi-line command with modifier
pitch = noprogress To Pitch (ac): 0, 75, 15, "no",
...0.03, 0.45, 0.01, 0.35, 0.14, 600
+ # Formulas are strings
+ Formula: "if col = 1 then row * Object_'pitch'.dx + 'first' else self fi"
# do-style command with assignment
minimum = do("Get minimum...", 0, 0, "Hertz", "Parabolic")
# New-style multi-line command call with broken strings
table = Create Table with column names: "table", 0,
- ..."file subject speaker
- ...f0 f1 f2 f3 " +
+ ..."file subject speaker
+ ... f0 f1 f2 f" + string$(3) + " " +
..."duration response"
# Function call with trailing space
@@ -156,7 +174,7 @@ for i from 1 to n
demoWaitForInput ( )
demo Erase all
demo Text: 50, "centre", 50, "half", "Finished"
-endfor
+endwhile
switch$ = if switch == 1 then "a" else
... if switch == 2 then "b" else
@@ -207,6 +225,11 @@ assert a != b && c
assert a <> b || c
assert a < b | c
assert a > b
+
+assert (a)or (b)
+assert (a) or(b)
+assert (a)and(b)
+
assert "hello" = "he" + "llo"
assert "hello" == "hello world" - " world"
@@ -243,3 +266,15 @@ endproc
asserterror Unknown symbol:'newline$'« _
assert '_new_style.local'
+@proc: a, selected("string"), b
+# Comment
+
+for i to saveSelection.n
+ selectObject: saveSelection.id[i]
+ appendInfoLine: selected$()
+endfor
+
+@ok(if selected$("Sound") = "tone" then 1 else 0 fi,
+ ... "selected sound is tone")
+
+@ok_formula("selected$(""Sound"") = ""tone""", "selected sound is tone")
diff --git a/tests/examplefiles/example.sbl b/tests/examplefiles/example.sbl
new file mode 100644
index 00000000..94efada5
--- /dev/null
+++ b/tests/examplefiles/example.sbl
@@ -0,0 +1,109 @@
+/* Stemmer for Esperanto in UTF-8 */
+
+strings ()
+
+integers ()
+
+booleans ( foreign )
+
+routines (
+ apostrophe
+ canonical_form
+ correlative
+ interjection
+ short_word
+ standard_suffix
+ unuj
+)
+
+externals ( stem )
+
+groupings ( vowel aiou ao ou )
+
+stringdef a' decimal '225'
+stringdef e' hex 'E9'
+stringdef i' hex 'ED'
+stringdef o' hex ' f3'
+stringdef u' hex 'fa '
+
+stringdef cx hex '0109'
+stringdef gx hex '011D'
+stringdef hx hex '0125'
+stringdef jx hex '0135'
+stringdef sx hex '015D'
+stringdef ux hex '016D'
+
+define canonical_form as repeat (
+ [substring]
+ among (
+stringescapes //
+ '/a'/' (<- 'a' set foreign)
+ '/e'/' (<- 'e' set foreign)
+ '/i'/' (<- 'i' set foreign)
+ '/o'/' (<- 'o' set foreign)
+ '/u'/' (<- 'u' set foreign)
+stringescapes `'
+ 'cx' (<- '`cx'')
+ 'gx' (<- '`gx'')
+ 'hx' (<- '`hx'')
+ 'jx' (<- '`jx'')
+ 'sx' (<- '`sx'')
+ 'ux' (<- '`ux'')
+ '' (next)
+ )
+)
+
+backwardmode (
+ stringescapes { }
+
+ define apostrophe as (
+ (['un{'}'] atlimit <- 'unu') or
+ (['l{'}'] atlimit <- 'la') or
+ (['{'}'] <- 'o')
+ )
+
+ define vowel 'aeiou'
+ define aiou vowel - 'e'
+ define ao 'ao'
+ define ou 'ou'
+
+ define short_word as not (loop (maxint * 0 + 4 / 2) gopast vowel)
+
+ define interjection as (
+ among ('adia{ux}' 'aha' 'amen' 'hola' 'hura' 'mia{ux}' 'muu' 'oho')
+ atlimit
+ )
+
+ define correlative as (
+ []
+ // Ignore -al, -am, etc. since they can't be confused with suffixes.
+ test (
+ ('a' or (try 'n'] 'e') or (try 'n' try 'j'] ou))
+ 'i'
+ try ('k' or 't' or '{cx}' or 'nen')
+ atlimit
+ )
+ delete
+ )
+
+ define unuj as (
+ [try 'n' 'j'] 'unu' atlimit delete
+ )
+
+ define standard_suffix as (
+ [
+ try ((try 'n' try 'j' ao) or (try 's' aiou) or (try 'n' 'e'))
+ try '-' try 'a{ux}'
+ ] delete
+ )
+)
+
+define stem as (
+ do canonical_form
+ not foreign
+ backwards (
+ do apostrophe
+ short_word or interjection or
+ correlative or unuj or do standard_suffix
+ )
+)
diff --git a/tests/examplefiles/example.tasm b/tests/examplefiles/example.tasm
new file mode 100644
index 00000000..d7202ffb
--- /dev/null
+++ b/tests/examplefiles/example.tasm
@@ -0,0 +1,527 @@
+;----------------------------------------------------------------------------;
+; Does A* pathfinding for rockraiders and vehicles
+;
+; Copyright 2015 Ruben De Smet
+;
+; Redistribution and use in source and binary forms, with or without
+; modification, are permitted provided that the following conditions are
+; met:
+;
+; (1) Redistributions of source code must retain the above copyright
+; notice, this list of conditions and the following disclaimer.
+;
+; (2) Redistributions in binary form must reproduce the above copyright
+; notice, this list of conditions and the following disclaimer in
+; the documentation and/or other materials provided with the
+; distribution.
+;
+; (3) The name of the author may not be used to
+; endorse or promote products derived from this software without
+; specific prior written permission.
+;
+; THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+; IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+; DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+; INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+; (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+; HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+; STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+; IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+; POSSIBILITY OF SUCH DAMAGE.
+;
+;----------------------------------------------------------------------------;
+
+IDEAL
+P386
+MODEL FLAT, C
+ASSUME cs:_TEXT,ds:FLAT,es:FLAT,fs:FLAT,gs:FLAT
+
+INCLUDE "ASTAR.INC"
+INCLUDE "READLVL.INC"
+INCLUDE "DEBUG.INC"
+
+STRUC TPriorityField
+ heuristic dd ?
+ distance dd ?
+ x db ?
+ y db ?
+ fromx db ?
+ fromy db ?
+ENDS
+
+STRUC TField
+ distance dd ?
+ x db ?
+ y db ?
+ENDS
+
+CODESEG
+
+PROC getPath
+ USES ecx
+ ARG @@tgtx:dword, \
+ @@tgty:dword \
+ RETURNS eax, ebx ; eax contains x, ebx contains y
+
+ call getLevelWidth
+ imul eax, [@@tgty]
+ add eax, [@@tgtx]
+ imul eax, SIZE TField
+ add eax, offset backtraceGraph
+ mov ecx, eax
+
+ xor eax, eax
+ xor ebx, ebx
+
+ mov al, [(TField ptr ecx).x]
+ mov bl, [(TField ptr ecx).y]
+
+ ret
+ENDP getPath
+
+PROC findPath
+ ; eax will contain a 1 when a path has been found
+ ; 0 otherwise.
+ ARG @@srcx:dword, \
+ @@srcy:dword, \
+ @@tgtx:dword, \
+ @@tgty:dword, \
+ @@type:dword \
+ RETURNS eax
+
+ ; Check whether the target field is "allowed" for
+ ; the selected vehicle or rock raider
+ call getField, [@@tgtx], [@@tgty]
+ mov al, [byte ptr eax]
+ and eax, 0FFh
+
+ add eax, offset actionTable
+ mov eax, [eax]
+ and eax, [@@type] ; TODO: for now, rock raider is hard coded
+ jnz @canGoToTarget
+
+ mov eax, 0
+ ret
+@canGoToTarget:
+
+ call cleanData
+ mov eax, [@@type]
+ mov [currentType], eax
+
+ mov eax, [@@srcx]
+ mov [currentOpen.x], al
+ mov eax, [@@srcy]
+ mov [currentOpen.y], al
+
+ call distance, [@@srcx], [@@srcy], [@@tgtx], [@@tgty]
+ ; eax <- distance
+ call addOpen, [@@srcx], [@@srcy], eax, 0
+
+@openListNotEmpty:
+ call popOpen
+ cmp eax, 0
+ je @openListEmpty
+
+ call addToMap
+
+ call addClosed
+
+ mov eax, [@@tgtx]
+ cmp [currentOpen.x], al
+ jne @nextOpen
+ mov eax, [@@tgty]
+ cmp [currentOpen.y], al
+ jne @nextOpen
+
+ jmp @routeFound
+
+ @nextOpen:
+ call addNeighbours, [@@tgtx], [@@tgty]
+
+ jmp @openListNotEmpty
+
+@openListEmpty:
+ mov eax, 0
+ ret
+
+@routeFound:
+ mov eax, 1
+ ret
+ENDP findPath
+
+PROC addToMap
+ USES eax, ecx
+
+ call getLevelWidth
+ xor ecx, ecx
+ mov cl, [currentOpen.y]
+ imul eax, ecx
+ mov cl, [currentOpen.x]
+ add eax, ecx
+ imul eax, SIZE TField
+ add eax, offset backtraceGraph
+
+ mov ecx, [currentOpen.distance]
+ cmp [(TField ptr eax).distance], ecx
+ jbe @dontAdd
+
+ mov [(TField ptr eax).distance], ecx
+ mov cl, [currentOpen.fromx]
+ mov [(TField ptr eax).x], cl
+ mov cl, [currentOpen.fromy]
+ mov [(TField ptr eax).y], cl
+
+@dontAdd:
+ ret
+ENDP addToMap
+
+; Is closed checks whether the field considered is "closed" for being added to the open list.
+; So, it also checks whether we can go on the selected field.
+PROC isClosed
+ USES ebx, ecx, edx
+ ARG @@x:dword, \
+ @@y:dword RETURNS eax
+
+ ; Check bounds first:
+
+ call getLevelWidth
+ cmp [@@x], eax
+ ja notWithinBounds ; ja considers -1 > 10
+
+ call getLevelHeight
+ cmp [@@y], eax
+ ja notWithinBounds
+
+ ; Check whether this field is "allowed" for
+ ; the selected vehicle or rock raider
+ call getField, [@@x], [@@y]
+ mov al, [byte ptr eax]
+ and eax, 0FFh
+
+ add eax, offset actionTable
+ mov eax, [eax]
+ and eax, [currentType] ; TODO: for now, rock raider is hard coded
+ jnz @canGoHere
+
+
+ inc eax ; mov eax, 1
+ ret
+
+@canGoHere:
+
+ ; Getting here means the field is okay to walk/fly/whatever on
+
+ xor ecx, ecx
+ mov cx, [closedlistSize]
+ cmp cx, 0 ; If empty, return 0
+ jne @closedNotEmpty
+
+ mov eax, 0
+ ret
+
+@closedNotEmpty:
+ mov ebx, offset closedlist
+
+@loopClosed:
+ mov edx, [@@x]
+ cmp [(TField ptr ebx).x], dl
+ jne @nextClosed
+ mov edx, [@@y]
+ cmp [(TField ptr ebx).y], dl
+ jne @nextClosed
+
+ ; If reached here, yep, contained in closed list
+ mov eax, 1
+ ret
+
+ @nextClosed:
+ add ebx, SIZE TField
+ dec ecx
+ jnz @loopClosed
+
+ mov eax, 0
+ ret
+
+notWithinBounds:
+ mov eax, 1
+ ret
+ENDP isClosed
+
+PROC addNeighbours
+ USES eax, ebx, ecx, edx
+ ARG @@tgtx:dword, \
+ @@tgty:dword
+ ; Push all neighbours of currentOpen on openList
+
+ xor ebx, ebx
+ xor ecx, ecx
+
+ mov bl, [currentOpen.x]
+ mov cl, [currentOpen.y]
+ mov edx, [currentOpen.distance]
+ inc edx ; Next distance is one more.
+
+ ; Up
+ dec ecx
+ call isClosed, ebx, ecx
+ cmp eax, 0
+ jne @noUp
+ call distance, ebx, ecx, [@@tgtx], [@@tgty]
+ add eax, edx
+ call addOpen, ebx, ecx, eax, edx
+ @noUp:
+ inc ecx
+
+ ; Right
+ inc ebx
+ call isClosed, ebx, ecx
+ cmp eax, 0
+ jne @noRight
+ call distance, ebx, ecx, [@@tgtx], [@@tgty]
+ add eax, edx
+ call addOpen, ebx, ecx, eax, edx
+ @noRight:
+ dec ebx
+
+ ; Left
+ dec ebx
+ call isClosed, ebx, ecx
+ cmp eax, 0
+ jne @noLeft
+ call distance, ebx, ecx, [@@tgtx], [@@tgty]
+ add eax, edx
+ call addOpen, ebx, ecx, eax, edx
+ @noLeft:
+ inc ebx
+
+ ; Down
+ inc ecx
+ call isClosed, ebx, ecx
+ cmp eax, 0
+ jne @noDown
+ call distance, ebx, ecx, [@@tgtx], [@@tgty]
+ add eax, edx
+ call addOpen, ebx, ecx, eax, edx
+ @noDown:
+ dec ecx
+
+ ret
+ENDP addNeighbours
+
+PROC popOpen
+ ARG RETURNS eax
+ USES ebx, ecx, edx, esi, edi
+ ; eax contains the smallest current heuristic
+ ; ebx contains the index of that field
+
+ cmp [openlistSize], 0 ; If empty, return 0
+ jne @goForth
+
+ mov eax, 0
+ ret
+
+@goForth:
+
+ mov eax, 0FFFFFFFFh ; Longest distance possible in 32 bits.
+ xor ebx, ebx
+ xor ecx, ecx ; ecx contains the current index
+
+@searchFurther:
+ mov edx, ecx
+ imul edx, SIZE TPriorityField
+ cmp [(TPriorityField ptr (openlist + edx)).heuristic], eax
+ ja @notBetter
+ ; Better guess found, put right values in eax and ebx
+ mov eax, [(TPriorityField ptr (openlist + edx)).heuristic]
+ mov ebx, ecx
+
+@notBetter:
+
+ inc ecx
+ cmp cx, [openlistSize]
+ jne @searchFurther
+
+ ; By now, we have found the right item to pop from the priorityqueue.
+
+ ; Move the correct item in currentOpen
+ mov ecx, SIZE TPriorityField
+ mov esi, ebx
+ imul esi, ecx
+ add esi, offset openlist
+
+ mov edi, offset currentOpen
+ rep movsb
+
+ ; Now make the remove the thing from the vector
+
+ xor ecx, ecx
+ mov cx, [openlistSize]
+ sub ecx, ebx
+ dec ecx
+ imul ecx, SIZE TPriorityField
+ mov edi, esi
+ sub edi, SIZE TPriorityField
+ rep movsb
+
+ dec [openlistSize]
+ mov eax, 1
+ ret
+ENDP popOpen
+
+PROC addClosed
+ USES eax, ebx
+
+ xor ebx, ebx
+ xor eax, eax
+
+ mov bx, [closedlistSize]
+ imul ebx, SIZE TField
+ add ebx, offset closedlist ; ebx contains the target TField
+
+ mov al, [currentOpen.x]
+ mov [(TField ptr ebx).x], al
+ mov al, [currentOpen.y]
+ mov [(TField ptr ebx).y], al
+ mov eax, [currentOpen.distance]
+ mov [(TField ptr ebx).distance], eax
+
+ inc [closedlistSize]
+ cmp [closedlistSize], CLOSED_LIST_SIZE_MAX
+ jne @noProblemWithClosedVector
+
+ xor eax, eax
+ mov ax, [closedlistSize]
+ call crash, offset closedOutOfMemory, eax
+
+@noProblemWithClosedVector:
+ ret
+ENDP addClosed
+
+PROC addOpen
+ USES eax, ebx
+ ARG @@x:dword, \
+ @@y:dword, \
+ @@priority:dword, \
+ @@distance:dword
+
+ xor eax, eax
+ mov ax, [openlistSize]
+ imul eax, SIZE TPriorityField
+ add eax, offset openlist
+
+ mov ebx, [@@x]
+ mov [(TPriorityField ptr eax).x], bl
+ mov ebx, [@@y]
+ mov [(TPriorityField ptr eax).y], bl
+
+ mov bl, [currentOpen.x]
+ mov [(TPriorityField ptr eax).fromx], bl
+ mov bl, [currentOpen.y]
+ mov [(TPriorityField ptr eax).fromy], bl
+
+ mov ebx, [@@priority]
+ mov [(TPriorityField ptr eax).heuristic], ebx
+ mov ebx, [@@distance]
+ mov [(TPriorityField ptr eax).distance], ebx
+
+ inc [openlistSize]
+ cmp [openlistSize], OPEN_LIST_SIZE_MAX
+ jne @noProblem
+
+ xor eax, eax
+ mov ax, [openlistSize]
+ call crash, offset openOutOfMemory, eax
+
+@noProblem:
+ ret
+ENDP
+
+PROC distance
+ USES ebx
+ ARG @@srcx:dword, \
+ @@srcy:dword, \
+ @@tgtx:dword, \
+ @@tgty:dword \
+ RETURNS eax
+
+ mov eax, [@@srcx]
+ sub eax, [@@tgtx]
+
+ jns @noSignChangex
+ neg eax
+
+ @noSignChangex:
+
+ mov ebx, [@@srcy]
+ sub ebx, [@@tgty]
+
+ jns @noSignChangey
+ neg ebx
+
+ @noSignChangey:
+ add eax, ebx
+ ret
+ENDP distance
+
+PROC cleanData
+ USES eax, ecx
+ mov [openlistSize], 0
+ mov [closedlistSize], 0
+
+ mov [currentOpen.x], -1
+ mov [currentOpen.y], -1
+ mov [currentOpen.distance], 0
+
+ call getLevelWidth
+ mov ecx, eax
+ call getLevelHeight
+ imul ecx, eax
+
+ mov eax, offset backtraceGraph
+@fieldIter:
+ mov [(TField ptr eax).distance], 0ffffffffh ; Set to approximately +inf
+ mov [(TField ptr eax).x], 0
+ mov [(TField ptr eax).y], 0
+ add eax, SIZE TField
+ dec ecx
+ jnz @fieldIter
+
+ ret
+ENDP cleanData
+
+DATASEG
+
+openOutOfMemory db "Out of openlistSize memory. Hi dev: Please increase$"
+closedOutOfMemory db "Out of closedlistSize memory. Hi dev: Please increase$"
+
+; power | discover | walking | sailing | flying
+actionTable db 00001101b, \ ;EMPTY
+ 00001101b, \ ;RUBBLE
+ 00000000b, \ ;GRAVEL
+ 00000000b, \ ;LOOSE ROCK
+ 00000000b, \ ;HARD ROCK
+ 00000000b, \ ;MASSIVE ROCK
+ 00000000b, \ ;KRISTAL SOURCE
+ 00000000b, \ ;OREROCK
+ 00001011b, \ ;WATER
+ 00001001b, \ ;LAVA
+ 00001101b, \ ;SNAIL HOLE
+ 00001101b, \ ;EROSION
+ 00011101b, \ ;POWER PATH
+ 00011101b, \ ;BUILDING POWER PATH
+ 00011000b \ ;BUILDING
+
+UDATASEG
+
+currentType dd ?
+currentOpen TPriorityField ?
+
+openlist TPriorityField OPEN_LIST_SIZE_MAX dup(?)
+openlistSize dw ?
+closedlist TField CLOSED_LIST_SIZE_MAX dup(?)
+closedlistSize dw ?
+backtraceGraph TField MAX_LEVEL_SIZE dup(?)
+
+END
diff --git a/tests/examplefiles/example.whiley b/tests/examplefiles/example.whiley
new file mode 100644
index 00000000..74b39370
--- /dev/null
+++ b/tests/examplefiles/example.whiley
@@ -0,0 +1,296 @@
+/**
+ * Example Whiley program, taken from the Whiley benchmark suite.
+ * https://github.com/Whiley/WyBench/blob/master/src/101_interpreter/Main.whiley
+ */
+
+import whiley.lang.System
+import whiley.lang.Int
+import whiley.io.File
+import string from whiley.lang.ASCII
+import char from whiley.lang.ASCII
+
+// ====================================================
+// A simple calculator for expressions
+// ====================================================
+
+constant ADD is 0
+constant SUB is 1
+constant MUL is 2
+constant DIV is 3
+
+// binary operation
+type BOp is (int x) where ADD <= x && x <= DIV
+type BinOp is { BOp op, Expr lhs, Expr rhs }
+
+// variables
+type Var is { string id }
+
+// list access
+type ListAccess is {
+ Expr src,
+ Expr index
+}
+
+// expression tree
+type Expr is int | // constant
+ Var | // variable
+ BinOp | // binary operator
+ Expr[] | // array constructor
+ ListAccess // list access
+
+// values
+type Value is int | Value[]
+
+// stmts
+type Print is { Expr rhs }
+type Set is { string lhs, Expr rhs }
+type Stmt is Print | Set
+
+// ====================================================
+// Expression Evaluator
+// ====================================================
+
+type RuntimeError is { string msg }
+type Environment is [{string k, Value v}]
+
+// Evaluate an expression in a given environment reducing either to a
+// value, or a runtime error. The latter occurs if evaluation gets
+// "stuck" (e.g. expression is // not well-formed)
+function evaluate(Expr e, Environment env) -> Value | RuntimeError:
+ //
+ if e is int:
+ return e
+ else if e is Var:
+ return env[e.id]
+ else if e is BinOp:
+ Value|RuntimeError lhs = evaluate(e.lhs, env)
+ Value|RuntimeError rhs = evaluate(e.rhs, env)
+ // check if stuck
+ if !(lhs is int && rhs is int):
+ return {msg: "arithmetic attempted on non-numeric value"}
+ // switch statement would be good
+ if e.op == ADD:
+ return lhs + rhs
+ else if e.op == SUB:
+ return lhs - rhs
+ else if e.op == MUL:
+ return lhs * rhs
+ else if rhs != 0:
+ return lhs / rhs
+ return {msg: "divide-by-zero"}
+ else if e is Expr[]:
+ [Value] r = []
+ for i in e:
+ Value|RuntimeError v = evaluate(i, env)
+ if v is RuntimeError:
+ return v
+ else:
+ r = r ++ [v]
+ return r
+ else if e is ListAccess:
+ Value|RuntimeError src = evaluate(e.src, env)
+ Value|RuntimeError index = evaluate(e.index, env)
+ // santity checks
+ if src is [Value] && index is int && index >= 0 && index < |src|:
+ return src[index]
+ else:
+ return {msg: "invalid list access"}
+ else:
+ return 0 // dead-code
+
+// ====================================================
+// Expression Parser
+// ====================================================
+
+type State is { string input, int pos }
+type SyntaxError is { string msg, int start, int end }
+
+function SyntaxError(string msg, int start, int end) -> SyntaxError:
+ return { msg: msg, start: start, end: end }
+
+// Top-level parse method
+function parse(State st) -> (Stmt,State)|SyntaxError:
+ //
+ Var keyword, Var v
+ Expr e
+ int start = st.pos
+ //
+ keyword,st = parseIdentifier(st)
+ switch keyword.id:
+ case "print":
+ any r = parseAddSubExpr(st)
+ if !(r is SyntaxError):
+ e,st = r
+ return {rhs: e},st
+ else:
+ return r // error case
+ case "set":
+ st = parseWhiteSpace(st)
+ v,st = parseIdentifier(st)
+ any r = parseAddSubExpr(st)
+ if !(r is SyntaxError):
+ e,st = r
+ return {lhs: v.id, rhs: e},st
+ else:
+ return r // error case
+ default:
+ return SyntaxError("unknown statement",start,st.pos-1)
+
+function parseAddSubExpr(State st) -> (Expr, State)|SyntaxError:
+ //
+ Expr lhs, Expr rhs
+ // First, pass left-hand side
+ any r = parseMulDivExpr(st)
+ //
+ if r is SyntaxError:
+ return r
+ //
+ lhs,st = r
+ st = parseWhiteSpace(st)
+ // Second, see if there is a right-hand side
+ if st.pos < |st.input| && st.input[st.pos] == '+':
+ // add expression
+ st.pos = st.pos + 1
+ r = parseAddSubExpr(st)
+ if !(r is SyntaxError):
+ rhs,st = r
+ return {op: ADD, lhs: lhs, rhs: rhs},st
+ else:
+ return r
+ else if st.pos < |st.input| && st.input[st.pos] == '-':
+ // subtract expression
+ st.pos = st.pos + 1
+ r = parseAddSubExpr(st)
+ if !(r is SyntaxError):
+ rhs,st = r
+ return {op: SUB, lhs: lhs, rhs: rhs},st
+ else:
+ return r
+ // No right-hand side
+ return (lhs,st)
+
+function parseMulDivExpr(State st) -> (Expr, State)|SyntaxError:
+ // First, parse left-hand side
+ Expr lhs, Expr rhs
+ any r = parseTerm(st)
+ if r is SyntaxError:
+ return r
+ //
+ lhs,st = r
+ st = parseWhiteSpace(st)
+ // Second, see if there is a right-hand side
+ if st.pos < |st.input| && st.input[st.pos] == '*':
+ // add expression
+ st.pos = st.pos + 1
+ r = parseMulDivExpr(st)
+ if !(r is SyntaxError):
+ rhs,st = r
+ return {op: MUL, lhs: lhs, rhs: rhs}, st
+ else:
+ return r
+ else if st.pos < |st.input| && st.input[st.pos] == '/':
+ // subtract expression
+ st.pos = st.pos + 1
+ r = parseMulDivExpr(st)
+ if !(r is SyntaxError):
+ rhs,st = r
+ return {op: DIV, lhs: lhs, rhs: rhs}, st
+ else:
+ return r
+ // No right-hand side
+ return (lhs,st)
+
+function parseTerm(State st) -> (Expr, State)|SyntaxError:
+ //
+ st = parseWhiteSpace(st)
+ if st.pos < |st.input|:
+ if ASCII.isLetter(st.input[st.pos]):
+ return parseIdentifier(st)
+ else if ASCII.isDigit(st.input[st.pos]):
+ return parseNumber(st)
+ else if st.input[st.pos] == '[':
+ return parseList(st)
+ //
+ return SyntaxError("expecting number or variable",st.pos,st.pos)
+
+function parseIdentifier(State st) -> (Var, State):
+ //
+ string txt = ""
+ // inch forward until end of identifier reached
+ while st.pos < |st.input| && ASCII.isLetter(st.input[st.pos]):
+ txt = txt ++ [st.input[st.pos]]
+ st.pos = st.pos + 1
+ return ({id:txt}, st)
+
+function parseNumber(State st) -> (Expr, State)|SyntaxError:
+ // inch forward until end of identifier reached
+ int start = st.pos
+ while st.pos < |st.input| && ASCII.isDigit(st.input[st.pos]):
+ st.pos = st.pos + 1
+ //
+ int|null iv = Int.parse(st.input[start..st.pos])
+ if iv == null:
+ return SyntaxError("Error parsing number",start,st.pos)
+ else:
+ return iv, st
+
+function parseList(State st) -> (Expr, State)|SyntaxError:
+ //
+ st.pos = st.pos + 1 // skip '['
+ st = parseWhiteSpace(st)
+ [Expr] l = [] // initial list
+ bool firstTime = true
+ while st.pos < |st.input| && st.input[st.pos] != ']':
+ if !firstTime && st.input[st.pos] != ',':
+ return SyntaxError("expecting comma",st.pos,st.pos)
+ else if !firstTime:
+ st.pos = st.pos + 1 // skip ','
+ firstTime = false
+ any r = parseAddSubExpr(st)
+ if r is SyntaxError:
+ return r
+ else:
+ Expr e
+ e,st = r
+ // perform annoying error check
+ l = l ++ [e]
+ st = parseWhiteSpace(st)
+ st.pos = st.pos + 1
+ return l,st
+
+// Parse all whitespace upto end-of-file
+function parseWhiteSpace(State st) -> State:
+ while st.pos < |st.input| && ASCII.isWhiteSpace(st.input[st.pos]):
+ st.pos = st.pos + 1
+ return st
+
+// ====================================================
+// Main Method
+// ====================================================
+
+public method main(System.Console sys):
+ if(|sys.args| == 0):
+ sys.out.println("no parameter provided!")
+ else:
+ File.Reader file = File.Reader(sys.args[0])
+ string input = ASCII.fromBytes(file.readAll())
+
+ Environment env = Environment()
+ State st = {pos: 0, input: input}
+ while st.pos < |st.input|:
+ Stmt s
+ any r = parse(st)
+ if r is SyntaxError:
+ sys.out.println("syntax error: " ++ r.msg)
+ return
+ s,st = r
+ Value|RuntimeError v = evaluate(s.rhs,env)
+ if v is RuntimeError:
+ sys.out.println("runtime error: " ++ v.msg)
+ return
+ if s is Set:
+ env[s.lhs] = v
+ else:
+ sys.out.println(r)
+ st = parseWhiteSpace(st)
+
diff --git a/tests/examplefiles/example.yaml b/tests/examplefiles/example.yaml
index 9c0ed9d0..17544c02 100644
--- a/tests/examplefiles/example.yaml
+++ b/tests/examplefiles/example.yaml
@@ -1,3 +1,12 @@
+#
+# Regression tests
+#
+
+%TAG ! tag:example.com:foo/
+---
+test: !foo/bar {a: 'asdf'}
+test2: fred
+...
#
# Examples from the Preview section of the YAML specification
diff --git a/tests/examplefiles/fibonacci.tokigun.aheui b/tests/examplefiles/fibonacci.tokigun.aheui
new file mode 100644
index 00000000..afa2ca05
--- /dev/null
+++ b/tests/examplefiles/fibonacci.tokigun.aheui
@@ -0,0 +1,4 @@
+바싹반박나싼순
+뿌멓떠벌번멍뻐
+쌀삭쌀살다순옭
+어어선썬설썩옭
diff --git a/tests/examplefiles/guidance.smv b/tests/examplefiles/guidance.smv
new file mode 100644
index 00000000..671d1e1c
--- /dev/null
+++ b/tests/examplefiles/guidance.smv
@@ -0,0 +1,1124 @@
+--
+-- Shuttle Digital Autopilot
+-- by Sergey Berezin (berez@cs.cmu.edu)
+--
+MODULE cont_3eo_mode_select(start,smode5,vel,q_bar,apogee_alt_LT_alt_ref,
+ h_dot_LT_hdot_reg2,alpha_n_GRT_alpha_reg2,
+ delta_r_GRT_del_r_usp,v_horiz_dnrng_LT_0,
+ high_rate_sep,meco_confirmed)
+
+VAR cont_3EO_start: boolean;
+ RTLS_abort_declared: boolean;
+ region_selected : boolean;
+ m_mode: {mm102, mm103, mm601};
+ r: {reg-1, reg0, reg1, reg2, reg3, reg102};
+ step : {1,2,3,4,5,6,7,8,9,10, exit, undef};
+
+ASSIGN
+ init(cont_3EO_start) := FALSE;
+ init(m_mode) := {mm102, mm103};
+ init(region_selected) := FALSE;
+ init(RTLS_abort_declared) := FALSE;
+ init(r) := reg-1;
+ init(step) := undef;
+
+ next(step) :=
+ case
+ step = 1 & m_mode = mm102 : exit;
+ step = 1 : 2;
+ step = 2 & smode5 : 5;
+ step = 2 & vel = GRT_vi_3eo_max: exit;
+ step = 2 : 3;
+ step = 3 & vel = LEQ_vi_3eo_min : 6;
+ step = 3 : 4;
+ step = 4 & apogee_alt_LT_alt_ref: exit;
+ step = 4 : 6;
+ step = 5 : 6;
+ step = 6 & r = reg0 : exit;
+ step = 6 : 7;
+ step = 7 : 8;
+ step = 8 & q_bar = GRT_qbar_reg3 & !high_rate_sep : 10;
+ step = 8 : 9;
+ step = 9 : 10;
+ step = 10: exit;
+ next(start): 1;
+ step = exit : undef;
+ TRUE: step;
+ esac;
+
+ next(cont_3EO_start) :=
+ case
+ step = 1 & m_mode = mm102 : TRUE;
+ step = 10 & meco_confirmed : TRUE;
+ TRUE : cont_3EO_start;
+ esac;
+
+ next(r) :=
+ case
+ step = 1 & m_mode = mm102 : reg102;
+ step = 2 & !smode5 & vel = GRT_vi_3eo_max: reg0;
+ step = 4 & apogee_alt_LT_alt_ref: reg0;
+ step = 5 & v_horiz_dnrng_LT_0 & delta_r_GRT_del_r_usp : reg0;
+ step = 8 & q_bar = GRT_qbar_reg3 & !high_rate_sep : reg3;
+ step = 9: case
+ (h_dot_LT_hdot_reg2 & alpha_n_GRT_alpha_reg2 &
+ q_bar = GRT_qbar_reg1) | high_rate_sep : reg2;
+ TRUE : reg1;
+ esac;
+ next(step) = 1 : reg-1;
+ TRUE: r;
+ esac;
+
+ next(RTLS_abort_declared) :=
+ case
+ step = 10 & meco_confirmed & m_mode = mm103 : TRUE;
+ TRUE: RTLS_abort_declared;
+ esac;
+
+ next(m_mode) :=
+ case
+ step = 10 & meco_confirmed & m_mode = mm103 : mm601;
+ TRUE: m_mode;
+ esac;
+
+ next(region_selected) :=
+ case
+ next(step) = 1 : FALSE;
+ next(step) = exit : TRUE;
+ TRUE : region_selected;
+ esac;
+
+MODULE cont_3eo_guide(start,cont_3EO_start, mode_select_completed, et_sep_cmd,
+ h_dot_LT_0, q_bar_a_GRT_qbar_max_sep, m_mode, r0,
+ cont_minus_z_compl, t_nav-t_et_sep_GRT_dt_min_z_102,
+ ABS_q_orb_GRT_q_minus_z_max, ABS_r_orb_GRT_r_minus_z_max,
+ excess_OMS_propellant, q_bar_a_LT_qbar_oms_dump,
+ entry_mnvr_couter_LE_0, rcs_all_jet_inhibit,
+ alt_GRT_alt_min_102_dump, t_nav-t_gmtlo_LT_t_dmp_last,
+ pre_sep, cond_18, q_orb_LT_0, ABS_alf_err_LT_alf_sep_err,
+ cond_20b, cond_21, ABS_beta_n_GRT_beta_max, cond_24, cond_26,
+ cond_27, cond_29, mm602_OK)
+VAR
+ step: {1,a1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,
+ b20, c20, d20, 21,22,23,24,25,26,27,28,29,exit, undef};
+ call_RTLS_abort_task : boolean;
+ first3: boolean; -- indicates if it is the first pass
+ first8: boolean;
+ first27: boolean;
+ s_unconv : boolean;
+ mode_2_indicator : boolean;
+ et_sep_man_initiate : boolean;
+ emerg_sep : boolean;
+ cont_3eo_pr_delay : {minus_z_reg1, minus_z_reg2,
+ minus_z_reg3, minus_z_reg4, minus_z_reg102, 0, 5};
+ etsep_y_drift : {undef, minus_z_reg1, minus_z_reg2,
+ minus_z_reg3, minus_z_reg4, minus_z_reg102, 0};
+ fwd_rcs_dump_enable : boolean;
+ fcs_accept_icnct : boolean;
+ oms_rcs_i_c_inh_ena_cmd : boolean;
+ orbiter_dump_ena : boolean;
+ frz_3eo : boolean;
+ high_rate_sep: boolean;
+ entry_gains : boolean;
+ cont_sep_cplt : boolean;
+ pch_cmd_reg4 : boolean;
+ alpha_ok : boolean;
+ r : {reg-1, reg0, reg1, reg2, reg3, reg4, reg102};
+ early_sep : boolean;
+--------------------------------------------
+----- Additional Variables -----------------
+--------------------------------------------
+ rtls_lo_f_d_delay : {undef, 0};
+ wcb2 : {undef, reg1_0, reg2_neg4, wcb2_3eo, reg4_0,
+ reg102_undef, post_sep_0};
+ q_gcb_i : {undef, quat_reg1, quat_reg2, quat_reg3, quat_reg4,
+ quat_reg102_undef, quat_entry_M50_to_cmdbody};
+ oms_nz_lim : {undef, oms_nz_lim_3eo, oms_nz_lim_iload, oms_nz_lim_std};
+ contingency_nz_lim : {undef, contingency_nz_lim_3eo,
+ contingency_nz_lim_iload, contingency_nz_lim_std};
+
+
+
+ASSIGN
+ init(entry_gains) := FALSE;
+ init(frz_3eo) := FALSE;
+ init(cont_3eo_pr_delay) := 5;
+ init(etsep_y_drift) := undef;
+ init(r) := reg-1;
+ init(step) := undef;
+ init(call_RTLS_abort_task) := FALSE;
+ init(first3) := TRUE;
+ init(first8) := TRUE;
+ init(first27) := TRUE;
+ init(cont_sep_cplt) := FALSE;
+ init(et_sep_man_initiate) := FALSE;
+ init(alpha_ok) := FALSE;
+ init(pch_cmd_reg4) := FALSE;
+
+-- Assumed initializations:
+
+ init(rtls_lo_f_d_delay) := undef;
+ init(wcb2) := undef;
+ init(q_gcb_i) := undef;
+ init(oms_nz_lim) := undef;
+ init(contingency_nz_lim) := undef;
+ init(oms_rcs_i_c_inh_ena_cmd) := FALSE;
+ init(orbiter_dump_ena) := FALSE;
+-- init(early_sep) := FALSE;
+
+-------------
+
+ next(step) := nextstep;
+
+ next(r) :=
+ case
+ step = a1 & (cont_3EO_start | mode_select_completed) : r0;
+ step = 21 & cond_21 : reg4;
+ step = 23 & ABS_beta_n_GRT_beta_max & !high_rate_sep : reg1;
+ TRUE : r;
+ esac;
+
+ next(first3) :=
+ case
+ step = 3 & cont_3EO_start : FALSE;
+ TRUE : first3;
+ esac;
+
+ next(first8) :=
+ case
+ step = 8 & excess_OMS_propellant & cont_3EO_start : FALSE;
+ TRUE : first8;
+ esac;
+
+ next(first27) :=
+ case
+ step = 27 : FALSE;
+ TRUE: first27;
+ esac;
+
+ next(s_unconv) :=
+ case
+ step = 3 : FALSE;
+ TRUE : s_unconv;
+ esac;
+
+ next(call_RTLS_abort_task) :=
+ case
+ step = 3 : TRUE;
+ TRUE : call_RTLS_abort_task;
+ esac;
+
+ next(mode_2_indicator) :=
+ case
+ step = 4 : TRUE;
+ TRUE : mode_2_indicator;
+ esac;
+
+ next(et_sep_man_initiate) :=
+ case
+ step = 5 & h_dot_LT_0 & q_bar_a_GRT_qbar_max_sep & m_mode != mm102 : TRUE;
+ step = 14 & pre_sep : TRUE;
+ step = 19 & q_orb_LT_0 : TRUE;
+ step = d20 : TRUE;
+ step = 26 & cond_26 : TRUE;
+ step = 29 & cond_29 : TRUE;
+ TRUE : et_sep_man_initiate;
+ esac;
+
+ next(emerg_sep) :=
+ case
+ next(step) = 1 : FALSE;
+ step = 5 & h_dot_LT_0 & q_bar_a_GRT_qbar_max_sep & m_mode != mm102: TRUE;
+ TRUE : emerg_sep;
+ esac;
+
+ next(cont_3eo_pr_delay) :=
+ case
+ next(step) = 1 : 5;
+ step = 5 & h_dot_LT_0 & q_bar_a_GRT_qbar_max_sep & m_mode != mm102 :
+ minus_z_reg3;
+ step = 7 & !cont_minus_z_compl & r = reg102 &
+ t_nav-t_et_sep_GRT_dt_min_z_102 &
+ (ABS_q_orb_GRT_q_minus_z_max | ABS_r_orb_GRT_r_minus_z_max) : 0;
+ step = 14 & pre_sep : minus_z_reg102;
+ step = 19 & q_orb_LT_0 : minus_z_reg4;
+ step = d20 : minus_z_reg3;
+ step = 26 & cond_26 : minus_z_reg2;
+ step = 27 & first27 : minus_z_reg1;
+ TRUE : cont_3eo_pr_delay;
+ esac;
+
+ next(etsep_y_drift) :=
+ case
+ step = 5 & h_dot_LT_0 & q_bar_a_GRT_qbar_max_sep & m_mode != mm102 :
+ minus_z_reg3;
+ step = 7 & !cont_minus_z_compl & r = reg102 &
+ t_nav-t_et_sep_GRT_dt_min_z_102 &
+ (ABS_q_orb_GRT_q_minus_z_max | ABS_r_orb_GRT_r_minus_z_max) : 0;
+ step = 14 & pre_sep : minus_z_reg102;
+ step = 19 & q_orb_LT_0 : minus_z_reg4;
+ step = d20 : minus_z_reg3;
+ step = 26 & cond_26 : minus_z_reg2;
+ step = 27 & first27 : minus_z_reg1;
+ TRUE : etsep_y_drift;
+ esac;
+
+ next(fwd_rcs_dump_enable) :=
+ case
+ step = 8 & excess_OMS_propellant & first8 : FALSE;
+ TRUE : fwd_rcs_dump_enable;
+ esac;
+
+ next(fcs_accept_icnct) :=
+ case
+ step = 9 & q_bar_a_LT_qbar_oms_dump & r != reg102 : TRUE;
+ TRUE : fcs_accept_icnct;
+ esac;
+
+ next(oms_rcs_i_c_inh_ena_cmd) :=
+ case
+-- next(step) = 1 & oms_rcs_i_c_inh_ena_cmd : {0,1};
+ next(step) = 1 & oms_rcs_i_c_inh_ena_cmd : FALSE; -- Assumed initialization
+ step = 9 & q_bar_a_LT_qbar_oms_dump & r != reg102 : TRUE;
+ TRUE : oms_rcs_i_c_inh_ena_cmd;
+ esac;
+
+ next(orbiter_dump_ena) :=
+ case
+ next(start) = TRUE : FALSE; -- Assumed initialization
+ step = 9 & q_bar_a_LT_qbar_oms_dump & r != reg102 : TRUE;
+ step = 13 & alt_GRT_alt_min_102_dump & t_nav-t_gmtlo_LT_t_dmp_last : TRUE;
+ TRUE : orbiter_dump_ena;
+ esac;
+
+ next(frz_3eo) :=
+ case
+ next(step) = 1 : FALSE;
+ step = 10 & entry_mnvr_couter_LE_0 & !rcs_all_jet_inhibit : FALSE;
+ step = 28 & !et_sep_man_initiate : TRUE;
+ TRUE : frz_3eo;
+ esac;
+
+ next(high_rate_sep) :=
+ case
+ step = 10 & entry_mnvr_couter_LE_0 & !rcs_all_jet_inhibit : FALSE;
+ step = 25 : TRUE;
+ TRUE : high_rate_sep;
+ esac;
+
+ next(entry_gains) :=
+ case
+ next(step) = 1 : FALSE;
+ step = 10 & entry_mnvr_couter_LE_0 & !rcs_all_jet_inhibit : TRUE;
+ TRUE : entry_gains;
+ esac;
+
+ next(cont_sep_cplt) :=
+ case
+ next(step) = 1 : FALSE;
+ step = 12 & mm602_OK : TRUE;
+ TRUE : cont_sep_cplt;
+ esac;
+
+ next(pch_cmd_reg4) :=
+ case
+ next(step) = 1 : FALSE;
+ step = 18 & !pch_cmd_reg4 & cond_18 : TRUE;
+ TRUE : pch_cmd_reg4;
+ esac;
+
+ next(alpha_ok) :=
+ case
+ next(step) = 1 : FALSE;
+ step = 20 & ABS_alf_err_LT_alf_sep_err : TRUE;
+ TRUE : alpha_ok;
+ esac;
+
+ next(early_sep) :=
+ case
+ step = 27 & first27 :
+ case
+ cond_27 : TRUE;
+ TRUE : FALSE;
+ esac;
+ TRUE : early_sep;
+ esac;
+
+--------------------------------------------
+----- Additional Variables -----------------
+--------------------------------------------
+
+ next(rtls_lo_f_d_delay) :=
+ case
+ next(start) = TRUE : undef; -- Assumed initialization
+ step = 8 & first8 & excess_OMS_propellant : 0;
+ TRUE : rtls_lo_f_d_delay;
+ esac;
+
+ next(wcb2) :=
+ case
+ next(start) = TRUE : undef; -- Assumed initialization
+ step = 10 & entry_mnvr_couter_LE_0 : post_sep_0;
+ step = 12 : case
+ r = reg4 : reg4_0;
+ TRUE : wcb2_3eo;
+ esac;
+ step = 14 & pre_sep : reg102_undef;
+ step = 15 : case
+ r = reg4 : reg4_0;
+ TRUE : wcb2_3eo;
+ esac;
+ step = 25 : reg2_neg4;
+ TRUE : wcb2;
+ esac;
+
+ next(q_gcb_i) :=
+ case
+ next(start) = TRUE : undef; -- Assumed initialization
+ step = 11 : quat_entry_M50_to_cmdbody;
+ step = 14 & pre_sep : quat_reg102_undef;
+ step = 16 : case
+ r = reg4 : quat_reg4;
+ TRUE : quat_reg3;
+ esac;
+ step = 22 : quat_reg2;
+
+-- Without this step the value "quat_reg2" would remain in "reg1":
+-- step = 23 & ABS_beta_n_GRT_beta_max & !high_rate_sep : undef;
+
+ TRUE : q_gcb_i;
+ esac;
+
+ next(oms_nz_lim) :=
+ case
+ next(start) = TRUE : undef; -- Assumed initialization
+ step = 9 & q_bar_a_LT_qbar_oms_dump & r != reg102 : oms_nz_lim_3eo;
+ step = 12 & mm602_OK : oms_nz_lim_std;
+ TRUE : oms_nz_lim;
+ esac;
+
+ next(contingency_nz_lim) :=
+ case
+ next(start) = TRUE : undef; -- Assumed initialization
+ step = 9 & q_bar_a_LT_qbar_oms_dump & r != reg102 :
+ contingency_nz_lim_3eo;
+ step = 12 & mm602_OK : contingency_nz_lim_std;
+ TRUE : contingency_nz_lim;
+ esac;
+
+DEFINE
+ finished := step = exit;
+ idle := step = undef;
+
+ start_cont_3eo_mode_select :=
+ case
+ step = 1 & !cont_3EO_start : TRUE;
+ TRUE : FALSE;
+ esac;
+
+ nextstep :=
+ case
+ step = 1 : a1;
+ step = a1 : case
+ (cont_3EO_start | mode_select_completed) : 2;
+ TRUE : step;
+ esac;
+ step = 2 : case
+ !cont_3EO_start : exit;
+ first3 : 3;
+ TRUE: 4;
+ esac;
+ step = 3 : 4;
+ step = 4 : case
+ et_sep_cmd : 7;
+ TRUE : 5;
+ esac;
+ step = 5 : case
+ h_dot_LT_0 & q_bar_a_GRT_qbar_max_sep &
+ m_mode != mm102 : exit;
+ TRUE : 6;
+ esac;
+ step = 6 :
+ case
+ r = reg102 : 13;
+ r in {reg3, reg4} : 15;
+ r = reg2 : 22;
+ r = reg1 : 27;
+ TRUE : exit;
+ esac;
+ step = 7 : case
+ cont_minus_z_compl : 8;
+ TRUE : exit;
+ esac;
+ step = 8 : case
+ excess_OMS_propellant & first8 : 9;
+ TRUE : 10;
+ esac;
+ step = 9 : exit;
+ step = 10 : case
+ !entry_mnvr_couter_LE_0 | rcs_all_jet_inhibit : exit;
+ TRUE : 11;
+ esac;
+ step = 11 : 12;
+ step = 12 : exit;
+ step = 13 : 14;
+ step = 14 : exit;
+ step = 15 : 16;
+ step = 16 : 17;
+ step = 17 : case
+ r = reg4 : 18;
+ TRUE : 20;
+ esac;
+ step = 18 : case
+ pch_cmd_reg4 | cond_18 : 19;
+ TRUE : exit;
+ esac;
+ step = 19 : exit;
+ step = 20 : case
+ ABS_alf_err_LT_alf_sep_err : b20;
+ TRUE : c20;
+ esac;
+ step = b20 : case
+ cond_20b : d20;
+ TRUE : exit;
+ esac;
+ step = c20 : case
+ alpha_ok : d20;
+ TRUE : 21;
+ esac;
+ step = d20 : exit;
+ TRUE : nextstep21;
+ esac;
+
+ nextstep21 :=
+ case
+ step = 21 : case
+ cond_21 : 15;
+ TRUE : exit;
+ esac;
+ step = 22 : 23;
+ step = 23 : case
+ ABS_beta_n_GRT_beta_max & !high_rate_sep : 27;
+ TRUE : 24;
+ esac;
+ step = 24 : case
+ cond_24 | high_rate_sep : 25;
+ TRUE : exit;
+ esac;
+ step = 25 : 26;
+ step = 26 : exit;
+ step = 27 : 28;
+ step = 28 : case
+ !et_sep_man_initiate : 29;
+ TRUE : exit;
+ esac;
+ step = 29 : exit;
+ start : 1;
+ step = exit : undef;
+ TRUE : step;
+ esac;
+
+ post_sep_mode := step in {7,8,9,10,11,12};
+
+------------------------------------------------------------------
+------------------------------------------------------------------
+
+MODULE main
+VAR
+ smode5: boolean;
+ vel : {GRT_vi_3eo_max, GRT_vi_3eo_min, LEQ_vi_3eo_min};
+ q_bar: {GRT_qbar_reg3, GRT_qbar_reg1, LEQ_qbar_reg1};
+ q_bar_a_GRT_qbar_max_sep : boolean;
+ q_bar_a_LT_qbar_oms_dump : boolean;
+ apogee_alt_LT_alt_ref : boolean;
+ h_dot_LT_hdot_reg2 : boolean;
+ h_dot_LT_0 : boolean;
+ alpha_n_GRT_alpha_reg2 : boolean;
+ delta_r_GRT_del_r_usp : boolean;
+ v_horiz_dnrng_LT_0: boolean;
+ meco_confirmed: boolean;
+ et_sep_cmd : boolean;
+ cont_minus_z_compl : boolean;
+ t_nav-t_et_sep_GRT_dt_min_z_102 : boolean;
+ ABS_q_orb_GRT_q_minus_z_max : boolean;
+ ABS_r_orb_GRT_r_minus_z_max : boolean;
+ excess_OMS_propellant : boolean;
+ entry_mnvr_couter_LE_0 : boolean;
+ rcs_all_jet_inhibit : boolean;
+ alt_GRT_alt_min_102_dump : boolean;
+ t_nav-t_gmtlo_LT_t_dmp_last : boolean;
+ pre_sep : boolean;
+ cond_18 : boolean;
+ q_orb_LT_0 : boolean;
+ ABS_alf_err_LT_alf_sep_err : boolean;
+ cond_20b : boolean;
+ cond_21 : boolean;
+ ABS_beta_n_GRT_beta_max : boolean;
+ cond_24 : boolean;
+ cond_26 : boolean;
+ cond_27 : boolean;
+ cond_29 : boolean;
+ mm602_OK : boolean;
+ start_guide : boolean;
+ mated_coast_mnvr : boolean;
+
+ cs: cont_3eo_mode_select(cg.start_cont_3eo_mode_select,
+ smode5,vel,q_bar,apogee_alt_LT_alt_ref,
+ h_dot_LT_hdot_reg2,alpha_n_GRT_alpha_reg2,
+ delta_r_GRT_del_r_usp,v_horiz_dnrng_LT_0,
+ cg.high_rate_sep,meco_confirmed);
+
+ cg: cont_3eo_guide(start_guide,
+ cs.cont_3EO_start, cs.region_selected, et_sep_cmd,
+ h_dot_LT_0, q_bar_a_GRT_qbar_max_sep, cs.m_mode, cs.r,
+ cont_minus_z_compl, t_nav-t_et_sep_GRT_dt_min_z_102,
+ ABS_q_orb_GRT_q_minus_z_max, ABS_r_orb_GRT_r_minus_z_max,
+ excess_OMS_propellant, q_bar_a_LT_qbar_oms_dump,
+ entry_mnvr_couter_LE_0, rcs_all_jet_inhibit,
+ alt_GRT_alt_min_102_dump, t_nav-t_gmtlo_LT_t_dmp_last,
+ pre_sep, cond_18, q_orb_LT_0, ABS_alf_err_LT_alf_sep_err,
+ cond_20b, cond_21, ABS_beta_n_GRT_beta_max, cond_24, cond_26,
+ cond_27, cond_29, mm602_OK);
+
+ASSIGN
+ init(start_guide) := FALSE;
+ init(mated_coast_mnvr) := FALSE;
+
+ next(entry_mnvr_couter_LE_0) :=
+ case
+ !entry_mnvr_couter_LE_0 : {FALSE, TRUE};
+ TRUE : TRUE;
+ esac;
+
+---------------------------------------------------------------------
+---------------------------------------------------------------------
+ next(start_guide) :=
+ case
+ start_guide : FALSE;
+ !cg.idle : FALSE;
+ TRUE : {FALSE, TRUE};
+ esac;
+
+ next(smode5) :=
+ case
+ fixed_values : smode5;
+ cg.idle : { FALSE, TRUE };
+ TRUE : smode5;
+ esac;
+
+ next(vel) :=
+ case
+ fixed_values : vel;
+ cg.idle : {GRT_vi_3eo_max, GRT_vi_3eo_min, LEQ_vi_3eo_min};
+ TRUE : vel;
+ esac;
+
+ next(q_bar) :=
+ case
+ fixed_values : q_bar;
+ cg.idle : {GRT_qbar_reg3, GRT_qbar_reg1, LEQ_qbar_reg1};
+ TRUE : q_bar;
+ esac;
+
+ next(q_bar_a_GRT_qbar_max_sep) :=
+ case
+ fixed_values : q_bar_a_GRT_qbar_max_sep;
+ cg.idle : { FALSE, TRUE };
+ TRUE : q_bar_a_GRT_qbar_max_sep;
+ esac;
+
+ next(apogee_alt_LT_alt_ref) :=
+ case
+ fixed_values : apogee_alt_LT_alt_ref;
+ cg.idle : { FALSE, TRUE };
+ TRUE : apogee_alt_LT_alt_ref;
+ esac;
+
+ next(h_dot_LT_hdot_reg2) :=
+ case
+ fixed_values : h_dot_LT_hdot_reg2;
+ cg.idle : { FALSE, TRUE };
+ TRUE : h_dot_LT_hdot_reg2;
+ esac;
+
+ next(h_dot_LT_0) :=
+ case
+ fixed_values : h_dot_LT_0;
+ cg.idle : { FALSE, TRUE };
+ TRUE : h_dot_LT_0;
+ esac;
+
+ next(alpha_n_GRT_alpha_reg2) :=
+ case
+ fixed_values : alpha_n_GRT_alpha_reg2;
+ cg.idle : { FALSE, TRUE };
+ TRUE : alpha_n_GRT_alpha_reg2;
+ esac;
+
+ next(delta_r_GRT_del_r_usp) :=
+ case
+ fixed_values : delta_r_GRT_del_r_usp;
+ cg.idle : { FALSE, TRUE };
+ TRUE : delta_r_GRT_del_r_usp;
+ esac;
+
+ next(v_horiz_dnrng_LT_0) :=
+ case
+ fixed_values : v_horiz_dnrng_LT_0;
+ cg.idle : { FALSE, TRUE };
+ TRUE : v_horiz_dnrng_LT_0;
+ esac;
+
+ next(meco_confirmed) :=
+ case
+ fixed_values : meco_confirmed;
+ meco_confirmed : TRUE;
+ cg.idle : { FALSE, TRUE };
+ TRUE : meco_confirmed;
+ esac;
+
+ next(et_sep_cmd) :=
+ case
+ fixed_values : et_sep_cmd;
+ et_sep_cmd : TRUE;
+ cg.idle : { FALSE, TRUE };
+ TRUE : et_sep_cmd;
+ esac;
+
+ next(cont_minus_z_compl) :=
+ case
+ fixed_values : cont_minus_z_compl;
+ cg.idle : { FALSE, TRUE };
+ TRUE : cont_minus_z_compl;
+ esac;
+
+ next(t_nav-t_et_sep_GRT_dt_min_z_102) :=
+ case
+ fixed_values : t_nav-t_et_sep_GRT_dt_min_z_102;
+ cg.idle : { FALSE, TRUE };
+ TRUE : t_nav-t_et_sep_GRT_dt_min_z_102;
+ esac;
+
+ next(ABS_q_orb_GRT_q_minus_z_max) :=
+ case
+ fixed_values : ABS_q_orb_GRT_q_minus_z_max;
+ cg.idle : { FALSE, TRUE };
+ TRUE : ABS_q_orb_GRT_q_minus_z_max;
+ esac;
+
+ next(ABS_r_orb_GRT_r_minus_z_max) :=
+ case
+ fixed_values : ABS_r_orb_GRT_r_minus_z_max;
+ cg.idle : { FALSE, TRUE };
+ TRUE : ABS_r_orb_GRT_r_minus_z_max;
+ esac;
+
+ next(excess_OMS_propellant) :=
+ case
+ fixed_values : excess_OMS_propellant;
+ cg.idle & excess_OMS_propellant : { FALSE, TRUE };
+ TRUE : excess_OMS_propellant;
+ esac;
+
+ next(q_bar_a_LT_qbar_oms_dump) :=
+ case
+ fixed_values : q_bar_a_LT_qbar_oms_dump;
+ cg.idle : { FALSE, TRUE };
+ TRUE : q_bar_a_LT_qbar_oms_dump;
+ esac;
+
+ next(rcs_all_jet_inhibit) :=
+ case
+ fixed_values : rcs_all_jet_inhibit;
+ cg.idle : { FALSE, TRUE };
+ TRUE : rcs_all_jet_inhibit;
+ esac;
+
+ next(alt_GRT_alt_min_102_dump) :=
+ case
+ fixed_values : alt_GRT_alt_min_102_dump;
+ cg.idle : { FALSE, TRUE };
+ TRUE : alt_GRT_alt_min_102_dump;
+ esac;
+
+ next(t_nav-t_gmtlo_LT_t_dmp_last) :=
+ case
+ fixed_values : t_nav-t_gmtlo_LT_t_dmp_last;
+ cg.idle : { FALSE, TRUE };
+ TRUE : t_nav-t_gmtlo_LT_t_dmp_last;
+ esac;
+
+ next(pre_sep) :=
+ case
+ fixed_values : pre_sep;
+ cg.idle : { FALSE, TRUE };
+ TRUE : pre_sep;
+ esac;
+
+ next(cond_18) :=
+ case
+ fixed_values : cond_18;
+ cg.idle : { FALSE, TRUE };
+ TRUE : cond_18;
+ esac;
+
+ next(q_orb_LT_0) :=
+ case
+ fixed_values : q_orb_LT_0;
+ cg.idle : { FALSE, TRUE };
+ TRUE : q_orb_LT_0;
+ esac;
+
+ next(ABS_alf_err_LT_alf_sep_err) :=
+ case
+ fixed_values : ABS_alf_err_LT_alf_sep_err;
+ cg.idle : { FALSE, TRUE };
+ TRUE : ABS_alf_err_LT_alf_sep_err;
+ esac;
+
+ next(cond_20b) :=
+ case
+ fixed_values : cond_20b;
+ cg.idle : { FALSE, TRUE };
+ TRUE : cond_20b;
+ esac;
+
+ next(cond_21) :=
+ case
+ fixed_values : cond_21;
+ cg.idle : { FALSE, TRUE };
+ TRUE : cond_21;
+ esac;
+
+ next(ABS_beta_n_GRT_beta_max) :=
+ case
+ fixed_values : ABS_beta_n_GRT_beta_max;
+ cg.idle : { FALSE, TRUE };
+ TRUE : ABS_beta_n_GRT_beta_max;
+ esac;
+
+ next(cond_24) :=
+ case
+ fixed_values : cond_24;
+ cg.idle : { FALSE, TRUE };
+ TRUE : cond_24;
+ esac;
+
+ next(cond_26) :=
+ case
+ fixed_values : cond_26;
+ cg.idle : { FALSE, TRUE };
+ TRUE : cond_26;
+ esac;
+
+ next(cond_27) :=
+ case
+ fixed_values : cond_27;
+ cg.idle : { FALSE, TRUE };
+ TRUE : cond_27;
+ esac;
+
+ next(cond_29) :=
+ case
+ fixed_values : cond_29;
+ cg.idle : { FALSE, TRUE };
+ TRUE : cond_29;
+ esac;
+
+ next(mm602_OK) :=
+ case
+ fixed_values : mm602_OK;
+ cg.idle : { FALSE, TRUE };
+ TRUE : mm602_OK;
+ esac;
+
+ next(mated_coast_mnvr) :=
+ case
+ next(cg.step) = 1 : FALSE;
+ cg.step = 6 & cg.r in {reg1, reg2, reg3, reg4, reg102} : TRUE;
+ TRUE : mated_coast_mnvr;
+ esac;
+
+---------------------------------------------------------------------
+---------------------------------------------------------------------
+DEFINE
+ fixed_values := FALSE;
+
+ output_ok :=
+ case
+ cg.q_gcb_i = undef | cg.wcb2 = undef |
+ cg.cont_3eo_pr_delay = 5 |
+ cg.etsep_y_drift = undef :
+ case
+ !mated_coast_mnvr: 1;
+ TRUE : undef;
+ esac;
+ !mated_coast_mnvr: toint(cg.q_gcb_i = quat_entry_M50_to_cmdbody &
+ cg.wcb2 = post_sep_0);
+-- reg1 never happens?
+-- cg.r = reg1 : (cg.q_gcb_i = quat_reg1 & cg.wcb2 = reg1_0 &
+-- cg.cont_3eo_pr_delay = minus_z_reg1 &
+-- cg.etsep_y_drift = minus_z_reg1) | cg.emerg_sep;
+ cg.r = reg2 : toint((cg.q_gcb_i = quat_reg2 & cg.wcb2 = reg2_neg4 &
+ cg.cont_3eo_pr_delay = minus_z_reg2 &
+ cg.etsep_y_drift = minus_z_reg2) | cg.emerg_sep);
+
+ cg.r = reg3 : toint((cg.q_gcb_i = quat_reg3 & cg.wcb2 = wcb2_3eo &
+ cg.cont_3eo_pr_delay = minus_z_reg3 &
+ cg.etsep_y_drift = minus_z_reg3) | cg.emerg_sep);
+ cg.r = reg4 : toint((cg.q_gcb_i = quat_reg4 & cg.wcb2 = reg4_0 &
+ cg.cont_3eo_pr_delay = minus_z_reg4 &
+ cg.etsep_y_drift = minus_z_reg4) | cg.emerg_sep);
+ cg.r = reg102 : toint((cg.q_gcb_i = quat_reg102_undef &
+ cg.wcb2 = reg102_undef &
+ cg.cont_3eo_pr_delay = minus_z_reg102 &
+ cg.etsep_y_drift = minus_z_reg102) | cg.emerg_sep);
+ TRUE : 0;
+ esac;
+
+---------------------------------------------------------------------
+-------- Specifications ---------------------------------------------
+---------------------------------------------------------------------
+
+-- Contingency Guide terminates
+
+SPEC AG(!cg.idle -> AF(cg.finished))
+
+-- Contingency guide can be executed infinitely often
+
+SPEC AG( (cg.idle | cg.finished) ->
+ EF(!(cg.idle | cg.finished) & EF(cg.finished)))
+
+-- Contingency mode select task works fine
+
+SPEC AG(cs.cont_3EO_start & cs.region_selected ->
+ ((cs.m_mode = mm102 | meco_confirmed) &
+ cs.r != reg-1 & cs.r != reg0))
+
+-- Bad (initial) value never happens again once region is computed
+-- unless we restart the task
+
+--SPEC AG(cs.r != reg-1 -> !E[!cg.start_cont_3eo_mode_select U
+-- cs.r = reg-1 & !cg.start_cont_3eo_mode_select])
+
+-- Comment out each of the regions and see if this is still true
+-- (Check, if ALL of the regions can happen)
+
+--SPEC AG(cs.r in {reg-1
+-- ,reg0
+-- ,reg1
+-- ,reg2
+-- ,reg3
+-- ,reg102
+-- })
+
+-- Comment out each of the regions and see if this is still true
+-- (Check, if ALL of the regions can happen)
+
+--SPEC AG(cg.r in {reg-1
+-- ,reg0
+-- ,reg1
+-- ,reg2
+-- ,reg3
+-- ,reg4
+-- ,reg102
+-- })
+
+-- Mode_select starts at the next step after its "start" bit is set:
+
+--SPEC AG(!cg.start_cont_3eo_mode_select ->
+-- AX(cg.start_cont_3eo_mode_select & cs.step in {exit, undef} ->
+-- AX(cs.step = 1 & !cs.region_selected)))
+
+-- During major mode 103, the inertial velocity is monitored.
+-- Below an I-loaded velocity, a MECO would constitute a contingency
+-- abort. (Must NOT be in SMODE=5 (??))
+
+SPEC AG(cg.start_cont_3eo_mode_select & cs.m_mode = mm103 &
+ vel = LEQ_vi_3eo_min & meco_confirmed & !smode5 ->
+ A[!cs.region_selected U cs.region_selected & cs.cont_3EO_start])
+
+-- Above a certain inertial velocity (in mode 103), the 3E/O field
+-- is blanked, indicating that a MECO at this point would not require
+-- an OPS 6 contingency abort.
+
+SPEC AG(cs.region_selected ->
+ (cs.m_mode = mm103 & vel = GRT_vi_3eo_max -> !cs.cont_3EO_start))
+
+-- Between the two velocities, an apogee altitude - velocity curve is
+-- constructed based on the current inertial velocity. If the apogee
+-- altitude is above this curve, a contingency abort capability is
+-- still required and a 3E/O region index will be calculated.
+-- Otherwise, the 3E/O field is blanked out and no further contingency
+-- abort calculations will be performed. (Must NOT be in SMODE=5 (??))
+
+SPEC AG(cg.start_cont_3eo_mode_select & cs.m_mode = mm103 &
+ vel = GRT_vi_3eo_min & meco_confirmed & !smode5 ->
+ A[!cs.region_selected U cs.region_selected &
+ apogee_alt_LT_alt_ref = !cs.cont_3EO_start])
+
+-- For an RTLS trajectory (SMODE=5), a check is made on the downrange
+-- velocity to see if the vehicle is heading away from the landing site.
+-- If this is the case, a 3E/O region index is calculated. If the vehicle
+-- is heading back to the landing site, and the current range to the MECO
+-- R-V line is greater than an I-loaded value, a 3E/O region index is
+-- calculated. Otherwise, an intact abort is possible and the 3E/O field
+-- is blanked.
+
+SPEC AG(cg.start_cont_3eo_mode_select & smode5 & meco_confirmed &
+ (!v_horiz_dnrng_LT_0 | !delta_r_GRT_del_r_usp) ->
+ A[!cs.region_selected U cs.region_selected & cs.cont_3EO_start])
+
+-- If this task is called prior to SRB separation [mm102], the 3E/O region
+-- index is set to 102 and the 3E/O contingency flag is set.
+
+SPEC AG(cs.m_mode = mm102 & cg.start_cont_3eo_mode_select ->
+ AX (A [ !cs.region_selected U cs.region_selected &
+ cs.r = reg102 & cs.cont_3EO_start]))
+
+-- After SRB separation, on every pass that the 3E/O region index is
+-- calculated, a check is made to see if MECO confirmed has occured. If
+-- so, a check is made to see if the major mode is 103. If so, an RTLS is
+-- automatically invoked to transition to major mode 601.
+
+SPEC AG(!cs.region_selected & cs.m_mode = mm103 & meco_confirmed ->
+ A[!cs.region_selected U cs.region_selected & cs.r != reg0 ->
+ cs.m_mode = mm601 & cs.RTLS_abort_declared])
+
+-- Once the 3E/O contingency flag has been set, this task is no longer
+-- executed.
+
+SPEC AG(cs.cont_3EO_start -> AG(!cg.start_cont_3eo_mode_select))
+
+-- If MECO confirmed occurs in MM103 and an OPS 6 contingency abort
+-- procedure is still required, contingency 3E/O guidance sets the
+-- CONT_3EO_START flag ON. Contingency 3E/O guidance then switches
+-- from its display support function into an actual auto guidance
+-- steering process. [...] Contingency 3E/O guidance sets the RTLS abort
+-- declared flag and the MSC performs the transition from from major mode
+-- 103 to 601.
+
+SPEC AG(!cg.idle & !cg.finished & !cs.region_selected & cs.m_mode = mm103 ->
+ A[ !cg.finished U cg.finished & cs.region_selected &
+ (cs.cont_3EO_start -> cs.m_mode = mm601 & cs.RTLS_abort_declared) ])
+
+-- If MECO confirmed occurs in a major mode 601 and a contingency abort
+-- procedure is still required, contingency 3E/O guidance sets the
+-- CONT_3EO_START flag ON. [...] Contingency 3E/O guidance then commands
+-- 3E/O auto maneuvers in major mode 601. [What are these maneuvers??]
+
+SPEC AG(cg.finished & cs.m_mode = mm601 & !et_sep_cmd &
+ meco_confirmed & cs.cont_3EO_start ->
+ cg.q_gcb_i in {quat_reg1, quat_reg2, quat_reg3, quat_reg4, undef}
+ | cg.emerg_sep)
+
+-- If MECO confirmed occurs in a first stage (MM102) [...], contingency
+-- 3E/O guidance will command a fast ET separation during SRB tailoff in
+-- major mode 102. CONT 3E/O GUID will then command maneuver post-sep in
+-- MM601 (???). [ I'm not sure what indicates fast ET sep.: emerg_sep or
+-- early_sep, or what? ]
+
+SPEC AG(cg.finished & cs.m_mode = mm102 & meco_confirmed & pre_sep ->
+ cg.emerg_sep | et_sep_cmd
+ | cg.et_sep_man_initiate
+ | cg.early_sep
+ )
+
+---------------------------------------------
+-- Invariants from Murphi code --------------
+---------------------------------------------
+
+--SPEC AG(cg.finished -> (output_ok != 0 | (output_ok = undef &
+-- (cg.emerg_sep | !cg.cont_sep_cplt))))
+
+--SPEC AG(!cg.finished & !cg.idle -> !mated_coast_mnvr | !et_sep_cmd)
+
+-- Stronger version !!!
+
+SPEC AG(cg.finished -> output_ok != 0)
+
+-- Contingency Guidance shall command an ET separation
+-- [under certain conditions :-].
+
+SPEC AG(cs.cont_3EO_start & cg.finished &
+ (cg.r = reg1 -> cond_29) &
+ (cg.r = reg2 -> cond_24 & cond_26) &
+ (cg.r = reg3 -> cg.alpha_ok &
+ (ABS_alf_err_LT_alf_sep_err -> cond_20b)) &
+ (cg.r = reg4 -> cond_18 & q_orb_LT_0) &
+ (cg.r = reg102 -> pre_sep) ->
+ et_sep_cmd | cg.et_sep_man_initiate
+ | cg.early_sep
+ | cg.emerg_sep
+ )
+
+-- Contingency Guidance shall command at most one interconnected OMS dump.
+
+SPEC AG(cg.finished & cg.oms_rcs_i_c_inh_ena_cmd ->
+ AG(!cg.oms_rcs_i_c_inh_ena_cmd -> AG(!cg.oms_rcs_i_c_inh_ena_cmd)))
+
+-- Contingency Guidance shall command a transition to glide RTLS
+-- (flight mode 602)
+
+SPEC AG(cg.finished & cs.m_mode = mm601 ->
+ --cg.cont_sep_cplt | cg.emerg_sep |
+ cg.call_RTLS_abort_task)
+
+-- Paper, p. 28, unstated assumption 2: at step 6 the region is
+-- among 102, 1-4.
+
+SPEC AG(cg.step = 6 -> cg.r in {reg102, reg1, reg2, reg3, reg4})
+
+-- The transition to mode 602 shall not occur until the entry maneuver
+-- has been calculated
+
+SPEC !E[cg.q_gcb_i = undef U cg.cont_sep_cplt & cg.q_gcb_i = undef]
+
+-- The entry maneuver calculations shall not commence until the OMS/RCS
+-- interconnect, if any, is complete (??? What does it exactly mean???)
+-- !!!
+--SPEC AG(cg.oms_rcs_i_c_inh_ena_cmd ->
+-- !E[cg.oms_rcs_i_c_inh_ena_cmd U
+-- cg.q_gcb_i != undef & cg.oms_rcs_i_c_inh_ena_cmd])
+
+SPEC AG(cg.oms_rcs_i_c_inh_ena_cmd ->
+ !E[rcs_all_jet_inhibit U
+ cg.q_gcb_i != undef & rcs_all_jet_inhibit])
+
+-- The OMS dump shall not be considered until the -Z translation is complete.
+
+SPEC !E[!cont_minus_z_compl & cg.r != reg102 U cg.orbiter_dump_ena]
+
+-- Completion of -Z translation shall not be checked until ET separation
+-- has been commanded
+
+SPEC !E[!et_sep_cmd U cg.step = 7]
+
+-- ET separation shall be commanded if and only if an abort maneuver
+-- region is assigned [and again there are *certain conditions*].
+
+SPEC AG(cg.finished & cs.cont_3EO_start &
+ (cg.r = reg1 -> cond_29) &
+ (cg.r = reg2 -> cond_24 & cond_26) &
+ (cg.r = reg3 -> cg.alpha_ok &
+ (ABS_alf_err_LT_alf_sep_err -> cond_20b)) &
+ (cg.r = reg4 -> cond_18 & q_orb_LT_0) &
+ (cg.r = reg102 -> pre_sep) ->
+ (cg.et_sep_man_initiate | et_sep_cmd
+ <-> cg.r in {reg1, reg2, reg3, reg4, reg102}))
+
+-- The assigned region can not change arbitrarily.
+
+-- Regions 1 and 2 may interchange, but will not switch to any other region:
+
+SPEC AG(cg.finished & cs.cont_3EO_start & cg.r in {reg1,reg2} ->
+ AG(cg.finished -> cg.r in {reg1,reg2}))
+
+-- Regions 3 and 4 may interchange, but will not switch to any other region:
+
+SPEC AG(cg.finished & cs.cont_3EO_start & cg.r in {reg3,reg4} ->
+ AG(cg.finished -> cg.r in {reg3,reg4}))
+
+-- Region 102 never changes:
+
+SPEC AG(cg.finished & cg.r = reg102 -> AG(cg.finished -> cg.r = reg102))
diff --git a/tests/examplefiles/hello-world.puzzlet.aheui b/tests/examplefiles/hello-world.puzzlet.aheui
new file mode 100644
index 00000000..e7ef3a62
--- /dev/null
+++ b/tests/examplefiles/hello-world.puzzlet.aheui
@@ -0,0 +1,10 @@
+밤밣따빠밣밟따뿌
+빠맣파빨받밤뚜뭏
+돋밬탕빠맣붏두붇
+볻뫃박발뚷투뭏붖
+뫃도뫃희멓뭏뭏붘
+뫃봌토범더벌뿌뚜
+뽑뽀멓멓더벓뻐뚠
+뽀덩벐멓뻐덕더벅
+
+https://github.com/aheui/snippets/blob/master/hello-world/hello-world.puzzlet.aheui
diff --git a/tests/examplefiles/plain.bst b/tests/examplefiles/plain.bst
new file mode 100644
index 00000000..7adf4bb0
--- /dev/null
+++ b/tests/examplefiles/plain.bst
@@ -0,0 +1,1097 @@
+% BibTeX standard bibliography style `plain'
+ % Version 0.99b (8-Dec-10 release) for BibTeX versions 0.99a or later.
+ % Copyright (C) 1984, 1985, 1988, 2010 Howard Trickey and Oren Patashnik.
+ % Unlimited copying and redistribution of this file are permitted as long as
+ % it is unmodified. Modifications (and redistribution of modified versions)
+ % are also permitted, but only if the resulting file is renamed to something
+ % besides btxbst.doc, plain.bst, unsrt.bst, alpha.bst, and abbrv.bst.
+ % This restriction helps ensure that all standard styles are identical.
+ % The file btxbst.doc has the documentation for this style.
+
+ENTRY
+ { address
+ author
+ booktitle
+ chapter
+ edition
+ editor
+ howpublished
+ institution
+ journal
+ key
+ month
+ note
+ number
+ organization
+ pages
+ publisher
+ school
+ series
+ title
+ type
+ volume
+ year
+ }
+ {}
+ { label }
+
+INTEGERS { output.state before.all mid.sentence after.sentence after.block }
+
+FUNCTION {init.state.consts}
+{ #0 'before.all :=
+ #1 'mid.sentence :=
+ #2 'after.sentence :=
+ #3 'after.block :=
+}
+
+STRINGS { s t }
+
+FUNCTION {output.nonnull}
+{ 's :=
+ output.state mid.sentence =
+ { ", " * write$ }
+ { output.state after.block =
+ { add.period$ write$
+ newline$
+ "\newblock " write$
+ }
+ { output.state before.all =
+ 'write$
+ { add.period$ " " * write$ }
+ if$
+ }
+ if$
+ mid.sentence 'output.state :=
+ }
+ if$
+ s
+}
+
+FUNCTION {output}
+{ duplicate$ empty$
+ 'pop$
+ 'output.nonnull
+ if$
+}
+
+FUNCTION {output.check}
+{ 't :=
+ duplicate$ empty$
+ { pop$ "empty " t * " in " * cite$ * warning$ }
+ 'output.nonnull
+ if$
+}
+
+FUNCTION {output.bibitem}
+{ newline$
+ "\bibitem{" write$
+ cite$ write$
+ "}" write$
+ newline$
+ ""
+ before.all 'output.state :=
+}
+
+FUNCTION {fin.entry}
+{ add.period$
+ write$
+ newline$
+}
+
+FUNCTION {new.block}
+{ output.state before.all =
+ 'skip$
+ { after.block 'output.state := }
+ if$
+}
+
+FUNCTION {new.sentence}
+{ output.state after.block =
+ 'skip$
+ { output.state before.all =
+ 'skip$
+ { after.sentence 'output.state := }
+ if$
+ }
+ if$
+}
+
+FUNCTION {not}
+{ { #0 }
+ { #1 }
+ if$
+}
+
+FUNCTION {and}
+{ 'skip$
+ { pop$ #0 }
+ if$
+}
+
+FUNCTION {or}
+{ { pop$ #1 }
+ 'skip$
+ if$
+}
+
+FUNCTION {new.block.checka}
+{ empty$
+ 'skip$
+ 'new.block
+ if$
+}
+
+FUNCTION {new.block.checkb}
+{ empty$
+ swap$ empty$
+ and
+ 'skip$
+ 'new.block
+ if$
+}
+
+FUNCTION {new.sentence.checka}
+{ empty$
+ 'skip$
+ 'new.sentence
+ if$
+}
+
+FUNCTION {new.sentence.checkb}
+{ empty$
+ swap$ empty$
+ and
+ 'skip$
+ 'new.sentence
+ if$
+}
+
+FUNCTION {field.or.null}
+{ duplicate$ empty$
+ { pop$ "" }
+ 'skip$
+ if$
+}
+
+FUNCTION {emphasize}
+{ duplicate$ empty$
+ { pop$ "" }
+ { "{\em " swap$ * "}" * }
+ if$
+}
+
+INTEGERS { nameptr namesleft numnames }
+
+FUNCTION {format.names}
+{ 's :=
+ #1 'nameptr :=
+ s num.names$ 'numnames :=
+ numnames 'namesleft :=
+ { namesleft #0 > }
+ { s nameptr "{ff~}{vv~}{ll}{, jj}" format.name$ 't :=
+ nameptr #1 >
+ { namesleft #1 >
+ { ", " * t * }
+ { numnames #2 >
+ { "," * }
+ 'skip$
+ if$
+ t "others" =
+ { " et~al." * }
+ { " and " * t * }
+ if$
+ }
+ if$
+ }
+ 't
+ if$
+ nameptr #1 + 'nameptr :=
+ namesleft #1 - 'namesleft :=
+ }
+ while$
+}
+
+FUNCTION {format.authors}
+{ author empty$
+ { "" }
+ { author format.names }
+ if$
+}
+
+FUNCTION {format.editors}
+{ editor empty$
+ { "" }
+ { editor format.names
+ editor num.names$ #1 >
+ { ", editors" * }
+ { ", editor" * }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.title}
+{ title empty$
+ { "" }
+ { title "t" change.case$ }
+ if$
+}
+
+FUNCTION {n.dashify}
+{ 't :=
+ ""
+ { t empty$ not }
+ { t #1 #1 substring$ "-" =
+ { t #1 #2 substring$ "--" = not
+ { "--" *
+ t #2 global.max$ substring$ 't :=
+ }
+ { { t #1 #1 substring$ "-" = }
+ { "-" *
+ t #2 global.max$ substring$ 't :=
+ }
+ while$
+ }
+ if$
+ }
+ { t #1 #1 substring$ *
+ t #2 global.max$ substring$ 't :=
+ }
+ if$
+ }
+ while$
+}
+
+FUNCTION {format.date}
+{ year empty$
+ { month empty$
+ { "" }
+ { "there's a month but no year in " cite$ * warning$
+ month
+ }
+ if$
+ }
+ { month empty$
+ 'year
+ { month " " * year * }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.btitle}
+{ title emphasize
+}
+
+FUNCTION {tie.or.space.connect}
+{ duplicate$ text.length$ #3 <
+ { "~" }
+ { " " }
+ if$
+ swap$ * *
+}
+
+FUNCTION {either.or.check}
+{ empty$
+ 'pop$
+ { "can't use both " swap$ * " fields in " * cite$ * warning$ }
+ if$
+}
+
+FUNCTION {format.bvolume}
+{ volume empty$
+ { "" }
+ { "volume" volume tie.or.space.connect
+ series empty$
+ 'skip$
+ { " of " * series emphasize * }
+ if$
+ "volume and number" number either.or.check
+ }
+ if$
+}
+
+FUNCTION {format.number.series}
+{ volume empty$
+ { number empty$
+ { series field.or.null }
+ { output.state mid.sentence =
+ { "number" }
+ { "Number" }
+ if$
+ number tie.or.space.connect
+ series empty$
+ { "there's a number but no series in " cite$ * warning$ }
+ { " in " * series * }
+ if$
+ }
+ if$
+ }
+ { "" }
+ if$
+}
+
+FUNCTION {format.edition}
+{ edition empty$
+ { "" }
+ { output.state mid.sentence =
+ { edition "l" change.case$ " edition" * }
+ { edition "t" change.case$ " edition" * }
+ if$
+ }
+ if$
+}
+
+INTEGERS { multiresult }
+
+FUNCTION {multi.page.check}
+{ 't :=
+ #0 'multiresult :=
+ { multiresult not
+ t empty$ not
+ and
+ }
+ { t #1 #1 substring$
+ duplicate$ "-" =
+ swap$ duplicate$ "," =
+ swap$ "+" =
+ or or
+ { #1 'multiresult := }
+ { t #2 global.max$ substring$ 't := }
+ if$
+ }
+ while$
+ multiresult
+}
+
+FUNCTION {format.pages}
+{ pages empty$
+ { "" }
+ { pages multi.page.check
+ { "pages" pages n.dashify tie.or.space.connect }
+ { "page" pages tie.or.space.connect }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.vol.num.pages}
+{ volume field.or.null
+ number empty$
+ 'skip$
+ { "(" number * ")" * *
+ volume empty$
+ { "there's a number but no volume in " cite$ * warning$ }
+ 'skip$
+ if$
+ }
+ if$
+ pages empty$
+ 'skip$
+ { duplicate$ empty$
+ { pop$ format.pages }
+ { ":" * pages n.dashify * }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.chapter.pages}
+{ chapter empty$
+ 'format.pages
+ { type empty$
+ { "chapter" }
+ { type "l" change.case$ }
+ if$
+ chapter tie.or.space.connect
+ pages empty$
+ 'skip$
+ { ", " * format.pages * }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.in.ed.booktitle}
+{ booktitle empty$
+ { "" }
+ { editor empty$
+ { "In " booktitle emphasize * }
+ { "In " format.editors * ", " * booktitle emphasize * }
+ if$
+ }
+ if$
+}
+
+FUNCTION {empty.misc.check}
+{ author empty$ title empty$ howpublished empty$
+ month empty$ year empty$ note empty$
+ and and and and and
+ key empty$ not and
+ { "all relevant fields are empty in " cite$ * warning$ }
+ 'skip$
+ if$
+}
+
+FUNCTION {format.thesis.type}
+{ type empty$
+ 'skip$
+ { pop$
+ type "t" change.case$
+ }
+ if$
+}
+
+FUNCTION {format.tr.number}
+{ type empty$
+ { "Technical Report" }
+ 'type
+ if$
+ number empty$
+ { "t" change.case$ }
+ { number tie.or.space.connect }
+ if$
+}
+
+FUNCTION {format.article.crossref}
+{ key empty$
+ { journal empty$
+ { "need key or journal for " cite$ * " to crossref " * crossref *
+ warning$
+ ""
+ }
+ { "In {\em " journal * "\/}" * }
+ if$
+ }
+ { "In " key * }
+ if$
+ " \cite{" * crossref * "}" *
+}
+
+FUNCTION {format.crossref.editor}
+{ editor #1 "{vv~}{ll}" format.name$
+ editor num.names$ duplicate$
+ #2 >
+ { pop$ " et~al." * }
+ { #2 <
+ 'skip$
+ { editor #2 "{ff }{vv }{ll}{ jj}" format.name$ "others" =
+ { " et~al." * }
+ { " and " * editor #2 "{vv~}{ll}" format.name$ * }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.book.crossref}
+{ volume empty$
+ { "empty volume in " cite$ * "'s crossref of " * crossref * warning$
+ "In "
+ }
+ { "Volume" volume tie.or.space.connect
+ " of " *
+ }
+ if$
+ editor empty$
+ editor field.or.null author field.or.null =
+ or
+ { key empty$
+ { series empty$
+ { "need editor, key, or series for " cite$ * " to crossref " *
+ crossref * warning$
+ "" *
+ }
+ { "{\em " * series * "\/}" * }
+ if$
+ }
+ { key * }
+ if$
+ }
+ { format.crossref.editor * }
+ if$
+ " \cite{" * crossref * "}" *
+}
+
+FUNCTION {format.incoll.inproc.crossref}
+{ editor empty$
+ editor field.or.null author field.or.null =
+ or
+ { key empty$
+ { booktitle empty$
+ { "need editor, key, or booktitle for " cite$ * " to crossref " *
+ crossref * warning$
+ ""
+ }
+ { "In {\em " booktitle * "\/}" * }
+ if$
+ }
+ { "In " key * }
+ if$
+ }
+ { "In " format.crossref.editor * }
+ if$
+ " \cite{" * crossref * "}" *
+}
+
+FUNCTION {article}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ crossref missing$
+ { journal emphasize "journal" output.check
+ format.vol.num.pages output
+ format.date "year" output.check
+ }
+ { format.article.crossref output.nonnull
+ format.pages output
+ }
+ if$
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {book}
+{ output.bibitem
+ author empty$
+ { format.editors "author and editor" output.check }
+ { format.authors output.nonnull
+ crossref missing$
+ { "author and editor" editor either.or.check }
+ 'skip$
+ if$
+ }
+ if$
+ new.block
+ format.btitle "title" output.check
+ crossref missing$
+ { format.bvolume output
+ new.block
+ format.number.series output
+ new.sentence
+ publisher "publisher" output.check
+ address output
+ }
+ { new.block
+ format.book.crossref output.nonnull
+ }
+ if$
+ format.edition output
+ format.date "year" output.check
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {booklet}
+{ output.bibitem
+ format.authors output
+ new.block
+ format.title "title" output.check
+ howpublished address new.block.checkb
+ howpublished output
+ address output
+ format.date output
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {inbook}
+{ output.bibitem
+ author empty$
+ { format.editors "author and editor" output.check }
+ { format.authors output.nonnull
+ crossref missing$
+ { "author and editor" editor either.or.check }
+ 'skip$
+ if$
+ }
+ if$
+ new.block
+ format.btitle "title" output.check
+ crossref missing$
+ { format.bvolume output
+ format.chapter.pages "chapter and pages" output.check
+ new.block
+ format.number.series output
+ new.sentence
+ publisher "publisher" output.check
+ address output
+ }
+ { format.chapter.pages "chapter and pages" output.check
+ new.block
+ format.book.crossref output.nonnull
+ }
+ if$
+ format.edition output
+ format.date "year" output.check
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {incollection}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ crossref missing$
+ { format.in.ed.booktitle "booktitle" output.check
+ format.bvolume output
+ format.number.series output
+ format.chapter.pages output
+ new.sentence
+ publisher "publisher" output.check
+ address output
+ format.edition output
+ format.date "year" output.check
+ }
+ { format.incoll.inproc.crossref output.nonnull
+ format.chapter.pages output
+ }
+ if$
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {inproceedings}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ crossref missing$
+ { format.in.ed.booktitle "booktitle" output.check
+ format.bvolume output
+ format.number.series output
+ format.pages output
+ address empty$
+ { organization publisher new.sentence.checkb
+ organization output
+ publisher output
+ format.date "year" output.check
+ }
+ { address output.nonnull
+ format.date "year" output.check
+ new.sentence
+ organization output
+ publisher output
+ }
+ if$
+ }
+ { format.incoll.inproc.crossref output.nonnull
+ format.pages output
+ }
+ if$
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {conference} { inproceedings }
+
+FUNCTION {manual}
+{ output.bibitem
+ author empty$
+ { organization empty$
+ 'skip$
+ { organization output.nonnull
+ address output
+ }
+ if$
+ }
+ { format.authors output.nonnull }
+ if$
+ new.block
+ format.btitle "title" output.check
+ author empty$
+ { organization empty$
+ { address new.block.checka
+ address output
+ }
+ 'skip$
+ if$
+ }
+ { organization address new.block.checkb
+ organization output
+ address output
+ }
+ if$
+ format.edition output
+ format.date output
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {mastersthesis}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ "Master's thesis" format.thesis.type output.nonnull
+ school "school" output.check
+ address output
+ format.date "year" output.check
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {misc}
+{ output.bibitem
+ format.authors output
+ title howpublished new.block.checkb
+ format.title output
+ howpublished new.block.checka
+ howpublished output
+ format.date output
+ new.block
+ note output
+ fin.entry
+ empty.misc.check
+}
+
+FUNCTION {phdthesis}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.btitle "title" output.check
+ new.block
+ "PhD thesis" format.thesis.type output.nonnull
+ school "school" output.check
+ address output
+ format.date "year" output.check
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {proceedings}
+{ output.bibitem
+ editor empty$
+ { organization output }
+ { format.editors output.nonnull }
+ if$
+ new.block
+ format.btitle "title" output.check
+ format.bvolume output
+ format.number.series output
+ address empty$
+ { editor empty$
+ { publisher new.sentence.checka }
+ { organization publisher new.sentence.checkb
+ organization output
+ }
+ if$
+ publisher output
+ format.date "year" output.check
+ }
+ { address output.nonnull
+ format.date "year" output.check
+ new.sentence
+ editor empty$
+ 'skip$
+ { organization output }
+ if$
+ publisher output
+ }
+ if$
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {techreport}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ format.tr.number output.nonnull
+ institution "institution" output.check
+ address output
+ format.date "year" output.check
+ new.block
+ note output
+ fin.entry
+}
+
+FUNCTION {unpublished}
+{ output.bibitem
+ format.authors "author" output.check
+ new.block
+ format.title "title" output.check
+ new.block
+ note "note" output.check
+ format.date output
+ fin.entry
+}
+
+FUNCTION {default.type} { misc }
+
+MACRO {jan} {"January"}
+
+MACRO {feb} {"February"}
+
+MACRO {mar} {"March"}
+
+MACRO {apr} {"April"}
+
+MACRO {may} {"May"}
+
+MACRO {jun} {"June"}
+
+MACRO {jul} {"July"}
+
+MACRO {aug} {"August"}
+
+MACRO {sep} {"September"}
+
+MACRO {oct} {"October"}
+
+MACRO {nov} {"November"}
+
+MACRO {dec} {"December"}
+
+MACRO {acmcs} {"ACM Computing Surveys"}
+
+MACRO {acta} {"Acta Informatica"}
+
+MACRO {cacm} {"Communications of the ACM"}
+
+MACRO {ibmjrd} {"IBM Journal of Research and Development"}
+
+MACRO {ibmsj} {"IBM Systems Journal"}
+
+MACRO {ieeese} {"IEEE Transactions on Software Engineering"}
+
+MACRO {ieeetc} {"IEEE Transactions on Computers"}
+
+MACRO {ieeetcad}
+ {"IEEE Transactions on Computer-Aided Design of Integrated Circuits"}
+
+MACRO {ipl} {"Information Processing Letters"}
+
+MACRO {jacm} {"Journal of the ACM"}
+
+MACRO {jcss} {"Journal of Computer and System Sciences"}
+
+MACRO {scp} {"Science of Computer Programming"}
+
+MACRO {sicomp} {"SIAM Journal on Computing"}
+
+MACRO {tocs} {"ACM Transactions on Computer Systems"}
+
+MACRO {tods} {"ACM Transactions on Database Systems"}
+
+MACRO {tog} {"ACM Transactions on Graphics"}
+
+MACRO {toms} {"ACM Transactions on Mathematical Software"}
+
+MACRO {toois} {"ACM Transactions on Office Information Systems"}
+
+MACRO {toplas} {"ACM Transactions on Programming Languages and Systems"}
+
+MACRO {tcs} {"Theoretical Computer Science"}
+
+READ
+
+FUNCTION {sortify}
+{ purify$
+ "l" change.case$
+}
+
+INTEGERS { len }
+
+FUNCTION {chop.word}
+{ 's :=
+ 'len :=
+ s #1 len substring$ =
+ { s len #1 + global.max$ substring$ }
+ 's
+ if$
+}
+
+FUNCTION {sort.format.names}
+{ 's :=
+ #1 'nameptr :=
+ ""
+ s num.names$ 'numnames :=
+ numnames 'namesleft :=
+ { namesleft #0 > }
+ { nameptr #1 >
+ { " " * }
+ 'skip$
+ if$
+ s nameptr "{vv{ } }{ll{ }}{ ff{ }}{ jj{ }}" format.name$ 't :=
+ nameptr numnames = t "others" = and
+ { "et al" * }
+ { t sortify * }
+ if$
+ nameptr #1 + 'nameptr :=
+ namesleft #1 - 'namesleft :=
+ }
+ while$
+}
+
+FUNCTION {sort.format.title}
+{ 't :=
+ "A " #2
+ "An " #3
+ "The " #4 t chop.word
+ chop.word
+ chop.word
+ sortify
+ #1 global.max$ substring$
+}
+
+FUNCTION {author.sort}
+{ author empty$
+ { key empty$
+ { "to sort, need author or key in " cite$ * warning$
+ ""
+ }
+ { key sortify }
+ if$
+ }
+ { author sort.format.names }
+ if$
+}
+
+FUNCTION {author.editor.sort}
+{ author empty$
+ { editor empty$
+ { key empty$
+ { "to sort, need author, editor, or key in " cite$ * warning$
+ ""
+ }
+ { key sortify }
+ if$
+ }
+ { editor sort.format.names }
+ if$
+ }
+ { author sort.format.names }
+ if$
+}
+
+FUNCTION {author.organization.sort}
+{ author empty$
+ { organization empty$
+ { key empty$
+ { "to sort, need author, organization, or key in " cite$ * warning$
+ ""
+ }
+ { key sortify }
+ if$
+ }
+ { "The " #4 organization chop.word sortify }
+ if$
+ }
+ { author sort.format.names }
+ if$
+}
+
+FUNCTION {editor.organization.sort}
+{ editor empty$
+ { organization empty$
+ { key empty$
+ { "to sort, need editor, organization, or key in " cite$ * warning$
+ ""
+ }
+ { key sortify }
+ if$
+ }
+ { "The " #4 organization chop.word sortify }
+ if$
+ }
+ { editor sort.format.names }
+ if$
+}
+
+FUNCTION {presort}
+{ type$ "book" =
+ type$ "inbook" =
+ or
+ 'author.editor.sort
+ { type$ "proceedings" =
+ 'editor.organization.sort
+ { type$ "manual" =
+ 'author.organization.sort
+ 'author.sort
+ if$
+ }
+ if$
+ }
+ if$
+ " "
+ *
+ year field.or.null sortify
+ *
+ " "
+ *
+ title field.or.null
+ sort.format.title
+ *
+ #1 entry.max$ substring$
+ 'sort.key$ :=
+}
+
+ITERATE {presort}
+
+SORT
+
+STRINGS { longest.label }
+
+INTEGERS { number.label longest.label.width }
+
+FUNCTION {initialize.longest.label}
+{ "" 'longest.label :=
+ #1 'number.label :=
+ #0 'longest.label.width :=
+}
+
+FUNCTION {longest.label.pass}
+{ number.label int.to.str$ 'label :=
+ number.label #1 + 'number.label :=
+ label width$ longest.label.width >
+ { label 'longest.label :=
+ label width$ 'longest.label.width :=
+ }
+ 'skip$
+ if$
+}
+
+EXECUTE {initialize.longest.label}
+
+ITERATE {longest.label.pass}
+
+FUNCTION {begin.bib}
+{ preamble$ empty$
+ 'skip$
+ { preamble$ write$ newline$ }
+ if$
+ "\begin{thebibliography}{" longest.label * "}" * write$ newline$
+}
+
+EXECUTE {begin.bib}
+
+EXECUTE {init.state.consts}
+
+ITERATE {call.type$}
+
+FUNCTION {end.bib}
+{ newline$
+ "\end{thebibliography}" write$ newline$
+}
+
+EXECUTE {end.bib}
diff --git a/tests/examplefiles/rnc_example.rnc b/tests/examplefiles/rnc_example.rnc
new file mode 100644
index 00000000..a1440302
--- /dev/null
+++ b/tests/examplefiles/rnc_example.rnc
@@ -0,0 +1,33 @@
+# This is a sample RNC file from the tutorial for the 2003 Working Draft
+# http://relaxng.org/compact-tutorial-20030326.html
+
+element html {
+ element head {
+ element title { text }
+ },
+ element body {
+ element table {
+ attribute class { "addressBook" },
+ element tr {
+ attribute class { "card" },
+ element td {
+ attribute class { "name" },
+ mixed {
+ element span {
+ attribute class { "givenName" },
+ text
+ }?,
+ element span {
+ attribute class { "familyName" },
+ text
+ }?
+ }
+ },
+ element td {
+ attribute class { "email" },
+ text
+ }
+ }+
+ }
+ }
+}
diff --git a/tests/examplefiles/test.bib b/tests/examplefiles/test.bib
new file mode 100644
index 00000000..87e558d8
--- /dev/null
+++ b/tests/examplefiles/test.bib
@@ -0,0 +1,77 @@
+This is an example BibTeX file.
+This text is a comment.
+
+@preamble{"%%% example BibTeX file"}
+
+@Preamble{"\newcommand{\noopsort}[1]{} "
+ "\newcommand{\noopsort}[1]{} "}
+
+@String{SCI = "Science"}
+
+@STRING{JFernandez = "Fernandez, Julio M."}
+@StRiNg{HGaub = "Gaub, Hermann E."}
+@string{MGautel = "Gautel, Mathias"}
+@String{FOesterhelt = "Oesterhelt, Filipp"}
+@String{MRief = "Rief, Matthias"}
+
+@Article{rief97b,
+ author = MRief #" and "# MGautel #" and "# FOesterhelt
+ #" and "# JFernandez #" and "# HGaub,
+ title = "Reversible Unfolding of Individual Titin
+ Immunoglobulin Domains by {AFM}",
+ journal = SCI,
+ volume = 276,
+ number = 5315,
+ pages = "1109--1112",
+ year = 1997,
+ doi = "10.1126/science.276.5315.1109",
+ URL = "http://www.sciencemag.org/cgi/content/abstract/276/5315/1109",
+ eprint = "http://www.sciencemag.org/cgi/reprint/276/5315/1109.pdf",
+}
+
+
+Parens can be used instead of braces:
+
+@ARTICLE(ruckenstein-diffusion,
+ author = "Liu, Hongquin and Ruckenstein, Eli",
+ language = "english",
+ title = "Predicting the Diffusion Coefficient in Supercritical Fluids",
+ journal = "Ind. Eng. Chem. Res.",
+ volume = "36",
+ year = "1997",
+ pages = "888-895"
+)
+
+@book{
+ viktorov-methods,
+ author = "Викторов, Михаил Маркович",
+ publisher = "Л.: <<Химия>>",
+ title = "Методы вычисления физико-химических величин и прикладные расчёты",
+ language = "russian",
+ year = "1977",
+ isbn = "000-0000000000",
+}
+
+@comment{jackson-commented-out,
+ author = "Jackson, P\'eter",
+ publisher = "Some Publisher",
+ language = "english",
+ title = "Some Title",
+ series = "Some series",
+ booktitle = "Commented Out",
+ number = "3",
+ edition = "Second",
+ year = "1933",
+ pages = "44--59"
+}
+
+@booklet{test-booklet,
+ author = "de Last, Jr., First Middle",
+ language = "english",
+ title = "Just a booklet",
+ year = 2006,
+ month = jan,
+ address = "Moscow",
+ howpublished = "Published by Foo"
+}
+
diff --git a/tests/examplefiles/test.cr b/tests/examplefiles/test.cr
new file mode 100644
index 00000000..028ff6f3
--- /dev/null
+++ b/tests/examplefiles/test.cr
@@ -0,0 +1,2871 @@
+# Examples taken from http://crystal-lang.org/docs/
+# Copyright 2012-2016 Manas Technology Solutions.
+
+
+require "http/server"
+
+server = HTTP::Server.new(8080) do |context|
+ context.response.content_type = "text/plain"
+ context.response.print "Hello world! The time is #{Time.now}"
+end
+
+puts "Listening on http://0.0.0.0:8080"
+server.listen
+
+
+module HTTP
+ class RequestHandler
+ end
+end
+
+alias NumericValue = Float32 | Float64 | Int32 | Int64
+
+enum Time::DayOfWeek
+end
+
+
+$global_greeting = "Hello world"
+
+class Greeting
+ @@default_greeting = "Hello world"
+
+ def initialize(@custom_greeting = nil)
+ end
+
+ def print_greeting
+ greeting = @custom_greeting || @@default_greeting
+ puts greeting
+ end
+end
+
+
+LUCKY_NUMBERS = [3, 7, 11]
+DOCUMENTATION_URL = "http://crystal-lang.org/docs"
+
+
+module Scorecard
+ class Parser
+ def parse(score_text)
+ begin
+ score_text.scan(SCORE_PATTERN) do |match|
+ handle_match(match)
+ end
+ rescue err : ParseError
+ # handle error ...
+ end
+ end
+ end
+end
+
+
+module Money
+ CURRENCIES = {
+ "EUR" => 1.0,
+ "ARS" => 10.55,
+ "USD" => 1.12,
+ "JPY" => 134.15,
+ }
+
+ class Amount
+ getter :currency, :value
+
+ def initialize(@currency, @value)
+ end
+ end
+
+ class CurrencyConversion
+ def initialize(@amount, @target_currency)
+ end
+
+ def amount
+ # implement conversion ...
+ end
+ end
+end
+
+
+i = 0
+while i < 10
+ proc = ->(x : Int32) do
+ spawn do
+ puts(x)
+ end
+ end
+ proc.call(i)
+ i += 1
+end
+
+Fiber.yield
+
+
+# A buffered channel of capacity 2
+channel = Channel(Int32).new(2)
+
+spawn do
+ channel.send(1)
+ channel.send(2)
+ channel.send(3)
+end
+
+3.times do |i|
+ puts channel.receive
+end
+
+
+class MyDictionary(K, V)
+end
+
+
+MyBox.new(1) #:: MyBox(Int32)
+MyBox.new("hello") #:: MyBox(String)
+
+
+module Moo(T)
+ def t
+ T
+ end
+end
+
+class Foo(U)
+ include Moo(U)
+
+ def initialize(@value : U)
+ end
+end
+
+foo = Foo.new(1)
+foo.t # Int32
+
+
+class Parent(T)
+end
+
+class Int32Child < Parent(Int32)
+end
+
+class GenericChild(T) < Parent(T)
+end
+
+
+class Person
+end
+
+
+a = 1
+ptr = pointerof(a)
+ptr[100_000] = 2 # undefined behaviour, probably a segmentation fault
+
+
+alias Int32OrString = Int32 | String
+
+
+alias Int32OrNil = Int32?
+
+
+alias Int32OrNil_ = Int32 | ::Nil
+
+
+alias Int32Ptr = Int32*
+
+
+alias Int32Ptr_ = Pointer(Int32)
+
+
+alias Int32_8 = Int32[8]
+
+
+alias Int32_8_ = StaticArray(Int32, 8)
+
+
+alias Int32StringTuple = {Int32, String}
+
+
+alias Int32StringTuple_ = Tuple(Int32, String)
+
+
+alias Int32ToString = Int32 -> String
+
+
+alias Int32ToString_ = Proc(Int32, String)
+
+
+alias ProcThatReturnsInt32 = -> Int32
+
+
+alias Int32AndCharToString = Int32, Char -> String
+
+
+alias ComplexProc = (Int32 -> Int32) -> String
+
+
+def foo(x : Int32)
+ "instance"
+end
+
+def foo(x : Int32.class)
+ "class"
+end
+
+foo 1 # "instance"
+foo Int32 # "class"
+
+
+class Parent
+end
+
+class Child1 < Parent
+end
+
+class Child2 < Parent
+end
+
+ary = [] of Parent.class
+ary << Child1
+ary << Child2
+
+
+# Same as not specifying a restriction, not very useful
+def foo(x : _)
+end
+
+# A bit more useful: any two arguments Proc that returns an Int32:
+def foo(x : _, _ -> Int32)
+end
+
+
+#alias SameAsInt32 = typeof(2)
+#alias Int32OrString_ = typeof(1, "a")
+
+
+class Person
+ def initialize(name)
+ @name = name
+ @age = 0
+ end
+
+ def name
+ @name
+ end
+
+ def age
+ @age
+ end
+end
+
+
+john = Person.new "John"
+peter = Person.new "Peter"
+
+john.name #=> "John"
+john.age #=> 0
+
+peter.name #=> "Peter"
+
+
+class Person
+ def self.new(name)
+ instance = Person.allocate
+ instance.initialize(name)
+ instance
+ end
+ end
+
+
+if a.is_a?(String)
+ # here a is a String
+end
+
+if b.is_a?(Number)
+ # here b is a Number
+end
+
+
+a = some_condition ? 1 : "hello"
+# a : Int32 | String
+
+if a.is_a?(Number)
+ # a : Int32
+else
+ # a : String
+end
+
+
+if a.is_a?(String) && b.is_a?(Number)
+ # here a is a String and b is a Number
+end
+
+
+a.+(b)
+
+
+struct Vector2
+ getter x, y
+
+ def initialize(@x, @y)
+ end
+
+ def +(other)
+ Vector2.new(x + other.x, y + other.y)
+ end
+end
+
+v1 = Vector2.new(1, 2)
+v2 = Vector2.new(3, 4)
+v1 + v2 #=> Vector2(@x=4, @y=6)
+
+
+
+
+struct Vector2
+ def -
+ Vector2.new(-x, -y)
+ end
+end
+
+v1 = Vector2.new(1, 2)
+-v1 #=> Vector2(@x=-1, @y=-2)
+
+
+
+
+
+class MyArray
+ def [](index)
+ # ...
+ end
+
+ def [](index1, index2, index3)
+ # ...
+ end
+
+ def []=(index, value)
+ # ...
+ end
+end
+
+array = MyArray.new
+
+array[1] # invokes the first method
+array[1, 2, 3] # invokes the second method
+array[1] = 2 # invokes the third method
+
+array.[](1) # invokes the first method
+array.[](1, 2, 3) # invokes the second method
+array.[]=(1, 2) # invokes the third method
+
+
+raise "OH NO!"
+raise Exception.new("Some error")
+
+
+class MyException < Exception
+end
+
+
+begin
+ raise MyException.new("OH NO!")
+rescue ex : MyException
+ puts "Rescued MyException: #{ex.message}"
+end
+
+
+begin
+ # ...
+rescue ex : MyException | MyOtherException
+ # only MyException or MyOtherException
+rescue
+ # any other kind of exception
+ensure
+ puts "Cleanup..."
+end
+
+
+def some_method
+ something_dangerous
+rescue
+ # execute if an exception is raised
+end
+
+
+array = [1, 2, 3]
+array[4] # raises because of IndexError
+array[4]? # returns nil because of index out of bounds
+
+
+def some_proc(&block : Int32 -> Int32)
+ block
+end
+
+x = 0
+proc = ->(i : Int32) { x += i }
+proc = some_proc(&proc)
+proc.call(1) #=> 1
+proc.call(10) #=> 11
+x #=> 11
+
+
+def add(x, y)
+ x + y
+end
+
+adder = ->add(Int32, Int32)
+adder.call(1, 2) #=> 3
+
+
+module Curses
+ class Window
+ end
+end
+
+Curses::Window.new
+
+
+module ItemsSize
+ def size
+ items.size
+ end
+end
+
+class Items
+ include ItemsSize
+
+ def items
+ [1, 2, 3]
+ end
+end
+
+items = Items.new
+items.size #=> 3
+
+
+module Base64
+ extend self
+
+ def encode64(string)
+ # ...
+ end
+
+ def decode64(string)
+ # ...
+ end
+end
+
+Base64.encode64 "hello" #=> "aGVsbG8="
+
+
+if some_condition
+ a = 1
+else
+ a = "hello"
+end
+
+a_as_int = a as Int32
+a_as_int.abs # works, compiler knows that a_as_int is Int32
+
+
+ptr = Pointer(Int32).malloc(1)
+ptr as Int8* #:: Pointer(Int8)
+
+
+array = [1, 2, 3]
+
+# object_id returns the address of an object in memory,
+# so we create a pointer with that address
+ptr = Pointer(Void).new(array.object_id)
+
+# Now we cast that pointer to the same type, and
+# we should get the same value
+array2 = ptr as Array(Int32)
+array2.same?(array) #=> true
+
+
+a = 1
+b = a as Int32 | Float64
+b #:: Int32 | Float64
+
+
+ary = [1, 2, 3]
+
+# We want to create an array 1, 2, 3 of Int32 | Float64
+ary2 = ary.map { |x| x as Int32 | Float64 }
+
+ary2 #:: Array(Int32 | Float64)
+ary2 << 1.5 # OK
+
+
+class Person
+ def initialize(@name)
+ end
+
+ def name
+ @name
+ end
+end
+
+a = [] of Person
+x = a.map { |f| f.name } # Error: can't infer block return type
+
+
+a = [] of Person
+x = a.map { |f| f.name as String } # OK
+
+
+Person.new "John"
+
+a = [] of Person
+x = a.map { |f| f.name } # OK
+
+
+loop do
+ do_something
+ break if some_condition
+end
+
+
+class Point
+ def initialize(@x, @y)
+ end
+end
+
+Point.new 1, 2
+
+# 2 x Int32 = 2 x 4 = 8
+instance_sizeof(Point) #=> 12
+
+
+a = 1
+while a < 5
+ a += 1
+ if a == 3
+ next
+ end
+ puts a
+end
+# The above prints the numbers 2, 4 and 5
+
+
+lib C
+ # In C: double cos(double x)
+ fun cos(value : Float64) : Float64
+
+ fun getch : Int32
+
+ fun srand(seed : UInt32)
+
+ fun exit(status : Int32) : NoReturn
+
+ fun printf(format : UInt8*, ...) : Int32
+end
+
+C.cos(1.5) #=> 0.0707372
+C.srand(1_u32)
+
+a = 1
+b = 2
+C.printf "%d + %d = %d\n", a, b, a + b
+
+
+lib LibSDL
+ fun init = SDL_Init(flags : UInt32) : Int32
+end
+
+lib LLVMIntrinsics
+ fun ceil_f32 = "llvm.ceil.f32"(value : Float32) : Float32
+end
+
+lib MyLib
+ fun my_fun(some_size : LibC::SizeT)
+end
+
+@[Link("pcre")]
+lib LibPCRE
+end
+
+
+lib C
+ ifdef x86_64
+ alias SizeT = UInt64
+ else
+ alias SizeT = UInt32
+ end
+
+ fun memcmp(p1 : Void*, p2 : Void*, size : C::SizeT) : Int32
+end
+
+
+lib X
+ enum SomeEnum
+ Ten = 10
+ Twenty = 10 * 2
+ ThirtyTwo = 1 << 5
+ end
+end
+
+
+lib X
+ enum SomeEnum
+ A = 1_u32
+ end
+end
+
+
+X::SomeEnum::Zero #=> 0_i8
+X::SomeEnum::Two #=> 2_i8
+
+
+lib X
+ fun callback(f : Int32 -> Int32)
+end
+
+
+f = ->(x : Int32) { x + 1 }
+X.callback(f)
+
+
+X.callback ->(x) { x + 1 }
+
+
+X.callback nil
+
+
+lib LibFoo
+ fun store_callback(callback : ->)
+ fun execute_callback
+end
+
+LibFoo.store_callback ->{ raise "OH NO!" }
+LibFoo.execute_callback
+
+
+lib LibFoo
+ fun store_callback(callback : ->)
+
+ @[Raises]
+ fun execute_callback
+end
+
+
+@[Link("pcre")]
+lib PCRE
+ INFO_CAPTURECOUNT = 2
+end
+
+PCRE::INFO_CAPTURECOUNT #=> 2
+
+
+lib U
+ # In C:
+ #
+ # union IntOrFloat {
+ # int some_int;
+ # double some_float;
+ # };
+ union IntOrFloat
+ some_int : Int32
+ some_float : Float64
+ end
+end
+
+
+value = U::IntOrFloat.new
+
+
+value = uninitialized U::IntOrFlaot
+value.some_int #=> some garbage value
+
+
+value = U::IntOrFloat.new
+value.some_int = 1
+value.some_int #=> 1
+value.some_float #=> 4.94066e-324
+
+
+def change_it(value)
+ value.some_int = 1
+end
+
+value = U::IntOrFloat.new
+change_it value
+value.some_int #=> 0
+
+
+lib C
+ # In C:
+ #
+ # struct TimeZone {
+ # int minutes_west;
+ # int dst_time;
+ # };
+ struct TimeZone
+ minutes_west : Int32
+ dst_time : Int32
+ end
+end
+
+
+lib C
+ # This is a forward declaration
+ struct Node
+ end
+
+ struct Node
+ node : Node*
+ end
+end
+
+
+tz = C::TimeZone.new
+
+
+tz = uninitialized C::TimeZone
+tz.minutes_west #=> some garbage value
+
+
+tz = C::TimeZone.new
+tz.minutes_west = 1
+tz.minutes_west #=> 1
+
+
+tz = C::TimeZone.new minutes_west: 1, dst_time: 2
+tz.minutes_west #=> 1
+tz.dst_time #=> 2
+
+
+def change_it(tz)
+ tz.minutes_west = 1
+end
+
+tz = C::TimeZone.new
+change_it tz
+tz.minutes_west #=> 0
+
+
+lib C
+ $errno : Int32
+end
+
+
+C.errno #=> some value
+C.errno = 0
+C.errno #=> 0
+
+
+lib C
+ @[ThreadLocal]
+ $errno : Int32
+end
+
+
+lib C
+ fun waitpid(pid : Int32, status_ptr : Int32*, options : Int32) : Int32
+end
+
+
+status_ptr = uninitialized Int32
+
+C.waitpid(pid, pointerof(status_ptr), options)
+
+
+C.waitpid(pid, out status_ptr, options)
+
+
+lib X
+ type CInt = Int32
+end
+
+
+ifdef x86_64
+ # some specific code for 64 bits platforms
+else
+ # some specific code for non-64 bits platforms
+end
+
+
+ifdef linux && x86_64
+ # some specific code for linux 64 bits
+end
+
+
+lib C
+ ifdef linux
+ struct SomeStruct
+ some_field : Int32
+ end
+ else
+ struct SomeStruct
+ some_field : Int64
+ end
+ end
+end
+
+
+# Assigns to a local variable
+local = 1
+
+# Assigns to a global variable
+$global = 4
+
+class Testing
+ # Assigns to an instance variable
+ @instance = 2
+
+ # Assigns to a class variable
+ @@class = 3
+end
+
+
+local += 1 # same as: local = local + 1
+
+# The above is valid with these operators:
+# +, -, *, /, %, |, &, ^, **, <<, >>
+
+local ||= 1 # same as: local || (local = 1)
+local &&= 1 # same as: local && (local = 1)
+
+
+# A setter
+person.name=("John")
+
+# The above can be written as:
+person.name = "John"
+
+# An indexed assignment
+objects.[]=(2, 3)
+
+# The above can be written as:
+objects[2] = 3
+
+# Not assignment-related, but also syntax sugar:
+objects.[](2, 3)
+
+# The above can be written as:
+objects[2, 3]
+
+
+person.age += 1 # same as: person.age = person.age + 1
+
+person.name ||= "John" # same as: person.name || (person.name = "John")
+person.name &&= "John" # same as: person.name && (person.name = "John")
+
+objects[1] += 2 # same as: objects[1] = objects[1] + 2
+
+objects[1] ||= 2 # same as: objects[1]? || (objects[1] = 2)
+objects[1] &&= 2 # same as: objects[1]? && (objects[1] = 2)
+
+
+alias PInt32 = Pointer(Int32)
+
+ptr = PInt32.malloc(1) # : Pointer(Int32)
+
+
+alias RecArray = Array(Int32) | Array(RecArray)
+
+ary = [] of RecArray
+ary.push [1, 2, 3]
+ary.push ary
+ary #=> [[1, 2, 3], [...]]
+
+
+module Json
+ alias Type = Nil |
+ Bool |
+ Int64 |
+ Float64 |
+ String |
+ Array(Type) |
+ Hash(String, Type)
+end
+
+
+a = 1
+if a > 0
+ a = 10
+end
+a #=> 10
+
+b = 1
+if b > 2
+ b = 10
+else
+ b = 20
+end
+b #=> 20
+
+
+if some_condition
+ do_something
+elsif some_other_condition
+ do_something_else
+else
+ do_that
+end
+
+
+a = 1
+if some_condition
+ a = "hello"
+else
+ a = true
+end
+# a : String | Bool
+
+b = 1
+if some_condition
+ b = "hello"
+end
+# b : Int32 | String
+
+if some_condition
+ c = 1
+else
+ c = "hello"
+end
+# c : Int32 | String
+
+if some_condition
+ d = 1
+end
+# d : Int32 | Nil
+
+
+a = 1
+if some_condition
+ a = "hello"
+ # a : String
+ a.size
+end
+# a : String | Int32
+
+
+if some_condition
+ e = 1
+else
+ e = "hello"
+ # e : String
+ return
+end
+# e : Int32
+
+
+enum Color : UInt8
+ Red # 0
+ Green # 1
+ Blue = 5 # overwritten to 5
+ Yellow # 6 (5 + 1)
+
+ def red?
+ self == Color::Red
+ end
+end
+
+Color::Red.value #:: UInt8
+
+
+@[Flags]
+enum IOMode
+ Read # 1
+ Write # 2
+ Async # 4
+end
+
+
+IOMode::None.value #=> 0
+IOMode::All.value #=> 7
+
+
+puts(Color::Red) # prints "Red"
+puts(IOMode::Write | IOMode::Async) # prints "Write, Async"
+
+
+puts Color.new(1) #=> prints "Green"
+
+
+puts Color.new(10) #=> prints "10"
+
+
+Color::Red.red? #=> true
+Color::Blue.red? #=> false
+
+
+def paint(color : Color)
+ case color
+ when Color::Red
+ # ...
+ else
+ # Unusual, but still can happen
+ raise "unknown color: #{color}"
+ end
+end
+
+paint Color::Red
+
+
+def paint(color : Symbol)
+ case color
+ when :red
+ # ...
+ else
+ raise "unknown color: #{color}"
+ end
+end
+
+paint :red
+
+
+name = "Crystal"
+age = 1
+
+
+flower = "Tulip"
+# At this point 'flower' is a String
+
+flower = 1
+# At this point 'flower' is an Int32
+
+
+class Foo
+ def finalize
+ # Invoked when Foo is garbage-collected
+ puts "Bye bye from #{self}!"
+ end
+end
+
+# Prints "Bye bye ...!" for ever
+loop do
+ Foo.new
+end
+
+
+# Defines a method in the program
+def add(x, y)
+ x + y
+end
+
+# Invokes the add method in the program
+add(1, 2) #=> 3
+
+
+def even?(num)
+ if num % 2 == 0
+ return true
+ end
+
+ return false
+end
+
+
+def add(x, y)
+ x + y
+end
+
+class Foo
+ def bar
+ # invokes the program's add method
+ add(1, 2)
+
+ # invokes Foo's baz method
+ baz(1, 2)
+ end
+
+ def baz(x, y)
+ x * y
+ end
+end
+
+
+def baz(x, y)
+ x + y
+end
+
+class Foo
+ def bar
+ baz(4, 2) #=> 2
+ ::baz(4, 2) #=> 6
+ end
+
+ def baz(x, y)
+ x - y
+ end
+end
+
+
+x = 1
+
+def add(y)
+ x + y # error: undefined local variable or method 'x'
+end
+
+add(2)
+
+
+add 1, 2 # same as add(1, 2)
+
+
+class Counter
+ @@instances = 0
+
+ def initialize
+ @@instances += 1
+ end
+
+ def self.instances
+ @@instances
+ end
+end
+
+Counter.instances #=> 0
+Counter.new
+Counter.new
+Counter.new
+Counter.instances #=> 3
+
+
+class Counter
+ def self.increment
+ @@instances += 1
+ end
+end
+
+Counter.increment # Error: undefined method '+' for Nil
+
+
+class Parent
+ @@counter = 0
+end
+
+class Child < Parent
+ def self.counter
+ @@counter
+ end
+end
+
+Child.counter #=> nil
+
+
+unless some_condition
+ then_expression
+else
+ else_expression
+end
+
+# Can also be written as a suffix
+close_door unless door_closed?
+
+
+a = 1
+b = typeof(a) #=> Int32
+
+
+typeof(1, "a", 'a') #=> (Int32 | String | Char)
+
+
+hash = {} of Int32 => String
+another_hash = typeof(hash).new #:: Hash(Int32, String)
+
+
+class Array
+ def self.elem_type(typ)
+ if typ.is_a?(Array)
+ elem_type(typ.first)
+ else
+ typ
+ end
+ end
+end
+
+nest = [1, ["b", [:c, ['d']]]]
+flat = Array(typeof(Array.elem_type(nest))).new
+typeof(nest) #=> Array(Int32 | Array(String | Array(Symbol | Array(Char))))
+typeof(flat) #=> Array(String | Int32 | Symbol | Char)
+
+
+a = 2 if some_condition
+
+
+x = 0
+proc = ->{ x += 1; x }
+proc.call #=> 1
+proc.call #=> 2
+x #=> 2
+
+
+def counter
+ x = 0
+ ->{ x += 1; x }
+end
+
+proc = counter
+proc.call #=> 1
+proc.call #=> 2
+
+
+def foo
+ yield
+end
+
+x = 1
+foo do
+ x = "hello"
+end
+x # : Int32 | String
+
+
+x = 1
+foo do
+ x = "hello"
+end
+x # : Int32 | String
+
+x = 'a'
+x # : Char
+
+
+def capture(&block)
+ block
+end
+
+x = 1
+capture { x = "hello" }
+
+x = 'a'
+x # : Int32 | String | Char
+
+
+def capture(&block)
+ block
+end
+
+x = 1
+->{ x = "hello" }
+
+x = 'a'
+x # : Int32 | String | Char
+
+
+abstract class Animal
+ # Makes this animal talk
+ abstract def talk
+end
+
+class Dog < Animal
+ def talk
+ "Woof!"
+ end
+end
+
+class Cat < Animal
+ def talk
+ "Miau"
+ end
+end
+
+class Person
+ getter pet
+
+ def initialize(@name, @pet)
+ end
+end
+
+john = Person.new "John", Dog.new
+peter = Person.new "Peter", Cat.new
+
+
+john.pet.talk #=> "Woof!"
+
+
+a = 1 > 2 ? 3 : 4
+
+# The above is the same as:
+a = if 1 > 2
+ 3
+ else
+ 4
+ end
+
+
+def some_method : String
+ "hello"
+end
+
+
+PI = 3.14
+
+module Earth
+ RADIUS = 6_371_000
+end
+
+PI #=> 3.14
+Earth::RADIUS #=> 6_371_000
+
+
+TEN = begin
+ a = 0
+ while a < 10
+ a += 1
+ end
+ a
+end
+
+TEN #=> 10
+
+
+class Person
+ getter name
+
+ def initialize(@name)
+ @age = 0
+ end
+end
+
+john = Person.new "John"
+john.name #=> "John"
+john.name.size #=> 4
+
+
+one = Person.new 1
+one.name #=> 1
+one.name + 2 #=> 3
+
+
+john = Person.new "John"
+one = Person.new 1
+
+
+john = Person.new "John"
+one = Person.new 1
+
+# Error: undefined method 'size' for Int32
+john.name.size
+
+# Error: no overload matches 'String#+' with types Int32
+john.name + 3
+
+
+john = Person.new "John"
+john.name.size
+one = Person.new 1
+
+
+class Person
+ getter name
+
+ def initialize(@name)
+ @age = 0
+ end
+
+ def address
+ @address
+ end
+
+ def address=(@address)
+ end
+end
+
+john = Person.new "John"
+john.address = "Argentina"
+
+
+# Error: undefined method 'size' for Nil
+john.address.size
+
+
+class Person
+ @age = 0
+
+ def initialize(@name)
+ end
+end
+
+
+class Person
+ @age : Int32
+
+ def initialize(@name)
+ @age = 0
+ end
+end
+
+
+a = if 2 > 1
+ 3
+ else
+ 4
+ end
+a #=> 3
+
+
+if 1 > 2
+else
+ 3
+end
+
+
+def twice(&block)
+ yield
+ yield
+end
+
+
+twice() do
+ puts "Hello!"
+end
+
+twice do
+ puts "Hello!"
+end
+
+twice { puts "Hello!" }
+
+
+def twice
+ yield 1
+ yield 2
+end
+
+twice do |i|
+ puts "Got #{i}"
+end
+
+
+twice { |i| puts "Got #{i}" }
+
+
+def many
+ yield 1, 2, 3
+end
+
+many do |x, y, z|
+ puts x + y + z
+end
+
+# Output: 6
+
+
+def many
+ yield 1, 2, 3
+end
+
+many do |x, y|
+ puts x + y
+end
+
+# Output: 3
+
+
+def twice
+ yield
+ yield
+end
+
+twice do |i|
+ puts i.inspect
+end
+
+
+def some
+ yield 1, 'a'
+ yield true, "hello"
+ yield 2
+end
+
+some do |first, second|
+ # first is Int32 | Bool
+ # second is Char | String | Nil
+end
+
+
+method do |argument|
+ argument.some_method
+end
+
+
+method(&.some_method)
+
+
+method &.some_method(arg1, arg2)
+
+
+method &.+(2)
+method &.[index]
+
+
+def twice
+ v1 = yield 1
+ puts v1
+
+ v2 = yield 2
+ puts v2
+end
+
+twice do |i|
+ i + 1
+end
+
+
+ary = [1, 2, 3]
+ary.map { |x| x + 1 } #=> [2, 3, 4]
+ary.select { |x| x % 2 == 1 } #=> [1, 3]
+
+
+def transform(value)
+ yield value
+end
+
+transform(1) { |x| x + 1 } #=> 2
+
+
+def thrice
+ puts "Before 1"
+ yield 1
+ puts "Before 2"
+ yield 2
+ puts "Before 3"
+ yield 3
+ puts "After 3"
+end
+
+thrice do |i|
+ if i == 2
+ break
+ end
+end
+
+
+def twice
+ yield 1
+ yield 2
+end
+
+twice { |i| i + 1 } #=> 3
+twice { |i| break "hello" } #=> "hello"
+
+
+value = twice do |i|
+ if i == 1
+ break "hello"
+ end
+ i + 1
+end
+value #:: Int32 | String
+
+
+values = twice { break 1, 2 }
+values #=> {1, 2}
+
+
+value = twice { break }
+value #=> nil
+
+
+def twice
+ yield 1
+ yield 2
+end
+
+twice do |i|
+ if i == 1
+ puts "Skipping 1"
+ next
+ end
+
+ puts "Got #{i}"
+end
+
+
+
+def twice
+ v1 = yield 1
+ puts v1
+
+ v2 = yield 2
+ puts v2
+end
+
+twice do |i|
+ if i == 1
+ next 10
+ end
+
+ i + 1
+end
+
+# Output
+# 10
+# 3
+
+
+class Foo
+ def one
+ 1
+ end
+
+ def yield_with_self
+ with self yield
+ end
+
+ def yield_normally
+ yield
+ end
+end
+
+def one
+ "one"
+end
+
+Foo.new.yield_with_self { one } # => 1
+Foo.new.yield_normally { one } # => "one"
+
+
+def twice
+ yield 1
+ yield 2
+end
+
+twice do |i|
+ puts "Got: #{i}"
+end
+
+
+i = 1
+puts "Got: #{i}"
+i = 2
+puts "Got: #{i}"
+
+
+3.times do |i|
+ puts i
+end
+
+
+struct Int
+ def times
+ i = 0
+ while i < self
+ yield i
+ i += 1
+ end
+ end
+end
+
+
+i = 0
+while i < 3
+ puts i
+ i += 1
+end
+
+
+class Person
+ def initialize(@name)
+ end
+
+ def greet
+ puts "Hi, I'm #{@name}"
+ end
+end
+
+class Employee < Person
+end
+
+employee = Employee.new "John"
+employee.greet # "Hi, I'm John"
+
+
+class Person
+ def initialize(@name)
+ end
+end
+
+class Employee < Person
+ def initialize(@name, @company_name)
+ end
+end
+
+Employee.new "John", "Acme" # OK
+Employee.new "Peter" # Error: wrong number of arguments
+ # for 'Employee:Class#new' (1 for 2)
+
+
+class Person
+ def greet(msg)
+ puts "Hi, #{msg}"
+ end
+end
+
+class Employee < Person
+ def greet(msg)
+ puts "Hello, #{msg}"
+ end
+end
+
+p = Person.new
+p.greet "everyone" # "Hi, everyone"
+
+e = Employee.new
+e.greet "everyone" # "Hello, everyone"
+
+
+class Person
+ def greet(msg)
+ puts "Hi, #{msg}"
+ end
+end
+
+class Employee < Person
+ def greet(msg : Int32)
+ puts "Hi, this is a number: #{msg}"
+ end
+end
+
+e = Employee.new
+e.greet "everyone" # "Hi, everyone"
+
+e.greet 1 # "Hi, this is a number: 1"
+
+
+class Person
+ def greet(msg)
+ puts "Hello, "#{msg}"
+ end
+end
+
+class Employee < Person
+ def greet(msg)
+ super # Same as: super(msg)
+ super("another message")
+ end
+end
+
+
+def int_to_int(&block : Int32 -> Int32)
+ block
+end
+
+proc = int_to_int { |x| x + 1 }
+proc.call(1) #=> 2
+
+
+class Model
+ def on_save(&block)
+ @on_save_callback = block
+ end
+
+ def save
+ if callback = @on_save_callback
+ callback.call
+ end
+ end
+end
+
+model = Model.new
+model.on_save { puts "Saved!" }
+model.save # prints "Saved!"
+
+
+def some_proc(&block : Int32 ->)
+ block
+end
+
+proc = some_proc { |x| x + 1 }
+proc.call(1) # void
+
+
+def some_proc(&block : Int32 -> _)
+ block
+end
+
+proc = some_proc { |x| x + 1 }
+proc.call(1) # 2
+
+proc = some_proc { |x| x.to_s }
+proc.call(1) # "1"
+
+
+macro update_x
+ x = 1
+end
+
+x = 0
+update_x
+x #=> 1
+
+
+macro dont_update_x
+ %x = 1
+ puts %x
+end
+
+x = 0
+dont_update_x # outputs 1
+x #=> 0
+
+
+macro fresh_vars_sample(*names)
+ # First declare vars
+ {% for name, index in names %}
+ print "Declaring: ", "%name{index}", '\n'
+ %name{index} = {{index}}
+ {% end %}
+
+ # Then print them
+ {% for name, index in names %}
+ print "%name{index}: ", %name{index}, '\n'
+ {% end %}
+end
+
+fresh_vars_sample a, b, c
+
+# Sample output:
+# Declaring: __temp_255
+# Declaring: __temp_256
+# Declaring: __temp_257
+# __temp_255: 0
+# __temp_256: 1
+# __temp_257: 2
+
+
+class Object
+ macro def instance_vars_names : Array(String)
+ {{ @type.instance_vars.map &.name.stringify }}
+ end
+end
+
+class Person
+ def initialize(@name, @age)
+ end
+end
+
+person = Person.new "John", 30
+person.instance_vars_names #=> ["name", "age"]
+
+
+class Object
+ macro def has_instance_var?(name) : Bool
+ # We cannot access name inside the macro expansion here,
+ # instead we need to use the macro language to construct an array
+ # and do the inclusion check at runtime.
+ {{ @type.instance_vars.map &.name.stringify }}.includes? name
+ end
+end
+
+person = Person.new "John", 30
+person.has_instance_var?("name") #=> true
+person.has_instance_var?("birthday") #=> false
+
+
+class Parent
+ macro inherited
+ def {{@type.name.downcase.id}}
+ 1
+ end
+ end
+end
+
+class Child < Parent
+end
+
+Child.new.child #=> 1
+
+
+macro method_missing(name, args, block)
+ print "Got ", {{name.id.stringify}}, " with ", {{args.size}}, " arguments", '\n'
+end
+
+foo # Prints: Got foo with 0 arguments
+bar 'a', 'b' # Prints: Got bar with 2 arguments
+
+
+sizeof(Int32) #=> 4
+sizeof(Int64) #=> 8
+
+
+# On a 64 bits machine
+sizeof(Pointer(Int32)) #=> 8
+sizeof(String) #=> 8
+
+
+a = 1
+sizeof(typeof(a)) #=> 4
+
+
+class Foo
+ macro emphasize(value)
+ "***#{ {{value}} }***"
+ end
+
+ def yield_with_self
+ with self yield
+ end
+end
+
+Foo.new.yield_with_self { emphasize(10) } #=> "***10***"
+
+
+# This generates:
+#
+# def :foo
+# 1
+# end
+define_method :foo, 1
+
+
+macro define_method(name, content)
+ def {{name.id}}
+ {{content}}
+ end
+end
+
+# This correctly generates:
+#
+# def foo
+# 1
+# end
+define_method :foo, 1
+
+
+macro define_method(name, content)
+ def {{name}}
+ {% if content == 1 %}
+ "one"
+ {% else %}
+ {{content}}
+ {% end %}
+ end
+end
+
+define_method foo, 1
+define_method bar, 2
+
+foo #=> one
+bar #=> 2
+
+
+{% if env("TEST") %}
+ puts "We are in test mode"
+{% end %}
+
+
+macro define_dummy_methods(names)
+ {% for name, index in names %}
+ def {{name.id}}
+ {{index}}
+ end
+ {% end %}
+end
+
+define_dummy_methods [foo, bar, baz]
+
+foo #=> 0
+bar #=> 1
+baz #=> 2
+
+
+macro define_dummy_methods(hash)
+ {% for key, value in hash %}
+ def {{key.id}}
+ {{value}}
+ end
+ {% end %}
+end
+define_dummy_methods({foo: 10, bar: 20})
+foo #=> 10
+bar #=> 20
+
+
+{% for name, index in ["foo", "bar", "baz"] %}
+ def {{name.id}}
+ {{index}}
+ end
+{% end %}
+
+foo #=> 0
+bar #=> 1
+baz #=> 2
+
+
+macro define_dummy_methods(*names)
+ {% for name, index in names %}
+ def {{name.id}}
+ {{index}}
+ end
+ {% end %}
+end
+
+define_dummy_methods foo, bar, baz
+
+foo #=> 0
+bar #=> 1
+baz #=> 2
+
+
+macro println(*values)
+ print {{*values}}, '\n'
+end
+
+println 1, 2, 3 # outputs 123\n
+
+
+VALUES = [1, 2, 3]
+
+{% for value in VALUES %}
+ puts {{value}}
+{% end %}
+
+
+until some_condition
+ do_this
+end
+
+# The above is the same as:
+while !some_condition
+ do_this
+end
+
+
+a = some_condition ? nil : 3
+# a is Int32 or Nil
+
+if a
+ # Since the only way to get here is if a is truthy,
+ # a can't be nil. So here a is Int32.
+ a.abs
+end
+
+
+if a = some_expression
+ # here a is not nil
+end
+
+
+if a && b
+ # here both a and b are guaranteed not to be Nil
+end
+
+
+if @a
+ # here @a can be nil
+end
+
+
+# First option: assign it to a variable
+if a = @a
+ # here a can't be nil
+end
+
+# Second option: use `Object#try` found in the standard library
+@a.try do |a|
+ # here a can't be nil
+end
+
+
+if method # first call to a method that can return Int32 or Nil
+ # here we know that the first call did not return Nil
+ method # second call can still return Int32 or Nil
+end
+
+
+class Person
+ def become_older(by = 1)
+ @age += by
+ end
+end
+
+john = Person.new "John"
+john.age #=> 0
+
+john.become_older
+john.age #=> 1
+
+john.become_older 2
+john.age #=> 3
+
+
+john.become_older by: 5
+
+
+def some_method(x, y = 1, z = 2, w = 3)
+ # do something...
+end
+
+some_method 10 # x = 10, y = 1, z = 2, w = 3
+some_method 10, z: 10 # x = 10, y = 1, z = 10, w = 3
+some_method 10, w: 1, y: 2, z: 3 # x = 10, y = 2, z = 3, w = 1
+
+
+case exp
+when value1, value2
+ do_something
+when value3
+ do_something_else
+else
+ do_another_thing
+end
+
+
+case var
+when String
+ # var : String
+ do_something
+when Int32
+ # var : Int32
+ do_something_else
+else
+ # here var is neither a String nor an Int32
+ do_another_thing
+end
+
+
+case num
+when .even?
+ do_something
+when .odd?
+ do_something_else
+end
+
+
+case
+when cond1, cond2
+ do_something
+when cond3
+ do_something_else
+end
+
+
+a = 1
+a.responds_to?(:abs) #=> true
+a.responds_to?(:size) #=> false
+
+
+foo_or_bar = /foo|bar/
+heeello = /h(e+)llo/
+integer = /\d+/
+
+
+r = /foo/imx
+
+
+slash = /\//
+
+
+r = %r(regex with slash: /)
+
+
+"hello world"
+
+
+"\"" # double quote
+"\\" # backslash
+"\e" # escape
+"\f" # form feed
+"\n" # newline
+"\r" # carriage return
+"\t" # tab
+"\v" # vertical tab
+
+
+"\101" # == "A"
+"\123" # == "S"
+"\12" # == "\n"
+"\1" # string with one character with code point 1
+
+
+"\u0041" # == "A"
+
+
+"\u{41}" # == "A"
+"\u{1F52E}" # == "🔮"
+
+
+"hello
+ world" # same as "hello\n world"
+
+
+"hello " \
+"world, " \
+"no newlines" # same as "hello world, no newlines"
+
+
+"hello \
+ world, \
+ no newlines" # same as "hello world, no newlines"
+
+
+# Supports double quotes and nested parenthesis
+%(hello ("world")) # same as "hello (\"world\")"
+
+# Supports double quotes and nested brackets
+%[hello ["world"]] # same as "hello [\"world\"]"
+
+# Supports double quotes and nested curlies
+%{hello {"world"}} # same as "hello {\"world\"}"
+
+# Supports double quotes and nested angles
+%<hello <"world">> # same as "hello <\"world\">"
+
+
+<<-XML
+<parent>
+ <child />
+</parent>
+XML
+
+
+# Same as "Hello\n world"
+<<-STRING
+ Hello
+ world
+ STRING
+
+# Same as " Hello\n world"
+<<-STRING
+ Hello
+ world
+ STRING
+
+
+a = 1
+b = 2
+"sum = #{a + b}" # "sum = 3"
+
+
+1.0 # Float64
+1.0_f32 # Float32
+1_f32 # Float32
+
+1e10 # Float64
+1.5e10 # Float64
+1.5e-7 # Float64
+
++1.3 # Float64
+-0.5 # Float64
+
+
+1_000_000.111_111 # better than 1000000.111111
+
+
+'a'
+'z'
+'0'
+'_'
+'あ'
+
+
+'\'' # single quote
+'\\' # backslash
+'\e' # escape
+'\f' # form feed
+'\n' # newline
+'\r' # carriage return
+'\t' # tab
+'\v' # vertical tab
+
+
+'\101' # == 'A'
+'\123' # == 'S'
+'\12' # == '\n'
+'\1' # code point 1
+
+
+'\u0041' # == 'A'
+
+
+'\u{41}' # == 'A'
+'\u{1F52E}' # == '🔮'
+
+
+{1 => 2, 3 => 4} # Hash(Int32, Int32)
+{1 => 2, 'a' => 3} # Hash(Int32 | Char, Int32)
+
+
+{} of Int32 => Int32 # same as Hash(Int32, Int32).new
+
+
+{key1: 'a', key2: 'b'} # Hash(Symbol, Char)
+
+
+{"key1": 'a', "key2": 'b'} # Hash(String, Char)
+
+
+MyType{"foo": "bar"}
+
+
+tmp = MyType.new
+tmp["foo"] = "bar"
+tmp
+
+
+tmp = MyType(typeof("foo"), typeof("bar")).new
+tmp["foo"] = "bar"
+tmp
+
+
+MyType(String, String) {"foo": "bar"}
+
+
+:hello
+:good_bye
+
+# With spaces and symbols
+:"symbol with spaces"
+
+# Ending with question and exclamation marks
+:question?
+:exclamation!
+
+# For the operators
+:+
+:-
+:*
+:/
+:==
+:<
+:<=
+:>
+:>=
+:!
+:!=
+:=~
+:!~
+:&
+:|
+:^
+:~
+:**
+:>>
+:<<
+:%
+:[]
+:[]?
+:[]=
+:<=>
+:===
+
+
+x..y # an inclusive range, in mathematics: [x, y]
+x...y # an exclusive range, in mathematics: [x, y)
+
+
+# A proc without arguments
+->{ 1 } # Proc(Int32)
+
+# A proc with one argument
+->(x : Int32) { x.to_s } # Proc(Int32, String)
+
+# A proc with two arguments:
+->(x : Int32, y : Int32) { x + y } # Proc(Int32, Int32, Int32)
+
+
+Proc(Int32, String).new { |x| x.to_s } # Proc(Int32, String)
+
+
+proc = ->(x : Int32, y : Int32) { x + y }
+proc.call(1, 2) #=> 3
+
+
+def one
+ 1
+end
+
+proc = ->one
+proc.call #=> 1
+
+
+def plus_one(x)
+ x + 1
+end
+
+proc = ->plus_one(Int32)
+proc.call(41) #=> 42
+
+
+str = "hello"
+proc = ->str.count(Char)
+proc.call('e') #=> 1
+proc.call('l') #=> 2
+
+
+tuple = {1, "hello", 'x'} # Tuple(Int32, String, Char)
+tuple[0] #=> 1 (Int32)
+tuple[1] #=> "hello" (String)
+tuple[2] #=> 'x' (Char)
+
+
+[1, 2, 3] # Array(Int32)
+[1, "hello", 'x'] # Array(Int32 | String | Char)
+
+
+[] of Int32 # same as Array(Int32).new
+
+
+%w(one two three) # ["one", "two", "three"]
+
+
+%i(one two three) # [:one, :two, :three]
+
+
+MyType{1, 2, 3}
+
+
+tmp = MyType.new
+tmp << 1
+tmp << 2
+tmp << 3
+tmp
+
+
+tmp = MyType(typeof(1, 2, 3)).new
+tmp << 1
+tmp << 2
+tmp << 3
+tmp
+
+
+MyType(Int32 | String) {1, 2, "foo"}
+
+
+nil
+
+
+1 # Int32
+
+1_i8 # Int8
+1_i16 # Int16
+1_i32 # Int32
+1_i64 # Int64
+
+1_u8 # UInt8
+1_u16 # UInt16
+1_u32 # UInt32
+1_u64 # UInt64
+
++10 # Int32
+-20 # Int32
+
+2147483648 # Int64
+9223372036854775808 # UInt64
+
+
+1_000_000 # better than 1000000
+
+
+0b1101 # == 13
+
+
+0o123 # == 83
+
+
+0xFE012D # == 16646445
+0xfe012d # == 16646445
+
+
+true # A Bool that is true
+false # A Bool that is false
+
+
+a = 1
+
+ptr = pointerof(a)
+ptr.value = 2
+
+a #=> 2
+
+
+class Point
+ def initialize(@x, @y)
+ end
+
+ def x
+ @x
+ end
+
+ def x_ptr
+ pointerof(@x)
+ end
+end
+
+point = Point.new 1, 2
+
+ptr = point.x_ptr
+ptr.value = 10
+
+point.x #=> 10
+
+
+def add(x : Number, y : Number)
+ x + y
+end
+
+# Ok
+add 1, 2 # Ok
+
+# Error: no overload matches 'add' with types Bool, Bool
+add true, false
+
+
+def add(x, y)
+ x + y
+end
+
+add true, false
+
+
+# A class that has a + method but isn't a Number
+class Six
+ def +(other)
+ 6 + other
+ end
+end
+
+# add method without type restrictions
+def add(x, y)
+ x + y
+end
+
+# OK
+add Six.new, 10
+
+# add method with type restrictions
+def restricted_add(x : Number, y : Number)
+ x + y
+end
+
+# Error: no overload matches 'restricted_add' with types Six, Int32
+restricted_add Six.new, 10
+
+
+class Person
+ def ==(other : self)
+ other.name == name
+ end
+
+ def ==(other)
+ false
+ end
+end
+
+john = Person.new "John"
+another_john = Person.new "John"
+peter = Person.new "Peter"
+
+john == another_john #=> true
+john == peter #=> false (names differ)
+john == 1 #=> false (because 1 is not a Person)
+
+
+class Person
+ def self.compare(p1 : self, p2 : self)
+ p1.name == p2.name
+ end
+end
+
+john = Person.new "John"
+peter = Person.new "Peter"
+
+Person.compare(john, peter) # OK
+
+
+def foo(x : Int32)
+end
+
+foo 1 # OK
+foo "hello" # Error
+
+
+def foo(x : Int32.class)
+end
+
+foo Int32 # OK
+foo String # Error
+
+
+def foo(x : Int32.class)
+ puts "Got Int32"
+end
+
+def foo(x : String.class)
+ puts "Got String"
+end
+
+foo Int32 # prints "Got Int32"
+foo String # prints "Got String"
+
+
+def foo(*args : Int32)
+end
+
+def foo(*args : String)
+end
+
+foo 1, 2, 3 # OK, invokes first overload
+foo "a", "b", "c" # OK, invokes second overload
+foo 1, 2, "hello" # Error
+foo() # Error
+
+
+def foo
+ # This is the empty-tuple case
+end
+
+
+def foo(x : T)
+ T
+end
+
+foo(1) #=> Int32
+foo("hello") #=> String
+
+
+def foo(x : Array(T))
+ T
+end
+
+foo([1, 2]) #=> Int32
+foo([1, "a"]) #=> (Int32 | String)
+
+
+def foo(x : T.class)
+ Array(T)
+end
+
+foo(Int32) #=> Array(Int32)
+foo(String) #=> Array(String)
+
+
+class Person
+ # Increases age by one
+ def become_older
+ @age += 1
+ end
+
+ # Increases age by the given number of years
+ def become_older(years : Int32)
+ @age += years
+ end
+
+ # Increases age by the given number of years, as a String
+ def become_older(years : String)
+ @age += years.to_i
+ end
+
+ # Yields the current age of this person and increases
+ # its age by the value returned by the block
+ def become_older
+ @age += yield @age
+ end
+end
+
+person = Person.new "John"
+
+person.become_older
+person.age #=> 1
+
+person.become_older 5
+person.age #=> 6
+
+person.become_older "12"
+person.age #=> 18
+
+person.become_older do |current_age|
+ current_age < 20 ? 10 : 30
+end
+person.age #=> 28
+
+
+a = 1
+a.is_a?(Int32) #=> true
+a.is_a?(String) #=> false
+a.is_a?(Number) #=> true
+a.is_a?(Int32 | String) #=> true
+
+
+# One for each thread
+@[ThreadLocal]
+$values = [] of Int32
+
+
+@[AlwaysInline]
+def foo
+ 1
+end
+
+
+@[NoInline]
+def foo
+ 1
+end
+
+
+lib LibFoo
+ @[CallConvention("X86_StdCall")]
+ fun foo : Int32
+end
+
+
+def sum(*elements)
+ total = 0
+ elements.each do |value|
+ total += value
+ end
+ total
+end
+
+# elements is Tuple(Int32, Int32, Int32, Float64)
+sum 1, 2, 3, 4.5
+
+
+if a.responds_to?(:abs)
+ # here a's type will be reduced to those responding to the 'abs' method
+end
+
+
+a = some_condition ? 1 : "hello"
+# a : Int32 | String
+
+if a.responds_to?(:abs)
+ # here a will be Int32, since Int32#abs exists but String#abs doesn't
+else
+ # here a will be String
+end
+
+
+if (a = @a).responds_to?(:abs)
+ # here a is guaranteed to respond to `abs`
+end
+
+
+def capture(&block)
+ block
+end
+
+def invoke(&block)
+ block.call
+end
+
+proc = capture { puts "Hello" }
+invoke(&proc) # prints "Hello"
+
+
+
+
+def capture(&block)
+ block
+end
+
+def twice
+ yield
+ yield
+end
+
+proc = capture { puts "Hello" }
+twice &proc
+
+
+twice &->{ puts "Hello" }
+
+
+def say_hello
+ puts "Hello"
+end
+
+twice &->say_hello
+
+
+def foo
+ yield 1
+end
+
+def wrap_foo
+ puts "Before foo"
+ foo do |x|
+ yield x
+ end
+ puts "After foo"
+end
+
+wrap_foo do |i|
+ puts i
+end
+
+
+def foo
+ yield 1
+end
+
+def wrap_foo(&block : Int32 -> _)
+ puts "Before foo"
+ foo(&block)
+ puts "After foo"
+end
+
+wrap_foo do |i|
+ puts i
+end
+
+
+foo_forward do |i|
+ break # error
+end
+
+
+a = 2
+while (a += 1) < 20
+ if a == 10
+ # goes to 'puts a'
+ break
+ end
+end
+puts a #=> 10
+
+
+class Person
+ private def say(message)
+ puts message
+ end
+
+ def say_hello
+ say "hello" # OK, no receiver
+ self.say "hello" # Error, self is a receiver
+
+ other = Person.new "Other"
+ other.say "hello" # Error, other is a receiver
+ end
+end
+
+
+class Employee < Person
+ def say_bye
+ say "bye" # OK
+ end
+end
+
+
+module Namespace
+ class Foo
+ protected def foo
+ puts "Hello"
+ end
+ end
+
+ class Bar
+ def bar
+ # Works, because Foo and Bar are under Namespace
+ Foo.new.foo
+ end
+ end
+end
+
+Namespace::Bar.new.bar
+
+
+class Person
+ protected def self.say(message)
+ puts message
+ end
+
+ def say_hello
+ Person.say "hello"
+ end
+end
+
+
+buffer = uninitialized UInt8[256]
diff --git a/tests/examplefiles/test.mt b/tests/examplefiles/test.mt
new file mode 100644
index 00000000..008dc88e
--- /dev/null
+++ b/tests/examplefiles/test.mt
@@ -0,0 +1,7 @@
+exports (main)
+
+def main(=> currentProcess) :Int as DeepFrozen:
+ traceln(`Current process: $currentProcess`)
+ "A \r \n \x00 \u1234"
+ '\u1234'
+ return 0
diff --git a/tests/examplefiles/tsql_example.sql b/tests/examplefiles/tsql_example.sql
new file mode 100644
index 00000000..cbd76091
--- /dev/null
+++ b/tests/examplefiles/tsql_example.sql
@@ -0,0 +1,72 @@
+-- Example Transact-SQL file.
+
+-- Single line comment
+/* A comment
+ * spawning two lines. */
+ /* An indented comment
+ * spawning multiple
+ * lines. */
+/* A /* nested */ comment. */
+
+select
+ left(emp.firstname, 1) + '.' + [emp.surname] as "Name",
+ dep.name as [Department]
+into
+ #temp_employee
+from
+ employee as emp
+ inner join department as dep on
+ dep.ident_code = emp.department_id
+where
+ emp.date_of_birth >= '1990-01-01';
+go
+
+declare @TextToFind nvarchar(100) = N'some
+text across
+multiple lines';
+
+set @TextToFind varchar(32) = 'hello' + ' world';
+set @TextTiFind += '!';
+
+declare @Count int = 17 * (3 - 5);
+
+delete from
+ [server].[database].[schema].[table]
+where
+ [Text] = @TextToFind and author Not LIKE '%some%';
+
+goto overthere;
+overthere:
+
+select
+ 123 as "int 1",
+ +123 as "int 2",
+ -123 as "int 3",
+ 0x20 as "hex int",
+ 123.45 as "float 1",
+ -1.23e45 as "float 2"
+ +1.23E+45 as "float 3",
+ -1.23e-45 as "float 4",
+ 1. as "float 5",
+ .1 as "float 6",
+ 1.e2 as "float 7",
+ .1e2 as "float 8";
+
+Select @@Error, $PARTITion.RangePF1(10);
+
+select top 3 Ähnliches from Müll;
+
+-- Example transaction
+BEGIN TRAN
+
+BEGIN TRY
+ INSERT INTO #temp_employe(Name, Department) VALUES ('L. Miller', 'Sales')
+ iNsErT inTO #temp_employe(Name, Department) VaLuEs ('M. Webster', 'Helpdesk')
+ COMMIT TRAN
+END TRY
+BEGIN CATCH
+ print 'cannot perform transaction; rolling back';
+ ROLLBACK TRAN
+END CATCH
+
+-- Comment at end without newline. \ No newline at end of file
diff --git a/tests/run.py b/tests/run.py
index 8167b911..07665b2a 100644
--- a/tests/run.py
+++ b/tests/run.py
@@ -8,7 +8,7 @@
python run.py [testfile ...]
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/string_asserts.py b/tests/string_asserts.py
index 11f5c7f0..05e95e6a 100644
--- a/tests/string_asserts.py
+++ b/tests/string_asserts.py
@@ -3,7 +3,7 @@
Pygments string assert utility
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py
index 022e6c55..ac3b4a51 100644
--- a/tests/test_basic_api.py
+++ b/tests/test_basic_api.py
@@ -3,7 +3,7 @@
Pygments basic API tests
~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -161,8 +161,8 @@ def test_formatter_public_api():
try:
inst = formatter(opt1="val1")
- except (ImportError, FontNotFound):
- raise support.SkipTest
+ except (ImportError, FontNotFound) as e:
+ raise support.SkipTest(e)
try:
inst.get_style_defs()
@@ -209,9 +209,9 @@ def test_formatter_unicode_handling():
def verify(formatter):
try:
inst = formatter(encoding=None)
- except (ImportError, FontNotFound):
+ except (ImportError, FontNotFound) as e:
# some dependency or font not installed
- raise support.SkipTest
+ raise support.SkipTest(e)
if formatter.name != 'Raw tokens':
out = format(tokens, inst)
diff --git a/tests/test_bibtex.py b/tests/test_bibtex.py
new file mode 100644
index 00000000..5ad92db4
--- /dev/null
+++ b/tests/test_bibtex.py
@@ -0,0 +1,236 @@
+# -*- coding: utf-8 -*-
+"""
+ BibTeX Test
+ ~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import textwrap
+import unittest
+
+from pygments.lexers import BibTeXLexer, BSTLexer
+from pygments.token import Token
+
+
+class BibTeXTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = BibTeXLexer()
+
+ def testPreamble(self):
+ data = u'@PREAMBLE{"% some LaTeX code here"}'
+ tokens = [
+ (Token.Name.Class, u'@PREAMBLE'),
+ (Token.Punctuation, u'{'),
+ (Token.String, u'"'),
+ (Token.String, u'% some LaTeX code here'),
+ (Token.String, u'"'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
+
+ def testString(self):
+ data = u'@STRING(SCI = "Science")'
+ tokens = [
+ (Token.Name.Class, u'@STRING'),
+ (Token.Punctuation, u'('),
+ (Token.Name.Attribute, u'SCI'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'='),
+ (Token.Text, u' '),
+ (Token.String, u'"'),
+ (Token.String, u'Science'),
+ (Token.String, u'"'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
+
+ def testEntry(self):
+ data = u"""
+ This is a comment.
+
+ @ARTICLE{ruckenstein-diffusion,
+ author = "Liu, Hongquin" # and # "Ruckenstein, Eli",
+ year = 1997,
+ month = JAN,
+ pages = "888-895"
+ }
+ """
+
+ tokens = [
+ (Token.Comment, u'This is a comment.'),
+ (Token.Text, u'\n\n'),
+ (Token.Name.Class, u'@ARTICLE'),
+ (Token.Punctuation, u'{'),
+ (Token.Name.Label, u'ruckenstein-diffusion'),
+ (Token.Punctuation, u','),
+ (Token.Text, u'\n '),
+ (Token.Name.Attribute, u'author'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'='),
+ (Token.Text, u' '),
+ (Token.String, u'"'),
+ (Token.String, u'Liu, Hongquin'),
+ (Token.String, u'"'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'#'),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u'and'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'#'),
+ (Token.Text, u' '),
+ (Token.String, u'"'),
+ (Token.String, u'Ruckenstein, Eli'),
+ (Token.String, u'"'),
+ (Token.Punctuation, u','),
+ (Token.Text, u'\n '),
+ (Token.Name.Attribute, u'year'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'='),
+ (Token.Text, u' '),
+ (Token.Number, u'1997'),
+ (Token.Punctuation, u','),
+ (Token.Text, u'\n '),
+ (Token.Name.Attribute, u'month'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'='),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u'JAN'),
+ (Token.Punctuation, u','),
+ (Token.Text, u'\n '),
+ (Token.Name.Attribute, u'pages'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'='),
+ (Token.Text, u' '),
+ (Token.String, u'"'),
+ (Token.String, u'888-895'),
+ (Token.String, u'"'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(textwrap.dedent(data))), tokens)
+
+ def testComment(self):
+ data = '@COMMENT{test}'
+ tokens = [
+ (Token.Comment, u'@COMMENT'),
+ (Token.Comment, u'{test}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
+
+ def testMissingBody(self):
+ data = '@ARTICLE xxx'
+ tokens = [
+ (Token.Name.Class, u'@ARTICLE'),
+ (Token.Text, u' '),
+ (Token.Error, u'x'),
+ (Token.Error, u'x'),
+ (Token.Error, u'x'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
+
+ def testMismatchedBrace(self):
+ data = '@PREAMBLE(""}'
+ tokens = [
+ (Token.Name.Class, u'@PREAMBLE'),
+ (Token.Punctuation, u'('),
+ (Token.String, u'"'),
+ (Token.String, u'"'),
+ (Token.Error, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
+
+
+class BSTTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = BSTLexer()
+
+ def testBasicBST(self):
+ data = """
+ % BibTeX standard bibliography style `plain'
+
+ INTEGERS { output.state before.all }
+
+ FUNCTION {sort.format.title}
+ { 't :=
+ "A " #2
+ "An " #3
+ "The " #4 t chop.word
+ chop.word
+ chop.word
+ sortify
+ #1 global.max$ substring$
+ }
+
+ ITERATE {call.type$}
+ """
+ tokens = [
+ (Token.Comment.SingleLine, "% BibTeX standard bibliography style `plain'"),
+ (Token.Text, u'\n\n'),
+ (Token.Keyword, u'INTEGERS'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'{'),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u'output.state'),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u'before.all'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n\n'),
+ (Token.Keyword, u'FUNCTION'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'{'),
+ (Token.Name.Variable, u'sort.format.title'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'{'),
+ (Token.Text, u' '),
+ (Token.Name.Function, u"'t"),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u':='),
+ (Token.Text, u'\n'),
+ (Token.Literal.String, u'"A "'),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'#2'),
+ (Token.Text, u'\n '),
+ (Token.Literal.String, u'"An "'),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'#3'),
+ (Token.Text, u'\n '),
+ (Token.Literal.String, u'"The "'),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'#4'),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u't'),
+ (Token.Text, u' '),
+ (Token.Name.Variable, u'chop.word'),
+ (Token.Text, u'\n '),
+ (Token.Name.Variable, u'chop.word'),
+ (Token.Text, u'\n'),
+ (Token.Name.Variable, u'chop.word'),
+ (Token.Text, u'\n'),
+ (Token.Name.Variable, u'sortify'),
+ (Token.Text, u'\n'),
+ (Token.Literal.Number, u'#1'),
+ (Token.Text, u' '),
+ (Token.Name.Builtin, u'global.max$'),
+ (Token.Text, u' '),
+ (Token.Name.Builtin, u'substring$'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n\n'),
+ (Token.Keyword, u'ITERATE'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'{'),
+ (Token.Name.Builtin, u'call.type$'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(list(self.lexer.get_tokens(textwrap.dedent(data))), tokens)
diff --git a/tests/test_cfm.py b/tests/test_cfm.py
index 2585489a..0ff1b167 100644
--- a/tests/test_cfm.py
+++ b/tests/test_cfm.py
@@ -3,7 +3,7 @@
Basic ColdfusionHtmlLexer Test
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_clexer.py b/tests/test_clexer.py
index fd7f58fc..5095b797 100644
--- a/tests/test_clexer.py
+++ b/tests/test_clexer.py
@@ -3,7 +3,7 @@
Basic CLexer Test
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_cmdline.py b/tests/test_cmdline.py
index 6e2c917a..1500c875 100644
--- a/tests/test_cmdline.py
+++ b/tests/test_cmdline.py
@@ -3,7 +3,7 @@
Command line test
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_cpp.py b/tests/test_cpp.py
new file mode 100644
index 00000000..ef59965c
--- /dev/null
+++ b/tests/test_cpp.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+"""
+ CPP Tests
+ ~~~~~~~~~
+
+ :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.lexers import CppLexer
+from pygments.token import Token
+
+
+class CppTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = CppLexer()
+
+ def testGoodComment(self):
+ fragment = u'/* foo */\n'
+ tokens = [
+ (Token.Comment.Multiline, u'/* foo */'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testOpenComment(self):
+ fragment = u'/* foo\n'
+ tokens = [
+ (Token.Comment.Multiline, u'/* foo\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_crystal.py b/tests/test_crystal.py
new file mode 100644
index 00000000..9a1588f2
--- /dev/null
+++ b/tests/test_crystal.py
@@ -0,0 +1,308 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic CrystalLexer Test
+ ~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import unicode_literals
+import unittest
+
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+from pygments.lexers import CrystalLexer
+
+
+class CrystalTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = CrystalLexer()
+ self.maxDiff = None
+
+ def testRangeSyntax1(self):
+ fragment = '1...3\n'
+ tokens = [
+ (Number.Integer, '1'),
+ (Operator, '...'),
+ (Number.Integer, '3'),
+ (Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testRangeSyntax2(self):
+ fragment = '1 .. 3\n'
+ tokens = [
+ (Number.Integer, '1'),
+ (Text, ' '),
+ (Operator, '..'),
+ (Text, ' '),
+ (Number.Integer, '3'),
+ (Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testInterpolationNestedCurly(self):
+ fragment = (
+ '"A#{ (3..5).group_by { |x| x/2}.map '
+ 'do |k,v| "#{k}" end.join }" + "Z"\n')
+ tokens = [
+ (String.Double, '"'),
+ (String.Double, 'A'),
+ (String.Interpol, '#{'),
+ (Text, ' '),
+ (Punctuation, '('),
+ (Number.Integer, '3'),
+ (Operator, '..'),
+ (Number.Integer, '5'),
+ (Punctuation, ')'),
+ (Operator, '.'),
+ (Name, 'group_by'),
+ (Text, ' '),
+ (String.Interpol, '{'),
+ (Text, ' '),
+ (Operator, '|'),
+ (Name, 'x'),
+ (Operator, '|'),
+ (Text, ' '),
+ (Name, 'x'),
+ (Operator, '/'),
+ (Number.Integer, '2'),
+ (String.Interpol, '}'),
+ (Operator, '.'),
+ (Name, 'map'),
+ (Text, ' '),
+ (Keyword, 'do'),
+ (Text, ' '),
+ (Operator, '|'),
+ (Name, 'k'),
+ (Punctuation, ','),
+ (Name, 'v'),
+ (Operator, '|'),
+ (Text, ' '),
+ (String.Double, '"'),
+ (String.Interpol, '#{'),
+ (Name, 'k'),
+ (String.Interpol, '}'),
+ (String.Double, '"'),
+ (Text, ' '),
+ (Keyword, 'end'),
+ (Operator, '.'),
+ (Name, 'join'),
+ (Text, ' '),
+ (String.Interpol, '}'),
+ (String.Double, '"'),
+ (Text, ' '),
+ (Operator, '+'),
+ (Text, ' '),
+ (String.Double, '"'),
+ (String.Double, 'Z'),
+ (String.Double, '"'),
+ (Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testOperatorMethods(self):
+ fragment = '([] of Int32).[]?(5)\n'
+ tokens = [
+ (Punctuation, '('),
+ (Operator, '['),
+ (Operator, ']'),
+ (Text, ' '),
+ (Keyword, 'of'),
+ (Text, ' '),
+ (Name.Builtin, 'Int32'),
+ (Punctuation, ')'),
+ (Operator, '.'),
+ (Name.Operator, '[]?'),
+ (Punctuation, '('),
+ (Number.Integer, '5'),
+ (Punctuation, ')'),
+ (Text, '\n')
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testArrayAccess(self):
+ fragment = '[5][5]?\n'
+ tokens = [
+ (Operator, '['),
+ (Number.Integer, '5'),
+ (Operator, ']'),
+ (Operator, '['),
+ (Number.Integer, '5'),
+ (Operator, ']?'),
+ (Text, '\n')
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testNumbers(self):
+ for kind, testset in [
+ (Number.Integer, '0 1 1_000_000 1u8 11231231231121312i64'),
+ (Number.Float, '0.0 1.0_f32 1_f32 0f64 1e+4 1e111 1_234.567_890'),
+ (Number.Bin, '0b1001_0110 0b0u8'),
+ (Number.Oct, '0o17 0o7_i32'),
+ (Number.Hex, '0xdeadBEEF'),
+ ]:
+ for fragment in testset.split():
+ self.assertEqual([(kind, fragment), (Text, '\n')],
+ list(self.lexer.get_tokens(fragment + '\n')))
+
+ for fragment in '01 0b2 0x129g2 0o12358'.split():
+ self.assertEqual(next(self.lexer.get_tokens(fragment + '\n'))[0],
+ Error)
+
+ def testChars(self):
+ for fragment in ["'a'", "'я'", "'\\u{1234}'", "'\n'"]:
+ self.assertEqual([(String.Char, fragment), (Text, '\n')],
+ list(self.lexer.get_tokens(fragment + '\n')))
+ self.assertEqual(next(self.lexer.get_tokens("'abc'"))[0], Error)
+
+ def testMacro(self):
+ fragment = (
+ 'def<=>(other : self) : Int\n'
+ '{%for field in %w(first_name middle_name last_name)%}\n'
+ 'cmp={{field.id}}<=>other.{{field.id}}\n'
+ 'return cmp if cmp!=0\n'
+ '{%end%}\n'
+ '0\n'
+ 'end\n')
+ tokens = [
+ (Keyword, 'def'),
+ (Name.Function, '<=>'),
+ (Punctuation, '('),
+ (Name, 'other'),
+ (Text, ' '),
+ (Punctuation, ':'),
+ (Text, ' '),
+ (Keyword.Pseudo, 'self'),
+ (Punctuation, ')'),
+ (Text, ' '),
+ (Punctuation, ':'),
+ (Text, ' '),
+ (Name.Builtin, 'Int'),
+ (Text, '\n'),
+ (String.Interpol, '{%'),
+ (Keyword, 'for'),
+ (Text, ' '),
+ (Name, 'field'),
+ (Text, ' '),
+ (Keyword, 'in'),
+ (Text, ' '),
+ (String.Other, '%w('),
+ (String.Other, 'first_name middle_name last_name'),
+ (String.Other, ')'),
+ (String.Interpol, '%}'),
+ (Text, '\n'),
+ (Name, 'cmp'),
+ (Operator, '='),
+ (String.Interpol, '{{'),
+ (Name, 'field'),
+ (Operator, '.'),
+ (Name, 'id'),
+ (String.Interpol, '}}'),
+ (Operator, '<=>'),
+ (Name, 'other'),
+ (Operator, '.'),
+ (String.Interpol, '{{'),
+ (Name, 'field'),
+ (Operator, '.'),
+ (Name, 'id'),
+ (String.Interpol, '}}'),
+ (Text, '\n'),
+ (Keyword, 'return'),
+ (Text, ' '),
+ (Name, 'cmp'),
+ (Text, ' '),
+ (Keyword, 'if'),
+ (Text, ' '),
+ (Name, 'cmp'),
+ (Operator, '!='),
+ (Number.Integer, '0'),
+ (Text, '\n'),
+ (String.Interpol, '{%'),
+ (Keyword, 'end'),
+ (String.Interpol, '%}'),
+ (Text, '\n'),
+ (Number.Integer, '0'),
+ (Text, '\n'),
+ (Keyword, 'end'),
+ (Text, '\n')
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testLib(self):
+ fragment = (
+ '@[Link("some")]\nlib LibSome\n'
+ '@[CallConvention("X86_StdCall")]\nfun foo="some.foo"(thing : Void*) : LibC::Int\n'
+ 'end\n')
+ tokens = [
+ (Operator, '@['),
+ (Name.Decorator, 'Link'),
+ (Punctuation, '('),
+ (String.Double, '"'),
+ (String.Double, 'some'),
+ (String.Double, '"'),
+ (Punctuation, ')'),
+ (Operator, ']'),
+ (Text, '\n'),
+ (Keyword, 'lib'),
+ (Text, ' '),
+ (Name.Namespace, 'LibSome'),
+ (Text, '\n'),
+ (Operator, '@['),
+ (Name.Decorator, 'CallConvention'),
+ (Punctuation, '('),
+ (String.Double, '"'),
+ (String.Double, 'X86_StdCall'),
+ (String.Double, '"'),
+ (Punctuation, ')'),
+ (Operator, ']'),
+ (Text, '\n'),
+ (Keyword, 'fun'),
+ (Text, ' '),
+ (Name.Function, 'foo'),
+ (Operator, '='),
+ (String.Double, '"'),
+ (String.Double, 'some.foo'),
+ (String.Double, '"'),
+ (Punctuation, '('),
+ (Name, 'thing'),
+ (Text, ' '),
+ (Punctuation, ':'),
+ (Text, ' '),
+ (Name.Builtin, 'Void'),
+ (Operator, '*'),
+ (Punctuation, ')'),
+ (Text, ' '),
+ (Punctuation, ':'),
+ (Text, ' '),
+ (Name, 'LibC'),
+ (Operator, '::'),
+ (Name.Builtin, 'Int'),
+ (Text, '\n'),
+ (Keyword, 'end'),
+ (Text, '\n')
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testEscapedBracestring(self):
+ fragment = 'str.gsub(%r{\\\\\\\\}, "/")\n'
+ tokens = [
+ (Name, 'str'),
+ (Operator, '.'),
+ (Name, 'gsub'),
+ (Punctuation, '('),
+ (String.Regex, '%r{'),
+ (String.Regex, '\\\\'),
+ (String.Regex, '\\\\'),
+ (String.Regex, '}'),
+ (Punctuation, ','),
+ (Text, ' '),
+ (String.Double, '"'),
+ (String.Double, '/'),
+ (String.Double, '"'),
+ (Punctuation, ')'),
+ (Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_data.py b/tests/test_data.py
new file mode 100644
index 00000000..be371419
--- /dev/null
+++ b/tests/test_data.py
@@ -0,0 +1,100 @@
+# -*- coding: utf-8 -*-
+"""
+ Data Tests
+ ~~~~~~~~~~
+
+ :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.lexers import JsonLexer, JsonBareObjectLexer
+from pygments.token import Token
+
+
+class JsonTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = JsonLexer()
+
+ def testBasic(self):
+ fragment = u'{"foo": "bar", "foo2": [1, 2, 3]}\n'
+ tokens = [
+ (Token.Punctuation, u'{'),
+ (Token.Name.Tag, u'"foo"'),
+ (Token.Punctuation, u':'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"bar"'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Name.Tag, u'"foo2"'),
+ (Token.Punctuation, u':'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'['),
+ (Token.Literal.Number.Integer, u'1'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'3'),
+ (Token.Punctuation, u']'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+class JsonBareObjectTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = JsonBareObjectLexer()
+
+ def testBasic(self):
+ # This is the same as testBasic for JsonLexer above, except the
+ # enclosing curly braces are removed.
+ fragment = u'"foo": "bar", "foo2": [1, 2, 3]\n'
+ tokens = [
+ (Token.Name.Tag, u'"foo"'),
+ (Token.Punctuation, u':'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"bar"'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Name.Tag, u'"foo2"'),
+ (Token.Punctuation, u':'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'['),
+ (Token.Literal.Number.Integer, u'1'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'3'),
+ (Token.Punctuation, u']'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testClosingCurly(self):
+ # This can be an Error token, but should not be a can't-pop-from-stack
+ # exception.
+ fragment = '}"a"\n'
+ tokens = [
+ (Token.Error, '}'),
+ (Token.Name.Tag, '"a"'),
+ (Token.Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testClosingCurlyInValue(self):
+ fragment = '"": ""}\n'
+ tokens = [
+ (Token.Name.Tag, '""'),
+ (Token.Punctuation, ':'),
+ (Token.Text, ' '),
+ (Token.Literal.String.Double, '""'),
+ (Token.Error, '}'),
+ (Token.Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
diff --git a/tests/test_examplefiles.py b/tests/test_examplefiles.py
index 924e1184..2fae1125 100644
--- a/tests/test_examplefiles.py
+++ b/tests/test_examplefiles.py
@@ -3,7 +3,7 @@
Pygments tests with example files
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -46,6 +46,10 @@ def test_example_files():
if not os.path.isfile(absfn):
continue
+ extension = os.getenv('TEST_EXT')
+ if extension and not absfn.endswith(extension):
+ continue
+
print(absfn)
with open(absfn, 'rb') as f:
code = f.read()
@@ -85,7 +89,7 @@ def test_example_files():
def check_lexer(lx, fn):
if os.name == 'java' and fn in BAD_FILES_FOR_JYTHON:
- raise support.SkipTest
+ raise support.SkipTest('%s is a known bad file on Jython' % fn)
absfn = os.path.join(TESTDIR, 'examplefiles', fn)
with open(absfn, 'rb') as fp:
text = fp.read()
diff --git a/tests/test_ezhil.py b/tests/test_ezhil.py
index 23b9cb41..15cc13b1 100644
--- a/tests/test_ezhil.py
+++ b/tests/test_ezhil.py
@@ -94,7 +94,8 @@ class EzhilTest(unittest.TestCase):
முடி\n"""
tokens = [
(Token.Comment.Single,
- u'# (C) \u0bae\u0bc1\u0ba4\u0bcd\u0ba4\u0bc8\u0baf\u0bbe \u0b85\u0ba3\u0bcd\u0ba3\u0bbe\u0bae\u0bb2\u0bc8 2013, 2015\n'),
+ u'# (C) \u0bae\u0bc1\u0ba4\u0bcd\u0ba4\u0bc8\u0baf\u0bbe \u0b85'
+ u'\u0ba3\u0bcd\u0ba3\u0bbe\u0bae\u0bb2\u0bc8 2013, 2015\n'),
(Token.Keyword,u'நிரல்பாகம்'),
(Token.Text, u' '),
(Token.Name, u'gcd'),
diff --git a/tests/test_html_formatter.py b/tests/test_html_formatter.py
index 596d9fbc..79990edd 100644
--- a/tests/test_html_formatter.py
+++ b/tests/test_html_formatter.py
@@ -3,7 +3,7 @@
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_inherit.py b/tests/test_inherit.py
index 34033a08..5da57dd9 100644
--- a/tests/test_inherit.py
+++ b/tests/test_inherit.py
@@ -3,7 +3,7 @@
Tests for inheritance in RegexLexer
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_irc_formatter.py b/tests/test_irc_formatter.py
index 16a8fd30..3b34f0bc 100644
--- a/tests/test_irc_formatter.py
+++ b/tests/test_irc_formatter.py
@@ -3,7 +3,7 @@
Pygments IRC formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_java.py b/tests/test_java.py
index f4096647..6e5e8992 100644
--- a/tests/test_java.py
+++ b/tests/test_java.py
@@ -3,7 +3,7 @@
Basic JavaLexer Test
~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_javascript.py b/tests/test_javascript.py
new file mode 100644
index 00000000..21dff7c4
--- /dev/null
+++ b/tests/test_javascript.py
@@ -0,0 +1,84 @@
+# -*- coding: utf-8 -*-
+"""
+ Javascript tests
+ ~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.lexers import CoffeeScriptLexer
+from pygments.token import Token
+
+COFFEE_SLASH_GOLDEN = [
+ # input_str, slashes_are_regex_here
+ (r'/\\/', True),
+ (r'/\\/i', True),
+ (r'/\//', True),
+ (r'/(\s)/', True),
+ ('/a{2,8}/', True),
+ ('/b*c?d+/', True),
+ ('/(capture-match)/', True),
+ ('/(?:do-not-capture-match)/', True),
+ ('/this|or|that/', True),
+ ('/[char-set]/', True),
+ ('/[^neg-char_st]/', True),
+ ('/^.*$/', True),
+ (r'/\n(\f)\0\1\d\b\cm\u1234/', True),
+ (r'/^.?([^/\\\n\w]*)a\1+$/.something(or_other) # something more complex', True),
+ ("foo = (str) ->\n /'|\"/.test str", True),
+ ('a = a / b / c', False),
+ ('a = a/b/c', False),
+ ('a = a/b/ c', False),
+ ('a = a /b/c', False),
+ ('a = 1 + /d/.test(a)', True),
+]
+
+def test_coffee_slashes():
+ for input_str, slashes_are_regex_here in COFFEE_SLASH_GOLDEN:
+ yield coffee_runner, input_str, slashes_are_regex_here
+
+def coffee_runner(input_str, slashes_are_regex_here):
+ lex = CoffeeScriptLexer()
+ output = list(lex.get_tokens(input_str))
+ print(output)
+ for t, s in output:
+ if '/' in s:
+ is_regex = t is Token.String.Regex
+ assert is_regex == slashes_are_regex_here, (t, s)
+
+class CoffeeTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = CoffeeScriptLexer()
+
+ def testMixedSlashes(self):
+ fragment = u'a?/foo/:1/2;\n'
+ tokens = [
+ (Token.Name.Other, u'a'),
+ (Token.Operator, u'?'),
+ (Token.Literal.String.Regex, u'/foo/'),
+ (Token.Operator, u':'),
+ (Token.Literal.Number.Integer, u'1'),
+ (Token.Operator, u'/'),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testBewareInfiniteLoop(self):
+ # This demonstrates the case that "This isn't really guarding" comment
+ # refers to.
+ fragment = '/a/x;\n'
+ tokens = [
+ (Token.Text, ''),
+ (Token.Operator, '/'),
+ (Token.Name.Other, 'a'),
+ (Token.Operator, '/'),
+ (Token.Name.Other, 'x'),
+ (Token.Punctuation, ';'),
+ (Token.Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_julia.py b/tests/test_julia.py
new file mode 100644
index 00000000..ed46f27e
--- /dev/null
+++ b/tests/test_julia.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+"""
+ Julia Tests
+ ~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.lexers import JuliaLexer
+from pygments.token import Token
+
+
+class JuliaTests(unittest.TestCase):
+ def setUp(self):
+ self.lexer = JuliaLexer()
+
+ def test_unicode(self):
+ """
+ Test that unicode character, √, in an expression is recognized
+ """
+ fragment = u's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n'
+ tokens = [
+ (Token.Name, u's'),
+ (Token.Text, u' '),
+ (Token.Operator, u'='),
+ (Token.Text, u' '),
+ (Token.Operator, u'\u221a'),
+ (Token.Punctuation, u'('),
+ (Token.Punctuation, u'('),
+ (Token.Literal.Number.Integer, u'1'),
+ (Token.Operator, u'/'),
+ (Token.Name, u'n'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u' '),
+ (Token.Operator, u'*'),
+ (Token.Text, u' '),
+ (Token.Name, u'sum'),
+ (Token.Punctuation, u'('),
+ (Token.Name, u'count'),
+ (Token.Text, u' '),
+ (Token.Operator, u'.^'),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u' '),
+ (Token.Operator, u'-'),
+ (Token.Text, u' '),
+ (Token.Name, u'mu'),
+ (Token.Text, u' '),
+ (Token.Operator, u'.^'),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_latex_formatter.py b/tests/test_latex_formatter.py
index 05a6c3ac..ebed7964 100644
--- a/tests/test_latex_formatter.py
+++ b/tests/test_latex_formatter.py
@@ -3,7 +3,7 @@
Pygments LaTeX formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -42,9 +42,9 @@ class LatexFormatterTest(unittest.TestCase):
ret = po.wait()
output = po.stdout.read()
po.stdout.close()
- except OSError:
+ except OSError as e:
# latex not available
- raise support.SkipTest
+ raise support.SkipTest(e)
else:
if ret:
print(output)
diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py
index 90d05ef8..3716fb72 100644
--- a/tests/test_lexers_other.py
+++ b/tests/test_lexers_other.py
@@ -3,7 +3,7 @@
Tests for other lexers
~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import glob
diff --git a/tests/test_modeline.py b/tests/test_modeline.py
new file mode 100644
index 00000000..efe038df
--- /dev/null
+++ b/tests/test_modeline.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+"""
+ Tests for the vim modeline feature
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+from pygments import modeline
+
+
+def test_lexer_classes():
+ def verify(buf):
+ assert modeline.get_filetype_from_buffer(buf) == 'python'
+
+ for buf in [
+ 'vi: ft=python' + '\n' * 8,
+ 'vi: ft=python' + '\n' * 8,
+ '\n\n\n\nvi=8: syntax=python' + '\n' * 8,
+ '\n' * 8 + 'ex: filetype=python',
+ '\n' * 8 + 'vim: some,other,syn=python\n\n\n\n'
+ ]:
+ yield verify, buf
diff --git a/tests/test_objectiveclexer.py b/tests/test_objectiveclexer.py
index 90bd680f..aee7db66 100644
--- a/tests/test_objectiveclexer.py
+++ b/tests/test_objectiveclexer.py
@@ -3,7 +3,7 @@
Basic CLexer Test
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_perllexer.py b/tests/test_perllexer.py
index 26b2d0a7..102f0a9f 100644
--- a/tests/test_perllexer.py
+++ b/tests/test_perllexer.py
@@ -3,14 +3,14 @@
Pygments regex lexer tests
~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import time
import unittest
-from pygments.token import String
+from pygments.token import Keyword, Name, String, Text
from pygments.lexers.perl import PerlLexer
@@ -135,3 +135,23 @@ class RunawayRegexTest(unittest.TestCase):
def test_substitution_with_parenthesis(self):
self.assert_single_token(r's(aaa)', String.Regex)
self.assert_fast_tokenization('s(' + '\\'*999)
+
+ ### Namespaces/modules
+
+ def test_package_statement(self):
+ self.assert_tokens(['package', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
+ self.assert_tokens(['package', ' ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
+
+ def test_use_statement(self):
+ self.assert_tokens(['use', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
+ self.assert_tokens(['use', ' ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
+
+ def test_no_statement(self):
+ self.assert_tokens(['no', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
+ self.assert_tokens(['no', ' ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
+
+ def test_require_statement(self):
+ self.assert_tokens(['require', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
+ self.assert_tokens(['require', ' ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
+ self.assert_tokens(['require', ' ', '"Foo/Bar.pm"'], [Keyword, Text, String])
+
diff --git a/tests/test_php.py b/tests/test_php.py
new file mode 100644
index 00000000..b4117381
--- /dev/null
+++ b/tests/test_php.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+"""
+ PHP Tests
+ ~~~~~~~~~
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.lexers import PhpLexer
+from pygments.token import Token
+
+
+class PhpTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = PhpLexer()
+
+ def testStringEscapingRun(self):
+ fragment = '<?php $x="{\\""; ?>\n'
+ tokens = [
+ (Token.Comment.Preproc, '<?php'),
+ (Token.Text, ' '),
+ (Token.Name.Variable, '$x'),
+ (Token.Operator, '='),
+ (Token.Literal.String.Double, '"'),
+ (Token.Literal.String.Double, '{'),
+ (Token.Literal.String.Escape, '\\"'),
+ (Token.Literal.String.Double, '"'),
+ (Token.Punctuation, ';'),
+ (Token.Text, ' '),
+ (Token.Comment.Preproc, '?>'),
+ (Token.Other, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_praat.py b/tests/test_praat.py
new file mode 100644
index 00000000..1ca97d1e
--- /dev/null
+++ b/tests/test_praat.py
@@ -0,0 +1,130 @@
+# -*- coding: utf-8 -*-
+"""
+ Praat lexer tests
+ ~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.token import Token
+from pygments.lexers import PraatLexer
+
+class PraatTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = PraatLexer()
+ self.maxDiff = None
+
+ def testNumericAssignment(self):
+ fragment = u'var = -15e4\n'
+ tokens = [
+ (Token.Text, u'var'),
+ (Token.Text, u' '),
+ (Token.Operator, u'='),
+ (Token.Text, u' '),
+ (Token.Operator, u'-'),
+ (Token.Literal.Number, u'15e4'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testStringAssignment(self):
+ fragment = u'var$ = "foo"\n'
+ tokens = [
+ (Token.Text, u'var$'),
+ (Token.Text, u' '),
+ (Token.Operator, u'='),
+ (Token.Text, u' '),
+ (Token.Literal.String, u'"'),
+ (Token.Literal.String, u'foo'),
+ (Token.Literal.String, u'"'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testStringEscapedQuotes(self):
+ fragment = u'"it said ""foo"""\n'
+ tokens = [
+ (Token.Literal.String, u'"'),
+ (Token.Literal.String, u'it said '),
+ (Token.Literal.String, u'"'),
+ (Token.Literal.String, u'"'),
+ (Token.Literal.String, u'foo'),
+ (Token.Literal.String, u'"'),
+ (Token.Literal.String, u'"'),
+ (Token.Literal.String, u'"'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testFunctionCall(self):
+ fragment = u'selected("Sound", i+(a*b))\n'
+ tokens = [
+ (Token.Name.Function, u'selected'),
+ (Token.Punctuation, u'('),
+ (Token.Literal.String, u'"'),
+ (Token.Literal.String, u'Sound'),
+ (Token.Literal.String, u'"'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Text, u'i'),
+ (Token.Operator, u'+'),
+ (Token.Text, u'('),
+ (Token.Text, u'a'),
+ (Token.Operator, u'*'),
+ (Token.Text, u'b'),
+ (Token.Text, u')'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testBrokenUnquotedString(self):
+ fragment = u'printline string\n... \'interpolated\' string\n'
+ tokens = [
+ (Token.Keyword, u'printline'),
+ (Token.Text, u' '),
+ (Token.Literal.String, u'string'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'...'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Interpol, u"'"),
+ (Token.Literal.String.Interpol, u'interpolated'),
+ (Token.Literal.String.Interpol, u"'"),
+ (Token.Text, u' '),
+ (Token.Literal.String, u'string'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testInlinIf(self):
+ fragment = u'var = if true == 1 then -1 else 0 fi'
+ tokens = [
+ (Token.Text, u'var'),
+ (Token.Text, u' '),
+ (Token.Operator, u'='),
+ (Token.Text, u' '),
+ (Token.Keyword, u'if'),
+ (Token.Text, u' '),
+ (Token.Text, u'true'),
+ (Token.Text, u' '),
+ (Token.Operator, u'=='),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'1'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'then'),
+ (Token.Text, u' '),
+ (Token.Operator, u'-'),
+ (Token.Literal.Number, u'1'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'else'),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'0'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'fi'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_properties.py b/tests/test_properties.py
new file mode 100644
index 00000000..562778ba
--- /dev/null
+++ b/tests/test_properties.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+"""
+ Properties Tests
+ ~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.lexers.configs import PropertiesLexer
+from pygments.token import Token
+
+
+class PropertiesTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = PropertiesLexer()
+
+ def test_comments(self):
+ """
+ Assures lines lead by either # or ! are recognized as a comment
+ """
+ fragment = '! a comment\n# also a comment\n'
+ tokens = [
+ (Token.Comment, '! a comment'),
+ (Token.Text, '\n'),
+ (Token.Comment, '# also a comment'),
+ (Token.Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def test_leading_whitespace_comments(self):
+ fragment = ' # comment\n'
+ tokens = [
+ (Token.Text, ' '),
+ (Token.Comment, '# comment'),
+ (Token.Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def test_escaped_space_in_key(self):
+ fragment = 'key = value\n'
+ tokens = [
+ (Token.Name.Attribute, 'key'),
+ (Token.Text, ' '),
+ (Token.Operator, '='),
+ (Token.Text, ' '),
+ (Token.Literal.String, 'value'),
+ (Token.Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def test_escaped_space_in_value(self):
+ fragment = 'key = doubleword\\ value\n'
+ tokens = [
+ (Token.Name.Attribute, 'key'),
+ (Token.Text, ' '),
+ (Token.Operator, '='),
+ (Token.Text, ' '),
+ (Token.Literal.String, 'doubleword\\ value'),
+ (Token.Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def test_space_delimited_kv_pair(self):
+ fragment = 'key value\n'
+ tokens = [
+ (Token.Name.Attribute, 'key'),
+ (Token.Text, ' '),
+ (Token.Literal.String, 'value\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def test_just_key(self):
+ fragment = 'justkey\n'
+ tokens = [
+ (Token.Name.Attribute, 'justkey'),
+ (Token.Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def test_just_key_with_space(self):
+ fragment = 'just\\ key\n'
+ tokens = [
+ (Token.Name.Attribute, 'just\\ key'),
+ (Token.Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_python.py b/tests/test_python.py
new file mode 100644
index 00000000..e99687a6
--- /dev/null
+++ b/tests/test_python.py
@@ -0,0 +1,113 @@
+# -*- coding: utf-8 -*-
+"""
+ Python Tests
+ ~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.lexers import PythonLexer, Python3Lexer
+from pygments.token import Token
+
+
+class PythonTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = PythonLexer()
+
+ def test_cls_builtin(self):
+ """
+ Tests that a cls token gets interpreted as a Token.Name.Builtin.Pseudo
+
+ """
+ fragment = 'class TestClass():\n @classmethod\n def hello(cls):\n pass\n'
+ tokens = [
+ (Token.Keyword, 'class'),
+ (Token.Text, ' '),
+ (Token.Name.Class, 'TestClass'),
+ (Token.Punctuation, '('),
+ (Token.Punctuation, ')'),
+ (Token.Punctuation, ':'),
+ (Token.Text, '\n'),
+ (Token.Text, ' '),
+ (Token.Name.Decorator, '@classmethod'),
+ (Token.Text, '\n'),
+ (Token.Text, ' '),
+ (Token.Keyword, 'def'),
+ (Token.Text, ' '),
+ (Token.Name.Function, 'hello'),
+ (Token.Punctuation, '('),
+ (Token.Name.Builtin.Pseudo, 'cls'),
+ (Token.Punctuation, ')'),
+ (Token.Punctuation, ':'),
+ (Token.Text, '\n'),
+ (Token.Text, ' '),
+ (Token.Keyword, 'pass'),
+ (Token.Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+
+class Python3Test(unittest.TestCase):
+ def setUp(self):
+ self.lexer = Python3Lexer()
+
+ def testNeedsName(self):
+ """
+ Tests that '@' is recognized as an Operator
+ """
+ fragment = u'S = (H @ beta - r).T @ inv(H @ V @ H.T) @ (H @ beta - r)\n'
+ tokens = [
+ (Token.Name, u'S'),
+ (Token.Text, u' '),
+ (Token.Operator, u'='),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'('),
+ (Token.Name, u'H'),
+ (Token.Text, u' '),
+ (Token.Operator, u'@'),
+ (Token.Text, u' '),
+ (Token.Name, u'beta'),
+ (Token.Text, u' '),
+ (Token.Operator, u'-'),
+ (Token.Text, u' '),
+ (Token.Name, u'r'),
+ (Token.Punctuation, u')'),
+ (Token.Operator, u'.'),
+ (Token.Name, u'T'),
+ (Token.Text, u' '),
+ (Token.Operator, u'@'),
+ (Token.Text, u' '),
+ (Token.Name, u'inv'),
+ (Token.Punctuation, u'('),
+ (Token.Name, u'H'),
+ (Token.Text, u' '),
+ (Token.Operator, u'@'),
+ (Token.Text, u' '),
+ (Token.Name, u'V'),
+ (Token.Text, u' '),
+ (Token.Operator, u'@'),
+ (Token.Text, u' '),
+ (Token.Name, u'H'),
+ (Token.Operator, u'.'),
+ (Token.Name, u'T'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u' '),
+ (Token.Operator, u'@'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'('),
+ (Token.Name, u'H'),
+ (Token.Text, u' '),
+ (Token.Operator, u'@'),
+ (Token.Text, u' '),
+ (Token.Name, u'beta'),
+ (Token.Text, u' '),
+ (Token.Operator, u'-'),
+ (Token.Text, u' '),
+ (Token.Name, u'r'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_qbasiclexer.py b/tests/test_qbasiclexer.py
index 8b790cee..0ea221a1 100644
--- a/tests/test_qbasiclexer.py
+++ b/tests/test_qbasiclexer.py
@@ -3,7 +3,7 @@
Tests for QBasic
~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_regexlexer.py b/tests/test_regexlexer.py
index eb25be61..d919a950 100644
--- a/tests/test_regexlexer.py
+++ b/tests/test_regexlexer.py
@@ -3,7 +3,7 @@
Pygments regex lexer tests
~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_regexopt.py b/tests/test_regexopt.py
index dd56a446..5cfb62a3 100644
--- a/tests/test_regexopt.py
+++ b/tests/test_regexopt.py
@@ -3,7 +3,7 @@
Tests for pygments.regexopt
~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -46,6 +46,7 @@ class RegexOptTestCase(unittest.TestCase):
random.randint(1, len(kwlist) - 1))
no_match = set(kwlist) - set(to_match)
rex = re.compile(regex_opt(to_match))
+ self.assertEqual(rex.groups, 1)
for w in to_match:
self.assertTrue(rex.match(w))
for w in no_match:
@@ -74,3 +75,36 @@ class RegexOptTestCase(unittest.TestCase):
rex = re.compile(opt)
m = rex.match('abfoo')
self.assertEqual(5, m.end())
+
+ def test_different_length_grouping(self):
+ opt = regex_opt(('a', 'xyz'))
+ print(opt)
+ rex = re.compile(opt)
+ self.assertTrue(rex.match('a'))
+ self.assertTrue(rex.match('xyz'))
+ self.assertFalse(rex.match('b'))
+ self.assertEqual(1, rex.groups)
+
+ def test_same_length_grouping(self):
+ opt = regex_opt(('a', 'b'))
+ print(opt)
+ rex = re.compile(opt)
+ self.assertTrue(rex.match('a'))
+ self.assertTrue(rex.match('b'))
+ self.assertFalse(rex.match('x'))
+
+ self.assertEqual(1, rex.groups)
+ groups = rex.match('a').groups()
+ self.assertEqual(('a',), groups)
+
+ def test_same_length_suffix_grouping(self):
+ opt = regex_opt(('a', 'b'), suffix='(m)')
+ print(opt)
+ rex = re.compile(opt)
+ self.assertTrue(rex.match('am'))
+ self.assertTrue(rex.match('bm'))
+ self.assertFalse(rex.match('xm'))
+ self.assertFalse(rex.match('ax'))
+ self.assertEqual(2, rex.groups)
+ groups = rex.match('am').groups()
+ self.assertEqual(('a', 'm'), groups)
diff --git a/tests/test_rtf_formatter.py b/tests/test_rtf_formatter.py
index 25784743..756c03a9 100644
--- a/tests/test_rtf_formatter.py
+++ b/tests/test_rtf_formatter.py
@@ -3,7 +3,7 @@
Pygments RTF formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_ruby.py b/tests/test_ruby.py
index ab210bad..b7d4110a 100644
--- a/tests/test_ruby.py
+++ b/tests/test_ruby.py
@@ -3,7 +3,7 @@
Basic RubyLexer Test
~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_shell.py b/tests/test_shell.py
index 4eb5a15a..e283793e 100644
--- a/tests/test_shell.py
+++ b/tests/test_shell.py
@@ -3,14 +3,14 @@
Basic Shell Tests
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import unittest
from pygments.token import Token
-from pygments.lexers import BashLexer
+from pygments.lexers import BashLexer, BashSessionLexer
class BashTest(unittest.TestCase):
@@ -87,3 +87,56 @@ class BashTest(unittest.TestCase):
(Token.Text, u'\n'),
]
self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testArrayNums(self):
+ fragment = u'a=(1 2 3)\n'
+ tokens = [
+ (Token.Name.Variable, u'a'),
+ (Token.Operator, u'='),
+ (Token.Operator, u'('),
+ (Token.Literal.Number, u'1'),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'2'),
+ (Token.Text, u' '),
+ (Token.Literal.Number, u'3'),
+ (Token.Operator, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testEndOfLineNums(self):
+ fragment = u'a=1\nb=2 # comment\n'
+ tokens = [
+ (Token.Name.Variable, u'a'),
+ (Token.Operator, u'='),
+ (Token.Literal.Number, u'1'),
+ (Token.Text, u'\n'),
+ (Token.Name.Variable, u'b'),
+ (Token.Operator, u'='),
+ (Token.Literal.Number, u'2'),
+ (Token.Text, u' '),
+ (Token.Comment.Single, u'# comment\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+class BashSessionTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = BashSessionLexer()
+ self.maxDiff = None
+
+ def testNeedsName(self):
+ fragment = u'$ echo \\\nhi\nhi\n'
+ tokens = [
+ (Token.Text, u''),
+ (Token.Generic.Prompt, u'$'),
+ (Token.Text, u' '),
+ (Token.Name.Builtin, u'echo'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Escape, u'\\\n'),
+ (Token.Text, u'hi'),
+ (Token.Text, u'\n'),
+ (Token.Generic.Output, u'hi\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
diff --git a/tests/test_smarty.py b/tests/test_smarty.py
index 450e4e6b..e1e079d9 100644
--- a/tests/test_smarty.py
+++ b/tests/test_smarty.py
@@ -3,7 +3,7 @@
Basic SmartyLexer Test
~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_sql.py b/tests/test_sql.py
new file mode 100644
index 00000000..c5f5c758
--- /dev/null
+++ b/tests/test_sql.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+"""
+ Pygments SQL lexers tests
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+import unittest
+
+from pygments.lexers.sql import TransactSqlLexer
+from pygments.token import Comment, Name, Number, Punctuation, Whitespace
+
+
+class TransactSqlLexerTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = TransactSqlLexer()
+
+ def _assertAreTokensOfType(self, examples, expected_token_type):
+ for test_number, example in enumerate(examples.split(), 1):
+ token_count = 0
+ for token_type, token_value in self.lexer.get_tokens(example):
+ if token_type != Whitespace:
+ token_count += 1
+ self.assertEqual(
+ token_type, expected_token_type,
+ 'token_type #%d for %s is be %s but must be %s' %
+ (test_number, token_value, token_type, expected_token_type))
+ self.assertEqual(
+ token_count, 1,
+ '%s must yield exactly 1 token instead of %d' %
+ (example, token_count))
+
+ def _assertTokensMatch(self, text, expected_tokens_without_trailing_newline):
+ actual_tokens = tuple(self.lexer.get_tokens(text))
+ if (len(actual_tokens) >= 1) and (actual_tokens[-1] == (Whitespace, '\n')):
+ actual_tokens = tuple(actual_tokens[:-1])
+ self.assertEqual(
+ expected_tokens_without_trailing_newline, actual_tokens,
+ 'text must yield expected tokens: %s' % text)
+
+ def test_can_lex_float(self):
+ self._assertAreTokensOfType(
+ '1. 1.e1 .1 1.2 1.2e3 1.2e+3 1.2e-3 1e2', Number.Float)
+ self._assertTokensMatch(
+ '1e2.1e2',
+ ((Number.Float, '1e2'), (Number.Float, '.1e2'))
+ )
+
+ def test_can_reject_almost_float(self):
+ self._assertTokensMatch(
+ '.e1',
+ ((Punctuation, '.'), (Name, 'e1')))
+
+ def test_can_lex_integer(self):
+ self._assertAreTokensOfType(
+ '1 23 456', Number.Integer)
+
+ def test_can_lex_names(self):
+ self._assertAreTokensOfType(
+ u'thingy thingy123 _thingy _ _123 Ähnliches Müll #temp1 ##temp2', Name)
+
+ def test_can_lex_comments(self):
+ self._assertTokensMatch('--\n', ((Comment.Single, '--\n'),))
+ self._assertTokensMatch('/**/', (
+ (Comment.Multiline, '/*'), (Comment.Multiline, '*/')
+ ))
+ self._assertTokensMatch('/*/**/*/', (
+ (Comment.Multiline, '/*'),
+ (Comment.Multiline, '/*'),
+ (Comment.Multiline, '*/'),
+ (Comment.Multiline, '*/'),
+ ))
diff --git a/tests/test_string_asserts.py b/tests/test_string_asserts.py
index ba7b37fa..5e9e5617 100644
--- a/tests/test_string_asserts.py
+++ b/tests/test_string_asserts.py
@@ -3,7 +3,7 @@
Pygments string assert utility tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_terminal_formatter.py b/tests/test_terminal_formatter.py
index cb5c6c44..ee0ac380 100644
--- a/tests/test_terminal_formatter.py
+++ b/tests/test_terminal_formatter.py
@@ -3,7 +3,7 @@
Pygments terminal formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_textfmts.py b/tests/test_textfmts.py
index d355ab68..453dd61f 100644
--- a/tests/test_textfmts.py
+++ b/tests/test_textfmts.py
@@ -3,7 +3,7 @@
Basic Tests for textfmts
~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_token.py b/tests/test_token.py
index 0c6b02bf..94522373 100644
--- a/tests/test_token.py
+++ b/tests/test_token.py
@@ -3,7 +3,7 @@
Test suite for the token module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_unistring.py b/tests/test_unistring.py
index a414347c..c56b68c7 100644
--- a/tests/test_unistring.py
+++ b/tests/test_unistring.py
@@ -3,7 +3,7 @@
Test suite for the unistring module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_using_api.py b/tests/test_using_api.py
index 16d865e6..7517ce7d 100644
--- a/tests/test_using_api.py
+++ b/tests/test_using_api.py
@@ -3,7 +3,7 @@
Pygments tests for using()
~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_util.py b/tests/test_util.py
index 720b384a..cdb58b3f 100644
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -3,7 +3,7 @@
Test suite for the util module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_whiley.py b/tests/test_whiley.py
new file mode 100644
index 00000000..f447ffec
--- /dev/null
+++ b/tests/test_whiley.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+"""
+ Whiley Test
+ ~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.lexers import WhileyLexer
+from pygments.token import Token
+
+
+class WhileyTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = WhileyLexer()
+
+ def testWhileyOperator(self):
+ fragment = u'123 \u2200 x\n'
+ tokens = [
+ (Token.Literal.Number.Integer, u'123'),
+ (Token.Text, u' '),
+ (Token.Operator, u'\u2200'),
+ (Token.Text, u' '),
+ (Token.Name, u'x'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))