summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--AUTHORS11
-rw-r--r--CHANGES32
-rw-r--r--Makefile2
-rw-r--r--docs/generate.py2
-rw-r--r--external/markdown-processor.py2
-rw-r--r--external/moin-parser.py2
-rw-r--r--external/rst-directive-old.py2
-rw-r--r--external/rst-directive.py2
-rw-r--r--pygments/__init__.py2
-rw-r--r--pygments/cmdline.py2
-rw-r--r--pygments/console.py2
-rw-r--r--pygments/filter.py2
-rw-r--r--pygments/filters/__init__.py2
-rw-r--r--pygments/formatter.py2
-rw-r--r--pygments/formatters/__init__.py2
-rwxr-xr-xpygments/formatters/_mapping.py2
-rw-r--r--pygments/formatters/bbcode.py2
-rw-r--r--pygments/formatters/html.py4
-rw-r--r--pygments/formatters/img.py2
-rw-r--r--pygments/formatters/latex.py5
-rw-r--r--pygments/formatters/other.py2
-rw-r--r--pygments/formatters/rtf.py2
-rw-r--r--pygments/formatters/svg.py2
-rw-r--r--pygments/formatters/terminal.py2
-rw-r--r--pygments/formatters/terminal256.py2
-rw-r--r--pygments/lexer.py18
-rw-r--r--pygments/lexers/__init__.py25
-rw-r--r--pygments/lexers/_asybuiltins.py2
-rw-r--r--pygments/lexers/_clbuiltins.py2
-rw-r--r--pygments/lexers/_luabuiltins.py2
-rw-r--r--pygments/lexers/_mapping.py60
-rw-r--r--pygments/lexers/_phpbuiltins.py2
-rw-r--r--pygments/lexers/_postgres_builtins.py2
-rw-r--r--pygments/lexers/_vimbuiltins.py6
-rw-r--r--pygments/lexers/agile.py589
-rw-r--r--pygments/lexers/asm.py6
-rw-r--r--pygments/lexers/compiled.py592
-rw-r--r--pygments/lexers/dotnet.py70
-rw-r--r--pygments/lexers/functional.py559
-rw-r--r--pygments/lexers/hdl.py151
-rw-r--r--pygments/lexers/jvm.py664
-rw-r--r--pygments/lexers/math.py2
-rw-r--r--pygments/lexers/other.py1356
-rw-r--r--pygments/lexers/parsers.py2
-rw-r--r--pygments/lexers/postgres.py326
-rw-r--r--pygments/lexers/pypylog.py85
-rw-r--r--pygments/lexers/shell.py360
-rw-r--r--pygments/lexers/special.py2
-rw-r--r--pygments/lexers/sql.py556
-rw-r--r--pygments/lexers/templates.py2
-rw-r--r--pygments/lexers/text.py337
-rw-r--r--pygments/lexers/web.py156
-rw-r--r--pygments/plugin.py2
-rw-r--r--pygments/scanner.py2
-rw-r--r--pygments/style.py2
-rw-r--r--pygments/styles/__init__.py3
-rw-r--r--pygments/styles/autumn.py2
-rw-r--r--pygments/styles/borland.py2
-rw-r--r--pygments/styles/bw.py2
-rw-r--r--pygments/styles/colorful.py2
-rw-r--r--pygments/styles/default.py2
-rw-r--r--pygments/styles/emacs.py2
-rw-r--r--pygments/styles/friendly.py2
-rw-r--r--pygments/styles/fruity.py3
-rw-r--r--pygments/styles/manni.py2
-rw-r--r--pygments/styles/monokai.py2
-rw-r--r--pygments/styles/murphy.py2
-rw-r--r--pygments/styles/native.py2
-rw-r--r--pygments/styles/pastie.py2
-rw-r--r--pygments/styles/perldoc.py2
-rw-r--r--pygments/styles/rrt.py33
-rw-r--r--pygments/styles/tango.py2
-rw-r--r--pygments/styles/trac.py2
-rw-r--r--pygments/styles/vim.py2
-rw-r--r--pygments/styles/vs.py2
-rw-r--r--pygments/token.py2
-rw-r--r--pygments/unistring.py2
-rw-r--r--pygments/util.py4
-rwxr-xr-xscripts/check_sources.py4
-rwxr-xr-xscripts/find_codetags.py2
-rwxr-xr-xscripts/find_error.py7
-rw-r--r--scripts/get_vimkw.py13
-rwxr-xr-xsetup.py2
-rw-r--r--tests/examplefiles/classes.dylan16
-rw-r--r--tests/examplefiles/coq_RelationClasses447
-rw-r--r--tests/examplefiles/example.cls15
-rw-r--r--tests/examplefiles/example.moon629
-rw-r--r--tests/examplefiles/example.p34
-rw-r--r--tests/examplefiles/example.snobol15
-rw-r--r--tests/examplefiles/example.u548
-rw-r--r--tests/examplefiles/http_request_example14
-rw-r--r--tests/examplefiles/http_response_example27
-rw-r--r--tests/examplefiles/nemerle_sample.n6
-rw-r--r--tests/examplefiles/newlisp-parser.lsp298
-rw-r--r--tests/examplefiles/test.bro250
-rw-r--r--tests/examplefiles/test.cs23
-rw-r--r--tests/examplefiles/test.dart23
-rw-r--r--tests/examplefiles/test.ecl58
-rwxr-xr-xtests/examplefiles/test.fan818
-rw-r--r--tests/examplefiles/test.ps1108
-rw-r--r--tests/old_run.py2
-rw-r--r--tests/run.py2
-rw-r--r--tests/test_basic_api.py4
-rw-r--r--tests/test_clexer.py2
-rw-r--r--tests/test_cmdline.py2
-rw-r--r--tests/test_examplefiles.py7
-rw-r--r--tests/test_html_formatter.py34
-rw-r--r--tests/test_latex_formatter.py2
-rw-r--r--tests/test_perllexer.py137
-rw-r--r--tests/test_regexlexer.py2
-rw-r--r--tests/test_token.py2
-rw-r--r--tests/test_using_api.py2
-rw-r--r--tests/test_util.py40
113 files changed, 7470 insertions, 2240 deletions
diff --git a/AUTHORS b/AUTHORS
index e5d0d3b3..edebe07f 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -22,6 +22,7 @@ Other contributors, listed alphabetically, are:
* Frits van Bommel -- assembler lexers
* Pierre Bourdon -- bugfixes
* Hiram Chirino -- Scaml and Jade lexers
+* Leaf Corcoran -- MoonScript lexer
* Christopher Creutzig -- MuPAD lexer
* Pete Curry -- bugfixes
* Owen Durni -- haXe lexer
@@ -36,6 +37,7 @@ Other contributors, listed alphabetically, are:
* Matt Good -- Genshi, Cheetah lexers
* Patrick Gotthardt -- PHP namespaces support
* Olivier Guibe -- Asymptote lexer
+* Martin Harriman -- SNOBOL lexer
* Matthew Harrison -- SVG formatter
* Steven Hazel -- Tcl lexer
* Aslak Hellesøy -- Gherkin lexer
@@ -44,18 +46,23 @@ Other contributors, listed alphabetically, are:
* Varun Hiremath -- Debian control lexer
* Ben Hollis -- Mason lexer
* Tim Howard -- BlitzMax lexer
+* Ivan Inozemtsev -- Fantom lexer
* Dennis Kaarsemaker -- sources.list lexer
+* Adam Koprowski -- Opa lexer
* Benjamin Kowarsch -- Modula-2 lexer
* Marek Kubica -- Scheme lexer
* Jochen Kupperschmidt -- Markdown processor
* Gerd Kurzbach -- Modelica lexer
+* Olov Lassus -- Dart lexer
* Mark Lee -- Vala lexer
* Ben Mabey -- Gherkin lexer
* Simone Margaritelli -- Hybris lexer
* Kirk McDonald -- D lexer
+* Gordon McGregor -- SystemVerilog lexer
* Stephen McKamey -- Duel/JBST lexer
* Brian McKenna -- F# lexer
* Lukas Meuser -- BBCode formatter, Lua lexer
+* Hong Minhee -- HTTP lexer
* Michael Mior -- Awk lexer
* Paulo Moura -- Logtalk lexer
* Mher Movsisyan -- DTD lexer
@@ -68,7 +75,9 @@ Other contributors, listed alphabetically, are:
* Ronny Pfannschmidt -- BBCode lexer
* Benjamin Peterson -- Test suite refactoring
* Dominik Picheta -- Nimrod lexer
+* Clément Prévost -- UrbiScript lexer
* Justin Reidy -- MXML lexer
+* Norman Richards -- JSON lexer
* Lubomir Rintel -- GoodData MAQL and CL lexers
* Andre Roberge -- Tango style
* Konrad Rudolph -- LaTeX formatter enhancements
@@ -87,7 +96,9 @@ Other contributors, listed alphabetically, are:
* Jeremy Thurgood -- Erlang, Squid config lexers
* Erick Tryzelaar -- Felix lexer
* Daniele Varrazzo -- PostgreSQL lexers
+* Abe Voelker -- OpenEdge ABL lexer
* Whitney Young -- ObjectiveC lexer
+* Matthias Vallentin -- Bro lexer
* Nathan Weizenbaum -- Haml and Sass lexers
* Dietmar Winkler -- Modelica lexer
* Nils Winter -- Smalltalk lexer
diff --git a/CHANGES b/CHANGES
index 5466a629..468b3358 100644
--- a/CHANGES
+++ b/CHANGES
@@ -6,7 +6,7 @@ http://bitbucket.org/birkenfeld/pygments-main/issues.
Version 1.5
-----------
-(codename not decided, released Jul xx, 2011)
+(codename not decided, released 2012)
- Lexers added:
@@ -20,10 +20,25 @@ Version 1.5
* Groovy (#501)
* PostgreSQL (#660)
* DTD
- * Gosu
+ * Gosu (#634)
* Octave (PR#22)
* Standard ML (PR#14)
* CFengine3 (#601)
+ * Opa (PR#37)
+ * HTTP sessions (PR#42)
+ * JSON (PR#31)
+ * SNOBOL (PR#30)
+ * MoonScript (PR#43)
+ * ECL (PR#29)
+ * Urbiscript (PR#17)
+ * OpenEdge ABL (PR#27)
+ * SystemVerilog (PR#35)
+ * Coq (#734)
+ * PowerShell (#654)
+ * Dart (#715)
+ * Fantom (PR#36)
+ * Bro (PR#5)
+ * NewLISP (PR#26)
- In the LaTeX formatter, escape special &, < and > chars (#648).
@@ -38,7 +53,7 @@ Version 1.5
- Fix Lua "class" highlighting: it does not have classes (#665).
-- Fix degenerate regex in Scala lexer (#671).
+- Fix degenerate regex in Scala lexer (#671) and highlighting bugs (#713, 708).
- Fix number pattern order in Ocaml lexer (#647).
@@ -46,6 +61,17 @@ Version 1.5
- Fixes to the Clojure lexer (PR#9).
+- Fix degenerate regex in Nemerle lexer (#706).
+
+- Fix infinite looping in CoffeeScript lexer (#729).
+
+- Fix crashes and analysis with ObjectiveC lexer (#693, #696).
+
+- Add some Fortran 2003 keywords.
+
+- Fix Boo string regexes (#679).
+
+- Add "rrt" style (#727).
Version 1.4
-----------
diff --git a/Makefile b/Makefile
index 60a41e58..c8940125 100644
--- a/Makefile
+++ b/Makefile
@@ -4,7 +4,7 @@
#
# Combines scripts for common tasks.
#
-# :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+# :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
# :license: BSD, see LICENSE for details.
#
diff --git a/docs/generate.py b/docs/generate.py
index 79095d38..4842dc6e 100644
--- a/docs/generate.py
+++ b/docs/generate.py
@@ -6,7 +6,7 @@
Generates a bunch of html files containing the documentation.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/external/markdown-processor.py b/external/markdown-processor.py
index b7ebb7f4..0d939b67 100644
--- a/external/markdown-processor.py
+++ b/external/markdown-processor.py
@@ -27,7 +27,7 @@
.. _Markdown: http://www.freewisdom.org/projects/python-markdown/
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/external/moin-parser.py b/external/moin-parser.py
index bc550b89..0bb9791c 100644
--- a/external/moin-parser.py
+++ b/external/moin-parser.py
@@ -31,7 +31,7 @@
If you do not want to do that and are willing to accept larger HTML
output, you can set the INLINESTYLES option below to True.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/external/rst-directive-old.py b/external/rst-directive-old.py
index e01e0b66..8e703faa 100644
--- a/external/rst-directive-old.py
+++ b/external/rst-directive-old.py
@@ -31,7 +31,7 @@
.. _directive documentation:
http://docutils.sourceforge.net/docs/howto/rst-directives.html
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/external/rst-directive.py b/external/rst-directive.py
index 2447d082..b54b03ef 100644
--- a/external/rst-directive.py
+++ b/external/rst-directive.py
@@ -31,7 +31,7 @@
.. _directive documentation:
http://docutils.sourceforge.net/docs/howto/rst-directives.html
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/__init__.py b/pygments/__init__.py
index 13cc2448..8a0e337e 100644
--- a/pygments/__init__.py
+++ b/pygments/__init__.py
@@ -22,7 +22,7 @@
.. _Pygments tip:
http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/cmdline.py b/pygments/cmdline.py
index 7db761ca..4eece2f9 100644
--- a/pygments/cmdline.py
+++ b/pygments/cmdline.py
@@ -5,7 +5,7 @@
Command line interface.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
diff --git a/pygments/console.py b/pygments/console.py
index 40de59b0..3901bd18 100644
--- a/pygments/console.py
+++ b/pygments/console.py
@@ -5,7 +5,7 @@
Format colored console output.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/filter.py b/pygments/filter.py
index 14efce4d..de1bf79c 100644
--- a/pygments/filter.py
+++ b/pygments/filter.py
@@ -5,7 +5,7 @@
Module that implements the default filter.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/filters/__init__.py b/pygments/filters/__init__.py
index d469f89d..b816c58c 100644
--- a/pygments/filters/__init__.py
+++ b/pygments/filters/__init__.py
@@ -6,7 +6,7 @@
Module containing filter lookup functions and default
filters.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatter.py b/pygments/formatter.py
index 53c31b7d..73124846 100644
--- a/pygments/formatter.py
+++ b/pygments/formatter.py
@@ -5,7 +5,7 @@
Base formatter class.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/__init__.py b/pygments/formatters/__init__.py
index 33a63997..da0fd51e 100644
--- a/pygments/formatters/__init__.py
+++ b/pygments/formatters/__init__.py
@@ -5,7 +5,7 @@
Pygments formatters.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import os.path
diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py
index 8736bfd0..fd380b3a 100755
--- a/pygments/formatters/_mapping.py
+++ b/pygments/formatters/_mapping.py
@@ -9,7 +9,7 @@
Do not alter the FORMATTERS dictionary by hand.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/bbcode.py b/pygments/formatters/bbcode.py
index ed4c736f..a0d50553 100644
--- a/pygments/formatters/bbcode.py
+++ b/pygments/formatters/bbcode.py
@@ -5,7 +5,7 @@
BBcode formatter.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/html.py b/pygments/formatters/html.py
index 1f0ca680..85a16ff8 100644
--- a/pygments/formatters/html.py
+++ b/pygments/formatters/html.py
@@ -5,7 +5,7 @@
Formatter for HTML output.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -596,7 +596,7 @@ class HtmlFormatter(Formatter):
def _wrap_lineanchors(self, inner):
s = self.lineanchors
- i = 0
+ i = self.linenostart - 1 # subtract 1 since we have to increment i *before* yielding
for t, line in inner:
if t:
i += 1
diff --git a/pygments/formatters/img.py b/pygments/formatters/img.py
index ec30cbe6..e2267f5e 100644
--- a/pygments/formatters/img.py
+++ b/pygments/formatters/img.py
@@ -5,7 +5,7 @@
Formatter for Pixmap output.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/latex.py b/pygments/formatters/latex.py
index e109ebdd..3406548a 100644
--- a/pygments/formatters/latex.py
+++ b/pygments/formatters/latex.py
@@ -5,7 +5,7 @@
Formatter for LaTeX fancyvrb output.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -286,7 +286,8 @@ class LatexFormatter(Formatter):
cp = self.commandprefix
styles = []
for name, definition in self.cmd2def.iteritems():
- styles.append(r'\expandafter\def\csname %s@tok@%s\endcsname{%s}' % (cp, name, definition))
+ styles.append(r'\expandafter\def\csname %s@tok@%s\endcsname{%s}' %
+ (cp, name, definition))
return STYLE_TEMPLATE % {'cp': self.commandprefix,
'styles': '\n'.join(styles)}
diff --git a/pygments/formatters/other.py b/pygments/formatters/other.py
index 84b45ea1..a19e4ce0 100644
--- a/pygments/formatters/other.py
+++ b/pygments/formatters/other.py
@@ -5,7 +5,7 @@
Other formatters: NullFormatter, RawTokenFormatter.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/rtf.py b/pygments/formatters/rtf.py
index 2e8b77aa..5627d070 100644
--- a/pygments/formatters/rtf.py
+++ b/pygments/formatters/rtf.py
@@ -5,7 +5,7 @@
A formatter that generates RTF files.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/svg.py b/pygments/formatters/svg.py
index 773bd72f..f08e6570 100644
--- a/pygments/formatters/svg.py
+++ b/pygments/formatters/svg.py
@@ -5,7 +5,7 @@
Formatter for SVG output.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/terminal.py b/pygments/formatters/terminal.py
index a78c5a33..43d03e60 100644
--- a/pygments/formatters/terminal.py
+++ b/pygments/formatters/terminal.py
@@ -5,7 +5,7 @@
Formatter for terminal output with ANSI sequences.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/terminal256.py b/pygments/formatters/terminal256.py
index bf8ee0ff..3105a651 100644
--- a/pygments/formatters/terminal256.py
+++ b/pygments/formatters/terminal256.py
@@ -11,7 +11,7 @@
Formatter version 1.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexer.py b/pygments/lexer.py
index 53ea5ac1..46f86076 100644
--- a/pygments/lexer.py
+++ b/pygments/lexer.py
@@ -5,7 +5,7 @@
Base lexer classes.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
@@ -274,12 +274,14 @@ def bygroups(*args):
if data:
yield match.start(i + 1), action, data
else:
- if ctx:
- ctx.pos = match.start(i + 1)
- for item in action(lexer, _PseudoMatch(match.start(i + 1),
- match.group(i + 1)), ctx):
- if item:
- yield item
+ data = match.group(i + 1)
+ if data is not None:
+ if ctx:
+ ctx.pos = match.start(i + 1)
+ for item in action(lexer, _PseudoMatch(match.start(i + 1),
+ data), ctx):
+ if item:
+ yield item
if ctx:
ctx.pos = match.end()
return callback
@@ -439,7 +441,7 @@ class RegexLexerMeta(LexerMeta):
def __call__(cls, *args, **kwds):
"""Instantiate cls after preprocessing its token definitions."""
- if not hasattr(cls, '_tokens'):
+ if '_tokens' not in cls.__dict__:
cls._all_tokens = {}
cls._tmpname = 0
if hasattr(cls, 'token_variants') and cls.token_variants:
diff --git a/pygments/lexers/__init__.py b/pygments/lexers/__init__.py
index cafbf867..eb8609d4 100644
--- a/pygments/lexers/__init__.py
+++ b/pygments/lexers/__init__.py
@@ -5,7 +5,7 @@
Pygments lexers.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -93,30 +93,32 @@ def get_lexer_for_filename(_fn, code=None, **options):
if fnmatch.fnmatch(fn, filename):
if name not in _lexer_cache:
_load_lexers(modname)
- matches.append(_lexer_cache[name])
+ matches.append((_lexer_cache[name], filename))
for cls in find_plugin_lexers():
for filename in cls.filenames:
if fnmatch.fnmatch(fn, filename):
- matches.append(cls)
+ matches.append((cls, filename))
if sys.version_info > (3,) and isinstance(code, bytes):
# decode it, since all analyse_text functions expect unicode
code = code.decode('latin1')
- def get_rating(cls):
+ def get_rating(info):
+ cls, filename = info
+ # explicit patterns get a bonus
+ bonus = '*' not in filename and 0.5 or 0
# The class _always_ defines analyse_text because it's included in
# the Lexer class. The default implementation returns None which
# gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
# to find lexers which need it overridden.
- d = cls.analyse_text(code)
- #print "Got %r from %r" % (d, cls)
- return d
+ if code:
+ return cls.analyse_text(code) + bonus
+ return bonus
- if code:
- matches.sort(key=get_rating)
if matches:
+ matches.sort(key=get_rating)
#print "Possible lexers, after sort:", matches
- return matches[-1](**options)
+ return matches[-1][0](**options)
raise ClassNotFound('no lexer for filename %r found' % _fn)
@@ -139,7 +141,8 @@ def _iter_lexerclasses():
"""
Return an iterator over all lexer classes.
"""
- for module_name, name, _, _, _ in LEXERS.itervalues():
+ for key in sorted(LEXERS):
+ module_name, name = LEXERS[key][:2]
if name not in _lexer_cache:
_load_lexers(module_name)
yield _lexer_cache[name]
diff --git a/pygments/lexers/_asybuiltins.py b/pygments/lexers/_asybuiltins.py
index 606f4f2e..d2edace6 100644
--- a/pygments/lexers/_asybuiltins.py
+++ b/pygments/lexers/_asybuiltins.py
@@ -10,7 +10,7 @@
TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
for function and variable names.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_clbuiltins.py b/pygments/lexers/_clbuiltins.py
index 22a0133e..3a3b3dfc 100644
--- a/pygments/lexers/_clbuiltins.py
+++ b/pygments/lexers/_clbuiltins.py
@@ -5,7 +5,7 @@
ANSI Common Lisp builtins.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_luabuiltins.py b/pygments/lexers/_luabuiltins.py
index 487595f3..27b39822 100644
--- a/pygments/lexers/_luabuiltins.py
+++ b/pygments/lexers/_luabuiltins.py
@@ -9,7 +9,7 @@
Do not edit the MODULES dict by hand.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 4772a9a0..518b12b5 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -9,7 +9,7 @@
Do not alter the LEXERS dictionary by hand.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -34,13 +34,14 @@ LEXERS = {
'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()),
- 'BashLexer': ('pygments.lexers.other', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass'), ('application/x-sh', 'application/x-shellscript')),
- 'BashSessionLexer': ('pygments.lexers.other', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)),
- 'BatchLexer': ('pygments.lexers.other', 'Batchfile', ('bat',), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
+ 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass'), ('application/x-sh', 'application/x-shellscript')),
+ 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)),
+ 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat',), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
'BlitzMaxLexer': ('pygments.lexers.compiled', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
'BrainfuckLexer': ('pygments.lexers.other', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
+ 'BroLexer': ('pygments.lexers.other', 'Bro', ('bro',), ('*.bro',), ()),
'CLexer': ('pygments.lexers.compiled', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
@@ -51,11 +52,12 @@ LEXERS = {
'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
- 'ClojureLexer': ('pygments.lexers.agile', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
+ 'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript'), ('*.coffee',), ('text/coffeescript',)),
'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml', '*.cfc'), ('application/x-coldfusion',)),
'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
+ 'CoqLexer': ('pygments.lexers.functional', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx'), ('text/x-c++hdr', 'text/x-c++src')),
'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
@@ -68,6 +70,7 @@ LEXERS = {
'DLexer': ('pygments.lexers.compiled', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
'DarcsPatchLexer': ('pygments.lexers.text', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
+ 'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control',), ('control',), ()),
'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
@@ -75,9 +78,10 @@ LEXERS = {
'DtdLexer': ('pygments.lexers.web', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
'DylanLexer': ('pygments.lexers.compiled', 'Dylan', ('dylan',), ('*.dylan', '*.dyl'), ('text/x-dylan',)),
+ 'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
'ECLexer': ('pygments.lexers.compiled', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
- 'ErlangLexer': ('pygments.lexers.functional', 'Erlang', ('erlang',), ('*.erl', '*.hrl'), ('text/x-erlang',)),
+ 'ErlangLexer': ('pygments.lexers.functional', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
'ErlangShellLexer': ('pygments.lexers.functional', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
@@ -85,6 +89,7 @@ LEXERS = {
'FSharpLexer': ('pygments.lexers.dotnet', 'FSharp', ('fsharp',), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
'FactorLexer': ('pygments.lexers.agile', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
'FancyLexer': ('pygments.lexers.agile', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
+ 'FantomLexer': ('pygments.lexers.compiled', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)),
'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
@@ -96,10 +101,10 @@ LEXERS = {
'GnuplotLexer': ('pygments.lexers.other', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
'GoLexer': ('pygments.lexers.compiled', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
'GoodDataCLLexer': ('pygments.lexers.other', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
- 'GosuLexer': ('pygments.lexers.compiled', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
- 'GosuTemplateLexer': ('pygments.lexers.compiled', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
+ 'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
+ 'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
'GroffLexer': ('pygments.lexers.text', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
- 'GroovyLexer': ('pygments.lexers.agile', 'Groovy', ('groovy',), ('*.groovy',), ('text/x-groovy',)),
+ 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy',), ('text/x-groovy',)),
'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml', 'HAML'), ('*.haml',), ('text/x-haml',)),
'HaskellLexer': ('pygments.lexers.functional', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
'HaxeLexer': ('pygments.lexers.web', 'haXe', ('hx', 'haXe'), ('*.hx',), ('text/haxe',)),
@@ -108,17 +113,19 @@ LEXERS = {
'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
+ 'HttpLexer': ('pygments.lexers.text', 'HTTP', ('http',), (), ()),
'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg'), ('*.ini', '*.cfg'), ('text/x-ini',)),
'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
- 'IokeLexer': ('pygments.lexers.agile', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
+ 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
+ 'JSONLexer': ('pygments.lexers.web', 'JSON', ('json',), ('*.json',), ('application/json',)),
'JadeLexer': ('pygments.lexers.web', 'Jade', ('jade', 'JADE'), ('*.jade',), ('text/x-jade',)),
- 'JavaLexer': ('pygments.lexers.compiled', 'Java', ('java',), ('*.java',), ('text/x-java',)),
+ 'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
- 'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js', '*.json'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript', 'application/json')),
+ 'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
@@ -142,9 +149,10 @@ LEXERS = {
'ModelicaLexer': ('pygments.lexers.other', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
'Modula2Lexer': ('pygments.lexers.compiled', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
'MoinWikiLexer': ('pygments.lexers.text', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
+ 'MoonScriptLexer': ('pygments.lexers.agile', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
'MuPADLexer': ('pygments.lexers.math', 'MuPAD', ('mupad',), ('*.mu',), ()),
'MxmlLexer': ('pygments.lexers.web', 'MXML', ('mxml',), ('*.mxml',), ()),
- 'MySqlLexer': ('pygments.lexers.other', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
+ 'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
@@ -152,6 +160,7 @@ LEXERS = {
'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
+ 'NewLispLexer': ('pygments.lexers.functional', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')),
'NewspeakLexer': ('pygments.lexers.other', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
'NimrodLexer': ('pygments.lexers.compiled', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)),
@@ -159,21 +168,23 @@ LEXERS = {
'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
'ObjectiveCLexer': ('pygments.lexers.compiled', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m',), ('text/x-objective-c',)),
'ObjectiveJLexer': ('pygments.lexers.web', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
- 'OcamlLexer': ('pygments.lexers.compiled', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
'OcamlLexer': ('pygments.lexers.functional', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
'OctaveLexer': ('pygments.lexers.math', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
+ 'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
+ 'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm'), ('text/x-perl', 'application/x-perl')),
'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]'), ('text/x-php',)),
- 'PlPgsqlLexer': ('pygments.lexers.postgres', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
+ 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript',), ('*.ps', '*.eps'), ('application/postscript',)),
- 'PostgresConsoleLexer': ('pygments.lexers.postgres', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
- 'PostgresLexer': ('pygments.lexers.postgres', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
+ 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
+ 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
+ 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1'), ('*.ps1',), ('text/x-powershell',)),
'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties',), ('*.properties',), ('text/x-java-properties',)),
'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf',), ('*.proto',), ()),
- 'PyPyLogLexer': ('pygments.lexers.pypylog', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
+ 'PyPyLogLexer': ('pygments.lexers.text', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
'Python3TracebackLexer': ('pygments.lexers.agile', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)),
'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
@@ -198,28 +209,31 @@ LEXERS = {
'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R'), ('text/S-plus', 'text/S', 'text/R')),
'SMLLexer': ('pygments.lexers.functional', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
'SassLexer': ('pygments.lexers.web', 'Sass', ('sass', 'SASS'), ('*.sass',), ('text/x-sass',)),
- 'ScalaLexer': ('pygments.lexers.compiled', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
+ 'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
'ScamlLexer': ('pygments.lexers.web', 'Scaml', ('scaml', 'SCAML'), ('*.scaml',), ('text/x-scaml',)),
'SchemeLexer': ('pygments.lexers.functional', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss', '*.rkt'), ('text/x-scheme', 'application/x-scheme')),
'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak'), ('*.st',), ('text/x-smalltalk',)),
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
+ 'SnobolLexer': ('pygments.lexers.other', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list'), ('sources.list',), ()),
- 'SqlLexer': ('pygments.lexers.other', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
- 'SqliteConsoleLexer': ('pygments.lexers.other', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
+ 'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
+ 'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
+ 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('sv',), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
- 'TcshLexer': ('pygments.lexers.other', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
+ 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
+ 'UrbiscriptLexer': ('pygments.lexers.other', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
'ValaLexer': ('pygments.lexers.compiled', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
- 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('v',), ('*.v', '*.sv'), ('text/x-verilog',)),
+ 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('v',), ('*.v',), ('text/x-verilog',)),
'VimLexer': ('pygments.lexers.text', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc'), ('text/x-vim',)),
'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy'), ('*.xqy', '*.xquery'), ('text/xquery', 'application/xquery')),
'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
diff --git a/pygments/lexers/_phpbuiltins.py b/pygments/lexers/_phpbuiltins.py
index 405655b1..cd1608f1 100644
--- a/pygments/lexers/_phpbuiltins.py
+++ b/pygments/lexers/_phpbuiltins.py
@@ -12,7 +12,7 @@
internet connection. don't run that at home, use
a server ;-)
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_postgres_builtins.py b/pygments/lexers/_postgres_builtins.py
index 0100bc2d..0cea1c27 100644
--- a/pygments/lexers/_postgres_builtins.py
+++ b/pygments/lexers/_postgres_builtins.py
@@ -4,7 +4,7 @@
Self-updating data files for PostgreSQL lexer.
- :copyright: Copyright 2011 by Daniele Varrazzo.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_vimbuiltins.py b/pygments/lexers/_vimbuiltins.py
index 71487fb1..1ba5245a 100644
--- a/pygments/lexers/_vimbuiltins.py
+++ b/pygments/lexers/_vimbuiltins.py
@@ -1,3 +1,3 @@
-auto=[('BufAdd', 'BufAdd'), ('BufCreate', 'BufCreate'), ('BufDelete', 'BufDelete'), ('BufEnter', 'BufEnter'), ('BufFilePost', 'BufFilePost'), ('BufFilePre', 'BufFilePre'), ('BufHidden', 'BufHidden'), ('BufLeave', 'BufLeave'), ('BufNew', 'BufNew'), ('BufNewFile', 'BufNewFile'), ('BufRead', 'BufRead'), ('BufReadCmd', 'BufReadCmd'), ('BufReadPost', 'BufReadPost'), ('BufReadPre', 'BufReadPre'), ('BufUnload', 'BufUnload'), ('BufWinEnter', 'BufWinEnter'), ('BufWinLeave', 'BufWinLeave'), ('BufWipeout', 'BufWipeout'), ('BufWrite', 'BufWrite'), ('BufWriteCmd', 'BufWriteCmd'), ('BufWritePost', 'BufWritePost'), ('BufWritePre', 'BufWritePre'), ('Cmd', 'Cmd'), ('CmdwinEnter', 'CmdwinEnter'), ('CmdwinLeave', 'CmdwinLeave'), ('ColorScheme', 'ColorScheme'), ('CursorHold', 'CursorHold'), ('CursorHoldI', 'CursorHoldI'), ('CursorMoved', 'CursorMoved'), ('CursorMovedI', 'CursorMovedI'), ('EncodingChanged', 'EncodingChanged'), ('FileAppendCmd', 'FileAppendCmd'), ('FileAppendPost', 'FileAppendPost'), ('FileAppendPre', 'FileAppendPre'), ('FileChangedRO', 'FileChangedRO'), ('FileChangedShell', 'FileChangedShell'), ('FileChangedShellPost', 'FileChangedShellPost'), ('FileEncoding', 'FileEncoding'), ('FileReadCmd', 'FileReadCmd'), ('FileReadPost', 'FileReadPost'), ('FileReadPre', 'FileReadPre'), ('FileType', 'FileType'), ('FileWriteCmd', 'FileWriteCmd'), ('FileWritePost', 'FileWritePost'), ('FileWritePre', 'FileWritePre'), ('FilterReadPost', 'FilterReadPost'), ('FilterReadPre', 'FilterReadPre'), ('FilterWritePost', 'FilterWritePost'), ('FilterWritePre', 'FilterWritePre'), ('FocusGained', 'FocusGained'), ('FocusLost', 'FocusLost'), ('FuncUndefined', 'FuncUndefined'), ('GUIEnter', 'GUIEnter'), ('InsertChange', 'InsertChange'), ('InsertEnter', 'InsertEnter'), ('InsertLeave', 'InsertLeave'), ('MenuPopup', 'MenuPopup'), ('QuickFixCmdPost', 'QuickFixCmdPost'), ('QuickFixCmdPre', 'QuickFixCmdPre'), ('RemoteReply', 'RemoteReply'), ('SessionLoadPost', 'SessionLoadPost'), ('ShellCmdPost', 'ShellCmdPost'), ('ShellFilterPost', 'ShellFilterPost'), ('SourcePre', 'SourcePre'), ('SpellFileMissing', 'SpellFileMissing'), ('StdinReadPost', 'StdinReadPost'), ('StdinReadPre', 'StdinReadPre'), ('SwapExists', 'SwapExists'), ('Syntax', 'Syntax'), ('TabEnter', 'TabEnter'), ('TabLeave', 'TabLeave'), ('TermChanged', 'TermChanged'), ('TermResponse', 'TermResponse'), ('User', 'User'), ('UserGettingBored', 'UserGettingBored'), ('VimEnter', 'VimEnter'), ('VimLeave', 'VimLeave'), ('VimLeavePre', 'VimLeavePre'), ('VimResized', 'VimResized'), ('WinEnter', 'WinEnter'), ('WinLeave', 'WinLeave'), ('event', 'event')]
-command=[('DeleteFirst', 'DeleteFirst'), ('Explore', 'Explore'), ('Hexplore', 'Hexplore'), ('I', 'I'), ('N', 'Next'), ('NetrwSettings', 'NetrwSettings'), ('Nread', 'Nread'), ('Nw', 'Nw'), ('P', 'Print'), ('Sexplore', 'Sexplore'), ('Vexplore', 'Vexplore'), ('X', 'X'), ('XMLent', 'XMLent'), ('XMLns', 'XMLns'), ('ab', 'abbreviate'), ('abc', 'abclear'), ('abo', 'aboveleft'), ('al', 'all'), ('ar', 'args'), ('arga', 'argadd'), ('argd', 'argdelete'), ('argdo', 'argdo'), ('arge', 'argedit'), ('argg', 'argglobal'), ('argl', 'arglocal'), ('argu', 'argument'), ('as', 'ascii'), ('b', 'buffer'), ('bN', 'bNext'), ('ba', 'ball'), ('bad', 'badd'), ('bd', 'bdelete'), ('be', 'be'), ('bel', 'belowright'), ('bf', 'bfirst'), ('bl', 'blast'), ('bm', 'bmodified'), ('bn', 'bnext'), ('bo', 'botright'), ('bp', 'bprevious'), ('br', 'brewind'), ('brea', 'break'), ('breaka', 'breakadd'), ('breakd', 'breakdel'), ('breakl', 'breaklist'), ('bro', 'browse'), ('bufdo', 'bufdo'), ('buffers', 'buffers'), ('bun', 'bunload'), ('bw', 'bwipeout'), ('c', 'change'), ('cN', 'cNext'), ('cNf', 'cNfile'), ('ca', 'cabbrev'), ('cabc', 'cabclear'), ('cad', 'caddexpr'), ('caddb', 'caddbuffer'), ('caddf', 'caddfile'), ('cal', 'call'), ('cat', 'catch'), ('cb', 'cbuffer'), ('cc', 'cc'), ('ccl', 'cclose'), ('cd', 'cd'), ('ce', 'center'), ('cex', 'cexpr'), ('cf', 'cfile'), ('cfir', 'cfirst'), ('cg', 'cgetfile'), ('cgetb', 'cgetbuffer'), ('cgete', 'cgetexpr'), ('changes', 'changes'), ('chd', 'chdir'), ('che', 'checkpath'), ('checkt', 'checktime'), ('cl', 'clist'), ('cla', 'clast'), ('clo', 'close'), ('cmapc', 'cmapclear'), ('cn', 'cnext'), ('cnew', 'cnewer'), ('cnf', 'cnfile'), ('cnorea', 'cnoreabbrev'), ('co', 'copy'), ('col', 'colder'), ('colo', 'colorscheme'), ('comc', 'comclear'), ('comp', 'compiler'), ('con', 'continue'), ('conf', 'confirm'), ('cope', 'copen'), ('cp', 'cprevious'), ('cpf', 'cpfile'), ('cq', 'cquit'), ('cr', 'crewind'), ('cu', 'cunmap'), ('cuna', 'cunabbrev'), ('cw', 'cwindow'), ('d', 'delete'), ('debugg', 'debuggreedy'), ('delc', 'delcommand'), ('delf', 'delfunction'), ('delm', 'delmarks'), ('di', 'display'), ('diffg', 'diffget'), ('diffoff', 'diffoff'), ('diffpatch', 'diffpatch'), ('diffpu', 'diffput'), ('diffsplit', 'diffsplit'), ('diffthis', 'diffthis'), ('diffu', 'diffupdate'), ('dig', 'digraphs'), ('dj', 'djump'), ('dl', 'dlist'), ('dr', 'drop'), ('ds', 'dsearch'), ('dsp', 'dsplit'), ('e', 'edit'), ('earlier', 'earlier'), ('echoe', 'echoerr'), ('echom', 'echomsg'), ('echon', 'echon'), ('el', 'else'), ('elsei', 'elseif'), ('em', 'emenu'), ('emenu', 'emenu'), ('en', 'endif'), ('endf', 'endfunction'), ('endfo', 'endfor'), ('endt', 'endtry'), ('endw', 'endwhile'), ('ene', 'enew'), ('ex', 'ex'), ('exi', 'exit'), ('exu', 'exusage'), ('f', 'file'), ('files', 'files'), ('filetype', 'filetype'), ('fin', 'find'), ('fina', 'finally'), ('fini', 'finish'), ('fir', 'first'), ('fix', 'fixdel'), ('fo', 'fold'), ('foldc', 'foldclose'), ('foldd', 'folddoopen'), ('folddoc', 'folddoclosed'), ('foldo', 'foldopen'), ('for', 'for'), ('fu', 'function'), ('go', 'goto'), ('gr', 'grep'), ('grepa', 'grepadd'), ('h', 'help'), ('ha', 'hardcopy'), ('helpf', 'helpfind'), ('helpg', 'helpgrep'), ('helpt', 'helptags'), ('hid', 'hide'), ('his', 'history'), ('ia', 'iabbrev'), ('iabc', 'iabclear'), ('if', 'if'), ('ij', 'ijump'), ('il', 'ilist'), ('imapc', 'imapclear'), ('in', 'in'), ('inorea', 'inoreabbrev'), ('is', 'isearch'), ('isp', 'isplit'), ('iu', 'iunmap'), ('iuna', 'iunabbrev'), ('j', 'join'), ('ju', 'jumps'), ('k', 'k'), ('kee', 'keepmarks'), ('keepalt', 'keepalt'), ('keepj', 'keepjumps'), ('l', 'list'), ('lN', 'lNext'), ('lNf', 'lNfile'), ('la', 'last'), ('lad', 'laddexpr'), ('laddb', 'laddbuffer'), ('laddf', 'laddfile'), ('lan', 'language'), ('later', 'later'), ('lb', 'lbuffer'), ('lc', 'lcd'), ('lch', 'lchdir'), ('lcl', 'lclose'), ('le', 'left'), ('lefta', 'leftabove'), ('lex', 'lexpr'), ('lf', 'lfile'), ('lfir', 'lfirst'), ('lg', 'lgetfile'), ('lgetb', 'lgetbuffer'), ('lgete', 'lgetexpr'), ('lgr', 'lgrep'), ('lgrepa', 'lgrepadd'), ('lh', 'lhelpgrep'), ('ll', 'll'), ('lla', 'llast'), ('lli', 'llist'), ('lm', 'lmap'), ('lmak', 'lmake'), ('lmapc', 'lmapclear'), ('ln', 'lnoremap'), ('lne', 'lnext'), ('lnew', 'lnewer'), ('lnf', 'lnfile'), ('lo', 'loadview'), ('loc', 'lockmarks'), ('lockv', 'lockvar'), ('lol', 'lolder'), ('lop', 'lopen'), ('lp', 'lprevious'), ('lpf', 'lpfile'), ('lr', 'lrewind'), ('ls', 'ls'), ('lt', 'ltag'), ('lu', 'lunmap'), ('lv', 'lvimgrep'), ('lvimgrepa', 'lvimgrepadd'), ('lw', 'lwindow'), ('m', 'move'), ('ma', 'mark'), ('mak', 'make'), ('marks', 'marks'), ('mat', 'match'), ('menut', 'menutranslate'), ('mk', 'mkexrc'), ('mks', 'mksession'), ('mksp', 'mkspell'), ('mkv', 'mkvimrc'), ('mkvie', 'mkview'), ('mod', 'mode'), ('mz', 'mzscheme'), ('mzf', 'mzfile'), ('n', 'next'), ('nbkey', 'nbkey'), ('new', 'new'), ('nmapc', 'nmapclear'), ('noh', 'nohlsearch'), ('norea', 'noreabbrev'), ('nu', 'number'), ('nun', 'nunmap'), ('o', 'open'), ('omapc', 'omapclear'), ('on', 'only'), ('opt', 'options'), ('ou', 'ounmap'), ('p', 'print'), ('pc', 'pclose'), ('pe', 'perl'), ('ped', 'pedit'), ('perld', 'perldo'), ('po', 'pop'), ('popu', 'popu'), ('popu', 'popup'), ('pp', 'ppop'), ('pre', 'preserve'), ('prev', 'previous'), ('prof', 'profile'), ('profd', 'profdel'), ('prompt', 'prompt'), ('promptf', 'promptfind'), ('promptr', 'promptrepl'), ('ps', 'psearch'), ('ptN', 'ptNext'), ('pta', 'ptag'), ('ptf', 'ptfirst'), ('ptj', 'ptjump'), ('ptl', 'ptlast'), ('ptn', 'ptnext'), ('ptp', 'ptprevious'), ('ptr', 'ptrewind'), ('pts', 'ptselect'), ('pu', 'put'), ('pw', 'pwd'), ('py', 'python'), ('pyf', 'pyfile'), ('q', 'quit'), ('qa', 'qall'), ('quita', 'quitall'), ('r', 'read'), ('rec', 'recover'), ('red', 'redo'), ('redi', 'redir'), ('redr', 'redraw'), ('redraws', 'redrawstatus'), ('reg', 'registers'), ('res', 'resize'), ('ret', 'retab'), ('retu', 'return'), ('rew', 'rewind'), ('ri', 'right'), ('rightb', 'rightbelow'), ('ru', 'runtime'), ('rub', 'ruby'), ('rubyd', 'rubydo'), ('rubyf', 'rubyfile'), ('rv', 'rviminfo'), ('sN', 'sNext'), ('sa', 'sargument'), ('sal', 'sall'), ('san', 'sandbox'), ('sav', 'saveas'), ('sb', 'sbuffer'), ('sbN', 'sbNext'), ('sba', 'sball'), ('sbf', 'sbfirst'), ('sbl', 'sblast'), ('sbm', 'sbmodified'), ('sbn', 'sbnext'), ('sbp', 'sbprevious'), ('sbr', 'sbrewind'), ('scrip', 'scriptnames'), ('scripte', 'scriptencoding'), ('se', 'set'), ('setf', 'setfiletype'), ('setg', 'setglobal'), ('setl', 'setlocal'), ('sf', 'sfind'), ('sfir', 'sfirst'), ('sh', 'shell'), ('sign', 'sign'), ('sil', 'silent'), ('sim', 'simalt'), ('sl', 'sleep'), ('sla', 'slast'), ('sm', 'smagic'), ('sm', 'smap'), ('smapc', 'smapclear'), ('sme', 'sme'), ('smenu', 'smenu'), ('sn', 'snext'), ('sni', 'sniff'), ('sno', 'snomagic'), ('snor', 'snoremap'), ('snoreme', 'snoreme'), ('snoremenu', 'snoremenu'), ('so', 'source'), ('sor', 'sort'), ('sp', 'split'), ('spe', 'spellgood'), ('spelld', 'spelldump'), ('spelli', 'spellinfo'), ('spellr', 'spellrepall'), ('spellu', 'spellundo'), ('spellw', 'spellwrong'), ('spr', 'sprevious'), ('sre', 'srewind'), ('st', 'stop'), ('sta', 'stag'), ('star', 'startinsert'), ('startg', 'startgreplace'), ('startr', 'startreplace'), ('stj', 'stjump'), ('stopi', 'stopinsert'), ('sts', 'stselect'), ('sun', 'sunhide'), ('sunm', 'sunmap'), ('sus', 'suspend'), ('sv', 'sview'), ('syncbind', 'syncbind'), ('t', 't'), ('tN', 'tNext'), ('ta', 'tag'), ('tab', 'tab'), ('tabN', 'tabNext'), ('tabc', 'tabclose'), ('tabd', 'tabdo'), ('tabe', 'tabedit'), ('tabf', 'tabfind'), ('tabfir', 'tabfirst'), ('tabl', 'tablast'), ('tabmove', 'tabmove'), ('tabn', 'tabnext'), ('tabnew', 'tabnew'), ('tabo', 'tabonly'), ('tabp', 'tabprevious'), ('tabr', 'tabrewind'), ('tabs', 'tabs'), ('tags', 'tags'), ('tc', 'tcl'), ('tcld', 'tcldo'), ('tclf', 'tclfile'), ('te', 'tearoff'), ('tf', 'tfirst'), ('th', 'throw'), ('the', 'the'), ('tj', 'tjump'), ('tl', 'tlast'), ('tm', 'tm'), ('tm', 'tmenu'), ('tn', 'tnext'), ('to', 'topleft'), ('tp', 'tprevious'), ('tr', 'trewind'), ('try', 'try'), ('ts', 'tselect'), ('tu', 'tu'), ('tu', 'tunmenu'), ('u', 'undo'), ('una', 'unabbreviate'), ('undoj', 'undojoin'), ('undol', 'undolist'), ('unh', 'unhide'), ('unlo', 'unlockvar'), ('unm', 'unmap'), ('up', 'update'), ('ve', 'version'), ('verb', 'verbose'), ('vert', 'vertical'), ('vi', 'visual'), ('vie', 'view'), ('vim', 'vimgrep'), ('vimgrepa', 'vimgrepadd'), ('viu', 'viusage'), ('vmapc', 'vmapclear'), ('vne', 'vnew'), ('vs', 'vsplit'), ('vu', 'vunmap'), ('w', 'write'), ('wN', 'wNext'), ('wa', 'wall'), ('wh', 'while'), ('win', 'winsize'), ('winc', 'wincmd'), ('windo', 'windo'), ('winp', 'winpos'), ('wn', 'wnext'), ('wp', 'wprevious'), ('wq', 'wq'), ('wqa', 'wqall'), ('ws', 'wsverb'), ('wv', 'wviminfo'), ('x', 'xit'), ('xa', 'xall'), ('xm', 'xmap'), ('xmapc', 'xmapclear'), ('xme', 'xme'), ('xmenu', 'xmenu'), ('xn', 'xnoremap'), ('xnoreme', 'xnoreme'), ('xnoremenu', 'xnoremenu'), ('xu', 'xunmap'), ('y', 'yank')]
-option=[('acd', 'acd'), ('ai', 'ai'), ('akm', 'akm'), ('al', 'al'), ('aleph', 'aleph'), ('allowrevins', 'allowrevins'), ('altkeymap', 'altkeymap'), ('ambiwidth', 'ambiwidth'), ('ambw', 'ambw'), ('anti', 'anti'), ('antialias', 'antialias'), ('ar', 'ar'), ('arab', 'arab'), ('arabic', 'arabic'), ('arabicshape', 'arabicshape'), ('ari', 'ari'), ('arshape', 'arshape'), ('autochdir', 'autochdir'), ('autoindent', 'autoindent'), ('autoread', 'autoread'), ('autowrite', 'autowrite'), ('autowriteall', 'autowriteall'), ('aw', 'aw'), ('awa', 'awa'), ('background', 'background'), ('backspace', 'backspace'), ('backup', 'backup'), ('backupcopy', 'backupcopy'), ('backupdir', 'backupdir'), ('backupext', 'backupext'), ('backupskip', 'backupskip'), ('balloondelay', 'balloondelay'), ('ballooneval', 'ballooneval'), ('balloonexpr', 'balloonexpr'), ('bar', 'bar'), ('bdir', 'bdir'), ('bdlay', 'bdlay'), ('beval', 'beval'), ('bex', 'bex'), ('bexpr', 'bexpr'), ('bg', 'bg'), ('bh', 'bh'), ('bin', 'bin'), ('binary', 'binary'), ('biosk', 'biosk'), ('bioskey', 'bioskey'), ('bk', 'bk'), ('bkc', 'bkc'), ('bl', 'bl'), ('block', 'block'), ('bomb', 'bomb'), ('breakat', 'breakat'), ('brk', 'brk'), ('browsedir', 'browsedir'), ('bs', 'bs'), ('bsdir', 'bsdir'), ('bsk', 'bsk'), ('bt', 'bt'), ('bufhidden', 'bufhidden'), ('buflisted', 'buflisted'), ('buftype', 'buftype'), ('casemap', 'casemap'), ('cb', 'cb'), ('ccv', 'ccv'), ('cd', 'cd'), ('cdpath', 'cdpath'), ('cedit', 'cedit'), ('cf', 'cf'), ('cfu', 'cfu'), ('ch', 'ch'), ('charconvert', 'charconvert'), ('ci', 'ci'), ('cin', 'cin'), ('cindent', 'cindent'), ('cink', 'cink'), ('cinkeys', 'cinkeys'), ('cino', 'cino'), ('cinoptions', 'cinoptions'), ('cinw', 'cinw'), ('cinwords', 'cinwords'), ('clipboard', 'clipboard'), ('cmdheight', 'cmdheight'), ('cmdwinheight', 'cmdwinheight'), ('cmp', 'cmp'), ('cms', 'cms'), ('co', 'co'), ('columns', 'columns'), ('com', 'com'), ('comments', 'comments'), ('commentstring', 'commentstring'), ('compatible', 'compatible'), ('complete', 'complete'), ('completefunc', 'completefunc'), ('completeopt', 'completeopt'), ('confirm', 'confirm'), ('consk', 'consk'), ('conskey', 'conskey'), ('copyindent', 'copyindent'), ('cot', 'cot'), ('cp', 'cp'), ('cpo', 'cpo'), ('cpoptions', 'cpoptions'), ('cpt', 'cpt'), ('cscopepathcomp', 'cscopepathcomp'), ('cscopeprg', 'cscopeprg'), ('cscopequickfix', 'cscopequickfix'), ('cscopetag', 'cscopetag'), ('cscopetagorder', 'cscopetagorder'), ('cscopeverbose', 'cscopeverbose'), ('cspc', 'cspc'), ('csprg', 'csprg'), ('csqf', 'csqf'), ('cst', 'cst'), ('csto', 'csto'), ('csverb', 'csverb'), ('cuc', 'cuc'), ('cul', 'cul'), ('cursor', 'cursor'), ('cursor', 'cursor'), ('cursorcolumn', 'cursorcolumn'), ('cursorline', 'cursorline'), ('cwh', 'cwh'), ('debug', 'debug'), ('deco', 'deco'), ('def', 'def'), ('define', 'define'), ('delcombine', 'delcombine'), ('dex', 'dex'), ('dg', 'dg'), ('dict', 'dict'), ('dictionary', 'dictionary'), ('diff', 'diff'), ('diffexpr', 'diffexpr'), ('diffopt', 'diffopt'), ('digraph', 'digraph'), ('dip', 'dip'), ('dir', 'dir'), ('directory', 'directory'), ('display', 'display'), ('dy', 'dy'), ('ea', 'ea'), ('ead', 'ead'), ('eadirection', 'eadirection'), ('eb', 'eb'), ('ed', 'ed'), ('edcompatible', 'edcompatible'), ('ef', 'ef'), ('efm', 'efm'), ('ei', 'ei'), ('ek', 'ek'), ('enc', 'enc'), ('encoding', 'encoding'), ('end', 'end'), ('endofline', 'endofline'), ('eol', 'eol'), ('ep', 'ep'), ('equalalways', 'equalalways'), ('equalprg', 'equalprg'), ('errorbells', 'errorbells'), ('errorfile', 'errorfile'), ('errorformat', 'errorformat'), ('esckeys', 'esckeys'), ('et', 'et'), ('eventignore', 'eventignore'), ('ex', 'ex'), ('expandtab', 'expandtab'), ('exrc', 'exrc'), ('fcl', 'fcl'), ('fcs', 'fcs'), ('fdc', 'fdc'), ('fde', 'fde'), ('fdi', 'fdi'), ('fdl', 'fdl'), ('fdls', 'fdls'), ('fdm', 'fdm'), ('fdn', 'fdn'), ('fdo', 'fdo'), ('fdt', 'fdt'), ('fen', 'fen'), ('fenc', 'fenc'), ('fencs', 'fencs'), ('fex', 'fex'), ('ff', 'ff'), ('ffs', 'ffs'), ('fileencoding', 'fileencoding'), ('fileencodings', 'fileencodings'), ('fileformat', 'fileformat'), ('fileformats', 'fileformats'), ('filetype', 'filetype'), ('fillchars', 'fillchars'), ('fk', 'fk'), ('fkmap', 'fkmap'), ('flp', 'flp'), ('fml', 'fml'), ('fmr', 'fmr'), ('fo', 'fo'), ('foldclose', 'foldclose'), ('foldcolumn', 'foldcolumn'), ('foldenable', 'foldenable'), ('foldexpr', 'foldexpr'), ('foldignore', 'foldignore'), ('foldlevel', 'foldlevel'), ('foldlevelstart', 'foldlevelstart'), ('foldmarker', 'foldmarker'), ('foldmethod', 'foldmethod'), ('foldminlines', 'foldminlines'), ('foldnestmax', 'foldnestmax'), ('foldopen', 'foldopen'), ('foldtext', 'foldtext'), ('formatexpr', 'formatexpr'), ('formatlistpat', 'formatlistpat'), ('formatoptions', 'formatoptions'), ('formatprg', 'formatprg'), ('fp', 'fp'), ('fs', 'fs'), ('fsync', 'fsync'), ('ft', 'ft'), ('gcr', 'gcr'), ('gd', 'gd'), ('gdefault', 'gdefault'), ('gfm', 'gfm'), ('gfn', 'gfn'), ('gfs', 'gfs'), ('gfw', 'gfw'), ('ghr', 'ghr'), ('go', 'go'), ('gp', 'gp'), ('grepformat', 'grepformat'), ('grepprg', 'grepprg'), ('gtl', 'gtl'), ('gtt', 'gtt'), ('guicursor', 'guicursor'), ('guifont', 'guifont'), ('guifontset', 'guifontset'), ('guifontwide', 'guifontwide'), ('guiheadroom', 'guiheadroom'), ('guioptions', 'guioptions'), ('guipty', 'guipty'), ('guitablabel', 'guitablabel'), ('guitabtooltip', 'guitabtooltip'), ('helpfile', 'helpfile'), ('helpheight', 'helpheight'), ('helplang', 'helplang'), ('hf', 'hf'), ('hh', 'hh'), ('hi', 'hi'), ('hid', 'hid'), ('hidden', 'hidden'), ('highlight', 'highlight'), ('history', 'history'), ('hk', 'hk'), ('hkmap', 'hkmap'), ('hkmapp', 'hkmapp'), ('hkp', 'hkp'), ('hl', 'hl'), ('hlg', 'hlg'), ('hls', 'hls'), ('hlsearch', 'hlsearch'), ('ic', 'ic'), ('icon', 'icon'), ('iconstring', 'iconstring'), ('ignorecase', 'ignorecase'), ('im', 'im'), ('imactivatekey', 'imactivatekey'), ('imak', 'imak'), ('imc', 'imc'), ('imcmdline', 'imcmdline'), ('imd', 'imd'), ('imdisable', 'imdisable'), ('imi', 'imi'), ('iminsert', 'iminsert'), ('ims', 'ims'), ('imsearch', 'imsearch'), ('inc', 'inc'), ('include', 'include'), ('includeexpr', 'includeexpr'), ('incsearch', 'incsearch'), ('inde', 'inde'), ('indentexpr', 'indentexpr'), ('indentkeys', 'indentkeys'), ('indk', 'indk'), ('inex', 'inex'), ('inf', 'inf'), ('infercase', 'infercase'), ('insert', 'insert'), ('insert', 'insert'), ('insertmode', 'insertmode'), ('invacd', 'invacd'), ('invai', 'invai'), ('invakm', 'invakm'), ('invallowrevins', 'invallowrevins'), ('invaltkeymap', 'invaltkeymap'), ('invanti', 'invanti'), ('invantialias', 'invantialias'), ('invar', 'invar'), ('invarab', 'invarab'), ('invarabic', 'invarabic'), ('invarabicshape', 'invarabicshape'), ('invari', 'invari'), ('invarshape', 'invarshape'), ('invautochdir', 'invautochdir'), ('invautoindent', 'invautoindent'), ('invautoread', 'invautoread'), ('invautowrite', 'invautowrite'), ('invautowriteall', 'invautowriteall'), ('invaw', 'invaw'), ('invawa', 'invawa'), ('invbackup', 'invbackup'), ('invballooneval', 'invballooneval'), ('invbeval', 'invbeval'), ('invbin', 'invbin'), ('invbinary', 'invbinary'), ('invbiosk', 'invbiosk'), ('invbioskey', 'invbioskey'), ('invbk', 'invbk'), ('invbl', 'invbl'), ('invbomb', 'invbomb'), ('invbuflisted', 'invbuflisted'), ('invcf', 'invcf'), ('invci', 'invci'), ('invcin', 'invcin'), ('invcindent', 'invcindent'), ('invcompatible', 'invcompatible'), ('invconfirm', 'invconfirm'), ('invconsk', 'invconsk'), ('invconskey', 'invconskey'), ('invcopyindent', 'invcopyindent'), ('invcp', 'invcp'), ('invcscopetag', 'invcscopetag'), ('invcscopeverbose', 'invcscopeverbose'), ('invcst', 'invcst'), ('invcsverb', 'invcsverb'), ('invcuc', 'invcuc'), ('invcul', 'invcul'), ('invcursorcolumn', 'invcursorcolumn'), ('invcursorline', 'invcursorline'), ('invdeco', 'invdeco'), ('invdelcombine', 'invdelcombine'), ('invdg', 'invdg'), ('invdiff', 'invdiff'), ('invdigraph', 'invdigraph'), ('invdisable', 'invdisable'), ('invea', 'invea'), ('inveb', 'inveb'), ('inved', 'inved'), ('invedcompatible', 'invedcompatible'), ('invek', 'invek'), ('invendofline', 'invendofline'), ('inveol', 'inveol'), ('invequalalways', 'invequalalways'), ('inverrorbells', 'inverrorbells'), ('invesckeys', 'invesckeys'), ('invet', 'invet'), ('invex', 'invex'), ('invexpandtab', 'invexpandtab'), ('invexrc', 'invexrc'), ('invfen', 'invfen'), ('invfk', 'invfk'), ('invfkmap', 'invfkmap'), ('invfoldenable', 'invfoldenable'), ('invgd', 'invgd'), ('invgdefault', 'invgdefault'), ('invguipty', 'invguipty'), ('invhid', 'invhid'), ('invhidden', 'invhidden'), ('invhk', 'invhk'), ('invhkmap', 'invhkmap'), ('invhkmapp', 'invhkmapp'), ('invhkp', 'invhkp'), ('invhls', 'invhls'), ('invhlsearch', 'invhlsearch'), ('invic', 'invic'), ('invicon', 'invicon'), ('invignorecase', 'invignorecase'), ('invim', 'invim'), ('invimc', 'invimc'), ('invimcmdline', 'invimcmdline'), ('invimd', 'invimd'), ('invincsearch', 'invincsearch'), ('invinf', 'invinf'), ('invinfercase', 'invinfercase'), ('invinsertmode', 'invinsertmode'), ('invis', 'invis'), ('invjoinspaces', 'invjoinspaces'), ('invjs', 'invjs'), ('invlazyredraw', 'invlazyredraw'), ('invlbr', 'invlbr'), ('invlinebreak', 'invlinebreak'), ('invlisp', 'invlisp'), ('invlist', 'invlist'), ('invloadplugins', 'invloadplugins'), ('invlpl', 'invlpl'), ('invlz', 'invlz'), ('invma', 'invma'), ('invmacatsui', 'invmacatsui'), ('invmagic', 'invmagic'), ('invmh', 'invmh'), ('invml', 'invml'), ('invmod', 'invmod'), ('invmodeline', 'invmodeline'), ('invmodifiable', 'invmodifiable'), ('invmodified', 'invmodified'), ('invmore', 'invmore'), ('invmousef', 'invmousef'), ('invmousefocus', 'invmousefocus'), ('invmousehide', 'invmousehide'), ('invnu', 'invnu'), ('invnumber', 'invnumber'), ('invpaste', 'invpaste'), ('invpi', 'invpi'), ('invpreserveindent', 'invpreserveindent'), ('invpreviewwindow', 'invpreviewwindow'), ('invprompt', 'invprompt'), ('invpvw', 'invpvw'), ('invreadonly', 'invreadonly'), ('invremap', 'invremap'), ('invrestorescreen', 'invrestorescreen'), ('invrevins', 'invrevins'), ('invri', 'invri'), ('invrightleft', 'invrightleft'), ('invrightleftcmd', 'invrightleftcmd'), ('invrl', 'invrl'), ('invrlc', 'invrlc'), ('invro', 'invro'), ('invrs', 'invrs'), ('invru', 'invru'), ('invruler', 'invruler'), ('invsb', 'invsb'), ('invsc', 'invsc'), ('invscb', 'invscb'), ('invscrollbind', 'invscrollbind'), ('invscs', 'invscs'), ('invsecure', 'invsecure'), ('invsft', 'invsft'), ('invshellslash', 'invshellslash'), ('invshelltemp', 'invshelltemp'), ('invshiftround', 'invshiftround'), ('invshortname', 'invshortname'), ('invshowcmd', 'invshowcmd'), ('invshowfulltag', 'invshowfulltag'), ('invshowmatch', 'invshowmatch'), ('invshowmode', 'invshowmode'), ('invsi', 'invsi'), ('invsm', 'invsm'), ('invsmartcase', 'invsmartcase'), ('invsmartindent', 'invsmartindent'), ('invsmarttab', 'invsmarttab'), ('invsmd', 'invsmd'), ('invsn', 'invsn'), ('invsol', 'invsol'), ('invspell', 'invspell'), ('invsplitbelow', 'invsplitbelow'), ('invsplitright', 'invsplitright'), ('invspr', 'invspr'), ('invsr', 'invsr'), ('invssl', 'invssl'), ('invsta', 'invsta'), ('invstartofline', 'invstartofline'), ('invstmp', 'invstmp'), ('invswapfile', 'invswapfile'), ('invswf', 'invswf'), ('invta', 'invta'), ('invtagbsearch', 'invtagbsearch'), ('invtagrelative', 'invtagrelative'), ('invtagstack', 'invtagstack'), ('invtbi', 'invtbi'), ('invtbidi', 'invtbidi'), ('invtbs', 'invtbs'), ('invtermbidi', 'invtermbidi'), ('invterse', 'invterse'), ('invtextauto', 'invtextauto'), ('invtextmode', 'invtextmode'), ('invtf', 'invtf'), ('invtgst', 'invtgst'), ('invtildeop', 'invtildeop'), ('invtimeout', 'invtimeout'), ('invtitle', 'invtitle'), ('invto', 'invto'), ('invtop', 'invtop'), ('invtr', 'invtr'), ('invttimeout', 'invttimeout'), ('invttybuiltin', 'invttybuiltin'), ('invttyfast', 'invttyfast'), ('invtx', 'invtx'), ('invvb', 'invvb'), ('invvisualbell', 'invvisualbell'), ('invwa', 'invwa'), ('invwarn', 'invwarn'), ('invwb', 'invwb'), ('invweirdinvert', 'invweirdinvert'), ('invwfh', 'invwfh'), ('invwfw', 'invwfw'), ('invwildmenu', 'invwildmenu'), ('invwinfixheight', 'invwinfixheight'), ('invwinfixwidth', 'invwinfixwidth'), ('invwiv', 'invwiv'), ('invwmnu', 'invwmnu'), ('invwrap', 'invwrap'), ('invwrapscan', 'invwrapscan'), ('invwrite', 'invwrite'), ('invwriteany', 'invwriteany'), ('invwritebackup', 'invwritebackup'), ('invws', 'invws'), ('is', 'is'), ('isf', 'isf'), ('isfname', 'isfname'), ('isi', 'isi'), ('isident', 'isident'), ('isk', 'isk'), ('iskeyword', 'iskeyword'), ('isp', 'isp'), ('isprint', 'isprint'), ('joinspaces', 'joinspaces'), ('js', 'js'), ('key', 'key'), ('keymap', 'keymap'), ('keymodel', 'keymodel'), ('keywordprg', 'keywordprg'), ('km', 'km'), ('kmp', 'kmp'), ('kp', 'kp'), ('langmap', 'langmap'), ('langmenu', 'langmenu'), ('laststatus', 'laststatus'), ('lazyredraw', 'lazyredraw'), ('lbr', 'lbr'), ('lcs', 'lcs'), ('linebreak', 'linebreak'), ('lines', 'lines'), ('linespace', 'linespace'), ('lisp', 'lisp'), ('lispwords', 'lispwords'), ('list', 'list'), ('listchars', 'listchars'), ('lm', 'lm'), ('lmap', 'lmap'), ('loadplugins', 'loadplugins'), ('lpl', 'lpl'), ('ls', 'ls'), ('lsp', 'lsp'), ('lw', 'lw'), ('lz', 'lz'), ('ma', 'ma'), ('macatsui', 'macatsui'), ('magic', 'magic'), ('makeef', 'makeef'), ('makeprg', 'makeprg'), ('mat', 'mat'), ('matchpairs', 'matchpairs'), ('matchtime', 'matchtime'), ('maxcombine', 'maxcombine'), ('maxfuncdepth', 'maxfuncdepth'), ('maxmapdepth', 'maxmapdepth'), ('maxmem', 'maxmem'), ('maxmempattern', 'maxmempattern'), ('maxmemtot', 'maxmemtot'), ('mco', 'mco'), ('mef', 'mef'), ('menuitems', 'menuitems'), ('mfd', 'mfd'), ('mh', 'mh'), ('mis', 'mis'), ('mkspellmem', 'mkspellmem'), ('ml', 'ml'), ('mls', 'mls'), ('mm', 'mm'), ('mmd', 'mmd'), ('mmp', 'mmp'), ('mmt', 'mmt'), ('mod', 'mod'), ('mode', 'mode'), ('mode', 'mode'), ('modeline', 'modeline'), ('modelines', 'modelines'), ('modifiable', 'modifiable'), ('modified', 'modified'), ('more', 'more'), ('mouse', 'mouse'), ('mousef', 'mousef'), ('mousefocus', 'mousefocus'), ('mousehide', 'mousehide'), ('mousem', 'mousem'), ('mousemodel', 'mousemodel'), ('mouses', 'mouses'), ('mouseshape', 'mouseshape'), ('mouset', 'mouset'), ('mousetime', 'mousetime'), ('mp', 'mp'), ('mps', 'mps'), ('msm', 'msm'), ('mzq', 'mzq'), ('mzquantum', 'mzquantum'), ('nf', 'nf'), ('noacd', 'noacd'), ('noai', 'noai'), ('noakm', 'noakm'), ('noallowrevins', 'noallowrevins'), ('noaltkeymap', 'noaltkeymap'), ('noanti', 'noanti'), ('noantialias', 'noantialias'), ('noar', 'noar'), ('noarab', 'noarab'), ('noarabic', 'noarabic'), ('noarabicshape', 'noarabicshape'), ('noari', 'noari'), ('noarshape', 'noarshape'), ('noautochdir', 'noautochdir'), ('noautoindent', 'noautoindent'), ('noautoread', 'noautoread'), ('noautowrite', 'noautowrite'), ('noautowriteall', 'noautowriteall'), ('noaw', 'noaw'), ('noawa', 'noawa'), ('nobackup', 'nobackup'), ('noballooneval', 'noballooneval'), ('nobeval', 'nobeval'), ('nobin', 'nobin'), ('nobinary', 'nobinary'), ('nobiosk', 'nobiosk'), ('nobioskey', 'nobioskey'), ('nobk', 'nobk'), ('nobl', 'nobl'), ('nobomb', 'nobomb'), ('nobuflisted', 'nobuflisted'), ('nocf', 'nocf'), ('noci', 'noci'), ('nocin', 'nocin'), ('nocindent', 'nocindent'), ('nocompatible', 'nocompatible'), ('noconfirm', 'noconfirm'), ('noconsk', 'noconsk'), ('noconskey', 'noconskey'), ('nocopyindent', 'nocopyindent'), ('nocp', 'nocp'), ('nocscopetag', 'nocscopetag'), ('nocscopeverbose', 'nocscopeverbose'), ('nocst', 'nocst'), ('nocsverb', 'nocsverb'), ('nocuc', 'nocuc'), ('nocul', 'nocul'), ('nocursorcolumn', 'nocursorcolumn'), ('nocursorline', 'nocursorline'), ('nodeco', 'nodeco'), ('nodelcombine', 'nodelcombine'), ('nodg', 'nodg'), ('nodiff', 'nodiff'), ('nodigraph', 'nodigraph'), ('nodisable', 'nodisable'), ('noea', 'noea'), ('noeb', 'noeb'), ('noed', 'noed'), ('noedcompatible', 'noedcompatible'), ('noek', 'noek'), ('noendofline', 'noendofline'), ('noeol', 'noeol'), ('noequalalways', 'noequalalways'), ('noerrorbells', 'noerrorbells'), ('noesckeys', 'noesckeys'), ('noet', 'noet'), ('noex', 'noex'), ('noexpandtab', 'noexpandtab'), ('noexrc', 'noexrc'), ('nofen', 'nofen'), ('nofk', 'nofk'), ('nofkmap', 'nofkmap'), ('nofoldenable', 'nofoldenable'), ('nogd', 'nogd'), ('nogdefault', 'nogdefault'), ('noguipty', 'noguipty'), ('nohid', 'nohid'), ('nohidden', 'nohidden'), ('nohk', 'nohk'), ('nohkmap', 'nohkmap'), ('nohkmapp', 'nohkmapp'), ('nohkp', 'nohkp'), ('nohls', 'nohls'), ('nohlsearch', 'nohlsearch'), ('noic', 'noic'), ('noicon', 'noicon'), ('noignorecase', 'noignorecase'), ('noim', 'noim'), ('noimc', 'noimc'), ('noimcmdline', 'noimcmdline'), ('noimd', 'noimd'), ('noincsearch', 'noincsearch'), ('noinf', 'noinf'), ('noinfercase', 'noinfercase'), ('noinsertmode', 'noinsertmode'), ('nois', 'nois'), ('nojoinspaces', 'nojoinspaces'), ('nojs', 'nojs'), ('nolazyredraw', 'nolazyredraw'), ('nolbr', 'nolbr'), ('nolinebreak', 'nolinebreak'), ('nolisp', 'nolisp'), ('nolist', 'nolist'), ('noloadplugins', 'noloadplugins'), ('nolpl', 'nolpl'), ('nolz', 'nolz'), ('noma', 'noma'), ('nomacatsui', 'nomacatsui'), ('nomagic', 'nomagic'), ('nomh', 'nomh'), ('noml', 'noml'), ('nomod', 'nomod'), ('nomodeline', 'nomodeline'), ('nomodifiable', 'nomodifiable'), ('nomodified', 'nomodified'), ('nomore', 'nomore'), ('nomousef', 'nomousef'), ('nomousefocus', 'nomousefocus'), ('nomousehide', 'nomousehide'), ('nonu', 'nonu'), ('nonumber', 'nonumber'), ('nopaste', 'nopaste'), ('nopi', 'nopi'), ('nopreserveindent', 'nopreserveindent'), ('nopreviewwindow', 'nopreviewwindow'), ('noprompt', 'noprompt'), ('nopvw', 'nopvw'), ('noreadonly', 'noreadonly'), ('noremap', 'noremap'), ('norestorescreen', 'norestorescreen'), ('norevins', 'norevins'), ('nori', 'nori'), ('norightleft', 'norightleft'), ('norightleftcmd', 'norightleftcmd'), ('norl', 'norl'), ('norlc', 'norlc'), ('noro', 'noro'), ('nors', 'nors'), ('noru', 'noru'), ('noruler', 'noruler'), ('nosb', 'nosb'), ('nosc', 'nosc'), ('noscb', 'noscb'), ('noscrollbind', 'noscrollbind'), ('noscs', 'noscs'), ('nosecure', 'nosecure'), ('nosft', 'nosft'), ('noshellslash', 'noshellslash'), ('noshelltemp', 'noshelltemp'), ('noshiftround', 'noshiftround'), ('noshortname', 'noshortname'), ('noshowcmd', 'noshowcmd'), ('noshowfulltag', 'noshowfulltag'), ('noshowmatch', 'noshowmatch'), ('noshowmode', 'noshowmode'), ('nosi', 'nosi'), ('nosm', 'nosm'), ('nosmartcase', 'nosmartcase'), ('nosmartindent', 'nosmartindent'), ('nosmarttab', 'nosmarttab'), ('nosmd', 'nosmd'), ('nosn', 'nosn'), ('nosol', 'nosol'), ('nospell', 'nospell'), ('nosplitbelow', 'nosplitbelow'), ('nosplitright', 'nosplitright'), ('nospr', 'nospr'), ('nosr', 'nosr'), ('nossl', 'nossl'), ('nosta', 'nosta'), ('nostartofline', 'nostartofline'), ('nostmp', 'nostmp'), ('noswapfile', 'noswapfile'), ('noswf', 'noswf'), ('nota', 'nota'), ('notagbsearch', 'notagbsearch'), ('notagrelative', 'notagrelative'), ('notagstack', 'notagstack'), ('notbi', 'notbi'), ('notbidi', 'notbidi'), ('notbs', 'notbs'), ('notermbidi', 'notermbidi'), ('noterse', 'noterse'), ('notextauto', 'notextauto'), ('notextmode', 'notextmode'), ('notf', 'notf'), ('notgst', 'notgst'), ('notildeop', 'notildeop'), ('notimeout', 'notimeout'), ('notitle', 'notitle'), ('noto', 'noto'), ('notop', 'notop'), ('notr', 'notr'), ('nottimeout', 'nottimeout'), ('nottybuiltin', 'nottybuiltin'), ('nottyfast', 'nottyfast'), ('notx', 'notx'), ('novb', 'novb'), ('novisualbell', 'novisualbell'), ('nowa', 'nowa'), ('nowarn', 'nowarn'), ('nowb', 'nowb'), ('noweirdinvert', 'noweirdinvert'), ('nowfh', 'nowfh'), ('nowfw', 'nowfw'), ('nowildmenu', 'nowildmenu'), ('nowinfixheight', 'nowinfixheight'), ('nowinfixwidth', 'nowinfixwidth'), ('nowiv', 'nowiv'), ('nowmnu', 'nowmnu'), ('nowrap', 'nowrap'), ('nowrapscan', 'nowrapscan'), ('nowrite', 'nowrite'), ('nowriteany', 'nowriteany'), ('nowritebackup', 'nowritebackup'), ('nows', 'nows'), ('nrformats', 'nrformats'), ('nu', 'nu'), ('number', 'number'), ('numberwidth', 'numberwidth'), ('nuw', 'nuw'), ('oft', 'oft'), ('ofu', 'ofu'), ('omnifunc', 'omnifunc'), ('operatorfunc', 'operatorfunc'), ('opfunc', 'opfunc'), ('osfiletype', 'osfiletype'), ('pa', 'pa'), ('para', 'para'), ('paragraphs', 'paragraphs'), ('paste', 'paste'), ('pastetoggle', 'pastetoggle'), ('patchexpr', 'patchexpr'), ('patchmode', 'patchmode'), ('path', 'path'), ('pdev', 'pdev'), ('penc', 'penc'), ('pex', 'pex'), ('pexpr', 'pexpr'), ('pfn', 'pfn'), ('ph', 'ph'), ('pheader', 'pheader'), ('pi', 'pi'), ('pm', 'pm'), ('pmbcs', 'pmbcs'), ('pmbfn', 'pmbfn'), ('popt', 'popt'), ('preserveindent', 'preserveindent'), ('previewheight', 'previewheight'), ('previewwindow', 'previewwindow'), ('printdevice', 'printdevice'), ('printencoding', 'printencoding'), ('printexpr', 'printexpr'), ('printfont', 'printfont'), ('printheader', 'printheader'), ('printmbcharset', 'printmbcharset'), ('printmbfont', 'printmbfont'), ('printoptions', 'printoptions'), ('prompt', 'prompt'), ('pt', 'pt'), ('pumheight', 'pumheight'), ('pvh', 'pvh'), ('pvw', 'pvw'), ('qe', 'qe'), ('quoteescape', 'quoteescape'), ('readonly', 'readonly'), ('remap', 'remap'), ('report', 'report'), ('restorescreen', 'restorescreen'), ('revins', 'revins'), ('ri', 'ri'), ('rightleft', 'rightleft'), ('rightleftcmd', 'rightleftcmd'), ('rl', 'rl'), ('rlc', 'rlc'), ('ro', 'ro'), ('rs', 'rs'), ('rtp', 'rtp'), ('ru', 'ru'), ('ruf', 'ruf'), ('ruler', 'ruler'), ('rulerformat', 'rulerformat'), ('runtimepath', 'runtimepath'), ('sb', 'sb'), ('sbo', 'sbo'), ('sbr', 'sbr'), ('sc', 'sc'), ('scb', 'scb'), ('scr', 'scr'), ('scroll', 'scroll'), ('scrollbind', 'scrollbind'), ('scrolljump', 'scrolljump'), ('scrolloff', 'scrolloff'), ('scrollopt', 'scrollopt'), ('scs', 'scs'), ('sect', 'sect'), ('sections', 'sections'), ('secure', 'secure'), ('sel', 'sel'), ('selection', 'selection'), ('selectmode', 'selectmode'), ('sessionoptions', 'sessionoptions'), ('sft', 'sft'), ('sh', 'sh'), ('shape', 'shape'), ('shape', 'shape'), ('shcf', 'shcf'), ('shell', 'shell'), ('shellcmdflag', 'shellcmdflag'), ('shellpipe', 'shellpipe'), ('shellquote', 'shellquote'), ('shellredir', 'shellredir'), ('shellslash', 'shellslash'), ('shelltemp', 'shelltemp'), ('shelltype', 'shelltype'), ('shellxquote', 'shellxquote'), ('shiftround', 'shiftround'), ('shiftwidth', 'shiftwidth'), ('shm', 'shm'), ('shortmess', 'shortmess'), ('shortname', 'shortname'), ('showbreak', 'showbreak'), ('showcmd', 'showcmd'), ('showfulltag', 'showfulltag'), ('showmatch', 'showmatch'), ('showmode', 'showmode'), ('showtabline', 'showtabline'), ('shq', 'shq'), ('si', 'si'), ('sidescroll', 'sidescroll'), ('sidescrolloff', 'sidescrolloff'), ('siso', 'siso'), ('sj', 'sj'), ('slm', 'slm'), ('sm', 'sm'), ('smartcase', 'smartcase'), ('smartindent', 'smartindent'), ('smarttab', 'smarttab'), ('smc', 'smc'), ('smd', 'smd'), ('sn', 'sn'), ('so', 'so'), ('softtabstop', 'softtabstop'), ('sol', 'sol'), ('sp', 'sp'), ('spc', 'spc'), ('spell', 'spell'), ('spellcapcheck', 'spellcapcheck'), ('spellfile', 'spellfile'), ('spelllang', 'spelllang'), ('spellsuggest', 'spellsuggest'), ('spf', 'spf'), ('spl', 'spl'), ('splitbelow', 'splitbelow'), ('splitright', 'splitright'), ('spr', 'spr'), ('sps', 'sps'), ('sr', 'sr'), ('srr', 'srr'), ('ss', 'ss'), ('ssl', 'ssl'), ('ssop', 'ssop'), ('st', 'st'), ('sta', 'sta'), ('stal', 'stal'), ('start', 'start'), ('startofline', 'startofline'), ('statusline', 'statusline'), ('stl', 'stl'), ('stmp', 'stmp'), ('sts', 'sts'), ('su', 'su'), ('sua', 'sua'), ('suffixes', 'suffixes'), ('suffixesadd', 'suffixesadd'), ('sw', 'sw'), ('swapfile', 'swapfile'), ('swapsync', 'swapsync'), ('swb', 'swb'), ('swf', 'swf'), ('switchbuf', 'switchbuf'), ('sws', 'sws'), ('sxq', 'sxq'), ('syn', 'syn'), ('synmaxcol', 'synmaxcol'), ('syntax', 'syntax'), ('t_AB', 't_AB'), ('t_AF', 't_AF'), ('t_AL', 't_AL'), ('t_CS', 't_CS'), ('t_CV', 't_CV'), ('t_Ce', 't_Ce'), ('t_Co', 't_Co'), ('t_Cs', 't_Cs'), ('t_DL', 't_DL'), ('t_EI', 't_EI'), ('t_EI', 't_EI'), ('t_EI', 't_EI'), ('t_F1', 't_F1'), ('t_F2', 't_F2'), ('t_F3', 't_F3'), ('t_F4', 't_F4'), ('t_F5', 't_F5'), ('t_F6', 't_F6'), ('t_F7', 't_F7'), ('t_F8', 't_F8'), ('t_F9', 't_F9'), ('t_IE', 't_IE'), ('t_IS', 't_IS'), ('t_K1', 't_K1'), ('t_K3', 't_K3'), ('t_K4', 't_K4'), ('t_K5', 't_K5'), ('t_K6', 't_K6'), ('t_K7', 't_K7'), ('t_K8', 't_K8'), ('t_K9', 't_K9'), ('t_KA', 't_KA'), ('t_KB', 't_KB'), ('t_KC', 't_KC'), ('t_KD', 't_KD'), ('t_KE', 't_KE'), ('t_KF', 't_KF'), ('t_KG', 't_KG'), ('t_KH', 't_KH'), ('t_KI', 't_KI'), ('t_KJ', 't_KJ'), ('t_KK', 't_KK'), ('t_KL', 't_KL'), ('t_RI', 't_RI'), ('t_RV', 't_RV'), ('t_SI', 't_SI'), ('t_SI', 't_SI'), ('t_SI', 't_SI'), ('t_Sb', 't_Sb'), ('t_Sf', 't_Sf'), ('t_WP', 't_WP'), ('t_WS', 't_WS'), ('t_ZH', 't_ZH'), ('t_ZR', 't_ZR'), ('t_al', 't_al'), ('t_bc', 't_bc'), ('t_cd', 't_cd'), ('t_ce', 't_ce'), ('t_cl', 't_cl'), ('t_cm', 't_cm'), ('t_cs', 't_cs'), ('t_da', 't_da'), ('t_db', 't_db'), ('t_dl', 't_dl'), ('t_fs', 't_fs'), ('t_k1', 't_k1'), ('t_k2', 't_k2'), ('t_k3', 't_k3'), ('t_k4', 't_k4'), ('t_k5', 't_k5'), ('t_k6', 't_k6'), ('t_k7', 't_k7'), ('t_k8', 't_k8'), ('t_k9', 't_k9'), ('t_kB', 't_kB'), ('t_kD', 't_kD'), ('t_kI', 't_kI'), ('t_kN', 't_kN'), ('t_kP', 't_kP'), ('t_kb', 't_kb'), ('t_kd', 't_kd'), ('t_ke', 't_ke'), ('t_kh', 't_kh'), ('t_kl', 't_kl'), ('t_kr', 't_kr'), ('t_ks', 't_ks'), ('t_ku', 't_ku'), ('t_le', 't_le'), ('t_mb', 't_mb'), ('t_md', 't_md'), ('t_me', 't_me'), ('t_mr', 't_mr'), ('t_ms', 't_ms'), ('t_nd', 't_nd'), ('t_op', 't_op'), ('t_se', 't_se'), ('t_so', 't_so'), ('t_sr', 't_sr'), ('t_te', 't_te'), ('t_ti', 't_ti'), ('t_ts', 't_ts'), ('t_ue', 't_ue'), ('t_us', 't_us'), ('t_ut', 't_ut'), ('t_vb', 't_vb'), ('t_ve', 't_ve'), ('t_vi', 't_vi'), ('t_vs', 't_vs'), ('t_xs', 't_xs'), ('ta', 'ta'), ('tabline', 'tabline'), ('tabpagemax', 'tabpagemax'), ('tabstop', 'tabstop'), ('tag', 'tag'), ('tagbsearch', 'tagbsearch'), ('taglength', 'taglength'), ('tagrelative', 'tagrelative'), ('tags', 'tags'), ('tagstack', 'tagstack'), ('tal', 'tal'), ('tb', 'tb'), ('tbi', 'tbi'), ('tbidi', 'tbidi'), ('tbis', 'tbis'), ('tbs', 'tbs'), ('tenc', 'tenc'), ('term', 'term'), ('termbidi', 'termbidi'), ('termencoding', 'termencoding'), ('terse', 'terse'), ('textauto', 'textauto'), ('textmode', 'textmode'), ('textwidth', 'textwidth'), ('tf', 'tf'), ('tgst', 'tgst'), ('thesaurus', 'thesaurus'), ('tildeop', 'tildeop'), ('timeout', 'timeout'), ('timeoutlen', 'timeoutlen'), ('title', 'title'), ('titlelen', 'titlelen'), ('titleold', 'titleold'), ('titlestring', 'titlestring'), ('tl', 'tl'), ('tm', 'tm'), ('to', 'to'), ('toolbar', 'toolbar'), ('toolbariconsize', 'toolbariconsize'), ('top', 'top'), ('tpm', 'tpm'), ('tr', 'tr'), ('ts', 'ts'), ('tsl', 'tsl'), ('tsr', 'tsr'), ('ttimeout', 'ttimeout'), ('ttimeoutlen', 'ttimeoutlen'), ('ttm', 'ttm'), ('tty', 'tty'), ('ttybuiltin', 'ttybuiltin'), ('ttyfast', 'ttyfast'), ('ttym', 'ttym'), ('ttymouse', 'ttymouse'), ('ttyscroll', 'ttyscroll'), ('ttytype', 'ttytype'), ('tw', 'tw'), ('tx', 'tx'), ('uc', 'uc'), ('ul', 'ul'), ('undolevels', 'undolevels'), ('updatecount', 'updatecount'), ('updatetime', 'updatetime'), ('ut', 'ut'), ('vb', 'vb'), ('vbs', 'vbs'), ('vdir', 'vdir'), ('ve', 've'), ('verbose', 'verbose'), ('verbosefile', 'verbosefile'), ('vfile', 'vfile'), ('vi', 'vi'), ('viewdir', 'viewdir'), ('viewoptions', 'viewoptions'), ('viminfo', 'viminfo'), ('virtualedit', 'virtualedit'), ('visualbell', 'visualbell'), ('vop', 'vop'), ('wa', 'wa'), ('wak', 'wak'), ('warn', 'warn'), ('wb', 'wb'), ('wc', 'wc'), ('wcm', 'wcm'), ('wd', 'wd'), ('weirdinvert', 'weirdinvert'), ('wfh', 'wfh'), ('wfw', 'wfw'), ('wh', 'wh'), ('whichwrap', 'whichwrap'), ('wi', 'wi'), ('wig', 'wig'), ('wildchar', 'wildchar'), ('wildcharm', 'wildcharm'), ('wildignore', 'wildignore'), ('wildmenu', 'wildmenu'), ('wildmode', 'wildmode'), ('wildoptions', 'wildoptions'), ('wim', 'wim'), ('winaltkeys', 'winaltkeys'), ('window', 'window'), ('winfixheight', 'winfixheight'), ('winfixwidth', 'winfixwidth'), ('winheight', 'winheight'), ('winminheight', 'winminheight'), ('winminwidth', 'winminwidth'), ('winwidth', 'winwidth'), ('wiv', 'wiv'), ('wiw', 'wiw'), ('wm', 'wm'), ('wmh', 'wmh'), ('wmnu', 'wmnu'), ('wmw', 'wmw'), ('wop', 'wop'), ('wrap', 'wrap'), ('wrapmargin', 'wrapmargin'), ('wrapscan', 'wrapscan'), ('write', 'write'), ('writeany', 'writeany'), ('writebackup', 'writebackup'), ('writedelay', 'writedelay'), ('ws', 'ws'), ('ww', 'ww')]
+auto=[('BufAdd','BufAdd'),('BufCreate','BufCreate'),('BufDelete','BufDelete'),('BufEnter','BufEnter'),('BufFilePost','BufFilePost'),('BufFilePre','BufFilePre'),('BufHidden','BufHidden'),('BufLeave','BufLeave'),('BufNew','BufNew'),('BufNewFile','BufNewFile'),('BufRead','BufRead'),('BufReadCmd','BufReadCmd'),('BufReadPost','BufReadPost'),('BufReadPre','BufReadPre'),('BufUnload','BufUnload'),('BufWinEnter','BufWinEnter'),('BufWinLeave','BufWinLeave'),('BufWipeout','BufWipeout'),('BufWrite','BufWrite'),('BufWriteCmd','BufWriteCmd'),('BufWritePost','BufWritePost'),('BufWritePre','BufWritePre'),('Cmd','Cmd'),('CmdwinEnter','CmdwinEnter'),('CmdwinLeave','CmdwinLeave'),('ColorScheme','ColorScheme'),('CursorHold','CursorHold'),('CursorHoldI','CursorHoldI'),('CursorMoved','CursorMoved'),('CursorMovedI','CursorMovedI'),('EncodingChanged','EncodingChanged'),('FileAppendCmd','FileAppendCmd'),('FileAppendPost','FileAppendPost'),('FileAppendPre','FileAppendPre'),('FileChangedRO','FileChangedRO'),('FileChangedShell','FileChangedShell'),('FileChangedShellPost','FileChangedShellPost'),('FileEncoding','FileEncoding'),('FileReadCmd','FileReadCmd'),('FileReadPost','FileReadPost'),('FileReadPre','FileReadPre'),('FileType','FileType'),('FileWriteCmd','FileWriteCmd'),('FileWritePost','FileWritePost'),('FileWritePre','FileWritePre'),('FilterReadPost','FilterReadPost'),('FilterReadPre','FilterReadPre'),('FilterWritePost','FilterWritePost'),('FilterWritePre','FilterWritePre'),('FocusGained','FocusGained'),('FocusLost','FocusLost'),('FuncUndefined','FuncUndefined'),('GUIEnter','GUIEnter'),('GUIFailed','GUIFailed'),('InsertChange','InsertChange'),('InsertCharPre','InsertCharPre'),('InsertEnter','InsertEnter'),('InsertLeave','InsertLeave'),('MenuPopup','MenuPopup'),('QuickFixCmdPost','QuickFixCmdPost'),('QuickFixCmdPre','QuickFixCmdPre'),('RemoteReply','RemoteReply'),('SessionLoadPost','SessionLoadPost'),('ShellCmdPost','ShellCmdPost'),('ShellFilterPost','ShellFilterPost'),('SourceCmd','SourceCmd'),('SourcePre','SourcePre'),('SpellFileMissing','SpellFileMissing'),('StdinReadPost','StdinReadPost'),('StdinReadPre','StdinReadPre'),('SwapExists','SwapExists'),('Syntax','Syntax'),('TabEnter','TabEnter'),('TabLeave','TabLeave'),('TermChanged','TermChanged'),('TermResponse','TermResponse'),('User','User'),('UserGettingBored','UserGettingBored'),('VimEnter','VimEnter'),('VimLeave','VimLeave'),('VimLeavePre','VimLeavePre'),('VimResized','VimResized'),('WinEnter','WinEnter'),('WinLeave','WinLeave'),('event','event')]
+command=[('Allargs','Allargs'),('DiffOrig','DiffOrig'),('Error','Error'),('Man','Man'),('MyCommand','MyCommand'),('Mycmd','Mycmd'),('N','N'),('N','Next'),('P','P'),('P','Print'),('Ren','Ren'),('Rena','Rena'),('Renu','Renu'),('TOhtml','TOhtml'),('X','X'),('XMLent','XMLent'),('XMLns','XMLns'),('a','a'),('ab','ab'),('abc','abclear'),('abo','aboveleft'),('al','all'),('ar','ar'),('ar','args'),('arga','argadd'),('argd','argdelete'),('argdo','argdo'),('arge','argedit'),('argg','argglobal'),('argl','arglocal'),('argu','argument'),('as','ascii'),('au','au'),('b','buffer'),('bN','bNext'),('ba','ball'),('bad','badd'),('bar','bar'),('bd','bdelete'),('bel','belowright'),('bf','bfirst'),('bl','blast'),('bm','bmodified'),('bn','bnext'),('bo','botright'),('bp','bprevious'),('br','br'),('br','brewind'),('brea','break'),('breaka','breakadd'),('breakd','breakdel'),('breakl','breaklist'),('bro','browse'),('browseset','browseset'),('bu','bu'),('buf','buf'),('bufdo','bufdo'),('buffers','buffers'),('bun','bunload'),('bw','bwipeout'),('c','c'),('c','change'),('cN','cN'),('cN','cNext'),('cNf','cNf'),('cNf','cNfile'),('cabc','cabclear'),('cad','cad'),('cad','caddexpr'),('caddb','caddbuffer'),('caddf','caddfile'),('cal','call'),('cat','catch'),('cb','cbuffer'),('cc','cc'),('ccl','cclose'),('cd','cd'),('ce','center'),('cex','cexpr'),('cf','cfile'),('cfir','cfirst'),('cg','cgetfile'),('cgetb','cgetbuffer'),('cgete','cgetexpr'),('changes','changes'),('chd','chdir'),('che','checkpath'),('checkt','checktime'),('cl','cl'),('cl','clist'),('cla','clast'),('clo','close'),('cmapc','cmapclear'),('cmdname','cmdname'),('cn','cn'),('cn','cnext'),('cnew','cnewer'),('cnf','cnf'),('cnf','cnfile'),('co','copy'),('col','colder'),('colo','colorscheme'),('com','com'),('comc','comclear'),('comment','comment'),('comp','compiler'),('con','con'),('con','continue'),('conf','confirm'),('cope','copen'),('count','count'),('cp','cprevious'),('cpf','cpfile'),('cq','cquit'),('cr','crewind'),('cs','cs'),('cscope','cscope'),('cstag','cstag'),('cuna','cunabbrev'),('cw','cwindow'),('d','d'),('d','delete'),('de','de'),('debug','debug'),('debugg','debuggreedy'),('del','del'),('delc','delcommand'),('delf','delf'),('delf','delfunction'),('delm','delmarks'),('di','di'),('di','display'),('diffg','diffget'),('diffo','diffo'),('diffoff','diffoff'),('diffp','diffp'),('diffpatch','diffpatch'),('diffpu','diffput'),('diffsplit','diffsplit'),('difft','difft'),('diffthis','diffthis'),('diffu','diffupdate'),('dig','dig'),('dig','digraphs'),('dj','djump'),('dl','dlist'),('do','do'),('doau','doau'),('dr','drop'),('ds','dsearch'),('dsp','dsplit'),('dwim','dwim'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','edit'),('ea','ea'),('earlier','earlier'),('ec','ec'),('echoe','echoerr'),('echom','echomsg'),('echon','echon'),('el','else'),('elsei','elseif'),('em','emenu'),('emenu','emenu'),('en','en'),('en','endif'),('endf','endf'),('endf','endfunction'),('endfo','endfor'),('endfun','endfun'),('endt','endtry'),('endw','endwhile'),('ene','enew'),('ex','ex'),('exi','exit'),('exu','exusage'),('f','f'),('f','file'),('filename','filename'),('files','files'),('filet','filet'),('filetype','filetype'),('fin','fin'),('fin','find'),('fina','finally'),('fini','finish'),('fir','first'),('fix','fixdel'),('fo','fold'),('foldc','foldclose'),('foldd','folddoopen'),('folddoc','folddoclosed'),('foldo','foldopen'),('for','for'),('fu','fu'),('fu','function'),('fun','fun'),('g','g'),('get','get'),('go','goto'),('gr','grep'),('grepa','grepadd'),('gs','gs'),('gs','gs'),('gui','gui'),('gvim','gvim'),('h','h'),('h','h'),('h','h'),('h','h'),('h','help'),('ha','hardcopy'),('helpf','helpfind'),('helpg','helpgrep'),('helpt','helptags'),('hi','hi'),('hid','hide'),('his','history'),('i','i'),('ia','ia'),('iabc','iabclear'),('if','if'),('ij','ijump'),('il','ilist'),('imapc','imapclear'),('in','in'),('index','index'),('intro','intro'),('is','isearch'),('isp','isplit'),('iuna','iunabbrev'),('j','join'),('ju','jumps'),('k','k'),('kee','keepmarks'),('keepa','keepa'),('keepalt','keepalt'),('keepj','keepjumps'),('l','l'),('l','list'),('lN','lN'),('lN','lNext'),('lNf','lNf'),('lNf','lNfile'),('la','la'),('la','last'),('lad','lad'),('lad','laddexpr'),('laddb','laddbuffer'),('laddf','laddfile'),('lan','lan'),('lan','language'),('lat','lat'),('later','later'),('lb','lbuffer'),('lc','lcd'),('lch','lchdir'),('lcl','lclose'),('lcs','lcs'),('lcscope','lcscope'),('le','left'),('lefta','leftabove'),('let','let'),('lex','lexpr'),('lf','lfile'),('lfir','lfirst'),('lg','lgetfile'),('lgetb','lgetbuffer'),('lgete','lgetexpr'),('lgr','lgrep'),('lgrepa','lgrepadd'),('lh','lhelpgrep'),('ll','ll'),('lla','llast'),('lli','llist'),('lmak','lmake'),('lmapc','lmapclear'),('lne','lne'),('lne','lnext'),('lnew','lnewer'),('lnf','lnf'),('lnf','lnfile'),('lo','lo'),('lo','loadview'),('loadk','loadk'),('loadkeymap','loadkeymap'),('loc','lockmarks'),('locale','locale'),('lockv','lockvar'),('lol','lolder'),('lop','lopen'),('lp','lprevious'),('lpf','lpfile'),('lr','lrewind'),('ls','ls'),('lt','ltag'),('lua','lua'),('luado','luado'),('luafile','luafile'),('lv','lvimgrep'),('lvimgrepa','lvimgrepadd'),('lw','lwindow'),('m','move'),('ma','ma'),('ma','mark'),('main','main'),('main','main'),('mak','make'),('marks','marks'),('mat','match'),('menut','menut'),('menut','menutranslate'),('mes','mes'),('messages','messages'),('mk','mk'),('mk','mkexrc'),('mkdir','mkdir'),('mks','mksession'),('mksp','mkspell'),('mkv','mkv'),('mkv','mkvimrc'),('mkvie','mkview'),('mo','mo'),('mod','mode'),('mv','mv'),('mz','mz'),('mz','mzscheme'),('mzf','mzfile'),('n','n'),('n','n'),('n','next'),('nb','nbkey'),('nbc','nbclose'),('nbs','nbstart'),('ne','ne'),('new','new'),('nkf','nkf'),('nmapc','nmapclear'),('noa','noa'),('noautocmd','noautocmd'),('noh','nohlsearch'),('nu','number'),('o','o'),('o','open'),('ol','oldfiles'),('omapc','omapclear'),('on','only'),('opt','options'),('ownsyntax','ownsyntax'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','print'),('pat','pat'),('pat','pat'),('pc','pclose'),('pe','pe'),('pe','perl'),('ped','pedit'),('perld','perldo'),('po','pop'),('popu','popu'),('popu','popup'),('pp','ppop'),('pr','pr'),('pre','preserve'),('prev','previous'),('pro','pro'),('prof','profile'),('profd','profdel'),('promptf','promptfind'),('promptr','promptrepl'),('ps','psearch'),('ptN','ptN'),('ptN','ptNext'),('pta','ptag'),('ptf','ptfirst'),('ptj','ptjump'),('ptl','ptlast'),('ptn','ptn'),('ptn','ptnext'),('ptp','ptprevious'),('ptr','ptrewind'),('pts','ptselect'),('pu','put'),('pw','pwd'),('py','py'),('py','python'),('py3','py3'),('py3','py3'),('py3file','py3file'),('pyf','pyfile'),('python3','python3'),('q','q'),('q','quit'),('qa','qall'),('quita','quitall'),('quote','quote'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','read'),('re','re'),('rec','recover'),('red','red'),('red','redo'),('redi','redir'),('redr','redraw'),('redraws','redrawstatus'),('reg','registers'),('res','resize'),('ret','retab'),('retu','return'),('rew','rewind'),('ri','right'),('rightb','rightbelow'),('ru','ru'),('ru','runtime'),('rub','ruby'),('rubyd','rubydo'),('rubyf','rubyfile'),('rundo','rundo'),('rv','rviminfo'),('s','s'),('s','s'),('s','s'),('s','s'),('sN','sNext'),('sa','sargument'),('sal','sall'),('san','sandbox'),('sav','saveas'),('sb','sbuffer'),('sbN','sbNext'),('sba','sball'),('sbf','sbfirst'),('sbl','sblast'),('sbm','sbmodified'),('sbn','sbnext'),('sbp','sbprevious'),('sbr','sbrewind'),('scrip','scrip'),('scrip','scriptnames'),('scripte','scriptencoding'),('scs','scs'),('scscope','scscope'),('se','set'),('setf','setfiletype'),('setg','setglobal'),('setl','setlocal'),('sf','sfind'),('sfir','sfirst'),('sh','shell'),('si','si'),('sig','sig'),('sign','sign'),('sil','silent'),('sim','simalt'),('sl','sl'),('sl','sleep'),('sla','slast'),('sm','smagic'),('sm','smap'),('sme','sme'),('smenu','smenu'),('sn','snext'),('sni','sniff'),('sno','snomagic'),('snoreme','snoreme'),('snoremenu','snoremenu'),('so','so'),('so','source'),('sor','sort'),('sp','split'),('spe','spe'),('spe','spellgood'),('spelld','spelldump'),('spelli','spellinfo'),('spellr','spellrepall'),('spellu','spellundo'),('spellw','spellwrong'),('spr','sprevious'),('sre','srewind'),('st','st'),('st','stop'),('sta','stag'),('star','star'),('star','startinsert'),('start','start'),('startg','startgreplace'),('startr','startreplace'),('stj','stjump'),('stopi','stopinsert'),('sts','stselect'),('sub','sub'),('sub','sub'),('sun','sunhide'),('sunme','sunme'),('sunmenu','sunmenu'),('sus','suspend'),('sv','sview'),('sw','swapname'),('sy','sy'),('syn','syn'),('sync','sync'),('syncbind','syncbind'),('synlist','synlist'),('t','t'),('t','t'),('t','t'),('tN','tN'),('tN','tNext'),('ta','ta'),('ta','tag'),('tab','tab'),('tabN','tabN'),('tabN','tabNext'),('tabc','tabclose'),('tabd','tabdo'),('tabe','tabedit'),('tabf','tabfind'),('tabfir','tabfirst'),('tabl','tablast'),('tabm','tabmove'),('tabn','tabnext'),('tabnew','tabnew'),('tabo','tabonly'),('tabp','tabprevious'),('tabr','tabrewind'),('tabs','tabs'),('tags','tags'),('tc','tcl'),('tcld','tcldo'),('tclf','tclfile'),('te','tearoff'),('tf','tfirst'),('th','throw'),('tj','tjump'),('tl','tlast'),('tm','tm'),('tm','tmenu'),('tn','tn'),('tn','tnext'),('to','topleft'),('tp','tprevious'),('tr','tr'),('tr','trewind'),('try','try'),('ts','tselect'),('tu','tu'),('tu','tunmenu'),('u','u'),('u','undo'),('un','un'),('una','unabbreviate'),('undoj','undojoin'),('undol','undolist'),('unh','unhide'),('unl','unl'),('unlo','unlockvar'),('uns','unsilent'),('up','update'),('v','v'),('ve','ve'),('ve','version'),('verb','verbose'),('version','version'),('version','version'),('vert','vertical'),('vi','vi'),('vi','visual'),('vie','view'),('vim','vimgrep'),('vimgrepa','vimgrepadd'),('viu','viusage'),('vmapc','vmapclear'),('vne','vnew'),('vs','vsplit'),('w','w'),('w','write'),('wN','wNext'),('wa','wall'),('wh','while'),('win','win'),('win','winsize'),('winc','wincmd'),('windo','windo'),('winp','winpos'),('wn','wnext'),('wp','wprevious'),('wq','wq'),('wqa','wqall'),('ws','wsverb'),('wundo','wundo'),('wv','wviminfo'),('x','x'),('x','xit'),('xa','xall'),('xmapc','xmapclear'),('xme','xme'),('xmenu','xmenu'),('xnoreme','xnoreme'),('xnoremenu','xnoremenu'),('xterm','xterm'),('xunme','xunme'),('xunmenu','xunmenu'),('xwininfo','xwininfo'),('y','yank')]
+option=[('acd','acd'),('ai','ai'),('akm','akm'),('al','al'),('aleph','aleph'),('allowrevins','allowrevins'),('altkeymap','altkeymap'),('ambiwidth','ambiwidth'),('ambw','ambw'),('anti','anti'),('antialias','antialias'),('ar','ar'),('arab','arab'),('arabic','arabic'),('arabicshape','arabicshape'),('ari','ari'),('arshape','arshape'),('autochdir','autochdir'),('autoindent','autoindent'),('autoread','autoread'),('autowrite','autowrite'),('autowriteall','autowriteall'),('aw','aw'),('awa','awa'),('background','background'),('backspace','backspace'),('backup','backup'),('backupcopy','backupcopy'),('backupdir','backupdir'),('backupext','backupext'),('backupskip','backupskip'),('balloondelay','balloondelay'),('ballooneval','ballooneval'),('balloonexpr','balloonexpr'),('bdir','bdir'),('bdlay','bdlay'),('beval','beval'),('bex','bex'),('bexpr','bexpr'),('bg','bg'),('bh','bh'),('bin','bin'),('binary','binary'),('biosk','biosk'),('bioskey','bioskey'),('bk','bk'),('bkc','bkc'),('bl','bl'),('bomb','bomb'),('breakat','breakat'),('brk','brk'),('browsedir','browsedir'),('bs','bs'),('bsdir','bsdir'),('bsk','bsk'),('bt','bt'),('bufhidden','bufhidden'),('buflisted','buflisted'),('buftype','buftype'),('casemap','casemap'),('cb','cb'),('cc','cc'),('ccv','ccv'),('cd','cd'),('cdpath','cdpath'),('cedit','cedit'),('cf','cf'),('cfu','cfu'),('ch','ch'),('charconvert','charconvert'),('ci','ci'),('cin','cin'),('cindent','cindent'),('cink','cink'),('cinkeys','cinkeys'),('cino','cino'),('cinoptions','cinoptions'),('cinw','cinw'),('cinwords','cinwords'),('clipboard','clipboard'),('cmdheight','cmdheight'),('cmdwinheight','cmdwinheight'),('cmp','cmp'),('cms','cms'),('co','co'),('cocu','cocu'),('cole','cole'),('colorcolumn','colorcolumn'),('columns','columns'),('com','com'),('comments','comments'),('commentstring','commentstring'),('compatible','compatible'),('complete','complete'),('completefunc','completefunc'),('completeopt','completeopt'),('concealcursor','concealcursor'),('conceallevel','conceallevel'),('confirm','confirm'),('consk','consk'),('conskey','conskey'),('copyindent','copyindent'),('cot','cot'),('cp','cp'),('cpo','cpo'),('cpoptions','cpoptions'),('cpt','cpt'),('crb','crb'),('cryptmethod','cryptmethod'),('cscopepathcomp','cscopepathcomp'),('cscopeprg','cscopeprg'),('cscopequickfix','cscopequickfix'),('cscoperelative','cscoperelative'),('cscopetag','cscopetag'),('cscopetagorder','cscopetagorder'),('cscopeverbose','cscopeverbose'),('cspc','cspc'),('csprg','csprg'),('csqf','csqf'),('csre','csre'),('cst','cst'),('csto','csto'),('csverb','csverb'),('cuc','cuc'),('cul','cul'),('cursorbind','cursorbind'),('cursorcolumn','cursorcolumn'),('cursorline','cursorline'),('cwh','cwh'),('debug','debug'),('deco','deco'),('def','def'),('define','define'),('delcombine','delcombine'),('dex','dex'),('dg','dg'),('dict','dict'),('dictionary','dictionary'),('diff','diff'),('diffexpr','diffexpr'),('diffopt','diffopt'),('digraph','digraph'),('dip','dip'),('dir','dir'),('directory','directory'),('display','display'),('dy','dy'),('ea','ea'),('ead','ead'),('eadirection','eadirection'),('eb','eb'),('ed','ed'),('edcompatible','edcompatible'),('ef','ef'),('efm','efm'),('ei','ei'),('ek','ek'),('enc','enc'),('encoding','encoding'),('endofline','endofline'),('eol','eol'),('ep','ep'),('equalalways','equalalways'),('equalprg','equalprg'),('errorbells','errorbells'),('errorfile','errorfile'),('errorformat','errorformat'),('esckeys','esckeys'),('et','et'),('eventignore','eventignore'),('ex','ex'),('expandtab','expandtab'),('exrc','exrc'),('fcl','fcl'),('fcs','fcs'),('fdc','fdc'),('fde','fde'),('fdi','fdi'),('fdl','fdl'),('fdls','fdls'),('fdm','fdm'),('fdn','fdn'),('fdo','fdo'),('fdt','fdt'),('fen','fen'),('fenc','fenc'),('fencs','fencs'),('fex','fex'),('ff','ff'),('ffs','ffs'),('fileencoding','fileencoding'),('fileencodings','fileencodings'),('fileformat','fileformat'),('fileformats','fileformats'),('filetype','filetype'),('fillchars','fillchars'),('fk','fk'),('fkmap','fkmap'),('flp','flp'),('fml','fml'),('fmr','fmr'),('fo','fo'),('foldclose','foldclose'),('foldcolumn','foldcolumn'),('foldenable','foldenable'),('foldexpr','foldexpr'),('foldignore','foldignore'),('foldlevel','foldlevel'),('foldlevelstart','foldlevelstart'),('foldmarker','foldmarker'),('foldmethod','foldmethod'),('foldminlines','foldminlines'),('foldnestmax','foldnestmax'),('foldopen','foldopen'),('foldtext','foldtext'),('formatexpr','formatexpr'),('formatlistpat','formatlistpat'),('formatoptions','formatoptions'),('formatprg','formatprg'),('fp','fp'),('fs','fs'),('fsync','fsync'),('ft','ft'),('gcr','gcr'),('gd','gd'),('gdefault','gdefault'),('gfm','gfm'),('gfn','gfn'),('gfs','gfs'),('gfw','gfw'),('ghr','ghr'),('go','go'),('gp','gp'),('grepformat','grepformat'),('grepprg','grepprg'),('gtl','gtl'),('gtt','gtt'),('guicursor','guicursor'),('guifont','guifont'),('guifontset','guifontset'),('guifontwide','guifontwide'),('guiheadroom','guiheadroom'),('guioptions','guioptions'),('guipty','guipty'),('guitablabel','guitablabel'),('guitabtooltip','guitabtooltip'),('helpfile','helpfile'),('helpheight','helpheight'),('helplang','helplang'),('hf','hf'),('hh','hh'),('hi','hi'),('hid','hid'),('hidden','hidden'),('highlight','highlight'),('history','history'),('hk','hk'),('hkmap','hkmap'),('hkmapp','hkmapp'),('hkp','hkp'),('hl','hl'),('hlg','hlg'),('hls','hls'),('hlsearch','hlsearch'),('ic','ic'),('icon','icon'),('iconstring','iconstring'),('ignorecase','ignorecase'),('im','im'),('imactivatekey','imactivatekey'),('imak','imak'),('imc','imc'),('imcmdline','imcmdline'),('imd','imd'),('imdisable','imdisable'),('imi','imi'),('iminsert','iminsert'),('ims','ims'),('imsearch','imsearch'),('inc','inc'),('include','include'),('includeexpr','includeexpr'),('incsearch','incsearch'),('inde','inde'),('indentexpr','indentexpr'),('indentkeys','indentkeys'),('indk','indk'),('inex','inex'),('inf','inf'),('infercase','infercase'),('inoremap','inoremap'),('insertmode','insertmode'),('invacd','invacd'),('invai','invai'),('invakm','invakm'),('invallowrevins','invallowrevins'),('invaltkeymap','invaltkeymap'),('invanti','invanti'),('invantialias','invantialias'),('invar','invar'),('invarab','invarab'),('invarabic','invarabic'),('invarabicshape','invarabicshape'),('invari','invari'),('invarshape','invarshape'),('invautochdir','invautochdir'),('invautoindent','invautoindent'),('invautoread','invautoread'),('invautowrite','invautowrite'),('invautowriteall','invautowriteall'),('invaw','invaw'),('invawa','invawa'),('invbackup','invbackup'),('invballooneval','invballooneval'),('invbeval','invbeval'),('invbin','invbin'),('invbinary','invbinary'),('invbiosk','invbiosk'),('invbioskey','invbioskey'),('invbk','invbk'),('invbl','invbl'),('invbomb','invbomb'),('invbuflisted','invbuflisted'),('invcf','invcf'),('invci','invci'),('invcin','invcin'),('invcindent','invcindent'),('invcompatible','invcompatible'),('invconfirm','invconfirm'),('invconsk','invconsk'),('invconskey','invconskey'),('invcopyindent','invcopyindent'),('invcp','invcp'),('invcrb','invcrb'),('invcscopetag','invcscopetag'),('invcscopeverbose','invcscopeverbose'),('invcst','invcst'),('invcsverb','invcsverb'),('invcuc','invcuc'),('invcul','invcul'),('invcursorbind','invcursorbind'),('invcursorcolumn','invcursorcolumn'),('invcursorline','invcursorline'),('invdeco','invdeco'),('invdelcombine','invdelcombine'),('invdg','invdg'),('invdiff','invdiff'),('invdigraph','invdigraph'),('invea','invea'),('inveb','inveb'),('inved','inved'),('invedcompatible','invedcompatible'),('invek','invek'),('invendofline','invendofline'),('inveol','inveol'),('invequalalways','invequalalways'),('inverrorbells','inverrorbells'),('invesckeys','invesckeys'),('invet','invet'),('invex','invex'),('invexpandtab','invexpandtab'),('invexrc','invexrc'),('invfen','invfen'),('invfk','invfk'),('invfkmap','invfkmap'),('invfoldenable','invfoldenable'),('invgd','invgd'),('invgdefault','invgdefault'),('invguipty','invguipty'),('invhid','invhid'),('invhidden','invhidden'),('invhk','invhk'),('invhkmap','invhkmap'),('invhkmapp','invhkmapp'),('invhkp','invhkp'),('invhls','invhls'),('invhlsearch','invhlsearch'),('invic','invic'),('invicon','invicon'),('invignorecase','invignorecase'),('invim','invim'),('invimc','invimc'),('invimcmdline','invimcmdline'),('invimd','invimd'),('invimdisable','invimdisable'),('invincsearch','invincsearch'),('invinf','invinf'),('invinfercase','invinfercase'),('invinsertmode','invinsertmode'),('invis','invis'),('invjoinspaces','invjoinspaces'),('invjs','invjs'),('invlazyredraw','invlazyredraw'),('invlbr','invlbr'),('invlinebreak','invlinebreak'),('invlisp','invlisp'),('invlist','invlist'),('invloadplugins','invloadplugins'),('invlpl','invlpl'),('invlz','invlz'),('invma','invma'),('invmacatsui','invmacatsui'),('invmagic','invmagic'),('invmh','invmh'),('invml','invml'),('invmod','invmod'),('invmodeline','invmodeline'),('invmodifiable','invmodifiable'),('invmodified','invmodified'),('invmore','invmore'),('invmousef','invmousef'),('invmousefocus','invmousefocus'),('invmousehide','invmousehide'),('invnu','invnu'),('invnumber','invnumber'),('invodev','invodev'),('invopendevice','invopendevice'),('invpaste','invpaste'),('invpi','invpi'),('invpreserveindent','invpreserveindent'),('invpreviewwindow','invpreviewwindow'),('invprompt','invprompt'),('invpvw','invpvw'),('invreadonly','invreadonly'),('invrelativenumber','invrelativenumber'),('invremap','invremap'),('invrestorescreen','invrestorescreen'),('invrevins','invrevins'),('invri','invri'),('invrightleft','invrightleft'),('invrl','invrl'),('invrnu','invrnu'),('invro','invro'),('invrs','invrs'),('invru','invru'),('invruler','invruler'),('invsb','invsb'),('invsc','invsc'),('invscb','invscb'),('invscrollbind','invscrollbind'),('invscs','invscs'),('invsecure','invsecure'),('invsft','invsft'),('invshellslash','invshellslash'),('invshelltemp','invshelltemp'),('invshiftround','invshiftround'),('invshortname','invshortname'),('invshowcmd','invshowcmd'),('invshowfulltag','invshowfulltag'),('invshowmatch','invshowmatch'),('invshowmode','invshowmode'),('invsi','invsi'),('invsm','invsm'),('invsmartcase','invsmartcase'),('invsmartindent','invsmartindent'),('invsmarttab','invsmarttab'),('invsmd','invsmd'),('invsn','invsn'),('invsol','invsol'),('invspell','invspell'),('invsplitbelow','invsplitbelow'),('invsplitright','invsplitright'),('invspr','invspr'),('invsr','invsr'),('invssl','invssl'),('invsta','invsta'),('invstartofline','invstartofline'),('invstmp','invstmp'),('invswapfile','invswapfile'),('invswf','invswf'),('invta','invta'),('invtagbsearch','invtagbsearch'),('invtagrelative','invtagrelative'),('invtagstack','invtagstack'),('invtbi','invtbi'),('invtbidi','invtbidi'),('invtbs','invtbs'),('invtermbidi','invtermbidi'),('invterse','invterse'),('invtextauto','invtextauto'),('invtextmode','invtextmode'),('invtf','invtf'),('invtgst','invtgst'),('invtildeop','invtildeop'),('invtimeout','invtimeout'),('invtitle','invtitle'),('invto','invto'),('invtop','invtop'),('invtr','invtr'),('invttimeout','invttimeout'),('invttybuiltin','invttybuiltin'),('invttyfast','invttyfast'),('invtx','invtx'),('invvb','invvb'),('invvisualbell','invvisualbell'),('invwa','invwa'),('invwarn','invwarn'),('invwb','invwb'),('invweirdinvert','invweirdinvert'),('invwfh','invwfh'),('invwfw','invwfw'),('invwildignorecase','invwildignorecase'),('invwildmenu','invwildmenu'),('invwinfixheight','invwinfixheight'),('invwinfixwidth','invwinfixwidth'),('invwiv','invwiv'),('invwmnu','invwmnu'),('invwrap','invwrap'),('invwrapscan','invwrapscan'),('invwrite','invwrite'),('invwriteany','invwriteany'),('invwritebackup','invwritebackup'),('invws','invws'),('is','is'),('isf','isf'),('isfname','isfname'),('isi','isi'),('isident','isident'),('isk','isk'),('iskeyword','iskeyword'),('isp','isp'),('isprint','isprint'),('joinspaces','joinspaces'),('js','js'),('key','key'),('keymap','keymap'),('keymodel','keymodel'),('keywordprg','keywordprg'),('km','km'),('kmp','kmp'),('kp','kp'),('langmap','langmap'),('langmenu','langmenu'),('laststatus','laststatus'),('lazyredraw','lazyredraw'),('lbr','lbr'),('lcs','lcs'),('linebreak','linebreak'),('lines','lines'),('linespace','linespace'),('lisp','lisp'),('lispwords','lispwords'),('list','list'),('listchars','listchars'),('lm','lm'),('lmap','lmap'),('loadplugins','loadplugins'),('lpl','lpl'),('ls','ls'),('lsp','lsp'),('lw','lw'),('lz','lz'),('ma','ma'),('macatsui','macatsui'),('magic','magic'),('makeef','makeef'),('makeprg','makeprg'),('mat','mat'),('matchpairs','matchpairs'),('matchtime','matchtime'),('maxcombine','maxcombine'),('maxfuncdepth','maxfuncdepth'),('maxmapdepth','maxmapdepth'),('maxmem','maxmem'),('maxmempattern','maxmempattern'),('maxmemtot','maxmemtot'),('mco','mco'),('mef','mef'),('menuitems','menuitems'),('mfd','mfd'),('mh','mh'),('mis','mis'),('mkspellmem','mkspellmem'),('ml','ml'),('mls','mls'),('mm','mm'),('mmd','mmd'),('mmp','mmp'),('mmt','mmt'),('mod','mod'),('modeline','modeline'),('modelines','modelines'),('modifiable','modifiable'),('modified','modified'),('more','more'),('mouse','mouse'),('mousef','mousef'),('mousefocus','mousefocus'),('mousehide','mousehide'),('mousem','mousem'),('mousemodel','mousemodel'),('mouses','mouses'),('mouseshape','mouseshape'),('mouset','mouset'),('mousetime','mousetime'),('mp','mp'),('mps','mps'),('msm','msm'),('mzq','mzq'),('mzquantum','mzquantum'),('nf','nf'),('nnoremap','nnoremap'),('noacd','noacd'),('noai','noai'),('noakm','noakm'),('noallowrevins','noallowrevins'),('noaltkeymap','noaltkeymap'),('noanti','noanti'),('noantialias','noantialias'),('noar','noar'),('noarab','noarab'),('noarabic','noarabic'),('noarabicshape','noarabicshape'),('noari','noari'),('noarshape','noarshape'),('noautochdir','noautochdir'),('noautoindent','noautoindent'),('noautoread','noautoread'),('noautowrite','noautowrite'),('noautowriteall','noautowriteall'),('noaw','noaw'),('noawa','noawa'),('nobackup','nobackup'),('noballooneval','noballooneval'),('nobeval','nobeval'),('nobin','nobin'),('nobinary','nobinary'),('nobiosk','nobiosk'),('nobioskey','nobioskey'),('nobk','nobk'),('nobl','nobl'),('nobomb','nobomb'),('nobuflisted','nobuflisted'),('nocf','nocf'),('noci','noci'),('nocin','nocin'),('nocindent','nocindent'),('nocompatible','nocompatible'),('noconfirm','noconfirm'),('noconsk','noconsk'),('noconskey','noconskey'),('nocopyindent','nocopyindent'),('nocp','nocp'),('nocrb','nocrb'),('nocscopetag','nocscopetag'),('nocscopeverbose','nocscopeverbose'),('nocst','nocst'),('nocsverb','nocsverb'),('nocuc','nocuc'),('nocul','nocul'),('nocursorbind','nocursorbind'),('nocursorcolumn','nocursorcolumn'),('nocursorline','nocursorline'),('nodeco','nodeco'),('nodelcombine','nodelcombine'),('nodg','nodg'),('nodiff','nodiff'),('nodigraph','nodigraph'),('noea','noea'),('noeb','noeb'),('noed','noed'),('noedcompatible','noedcompatible'),('noek','noek'),('noendofline','noendofline'),('noeol','noeol'),('noequalalways','noequalalways'),('noerrorbells','noerrorbells'),('noesckeys','noesckeys'),('noet','noet'),('noex','noex'),('noexpandtab','noexpandtab'),('noexrc','noexrc'),('nofen','nofen'),('nofk','nofk'),('nofkmap','nofkmap'),('nofoldenable','nofoldenable'),('nogd','nogd'),('nogdefault','nogdefault'),('noguipty','noguipty'),('nohid','nohid'),('nohidden','nohidden'),('nohk','nohk'),('nohkmap','nohkmap'),('nohkmapp','nohkmapp'),('nohkp','nohkp'),('nohls','nohls'),('nohlsearch','nohlsearch'),('noic','noic'),('noicon','noicon'),('noignorecase','noignorecase'),('noim','noim'),('noimc','noimc'),('noimcmdline','noimcmdline'),('noimd','noimd'),('noimdisable','noimdisable'),('noincsearch','noincsearch'),('noinf','noinf'),('noinfercase','noinfercase'),('noinsertmode','noinsertmode'),('nois','nois'),('nojoinspaces','nojoinspaces'),('nojs','nojs'),('nolazyredraw','nolazyredraw'),('nolbr','nolbr'),('nolinebreak','nolinebreak'),('nolisp','nolisp'),('nolist','nolist'),('noloadplugins','noloadplugins'),('nolpl','nolpl'),('nolz','nolz'),('noma','noma'),('nomacatsui','nomacatsui'),('nomagic','nomagic'),('nomh','nomh'),('noml','noml'),('nomod','nomod'),('nomodeline','nomodeline'),('nomodifiable','nomodifiable'),('nomodified','nomodified'),('nomore','nomore'),('nomousef','nomousef'),('nomousefocus','nomousefocus'),('nomousehide','nomousehide'),('nonu','nonu'),('nonumber','nonumber'),('noodev','noodev'),('noopendevice','noopendevice'),('nopaste','nopaste'),('nopi','nopi'),('nopreserveindent','nopreserveindent'),('nopreviewwindow','nopreviewwindow'),('noprompt','noprompt'),('nopvw','nopvw'),('noreadonly','noreadonly'),('norelativenumber','norelativenumber'),('noremap','noremap'),('norestorescreen','norestorescreen'),('norevins','norevins'),('nori','nori'),('norightleft','norightleft'),('norl','norl'),('nornu','nornu'),('noro','noro'),('nors','nors'),('noru','noru'),('noruler','noruler'),('nosb','nosb'),('nosc','nosc'),('noscb','noscb'),('noscrollbind','noscrollbind'),('noscs','noscs'),('nosecure','nosecure'),('nosft','nosft'),('noshellslash','noshellslash'),('noshelltemp','noshelltemp'),('noshiftround','noshiftround'),('noshortname','noshortname'),('noshowcmd','noshowcmd'),('noshowfulltag','noshowfulltag'),('noshowmatch','noshowmatch'),('noshowmode','noshowmode'),('nosi','nosi'),('nosm','nosm'),('nosmartcase','nosmartcase'),('nosmartindent','nosmartindent'),('nosmarttab','nosmarttab'),('nosmd','nosmd'),('nosn','nosn'),('nosol','nosol'),('nospell','nospell'),('nosplitbelow','nosplitbelow'),('nosplitright','nosplitright'),('nospr','nospr'),('nosr','nosr'),('nossl','nossl'),('nosta','nosta'),('nostartofline','nostartofline'),('nostmp','nostmp'),('noswapfile','noswapfile'),('noswf','noswf'),('nota','nota'),('notagbsearch','notagbsearch'),('notagrelative','notagrelative'),('notagstack','notagstack'),('notbi','notbi'),('notbidi','notbidi'),('notbs','notbs'),('notermbidi','notermbidi'),('noterse','noterse'),('notextauto','notextauto'),('notextmode','notextmode'),('notf','notf'),('notgst','notgst'),('notildeop','notildeop'),('notimeout','notimeout'),('notitle','notitle'),('noto','noto'),('notop','notop'),('notr','notr'),('nottimeout','nottimeout'),('nottybuiltin','nottybuiltin'),('nottyfast','nottyfast'),('notx','notx'),('novb','novb'),('novisualbell','novisualbell'),('nowa','nowa'),('nowarn','nowarn'),('nowb','nowb'),('noweirdinvert','noweirdinvert'),('nowfh','nowfh'),('nowfw','nowfw'),('nowildignorecase','nowildignorecase'),('nowildmenu','nowildmenu'),('nowinfixheight','nowinfixheight'),('nowinfixwidth','nowinfixwidth'),('nowiv','nowiv'),('nowmnu','nowmnu'),('nowrap','nowrap'),('nowrapscan','nowrapscan'),('nowrite','nowrite'),('nowriteany','nowriteany'),('nowritebackup','nowritebackup'),('nows','nows'),('nrformats','nrformats'),('nu','nu'),('number','number'),('numberwidth','numberwidth'),('nuw','nuw'),('odev','odev'),('oft','oft'),('ofu','ofu'),('omnifunc','omnifunc'),('opendevice','opendevice'),('operatorfunc','operatorfunc'),('opfunc','opfunc'),('osfiletype','osfiletype'),('pa','pa'),('para','para'),('paragraphs','paragraphs'),('paste','paste'),('pastetoggle','pastetoggle'),('patchexpr','patchexpr'),('patchmode','patchmode'),('path','path'),('pdev','pdev'),('penc','penc'),('pex','pex'),('pexpr','pexpr'),('pfn','pfn'),('ph','ph'),('pheader','pheader'),('pi','pi'),('pm','pm'),('pmbcs','pmbcs'),('pmbfn','pmbfn'),('popt','popt'),('preserveindent','preserveindent'),('previewheight','previewheight'),('previewwindow','previewwindow'),('printdevice','printdevice'),('printencoding','printencoding'),('printexpr','printexpr'),('printfont','printfont'),('printheader','printheader'),('printmbcharset','printmbcharset'),('printmbfont','printmbfont'),('printoptions','printoptions'),('prompt','prompt'),('pt','pt'),('pumheight','pumheight'),('pvh','pvh'),('pvw','pvw'),('qe','qe'),('quoteescape','quoteescape'),('rdt','rdt'),('readonly','readonly'),('redrawtime','redrawtime'),('relativenumber','relativenumber'),('remap','remap'),('report','report'),('restorescreen','restorescreen'),('revins','revins'),('ri','ri'),('rightleft','rightleft'),('rightleftcmd','rightleftcmd'),('rl','rl'),('rlc','rlc'),('rnu','rnu'),('ro','ro'),('rs','rs'),('rtp','rtp'),('ru','ru'),('ruf','ruf'),('ruler','ruler'),('rulerformat','rulerformat'),('runtimepath','runtimepath'),('sb','sb'),('sbo','sbo'),('sbr','sbr'),('sc','sc'),('scb','scb'),('scr','scr'),('scroll','scroll'),('scrollbind','scrollbind'),('scrolljump','scrolljump'),('scrolloff','scrolloff'),('scrollopt','scrollopt'),('scs','scs'),('sect','sect'),('sections','sections'),('secure','secure'),('sel','sel'),('selection','selection'),('selectmode','selectmode'),('sessionoptions','sessionoptions'),('sft','sft'),('sh','sh'),('shcf','shcf'),('shell','shell'),('shellcmdflag','shellcmdflag'),('shellpipe','shellpipe'),('shellquote','shellquote'),('shellredir','shellredir'),('shellslash','shellslash'),('shelltemp','shelltemp'),('shelltype','shelltype'),('shellxquote','shellxquote'),('shiftround','shiftround'),('shiftwidth','shiftwidth'),('shm','shm'),('shortmess','shortmess'),('shortname','shortname'),('showbreak','showbreak'),('showcmd','showcmd'),('showfulltag','showfulltag'),('showmatch','showmatch'),('showmode','showmode'),('showtabline','showtabline'),('shq','shq'),('si','si'),('sidescroll','sidescroll'),('sidescrolloff','sidescrolloff'),('siso','siso'),('sj','sj'),('slm','slm'),('sm','sm'),('smartcase','smartcase'),('smartindent','smartindent'),('smarttab','smarttab'),('smc','smc'),('smd','smd'),('sn','sn'),('so','so'),('softtabstop','softtabstop'),('sol','sol'),('sp','sp'),('spc','spc'),('spell','spell'),('spellcapcheck','spellcapcheck'),('spellfile','spellfile'),('spelllang','spelllang'),('spellsuggest','spellsuggest'),('spf','spf'),('spl','spl'),('splitbelow','splitbelow'),('splitright','splitright'),('spr','spr'),('sps','sps'),('sr','sr'),('srr','srr'),('ss','ss'),('ssl','ssl'),('ssop','ssop'),('st','st'),('sta','sta'),('stal','stal'),('startofline','startofline'),('statusline','statusline'),('stl','stl'),('stmp','stmp'),('sts','sts'),('su','su'),('sua','sua'),('suffixes','suffixes'),('suffixesadd','suffixesadd'),('sw','sw'),('swapfile','swapfile'),('swapsync','swapsync'),('swb','swb'),('swf','swf'),('switchbuf','switchbuf'),('sws','sws'),('sxq','sxq'),('syn','syn'),('synmaxcol','synmaxcol'),('syntax','syntax'),('t_AB','t_AB'),('t_AF','t_AF'),('t_AL','t_AL'),('t_CS','t_CS'),('t_CV','t_CV'),('t_Ce','t_Ce'),('t_Co','t_Co'),('t_Cs','t_Cs'),('t_DL','t_DL'),('t_EI','t_EI'),('t_F1','t_F1'),('t_F2','t_F2'),('t_F3','t_F3'),('t_F4','t_F4'),('t_F5','t_F5'),('t_F6','t_F6'),('t_F7','t_F7'),('t_F8','t_F8'),('t_F9','t_F9'),('t_IE','t_IE'),('t_IS','t_IS'),('t_K1','t_K1'),('t_K3','t_K3'),('t_K4','t_K4'),('t_K5','t_K5'),('t_K6','t_K6'),('t_K7','t_K7'),('t_K8','t_K8'),('t_K9','t_K9'),('t_KA','t_KA'),('t_KB','t_KB'),('t_KC','t_KC'),('t_KD','t_KD'),('t_KE','t_KE'),('t_KF','t_KF'),('t_KG','t_KG'),('t_KH','t_KH'),('t_KI','t_KI'),('t_KJ','t_KJ'),('t_KK','t_KK'),('t_KL','t_KL'),('t_RI','t_RI'),('t_RV','t_RV'),('t_SI','t_SI'),('t_Sb','t_Sb'),('t_Sf','t_Sf'),('t_WP','t_WP'),('t_WS','t_WS'),('t_ZH','t_ZH'),('t_ZR','t_ZR'),('t_al','t_al'),('t_bc','t_bc'),('t_cd','t_cd'),('t_ce','t_ce'),('t_cl','t_cl'),('t_cm','t_cm'),('t_cs','t_cs'),('t_da','t_da'),('t_db','t_db'),('t_dl','t_dl'),('t_fs','t_fs'),('t_k1','t_k1'),('t_k2','t_k2'),('t_k3','t_k3'),('t_k4','t_k4'),('t_k5','t_k5'),('t_k6','t_k6'),('t_k7','t_k7'),('t_k8','t_k8'),('t_k9','t_k9'),('t_kB','t_kB'),('t_kD','t_kD'),('t_kI','t_kI'),('t_kN','t_kN'),('t_kP','t_kP'),('t_kb','t_kb'),('t_kd','t_kd'),('t_ke','t_ke'),('t_kh','t_kh'),('t_kl','t_kl'),('t_kr','t_kr'),('t_ks','t_ks'),('t_ku','t_ku'),('t_le','t_le'),('t_mb','t_mb'),('t_md','t_md'),('t_me','t_me'),('t_mr','t_mr'),('t_ms','t_ms'),('t_nd','t_nd'),('t_op','t_op'),('t_se','t_se'),('t_so','t_so'),('t_sr','t_sr'),('t_te','t_te'),('t_ti','t_ti'),('t_ts','t_ts'),('t_ue','t_ue'),('t_us','t_us'),('t_ut','t_ut'),('t_vb','t_vb'),('t_ve','t_ve'),('t_vi','t_vi'),('t_vs','t_vs'),('t_xs','t_xs'),('ta','ta'),('tabline','tabline'),('tabpagemax','tabpagemax'),('tabstop','tabstop'),('tag','tag'),('tagbsearch','tagbsearch'),('taglength','taglength'),('tagrelative','tagrelative'),('tags','tags'),('tagstack','tagstack'),('tal','tal'),('tb','tb'),('tbi','tbi'),('tbidi','tbidi'),('tbis','tbis'),('tbs','tbs'),('tenc','tenc'),('term','term'),('termbidi','termbidi'),('termencoding','termencoding'),('terse','terse'),('textauto','textauto'),('textmode','textmode'),('textwidth','textwidth'),('tf','tf'),('tgst','tgst'),('thesaurus','thesaurus'),('tildeop','tildeop'),('timeout','timeout'),('timeoutlen','timeoutlen'),('title','title'),('titlelen','titlelen'),('titleold','titleold'),('titlestring','titlestring'),('tl','tl'),('tm','tm'),('to','to'),('toolbar','toolbar'),('toolbariconsize','toolbariconsize'),('top','top'),('tpm','tpm'),('tr','tr'),('ts','ts'),('tsl','tsl'),('tsr','tsr'),('ttimeout','ttimeout'),('ttimeoutlen','ttimeoutlen'),('ttm','ttm'),('tty','tty'),('ttybuiltin','ttybuiltin'),('ttyfast','ttyfast'),('ttym','ttym'),('ttymouse','ttymouse'),('ttyscroll','ttyscroll'),('ttytype','ttytype'),('tw','tw'),('tx','tx'),('uc','uc'),('udf','udf'),('udir','udir'),('ul','ul'),('undodir','undodir'),('undofile','undofile'),('undolevels','undolevels'),('undoreload','undoreload'),('updatecount','updatecount'),('updatetime','updatetime'),('ur','ur'),('ut','ut'),('vb','vb'),('vbs','vbs'),('vdir','vdir'),('ve','ve'),('verbose','verbose'),('verbosefile','verbosefile'),('vfile','vfile'),('vi','vi'),('viewdir','viewdir'),('viewoptions','viewoptions'),('viminfo','viminfo'),('virtualedit','virtualedit'),('visualbell','visualbell'),('vnoremap','vnoremap'),('vop','vop'),('wa','wa'),('wak','wak'),('warn','warn'),('wb','wb'),('wc','wc'),('wcm','wcm'),('wd','wd'),('weirdinvert','weirdinvert'),('wfh','wfh'),('wfw','wfw'),('wh','wh'),('whichwrap','whichwrap'),('wi','wi'),('wic','wic'),('wig','wig'),('wildchar','wildchar'),('wildcharm','wildcharm'),('wildignore','wildignore'),('wildignorecase','wildignorecase'),('wildmenu','wildmenu'),('wildmode','wildmode'),('wildoptions','wildoptions'),('wim','wim'),('winaltkeys','winaltkeys'),('window','window'),('winfixheight','winfixheight'),('winfixwidth','winfixwidth'),('winheight','winheight'),('winminheight','winminheight'),('winminwidth','winminwidth'),('winwidth','winwidth'),('wiv','wiv'),('wiw','wiw'),('wm','wm'),('wmh','wmh'),('wmnu','wmnu'),('wmw','wmw'),('wop','wop'),('wrap','wrap'),('wrapmargin','wrapmargin'),('wrapscan','wrapscan'),('write','write'),('writeany','writeany'),('writebackup','writebackup'),('writedelay','writedelay'),('ws','ws'),('ww','ww')]
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index 0c0de54e..fee062cb 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -5,28 +5,28 @@
Lexers for agile languages.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, \
- LexerContext, include, combined, do_insertions, bygroups, using, this
-from pygments.token import Error, Text, Whitespace, Other, \
+ LexerContext, include, combined, do_insertions, bygroups, using
+from pygments.token import Error, Text, Other, \
Comment, Operator, Keyword, Name, String, Number, Generic, Punctuation
from pygments.util import get_bool_opt, get_list_opt, shebang_matches
from pygments import unistring as uni
__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
- 'RubyLexer', 'RubyConsoleLexer', 'PerlLexer', 'LuaLexer',
- 'MiniDLexer', 'IoLexer', 'TclLexer', 'ClojureLexer',
- 'Python3Lexer', 'Python3TracebackLexer', 'FactorLexer',
- 'IokeLexer', 'FancyLexer', 'GroovyLexer']
+ 'Python3Lexer', 'Python3TracebackLexer', 'RubyLexer',
+ 'RubyConsoleLexer', 'PerlLexer', 'LuaLexer', 'MoonScriptLexer',
+ 'MiniDLexer', 'IoLexer', 'TclLexer', 'FactorLexer', 'FancyLexer']
# b/w compatibility
from pygments.lexers.functional import SchemeLexer
+from pygments.lexers.jvm import IokeLexer, ClojureLexer
line_re = re.compile('.*?\n')
@@ -105,12 +105,12 @@ class PythonLexer(RegexLexer):
r'WindowsError|ZeroDivisionError)\b', Name.Exception),
],
'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0[0-7]+', Number.Oct),
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
(r'0[xX][a-fA-F0-9]+', Number.Hex),
(r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
+ (r'\d+j?', Number.Integer)
],
'backtick': [
('`.*?`', String.Backtick),
@@ -156,12 +156,12 @@ class PythonLexer(RegexLexer):
],
'dqs': [
(r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
include('strings')
],
'sqs': [
(r"'", String, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
include('strings')
],
'tdqs': [
@@ -199,7 +199,7 @@ class Python3Lexer(RegexLexer):
tokens = PythonLexer.tokens.copy()
tokens['keywords'] = [
(r'(assert|break|continue|del|elif|else|except|'
- r'finally|for|global|if|lambda|pass|raise|'
+ r'finally|for|global|if|lambda|pass|raise|nonlocal|'
r'return|try|while|yield|as|with|True|False|None)\b', Keyword),
]
tokens['builtins'] = [
@@ -368,7 +368,8 @@ class PythonTracebackLexer(RegexLexer):
tokens = {
'root': [
- (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
+ (r'^Traceback \(most recent call last\):\n',
+ Generic.Traceback, 'intb'),
# SyntaxError starts with this.
(r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
(r'^.*\n', Other),
@@ -587,7 +588,7 @@ class RubyLexer(ExtendedRegexLexer):
tokens = {
'root': [
(r'#.*?$', Comment.Single),
- (r'=begin\s.*?\n=end', Comment.Multiline),
+ (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
# keywords
(r'(BEGIN|END|alias|begin|break|case|defined\?|'
r'do|else|elsif|end|ensure|for|if|in|next|redo|'
@@ -602,18 +603,19 @@ class RubyLexer(ExtendedRegexLexer):
# special methods
(r'(initialize|new|loop|include|extend|raise|attr_reader|'
r'attr_writer|attr_accessor|attr|catch|throw|private|'
- r'module_function|public|protected|true|false|nil)\b', Keyword.Pseudo),
+ r'module_function|public|protected|true|false|nil)\b',
+ Keyword.Pseudo),
(r'(not|and|or)\b', Operator.Word),
(r'(autoload|block_given|const_defined|eql|equal|frozen|include|'
r'instance_of|is_a|iterator|kind_of|method_defined|nil|'
r'private_method_defined|protected_method_defined|'
r'public_method_defined|respond_to|tainted)\?', Name.Builtin),
(r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
- (r'(?<!\.)(Array|Float|Integer|String|__id__|__send__|abort|ancestors|'
- r'at_exit|autoload|binding|callcc|caller|'
+ (r'(?<!\.)(Array|Float|Integer|String|__id__|__send__|abort|'
+ r'ancestors|at_exit|autoload|binding|callcc|caller|'
r'catch|chomp|chop|class_eval|class_variables|'
- r'clone|const_defined\?|const_get|const_missing|const_set|constants|'
- r'display|dup|eval|exec|exit|extend|fail|fork|'
+ r'clone|const_defined\?|const_get|const_missing|const_set|'
+ r'constants|display|dup|eval|exec|exit|extend|fail|fork|'
r'format|freeze|getc|gets|global_variables|gsub|'
r'hash|id|included_modules|inspect|instance_eval|'
r'instance_method|instance_methods|'
@@ -632,7 +634,8 @@ class RubyLexer(ExtendedRegexLexer):
r'warn)\b', Name.Builtin),
(r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
# normal heredocs
- (r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)', heredoc_callback),
+ (r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
+ heredoc_callback),
# empty string heredocs
(r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
(r'__END__', Comment.Preproc, 'end-part'),
@@ -737,7 +740,8 @@ class RubyLexer(ExtendedRegexLexer):
],
'string-intp-escaped': [
include('string-intp'),
- (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})', String.Escape)
+ (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
+ String.Escape)
],
'interpolated-regex': [
include('string-intp'),
@@ -824,19 +828,19 @@ class PerlLexer(RegexLexer):
mimetypes = ['text/x-perl', 'application/x-perl']
flags = re.DOTALL | re.MULTILINE
- # TODO: give this a perl guy who knows how to parse perl...
+ # TODO: give this to a perl guy who knows how to parse perl...
tokens = {
'balanced-regex': [
- (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'),
- (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
+ (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
(r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
- (r'{(\\\\|\\}|[^}])*}[egimosx]*', String.Regex, '#pop'),
- (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'),
- (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'),
- (r'\((\\\\|\\\)|[^\)])*\)[egimosx]*', String.Regex, '#pop'),
- (r'@(\\\\|\\\@|[^\@])*@[egimosx]*', String.Regex, '#pop'),
- (r'%(\\\\|\\\%|[^\%])*%[egimosx]*', String.Regex, '#pop'),
- (r'\$(\\\\|\\\$|[^\$])*\$[egimosx]*', String.Regex, '#pop'),
+ (r'{(\\\\|\\[^\\]|[^\\}])*}[egimosx]*', String.Regex, '#pop'),
+ (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
+ (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
+ (r'\((\\\\|\\[^\\]|[^\\\)])*\)[egimosx]*', String.Regex, '#pop'),
+ (r'@(\\\\|\\[^\\]|[^\\\@])*@[egimosx]*', String.Regex, '#pop'),
+ (r'%(\\\\|\\[^\\]|[^\\\%])*%[egimosx]*', String.Regex, '#pop'),
+ (r'\$(\\\\|\\[^\\]|[^\\\$])*\$[egimosx]*', String.Regex, '#pop'),
],
'root': [
(r'\#.*?$', Comment.Single),
@@ -848,20 +852,26 @@ class PerlLexer(RegexLexer):
bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
(r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
# common delimiters
- (r's/(\\\\|\\/|[^/])*/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex),
+ (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
+ String.Regex),
(r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
(r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
- (r's@(\\\\|\\@|[^@])*@(\\\\|\\@|[^@])*@[egimosx]*', String.Regex),
- (r's%(\\\\|\\%|[^%])*%(\\\\|\\%|[^%])*%[egimosx]*', String.Regex),
+ (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
+ String.Regex),
+ (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
+ String.Regex),
# balanced delimiters
- (r's{(\\\\|\\}|[^}])*}\s*', String.Regex, 'balanced-regex'),
- (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'),
- (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'),
- (r's\((\\\\|\\\)|[^\)])*\)\s*', String.Regex, 'balanced-regex'),
-
- (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
+ (r's{(\\\\|\\[^\\]|[^\\}])*}\s*', String.Regex, 'balanced-regex'),
+ (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
+ (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
+ 'balanced-regex'),
+ (r's\((\\\\|\\[^\\]|[^\\\)])*\)\s*', String.Regex,
+ 'balanced-regex'),
+
+ (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
(r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'),
- (r'((?<==~)|(?<=\())\s*/(\\\\|\\/|[^/])*/[gcimosx]*', String.Regex),
+ (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
+ String.Regex),
(r'\s+', Text),
(r'(abs|accept|alarm|atan2|bind|binmode|bless|caller|chdir|'
r'chmod|chomp|chop|chown|chr|chroot|close|closedir|connect|'
@@ -903,9 +913,9 @@ class PerlLexer(RegexLexer):
Number.Float),
(r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
(r'\d+(_\d+)*', Number.Integer),
- (r"'(\\\\|\\'|[^'])*'", String),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r'`(\\\\|\\`|[^`])*`', String.Backtick),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
(r'<([^\s>]+)>', String.Regex),
(r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
(r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
@@ -1047,7 +1057,7 @@ class LuaLexer(RegexLexer):
(r'(local)\b', Keyword.Declaration),
(r'(true|false|nil)\b', Keyword.Constant),
- (r'(function)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(function)\b', Keyword, 'funcname'),
(r'[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)?', Name),
@@ -1056,6 +1066,7 @@ class LuaLexer(RegexLexer):
],
'funcname': [
+ (r'\s+', Text),
('(?:([A-Za-z_][A-Za-z0-9_]*)(\.))?([A-Za-z_][A-Za-z0-9_]*)',
bygroups(Name.Class, Punctuation, Name.Function), '#pop'),
# inline function
@@ -1112,6 +1123,73 @@ class LuaLexer(RegexLexer):
yield index, token, value
+class MoonScriptLexer(LuaLexer):
+ """
+ For `MoonScript <http://moonscript.org.org>`_ source code.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = "MoonScript"
+ aliases = ["moon", "moonscript"]
+ filenames = ["*.moon"]
+ mimetypes = ['text/x-moonscript', 'application/x-moonscript']
+
+ tokens = {
+ 'root': [
+ (r'#!(.*?)$', Comment.Preproc),
+ (r'', Text, 'base'),
+ ],
+ 'base': [
+ ('--.*$', Comment.Single),
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ (r'(?i)0x[0-9a-f]*', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'(?s)\[(=*)\[.*?\]\1\]', String),
+ (r'(->|=>)', Name.Function),
+ (r':[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
+ (r'(==|!=|~=|<=|>=|\.\.|\.\.\.|[=+\-*/%^<>#!.\\:])', Operator),
+ (r'[;,]', Punctuation),
+ (r'[\[\]\{\}\(\)]', Keyword.Type),
+ (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Variable),
+ (r"(class|extends|if|then|super|do|with|import|export|"
+ r"while|elseif|return|for|in|from|when|using|else|"
+ r"and|or|not|switch|break)\b", Keyword),
+ (r'(true|false|nil)\b', Keyword.Constant),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'(self)\b', Name.Builtin.Pseudo),
+ (r'@@?([a-zA-Z_][a-zA-Z0-9_]*)?', Name.Variable.Class),
+ (r'[A-Z]\w*', Name.Class), # proper name
+ (r'[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)?', Name),
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+ 'stringescape': [
+ (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
+ ],
+ 'sqs': [
+ ("'", String.Single, '#pop'),
+ (".", String)
+ ],
+ 'dqs': [
+ ('"', String.Double, '#pop'),
+ (".", String)
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ # set . as Operator instead of Punctuation
+ for index, token, value in \
+ LuaLexer.get_tokens_unprocessed(self, text):
+ if token == Punctuation and value == ".":
+ token = Operator
+ yield index, token, value
+
+
+
class MiniDLexer(RegexLexer):
"""
For `MiniD <http://www.dsource.org/projects/minid>`_ (a D-like scripting
@@ -1354,133 +1432,6 @@ class TclLexer(RegexLexer):
return shebang_matches(text, r'(tcl)')
-class ClojureLexer(RegexLexer):
- """
- Lexer for `Clojure <http://clojure.org/>`_ source code.
-
- *New in Pygments 0.11.*
- """
- name = 'Clojure'
- aliases = ['clojure', 'clj']
- filenames = ['*.clj']
- mimetypes = ['text/x-clojure', 'application/x-clojure']
-
- keywords = [
- 'fn', 'def', 'defn', 'defmacro', 'defmethod', 'defmulti', 'defn-',
- 'defstruct', 'if', 'cond', 'let', 'for'
- ]
- builtins = [
- '.', '..',
- '*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=',
- 'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
- 'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
- 'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
- 'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
- 'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
- 'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
- 'butlast', 'byte', 'cast', 'char', 'children', 'class',
- 'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
- 'complement', 'concat', 'conj', 'cons', 'constantly',
- 'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
- 'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
- 'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
- 'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
- 'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
- 'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush',
- 'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
- 'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
- 'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
- 'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
- 'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
- 'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
- 'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
- 'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
- 'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
- 'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
- 'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
- 'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
- 'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
- 'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
- 'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
- 'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
- 'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
- 're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
- 'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
- 'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
- 'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
- 'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
- 'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
- 'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
- 'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
- 'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
- 'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
- 'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
- 'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
- 'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
- 'vector?', 'when', 'when-first', 'when-let', 'when-not',
- 'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
- 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper']
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
-
- # TODO / should divide keywords/symbols into namespace/rest
- # but that's hard, so just pretend / is part of the name
- valid_name = r'[\w!$%*+,<=>?/.-]+'
-
- def _multi_escape(entries):
- return '|'.join([re.escape(entry) + '(?![\\w-!$%*+,<=>?/.-])'
- for entry in entries])
-
- tokens = {
- 'root' : [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r'[,\s]+', Whitespace),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- (r'0x-?[abcdef\d]+', Number.Hex),
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"\\(.|[a-z]+)", String.Char),
-
- # keywords
- (r':' + valid_name, Name.Constant),
-
- # special operators
- (r'~@|[`\'#^~&]', Operator),
-
- # highlight the keywords
- (_multi_escape(keywords), Keyword),
-
- # highlight the builtins
- (_multi_escape(builtins), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # Clojure accepts vector notation
- (r'(\[|\])', Punctuation),
-
- # Clojure accepts map notation
- (r'(\{|\})', Punctuation),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
- ],
- }
-
-
class FactorLexer(RegexLexer):
"""
Lexer for the `Factor <http://factorcode.org>`_ language.
@@ -1643,33 +1594,38 @@ class FactorLexer(RegexLexer):
# defining words
(r'(\s*)(:|::|MACRO:|MEMO:)(\s+)(\S+)',
- bygroups(Text, Keyword, Text, Name.Function)),
+ bygroups(Text, Keyword, Text, Name.Function)),
(r'(\s*)(M:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Text, Keyword, Text, Name.Class, Text, Name.Function)),
+ bygroups(Text, Keyword, Text, Name.Class, Text, Name.Function)),
(r'(\s*)(GENERIC:)(\s+)(\S+)',
- bygroups(Text, Keyword, Text, Name.Function)),
+ bygroups(Text, Keyword, Text, Name.Function)),
(r'(\s*)(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Text, Keyword, Text, Name.Function, Text, Name.Function)),
+ bygroups(Text, Keyword, Text, Name.Function, Text, Name.Function)),
(r'(\()(\s+)', bygroups(Name.Function, Text), 'stackeffect'),
(r'\;\s', Keyword),
# imports and namespaces
- (r'(USING:)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'(USE:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Namespace)),
- (r'(UNUSE:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ (r'(USING:)((?:\s|\\\s)+)',
+ bygroups(Keyword.Namespace, Text), 'import'),
+ (r'(USE:)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ (r'(UNUSE:)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
(r'(QUALIFIED:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
(r'(QUALIFIED-WITH:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
(r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+)(=>)',
- bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Text)),
- (r'(IN:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Text)),
+ (r'(IN:)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
(r'(?:ALIAS|DEFER|FORGET|POSTPONE):', Keyword.Namespace),
# tuples and classes
(r'(TUPLE:)(\s+)(\S+)(\s+<\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'),
- (r'(TUPLE:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class), 'slots'),
+ bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'),
+ (r'(TUPLE:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class), 'slots'),
(r'(UNION:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
(r'(INTERSECTION:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
(r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)',
@@ -1687,7 +1643,8 @@ class FactorLexer(RegexLexer):
(r'ERROR:', Keyword),
(r'SYNTAX:', Keyword),
(r'(HELP:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)),
- (r'(MAIN:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Function)),
+ (r'(MAIN:)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Function)),
(r'(?:ALIEN|TYPEDEF|FUNCTION|STRUCT):', Keyword),
# vocab.private
@@ -1717,7 +1674,8 @@ class FactorLexer(RegexLexer):
(r'[-+/*=<>^]\s', Operator),
# keywords
- (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s', Keyword),
+ (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
+ Keyword),
# builtins
(builtin_kernel, Name.Builtin),
@@ -1761,196 +1719,6 @@ class FactorLexer(RegexLexer):
}
-class IokeLexer(RegexLexer):
- """
- For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
- prototype based programming language) source.
-
- *New in Pygments 1.4.*
- """
- name = 'Ioke'
- filenames = ['*.ik']
- aliases = ['ioke', 'ik']
- mimetypes = ['text/x-iokesrc']
- tokens = {
- 'interpolatableText': [
- (r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
- r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
- (r'#{', Punctuation, 'textInterpolationRoot')
- ],
-
- 'text': [
- (r'(?<!\\)"', String, '#pop'),
- include('interpolatableText'),
- (r'[^"]', String)
- ],
-
- 'documentation': [
- (r'(?<!\\)"', String.Doc, '#pop'),
- include('interpolatableText'),
- (r'[^"]', String.Doc)
- ],
-
- 'textInterpolationRoot': [
- (r'}', Punctuation, '#pop'),
- include('root')
- ],
-
- 'slashRegexp': [
- (r'(?<!\\)/[oxpniums]*', String.Regex, '#pop'),
- include('interpolatableText'),
- (r'\\/', String.Regex),
- (r'[^/]', String.Regex)
- ],
-
- 'squareRegexp': [
- (r'(?<!\\)][oxpniums]*', String.Regex, '#pop'),
- include('interpolatableText'),
- (r'\\]', String.Regex),
- (r'[^\]]', String.Regex)
- ],
-
- 'squareText': [
- (r'(?<!\\)]', String, '#pop'),
- include('interpolatableText'),
- (r'[^\]]', String)
- ],
-
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
-
- # Comments
- (r';(.*?)\n', Comment),
- (r'\A#!(.*?)\n', Comment),
-
- #Regexps
- (r'#/', String.Regex, 'slashRegexp'),
- (r'#r\[', String.Regex, 'squareRegexp'),
-
- #Symbols
- (r':[a-zA-Z0-9_!:?]+', String.Symbol),
- (r'[a-zA-Z0-9_!:?]+:(?![a-zA-Z0-9_!?])', String.Other),
- (r':"(\\\\|\\"|[^"])*"', String.Symbol),
-
- #Documentation
- (r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
- r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
- r'|(?<=dsyntax\())[\s\n\r]*"', String.Doc, 'documentation'),
-
- #Text
- (r'"', String, 'text'),
- (r'#\[', String, 'squareText'),
-
- #Mimic
- (r'[a-zA-Z0-9_][a-zA-Z0-9!?_:]+(?=\s*=.*mimic\s)', Name.Entity),
-
- #Assignment
- (r'[a-zA-Z_][a-zA-Z0-9_!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))', Name.Variable),
-
- # keywords
- (r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
- r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
- r'with)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
- # Origin
- (r'(eval|mimic|print|println)(?![a-zA-Z0-9!:_?])', Keyword),
-
- # Base
- (r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
- r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
- r'(?![a-zA-Z0-9!:_?])', Keyword),
-
- # Ground
- (r'(stackTraceAsText)(?![a-zA-Z0-9!:_?])', Keyword),
-
- #DefaultBehaviour Literals
- (r'(dict|list|message|set)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
- #DefaultBehaviour Case
- (r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
- r'case:otherwise|case:xor)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
- #DefaultBehaviour Reflection
- (r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
- r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
- r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
- r'(?![a-zA-Z0-9!:_?])', Keyword),
-
- #DefaultBehaviour Aspects
- (r'(after|around|before)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
- # DefaultBehaviour
- (r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
- r'(?![a-zA-Z0-9!:_?])', Keyword),
- (r'(use|destructuring)', Keyword.Reserved),
-
- #DefaultBehavior BaseBehavior
- (r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
- r'documentation|identity|removeCell!|undefineCell)'
- r'(?![a-zA-Z0-9!:_?])', Keyword),
-
- #DefaultBehavior Internal
- (r'(internal:compositeRegexp|internal:concatenateText|'
- r'internal:createDecimal|internal:createNumber|'
- r'internal:createRegexp|internal:createText)'
- r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
- #DefaultBehaviour Conditions
- (r'(availableRestarts|bind|error\!|findRestart|handle|'
- r'invokeRestart|rescue|restart|signal\!|warn\!)'
- r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
-
- # constants
- (r'(nil|false|true)(?![a-zA-Z0-9!:_?])', Name.Constant),
-
- # names
- (r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
- r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
- r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
- r'Conditions|Definitions|FlowControl|Internal|Literals|'
- r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
- r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
- r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
- r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
- r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
- r'System|Text|Tuple)(?![a-zA-Z0-9!:_?])', Name.Builtin),
-
- # functions
- (ur'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
- ur'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
- ur'(?![a-zA-Z0-9!:_?])', Name.Function),
-
- # Numbers
- (r'-?0[xX][0-9a-fA-F]+', Number.Hex),
- (r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'-?\d+', Number.Integer),
-
- (r'#\(', Punctuation),
-
- # Operators
- (ur'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
- ur'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
- ur'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
- ur'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
- ur'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
- ur'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
- ur'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
- (r'(and|nand|or|xor|nor|return|import)(?![a-zA-Z0-9_!?])',
- Operator),
-
- # Punctuation
- (r'(\`\`|\`|\'\'|\'|\.|\,|@|@@|\[|\]|\(|\)|{|})', Punctuation),
-
- #kinds
- (r'[A-Z][a-zA-Z0-9_!:?]*', Name.Class),
-
- #default cellnames
- (r'[a-z_][a-zA-Z0-9_!:?]*', Name)
- ]
- }
-
-
class FancyLexer(RegexLexer):
"""
Pygments Lexer For `Fancy <http://www.fancy-lang.org/>`_.
@@ -2033,66 +1801,3 @@ class FancyLexer(RegexLexer):
(r'\d+', Number.Integer)
]
}
-
-
-class GroovyLexer(RegexLexer):
- """
- For `Groovy <http://groovy.codehaus.org/>`_ source code.
-
- *New in Pygments 1.5.*
- """
-
- name = 'Groovy'
- aliases = ['groovy']
- filenames = ['*.groovy']
- mimetypes = ['text/x-groovy']
-
- flags = re.MULTILINE | re.DOTALL
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
- (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
- Keyword),
- (r'(abstract|const|enum|extends|final|implements|native|private|'
- r'protected|public|static|strictfp|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Declaration),
- (r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'\$/((?!/\$).)*/\$', String),
- (r'/(\\\\|\\"|[^/])*/', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
- (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
- (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
- (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
- ],
- }
diff --git a/pygments/lexers/asm.py b/pygments/lexers/asm.py
index 82933bc8..9e59e620 100644
--- a/pygments/lexers/asm.py
+++ b/pygments/lexers/asm.py
@@ -5,7 +5,7 @@
Lexers for assembly languages.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,7 +13,8 @@ import re
from pygments.lexer import RegexLexer, include, bygroups, using, DelegatingLexer
from pygments.lexers.compiled import DLexer, CppLexer, CLexer
-from pygments.token import *
+from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
+ Other, Keyword, Operator
__all__ = ['GasLexer', 'ObjdumpLexer','DObjdumpLexer', 'CppObjdumpLexer',
'CObjdumpLexer', 'LlvmLexer', 'NasmLexer']
@@ -72,6 +73,7 @@ class GasLexer(RegexLexer):
('%' + identifier, Name.Variable),
# Numeric constants
('$'+number, Number.Integer),
+ (r"$'(.|\\')'", String.Char),
(r'[\r\n]+', Text, '#pop'),
(r'#.*?$', Comment, '#pop'),
include('punctuation'),
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
index 5da2b875..eeab8d57 100644
--- a/pygments/lexers/compiled.py
+++ b/pygments/lexers/compiled.py
@@ -5,29 +5,29 @@
Lexers for compiled languages.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
+from string import Template
-from pygments.scanner import Scanner
from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- this, combined
+ this, combined
from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import \
- Text, Comment, Operator, Keyword, Name, String, Number, Punctuation, \
- Error
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Literal
+from pygments.scanner import Scanner
# backwards compatibility
from pygments.lexers.functional import OcamlLexer
+from pygments.lexers.jvm import JavaLexer, ScalaLexer
-__all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'ECLexer', 'JavaLexer',
- 'ScalaLexer', 'DylanLexer', 'OcamlLexer', 'ObjectiveCLexer',
- 'FortranLexer', 'GLShaderLexer', 'PrologLexer', 'CythonLexer',
- 'ValaLexer', 'OocLexer', 'GoLexer', 'FelixLexer', 'AdaLexer',
- 'Modula2Lexer', 'BlitzMaxLexer', 'NimrodLexer', 'GosuLexer',
- 'GosuTemplateLexer']
+__all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'ECLexer',
+ 'DylanLexer', 'ObjectiveCLexer', 'FortranLexer', 'GLShaderLexer',
+ 'PrologLexer', 'CythonLexer', 'ValaLexer', 'OocLexer', 'GoLexer',
+ 'FelixLexer', 'AdaLexer', 'Modula2Lexer', 'BlitzMaxLexer',
+ 'NimrodLexer', 'FantomLexer']
class CLexer(RegexLexer):
@@ -48,9 +48,12 @@ class CLexer(RegexLexer):
('^#if\s+0', Comment.Preproc, 'if0'),
('^#', Comment.Preproc, 'macro'),
# or with whitespace
- ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
- ('^' + _ws + '#', Comment.Preproc, 'macro'),
- (r'^(\s*)([a-zA-Z_][a-zA-Z0-9_]*:(?!:))', bygroups(Text, Name.Label)),
+ ('^(' + _ws + r')(#if\s+0)',
+ bygroups(using(this), Comment.Preproc), 'if0'),
+ ('^(' + _ws + ')(#)',
+ bygroups(using(this), Comment.Preproc), 'macro'),
+ (r'^(\s*)([a-zA-Z_][a-zA-Z0-9_]*:(?!:))',
+ bygroups(Text, Name.Label)),
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
@@ -170,7 +173,8 @@ class CppLexer(RegexLexer):
"""
name = 'C++'
aliases = ['cpp', 'c++']
- filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx']
+ filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
+ '*.cc', '*.hh', '*.cxx', '*.hxx']
mimetypes = ['text/x-c++hdr', 'text/x-c++src']
#: optional Comment or Whitespace
@@ -182,8 +186,10 @@ class CppLexer(RegexLexer):
('^#if\s+0', Comment.Preproc, 'if0'),
('^#', Comment.Preproc, 'macro'),
# or with whitespace
- ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
- ('^' + _ws + '#', Comment.Preproc, 'macro'),
+ ('^(' + _ws + r')(#if\s+0)',
+ bygroups(using(this), Comment.Preproc), 'if0'),
+ ('^(' + _ws + ')(#)',
+ bygroups(using(this), Comment.Preproc), 'macro'),
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
@@ -217,7 +223,7 @@ class CppLexer(RegexLexer):
r'multiple_inheritance|m128i|m128d|m128|m64|interface|'
r'identifier|forceinline|event|assume)\b', Keyword.Reserved),
# Offload C++ extensions, http://offload.codeplay.com/
- (r'(__offload|__blockingoffload|__outer)\b', Keyword.Psuedo),
+ (r'(__offload|__blockingoffload|__outer)\b', Keyword.Pseudo),
(r'(true|false)\b', Keyword.Constant),
(r'NULL\b', Name.Builtin),
('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
@@ -1035,164 +1041,6 @@ class DelphiLexer(Lexer):
yield scanner.start_pos, token, scanner.match or ''
-class JavaLexer(RegexLexer):
- """
- For `Java <http://www.sun.com/java/>`_ source code.
- """
-
- name = 'Java'
- aliases = ['java']
- filenames = ['*.java']
- mimetypes = ['text/x-java']
-
- flags = re.MULTILINE | re.DOTALL
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
- (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
- Keyword),
- (r'(abstract|const|enum|extends|final|implements|native|private|'
- r'protected|public|static|strictfp|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
- (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
- (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
- (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
-
-class ScalaLexer(RegexLexer):
- """
- For `Scala <http://www.scala-lang.org>`_ source code.
- """
-
- name = 'Scala'
- aliases = ['scala']
- filenames = ['*.scala']
- mimetypes = ['text/x-scala']
-
- flags = re.MULTILINE | re.DOTALL
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- # don't use raw unicode strings!
- op = u'[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+'
-
- letter = u'[a-zA-Z\\$_\u00aa\u00b5\u00ba\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386\u0388-\u03f5\u03f7-\u0481\u048a-\u0556\u0561-\u0587\u05d0-\u05f2\u0621-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u097f\u0985-\u09b9\u09bd\u09ce\u09dc-\u09e1\u09f0-\u09f1\u0a05-\u0a39\u0a59-\u0a5e\u0a72-\u0a74\u0a85-\u0ab9\u0abd\u0ad0-\u0ae1\u0b05-\u0b39\u0b3d\u0b5c-\u0b61\u0b71\u0b83-\u0bb9\u0bd0\u0c05-\u0c3d\u0c58-\u0c61\u0c85-\u0cb9\u0cbd\u0cde-\u0ce1\u0d05-\u0d3d\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0eb0\u0eb2-\u0eb3\u0ebd-\u0ec4\u0edc-\u0f00\u0f40-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10fa\u1100-\u135a\u1380-\u138f\u13a0-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u16ee-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u18a8\u18aa-\u191c\u1950-\u19a9\u19c1-\u19c7\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1d00-\u1d2b\u1d62-\u1d77\u1d79-\u1d9a\u1e00-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ffc\u2071\u207f\u2102\u2107\u210a-\u2113\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c7c\u2c80-\u2ce4\u2d00-\u2d65\u2d80-\u2dde\u3006-\u3007\u3021-\u3029\u3038-\u303a\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff-\u318e\u31a0-\u31b7\u31f0-\u31ff\u3400-\u4db5\u4e00-\ua014\ua016-\ua48c\ua500-\ua60b\ua610-\ua61f\ua62a-\ua66e\ua680-\ua697\ua722-\ua76f\ua771-\ua787\ua78b-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uac00-\ud7a3\uf900-\ufb1d\ufb1f-\ufb28\ufb2a-\ufd3d\ufd50-\ufdfb\ufe70-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff6f\uff71-\uff9d\uffa0-\uffdc]'
-
- upper = u'[A-Z\\$_\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]'
-
- idrest = ur'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
-
- tokens = {
- 'root': [
- # method names
- (r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
- (ur"'%s" % idrest, Text.Symbol),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (ur'@%s' % idrest, Name.Decorator),
- (ur'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
- ur'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
- ur'lazy|match|new|override|pr(?:ivate|otected)'
- ur'|re(?:quires|turn)|s(?:ealed|uper)|'
- ur't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\b|'
- u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])(\b|(?=\\s)|$)', Keyword),
- (ur':(?!%s)' % op, Keyword, 'type'),
- (ur'%s%s\b' % (upper, idrest), Name.Class),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'),
- (r'(type)(\s+)', bygroups(Keyword, Text), 'type'),
- (r'""".*?"""', String),
- (r'"(\\\\|\\"|[^"])*"', String),
- (ur"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
-# (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
-# Name.Attribute)),
- (idrest, Name),
- (r'`[^`]+`', Name),
- (r'\[', Operator, 'typeparam'),
- (r'[\(\)\{\};,.#]', Operator),
- (op, Operator),
- (ur'([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?',
- Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (ur'(%s|%s|`[^`]+`)(\s*)(\[)' % (idrest, op),
- bygroups(Name.Class, Text, Operator), 'typeparam'),
- (r'[\s\n]+', Text),
- (r'{', Operator, '#pop'),
- (r'\(', Operator, '#pop'),
- (ur'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
- ],
- 'type': [
- (r'\s+', Text),
- (u'<[%:]|>:|[#_\u21D2]|forSome|type', Keyword),
- (r'([,\);}]|=>|=)([\s\n]*)', bygroups(Operator, Text), '#pop'),
- (r'[\(\{]', Operator, '#push'),
- (ur'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)(\[)' %
- (idrest, op, idrest, op),
- bygroups(Keyword.Type, Text, Operator), ('#pop', 'typeparam')),
- (ur'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)$' %
- (idrest, op, idrest, op),
- bygroups(Keyword.Type, Text), '#pop'),
- (ur'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
- ],
- 'typeparam': [
- (r'[\s\n,]+', Text),
- (u'<[%:]|=>|>:|[#_\u21D2]|forSome|type', Keyword),
- (r'([\]\)\}])', Operator, '#pop'),
- (r'[\(\[\{]', Operator, '#push'),
- (ur'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
- ],
- 'comment': [
- (r'[^/\*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'import': [
- (ur'(%s|\.)+' % idrest, Name.Namespace, '#pop')
- ],
- }
-
-
class DylanLexer(RegexLexer):
"""
For the `Dylan <http://www.opendylan.org/>`_ language.
@@ -1232,8 +1080,10 @@ class DylanLexer(RegexLexer):
(r'\$[a-zA-Z0-9-]+', Name.Constant),
(r'[!$%&*/:<>=?~^.+\[\]{}-]+', Operator),
(r'\s+', Text),
+ (r'#"[a-zA-Z0-9-]+"', Keyword),
(r'#[a-zA-Z0-9-]+', Keyword),
- (r'[a-zA-Z0-9-]+', Name.Variable),
+ (r'#(\(|\[)', Punctuation),
+ (r'[a-zA-Z0-9-_]+', Name.Variable),
],
'string': [
(r'"', String, '#pop'),
@@ -1392,18 +1242,22 @@ class ObjectiveCLexer(RegexLexer):
}
def analyse_text(text):
- if '@"' in text: # strings
+ if '@import' in text or '@interface' in text or \
+ '@implementation' in text:
+ return True
+ elif '@"' in text: # strings
return True
- if re.match(r'\[[a-zA-Z0-9.]:', text): # message
+ elif re.match(r'\[[a-zA-Z0-9.]:', text): # message
return True
return False
+
class FortranLexer(RegexLexer):
- '''
+ """
Lexer for FORTRAN 90 code.
*New in Pygments 0.10.*
- '''
+ """
name = 'Fortran'
aliases = ['fortran']
filenames = ['*.f', '*.f90', '*.F', '*.F90']
@@ -1428,20 +1282,28 @@ class FortranLexer(RegexLexer):
],
'core': [
# Statements
- (r'\b(ACCEPT|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|BACKSPACE|BLOCK DATA|'
- r'BYTE|CALL|CASE|CLOSE|COMMON|CONTAINS|CONTINUE|CYCLE|DATA|'
- r'DEALLOCATE|DECODE|DIMENSION|DO|ENCODE|END FILE|ENDIF|END|ENTRY|'
- r'EQUIVALENCE|EXIT|EXTERNAL|EXTRINSIC|FORALL|FORMAT|FUNCTION|GOTO|'
- r'IF|IMPLICIT|INCLUDE|INQUIRE|INTENT|INTERFACE|INTRINSIC|MODULE|'
- r'NAMELIST|NULLIFY|NONE|OPEN|OPTIONAL|OPTIONS|PARAMETER|PAUSE|'
- r'POINTER|PRINT|PRIVATE|PROGRAM|PUBLIC|PURE|READ|RECURSIVE|RETURN|'
- r'REWIND|SAVE|SELECT|SEQUENCE|STOP|SUBROUTINE|TARGET|TYPE|USE|'
- r'VOLATILE|WHERE|WRITE|WHILE|THEN|ELSE|ENDIF)\s*\b',
+ (r'\b(ACCEPT|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|ASYNCHRONOUS|'
+ r'BACKSPACE|BIND|BLOCK DATA|BYTE|CALL|CASE|CLOSE|COMMON|CONTAINS|'
+ r'CONTINUE|CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|DIMENSION|DO|'
+ r'ELSE|ENCODE|END FILE|ENDIF|END|ENTRY|ENUMERATOR|EQUIVALENCE|'
+ r'EXIT|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|FUNCTION|GENERIC|'
+ r'GOTO|IF|IMPLICIT|IMPORT|INCLUDE|INQUIRE|INTENT|INTERFACE|'
+ r'INTRINSIC|MODULE|NAMELIST|NULLIFY|NONE|NON_INTRINSIC|'
+ r'NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|OPTIONS|PARAMETER|PASS|'
+ r'PAUSE|POINTER|PRINT|PRIVATE|PROGRAM|PROTECTED|PUBLIC|PURE|READ|'
+ r'RECURSIVE|RETURN|REWIND|SAVE|SELECT|SEQUENCE|STOP|SUBROUTINE|'
+ r'TARGET|THEN|TYPE|USE|VALUE|VOLATILE|WHERE|WRITE|WHILE)\s*\b',
Keyword),
# Data Types
(r'\b(CHARACTER|COMPLEX|DOUBLE PRECISION|DOUBLE COMPLEX|INTEGER|'
- r'LOGICAL|REAL)\s*\b',
+ r'LOGICAL|REAL|C_INT|C_SHORT|C_LONG|C_LONG_LONG|C_SIGNED_CHAR|'
+ r'C_SIZE_T|C_INT8_T|C_INT16_T|C_INT32_T|C_INT64_T|C_INT_LEAST8_T|'
+ r'C_INT_LEAST16_T|C_INT_LEAST32_T|C_INT_LEAST64_T|C_INT_FAST8_T|'
+ r'C_INT_FAST16_T|C_INT_FAST32_T|C_INT_FAST64_T|C_INTMAX_T|'
+ r'C_INTPTR_T|C_FLOAT|C_DOUBLE|C_LONG_DOUBLE|C_FLOAT_COMPLEX|'
+ r'C_DOUBLE_COMPLEX|C_LONG_DOUBLE_COMPLEX|C_BOOL|C_CHAR|C_PTR|'
+ r'C_FUNPTR)\s*\b',
Keyword.Type),
# Operators
@@ -1453,31 +1315,37 @@ class FortranLexer(RegexLexer):
# Intrinsics
(r'\b(Abort|Abs|Access|AChar|ACos|AdjustL|AdjustR|AImag|AInt|Alarm|'
- r'All|Allocated|ALog|AMax|AMin|AMod|And|ANInt|Any|'
- r'ASin|Associated|ATan|BesJ|BesJN|BesY|BesYN|'
- r'Bit_Size|BTest|CAbs|CCos|Ceiling|CExp|Char|ChDir|ChMod|CLog|'
- r'Cmplx|Complex|Conjg|Cos|CosH|Count|CPU_Time|CShift|CSin|CSqRt|'
- r'CTime|DAbs|DACos|DASin|DATan|Date_and_Time|DbesJ|'
+ r'All|Allocated|ALog|AMax|AMin|AMod|And|ANInt|Any|ASin|Associated|'
+ r'ATan|BesJ|BesJN|BesY|BesYN|Bit_Size|BTest|CAbs|CCos|Ceiling|'
+ r'CExp|Char|ChDir|ChMod|CLog|Cmplx|Command_Argument_Count|Complex|'
+ r'Conjg|Cos|CosH|Count|CPU_Time|CShift|CSin|CSqRt|CTime|C_Funloc|'
+ r'C_Loc|C_Associated|C_Null_Ptr|C_Null_Funptr|C_F_Pointer|'
+ r'C_Null_Char|C_Alert|C_Backspace|C_Form_Feed|C_New_Line|'
+ r'C_Carriage_Return|C_Horizontal_Tab|C_Vertical_Tab|'
+ r'DAbs|DACos|DASin|DATan|Date_and_Time|DbesJ|'
r'DbesJ|DbesJN|DbesY|DbesY|DbesYN|Dble|DCos|DCosH|DDiM|DErF|DErFC|'
r'DExp|Digits|DiM|DInt|DLog|DLog|DMax|DMin|DMod|DNInt|Dot_Product|'
r'DProd|DSign|DSinH|DSin|DSqRt|DTanH|DTan|DTime|EOShift|Epsilon|'
- r'ErF|ErFC|ETime|Exit|Exp|Exponent|FDate|FGet|FGetC|Float|'
- r'Floor|Flush|FNum|FPutC|FPut|Fraction|FSeek|FStat|FTell|'
- r'GError|GetArg|GetCWD|GetEnv|GetGId|GetLog|GetPId|GetUId|'
- r'GMTime|HostNm|Huge|IAbs|IAChar|IAnd|IArgC|IBClr|IBits|'
+ r'ErF|ErFC|ETime|Exit|Exp|Exponent|Extends_Type_Of|FDate|FGet|'
+ r'FGetC|Float|Floor|Flush|FNum|FPutC|FPut|Fraction|FSeek|FStat|'
+ r'FTell|GError|GetArg|Get_Command|Get_Command_Argument|'
+ r'Get_Environment_Variable|GetCWD|GetEnv|GetGId|GetLog|GetPId|'
+ r'GetUId|GMTime|HostNm|Huge|IAbs|IAChar|IAnd|IArgC|IBClr|IBits|'
r'IBSet|IChar|IDate|IDiM|IDInt|IDNInt|IEOr|IErrNo|IFix|Imag|'
r'ImagPart|Index|Int|IOr|IRand|IsaTty|IShft|IShftC|ISign|'
- r'ITime|Kill|Kind|LBound|Len|Len_Trim|LGe|LGt|Link|LLe|LLt|LnBlnk|'
- r'Loc|Log|Log|Logical|Long|LShift|LStat|LTime|MatMul|Max|'
- r'MaxExponent|MaxLoc|MaxVal|MClock|Merge|Min|MinExponent|MinLoc|'
- r'MinVal|Mod|Modulo|MvBits|Nearest|NInt|Not|Or|Pack|PError|'
+ r'Iso_C_Binding|Is_Iostat_End|Is_Iostat_Eor|ITime|Kill|Kind|'
+ r'LBound|Len|Len_Trim|LGe|LGt|Link|LLe|LLt|LnBlnk|Loc|Log|'
+ r'Logical|Long|LShift|LStat|LTime|MatMul|Max|MaxExponent|MaxLoc|'
+ r'MaxVal|MClock|Merge|Move_Alloc|Min|MinExponent|MinLoc|MinVal|'
+ r'Mod|Modulo|MvBits|Nearest|New_Line|NInt|Not|Or|Pack|PError|'
r'Precision|Present|Product|Radix|Rand|Random_Number|Random_Seed|'
- r'Range|Real|RealPart|Rename|Repeat|Reshape|RRSpacing|RShift|Scale|'
- r'Scan|Second|Selected_Int_Kind|Selected_Real_Kind|Set_Exponent|'
- r'Shape|Short|Sign|Signal|SinH|Sin|Sleep|Sngl|Spacing|Spread|SqRt|'
- r'SRand|Stat|Sum|SymLnk|System|System_Clock|Tan|TanH|Time|'
- r'Tiny|Transfer|Transpose|Trim|TtyNam|UBound|UMask|Unlink|Unpack|'
- r'Verify|XOr|ZAbs|ZCos|ZExp|ZLog|ZSin|ZSqRt)\s*\b',
+ r'Range|Real|RealPart|Rename|Repeat|Reshape|RRSpacing|RShift|'
+ r'Same_Type_As|Scale|Scan|Second|Selected_Int_Kind|'
+ r'Selected_Real_Kind|Set_Exponent|Shape|Short|Sign|Signal|SinH|'
+ r'Sin|Sleep|Sngl|Spacing|Spread|SqRt|SRand|Stat|Sum|SymLnk|'
+ r'System|System_Clock|Tan|TanH|Time|Tiny|Transfer|Transpose|Trim|'
+ r'TtyNam|UBound|UMask|Unlink|Unpack|Verify|XOr|ZAbs|ZCos|ZExp|'
+ r'ZLog|ZSin|ZSqRt)\s*\b',
Name.Builtin),
# Booleans
@@ -2296,7 +2164,7 @@ class AdaLexer(RegexLexer):
include('numbers'),
(r"'[^']'", String.Character),
(r'([a-z0-9_]+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"(<>|=>|:=|[\(\)\|:;,.'])", Punctuation),
+ (r"(<>|=>|:=|[()|:;,.'])", Punctuation),
(r'[*<>+=/&-]', Operator),
(r'\n+', Text),
],
@@ -2317,7 +2185,7 @@ class AdaLexer(RegexLexer):
],
'end' : [
('(if|case|record|loop|select)', Keyword.Reserved),
- ('"[^"]+"|[a-zA-Z0-9_]+', Name.Function),
+ ('"[^"]+"|[a-zA-Z0-9_.]+', Name.Function),
('[\n\s]+', Text),
(';', Punctuation, '#pop'),
],
@@ -2337,11 +2205,12 @@ class AdaLexer(RegexLexer):
],
'import': [
(r'[a-z0-9_.]+', Name.Namespace, '#pop'),
+ (r'', Text, '#pop'),
],
'formal_part' : [
(r'\)', Punctuation, '#pop'),
- (r'([a-z0-9_]+)(\s*)(,|:[^=])', bygroups(Name.Variable,
- Text, Punctuation)),
+ (r'[a-z0-9_]+', Name.Variable),
+ (r',|:[^=]', Punctuation),
(r'(in|not|null|out|access)\b', Keyword.Reserved),
include('root'),
],
@@ -2790,98 +2659,233 @@ class NimrodLexer(RegexLexer):
}
-class GosuLexer(RegexLexer):
+class FantomLexer(RegexLexer):
"""
- For Gosu source code.
+ For Fantom source code.
*New in Pygments 1.5.*
"""
+ name = 'Fantom'
+ aliases = ['fan']
+ filenames = ['*.fan']
+ mimetypes = ['application/x-fantom']
+
+ # often used regexes
+ def s(str):
+ return Template(str).substitute(
+ dict (
+ pod = r'[\"\w\.]+',
+ eos = r'\n|;',
+ id = r'[a-zA-Z_][a-zA-Z0-9_]*',
+ # all chars which can be part of type definition. Starts with
+ # either letter, or [ (maps), or | (funcs)
+ type = r'(?:\[|[a-zA-Z_]|\|)[:\w_\[\]\|\->\?]*?',
+ )
+ )
- name = 'Gosu'
- aliases = ['gosu']
- filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
- mimetypes = ['text/x-gosu']
-
- flags = re.MULTILINE | re.DOTALL
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # modifiers etc.
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
- (r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|'
- r'for|index|while|do|continue|break|return|try|catch|finally|this|throw|'
- r'new|switch|case|default|eval|super|outer|classpath|using)\b',
+ 'comments': [
+ (r'(?s)/\*.*?\*/', Comment.Multiline), #Multiline
+ (r'//.*?\n', Comment.Single), #Single line
+ #todo: highlight references in fandocs
+ (r'\*\*.*?\n', Comment.Special), #Fandoc
+ (r'#.*\n', Comment.Single) #Shell-style
+ ],
+ 'literals': [
+ (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), #Duration
+ (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number),
+ #Duration with dot
+ (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), #Float/Decimal
+ (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), #Hex
+ (r'\b-?[\d_]+', Number.Integer), #Int
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), #Char
+ (r'"', Punctuation, 'insideStr'), #Opening quote
+ (r'`', Punctuation, 'insideUri'), #Opening accent
+ (r'\b(true|false|null)\b', Keyword.Constant), #Bool & null
+ (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', #DSL
+ bygroups(Name.Namespace, Punctuation, Name.Class,
+ Punctuation, String, Punctuation)),
+ (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', #Type/slot literal
+ bygroups(Name.Namespace, Punctuation, Name.Class,
+ Punctuation, Name.Function)),
+ (r'\[,\]', Literal), # Empty list
+ (s(r'($type)(\[,\])'), # Typed empty list
+ bygroups(using(this, state = 'inType'), Literal)),
+ (r'\[:\]', Literal), # Empty Map
+ (s(r'($type)(\[:\])'),
+ bygroups(using(this, state = 'inType'), Literal)),
+ ],
+ 'insideStr': [
+ (r'\\\\', String.Escape), #Escaped backslash
+ (r'\\"', String.Escape), #Escaped "
+ (r'\\`', String.Escape), #Escaped `
+ (r'\$\w+', String.Interpol), #Subst var
+ (r'\${.*?}', String.Interpol), #Subst expr
+ (r'"', Punctuation, '#pop'), #Closing quot
+ (r'.', String) #String content
+ ],
+ 'insideUri': [ #TODO: remove copy/paste str/uri
+ (r'\\\\', String.Escape), #Escaped backslash
+ (r'\\"', String.Escape), #Escaped "
+ (r'\\`', String.Escape), #Escaped `
+ (r'\$\w+', String.Interpol), #Subst var
+ (r'\${.*?}', String.Interpol), #Subst expr
+ (r'`', Punctuation, '#pop'), #Closing tick
+ (r'.', String.Backtick) #URI content
+ ],
+ 'protectionKeywords': [
+ (r'\b(public|protected|private|internal)\b', Keyword),
+ ],
+ 'typeKeywords': [
+ (r'\b(abstract|final|const|native|facet|enum)\b', Keyword),
+ ],
+ 'methodKeywords': [
+ (r'\b(abstract|native|once|override|static|virtual|final)\b',
Keyword),
- (r'(var|delegate|construct|function|private|internal|protected|public|'
- r'abstract|override|final|static|extends|transient|implements|represents|'
- r'readonly)\b', Keyword.Declaration),
- (r'(property\s+)(get|set|)', Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
- (r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Keyword.Declaration, Text, Name.Class)),
- (r'(uses)(\s+)([a-zA-Z0-9_.]+\*?)', bygroups(Keyword.Namespace, Text, Name.Namespace)),
- (r'"', String, 'string'),
- (r'(\??[\.#])([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
- (r'(:)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
- (r'and|or|not|[\\~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\n', Text)
],
- 'templateText': [
- (r'(\\<)|(\\\$)', String),
- (r'(<%@\s+)(extends|params)', bygroups(Operator, Name.Decorator), 'stringTemplate'),
- (r'<%!--.*?--%>', Comment.Multiline),
- (r'(<%)|(<%=)', Operator, 'stringTemplate'),
- (r'\$\{', Operator, 'stringTemplateShorthand'),
- (r'.', String)
- ],
- 'string': [
- (r'"', String, '#pop'),
- include('templateText')
+ 'fieldKeywords': [
+ (r'\b(abstract|const|final|native|override|static|virtual|'
+ r'readonly)\b', Keyword)
],
- 'stringTemplate': [
- (r'"', String, 'string'),
- (r'%>', Operator, '#pop'),
- include('root')
+ 'otherKeywords': [
+ (r'\b(try|catch|throw|finally|for|if|else|while|as|is|isnot|'
+ r'switch|case|default|continue|break|do|return|get|set)\b',
+ Keyword),
+ (r'\b(it|this|super)\b', Name.Builtin.Pseudo),
],
- 'stringTemplateShorthand': [
- (r'"', String, 'string'),
- (r'\{', Operator, 'stringTemplateShorthand'),
- (r'\}', Operator, '#pop'),
- include('root')
+ 'operators': [
+ (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator)
],
- }
-
-
-class GosuTemplateLexer(Lexer):
- """
- For Gosu templates.
+ 'inType': [
+ (r'[\[\]\|\->:\?]', Punctuation),
+ (s(r'$id'), Name.Class),
+ (r'', Text, '#pop'),
- *New in Pygments 1.5.*
- """
+ ],
+ 'root': [
+ include('comments'),
+ include('protectionKeywords'),
+ include('typeKeywords'),
+ include('methodKeywords'),
+ include('fieldKeywords'),
+ include('literals'),
+ include('otherKeywords'),
+ include('operators'),
+ (r'using\b', Keyword.Namespace, 'using'), # Using stmt
+ (r'@\w+', Name.Decorator, 'facet'), # Symbol
+ (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Text, Name.Class),
+ 'inheritance'), # Inheritance list
+
+
+ ### Type var := val
+ (s(r'($type)([ \t]+)($id)(\s*)(:=)'),
+ bygroups(using(this, state = 'inType'), Text,
+ Name.Variable, Text, Operator)),
+
+ ### var := val
+ (s(r'($id)(\s*)(:=)'),
+ bygroups(Name.Variable, Text, Operator)),
+
+ ### .someId( or ->someId( ###
+ (s(r'(\.|(?:\->))($id)(\s*)(\()'),
+ bygroups(Operator, Name.Function, Text, Punctuation),
+ 'insideParen'),
+
+ ### .someId or ->someId
+ (s(r'(\.|(?:\->))($id)'),
+ bygroups(Operator, Name.Function)),
+
+ ### new makeXXX ( ####
+ (r'(new)(\s+)(make\w*)(\s*)(\()',
+ bygroups(Keyword, Text, Name.Function, Text, Punctuation),
+ 'insideMethodDeclArgs'),
+
+ ### Type name ( ####
+ (s(r'($type)([ \t]+)' #Return type and whitespace
+ r'($id)(\s*)(\()'), #method name + open brace
+ bygroups(using(this, state = 'inType'), Text,
+ Name.Function, Text, Punctuation),
+ 'insideMethodDeclArgs'),
+
+ ### ArgType argName, #####
+ (s(r'($type)(\s+)($id)(\s*)(,)'),
+ bygroups(using(this, state= 'inType'), Text, Name.Variable,
+ Text, Punctuation)),
+
+ #### ArgType argName) ####
+ ## Covered in 'insideParen' state
+
+ ### ArgType argName -> ArgType| ###
+ (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'),
+ bygroups(using(this, state= 'inType'), Text, Name.Variable,
+ Text, Punctuation, Text, using(this, state = 'inType'),
+ Punctuation)),
- name = 'Gosu Template'
- aliases = ['gst']
- filenames = ['*.gst']
- mimetypes = ['text/x-gosu-template']
- lexer = GosuLexer()
+ ### ArgType argName| ###
+ (s(r'($type)(\s+)($id)(\s*)(\|)'),
+ bygroups(using(this, state= 'inType'), Text, Name.Variable,
+ Text, Punctuation)),
- def get_tokens_unprocessed(self, text):
- stack = ['templateText']
- for item in self.lexer.get_tokens_unprocessed(text, stack):
- yield item
+ ### Type var
+ (s(r'($type)([ \t]+)($id)'),
+ bygroups(using(this, state='inType'), Text,
+ Name.Variable)),
+ (r'\(', Punctuation, 'insideParen'),
+ (r'\{', Punctuation, 'insideBrace'),
+ (r'.', Text)
+ ],
+ 'insideParen': [
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'insideMethodDeclArgs': [
+ (r'\)', Punctuation, '#pop'),
+ (s(r'($type)(\s+)($id)(\s*)(\))'),
+ bygroups(using(this, state= 'inType'), Text, Name.Variable,
+ Text, Punctuation), '#pop'),
+ include('root'),
+ ],
+ 'insideBrace': [
+ (r'\}', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'inheritance': [
+ (r'\s+', Text), #Whitespace
+ (r':|,', Punctuation),
+ (r'(?:(\w+)(::))?(\w+)',
+ bygroups(Name.Namespace, Punctuation, Name.Class)),
+ (r'{', Punctuation, '#pop')
+ ],
+ 'using': [
+ (r'[ \t]+', Text), # consume whitespaces
+ (r'(\[)(\w+)(\])',
+ bygroups(Punctuation, Comment.Special, Punctuation)), #ffi
+ (r'(\")?([\w\.]+)(\")?',
+ bygroups(Punctuation, Name.Namespace, Punctuation)), #podname
+ (r'::', Punctuation, 'usingClass'),
+ (r'', Text, '#pop')
+ ],
+ 'usingClass': [
+ (r'[ \t]+', Text), # consume whitespaces
+ (r'(as)(\s+)(\w+)',
+ bygroups(Keyword.Declaration, Text, Name.Class), '#pop:2'),
+ (r'[\w\$]+', Name.Class),
+ (r'', Text, '#pop:2') # jump out to root state
+ ],
+ 'facet': [
+ (r'\s+', Text),
+ (r'{', Punctuation, 'facetFields'),
+ (r'', Text, '#pop')
+ ],
+ 'facetFields': [
+ include('comments'),
+ include('literals'),
+ include('operators'),
+ (r'\s+', Text),
+ (r'(\s*)(\w+)(\s*)(=)', bygroups(Text, Name, Text, Operator)),
+ (r'}', Punctuation, '#pop'),
+ (r'.', Text)
+ ],
+ }
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index f78df352..ae2dbc64 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -5,7 +5,7 @@
Lexers for .net languages.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
@@ -92,7 +92,7 @@ class CSharpLexer(RegexLexer):
(r'\n', Text),
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
(r'[{}]', Punctuation),
- (r'@"(\\\\|\\"|[^"])*"', String),
+ (r'@"(""|[^"])*"', String),
(r'"(\\\\|\\"|[^"\n])*["\n]', String),
(r"'\\.'|'[^\\]'", String.Char),
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
@@ -111,10 +111,12 @@ class CSharpLexer(RegexLexer):
r'ref|return|sealed|sizeof|stackalloc|static|'
r'switch|this|throw|true|try|typeof|'
r'unchecked|unsafe|virtual|void|while|'
- r'get|set|new|partial|yield|add|remove|value)\b', Keyword),
+ r'get|set|new|partial|yield|add|remove|value|alias|ascending|'
+ r'descending|from|group|into|orderby|select|where|'
+ r'join|equals)\b', Keyword),
(r'(global)(::)', bygroups(Keyword, Punctuation)),
- (r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
- r'short|string|uint|ulong|ushort)\b\??', Keyword.Type),
+ (r'(bool|byte|char|decimal|double|dynamic|float|int|long|object|'
+ r'sbyte|short|string|uint|ulong|ushort|var)\b\??', Keyword.Type),
(r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
(r'(namespace|using)(\s+)', bygroups(Keyword, Text), 'namespace'),
(cs_ident, Name),
@@ -169,20 +171,20 @@ class NemerleLexer(RegexLexer):
flags = re.MULTILINE | re.DOTALL | re.UNICODE
- # for the range of allowed unicode characters in identifiers,
- # see http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
-
- levels = {
- 'none': '@?[_a-zA-Z][a-zA-Z0-9_]*',
- 'basic': ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
- '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
- uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
- 'full': ('@?(?:_|[^' +
- _escape(uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl')) + '])'
- + '[^' + _escape(uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo',
- 'Nl', 'Nd', 'Pc', 'Cf', 'Mn',
- 'Mc')) + ']*'),
- }
+ # for the range of allowed unicode characters in identifiers, see
+ # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
+
+ levels = dict(
+ none = '@?[_a-zA-Z][a-zA-Z0-9_]*',
+ basic = ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
+ '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
+ uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
+ full = ('@?(?:_|[^' + _escape(uni.allexcept('Lu', 'Ll', 'Lt', 'Lm',
+ 'Lo', 'Nl')) + '])'
+ + '[^' + _escape(uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo',
+ 'Nl', 'Nd', 'Pc', 'Cf', 'Mn',
+ 'Mc')) + ']*'),
+ )
tokens = {}
token_variants = True
@@ -199,7 +201,7 @@ class NemerleLexer(RegexLexer):
(r'[^\S\n]+', Text),
(r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Text),
(r'\$\s*"', String, 'splice-string'),
(r'\$\s*<#', String, 'splice-string2'),
@@ -209,15 +211,16 @@ class NemerleLexer(RegexLexer):
(r'\]\>', Keyword),
# quasiquotation only
- (r'\$' + cs_ident, Name),
- (r'(\$)(\()', bygroups(Name, Punctuation), 'splice-string-content'),
+ (r'\$' + cs_ident, Name),
+ (r'(\$)(\()', bygroups(Name, Punctuation),
+ 'splice-string-content'),
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
(r'[{}]', Punctuation),
- (r'@"(\\\\|\\"|[^"])*"', String),
+ (r'@"(""|[^"])*"', String),
(r'"(\\\\|\\"|[^"\n])*["\n]', String),
(r"'\\.'|'[^\\]'", String.Char),
- (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number),
(r'#[ \t]*(if|endif|else|elif|define|undef|'
r'line|error|warning|region|endregion|pragma)\b.*?\n',
@@ -256,7 +259,7 @@ class NemerleLexer(RegexLexer):
('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
],
'splice-string': [
- (r'[^"$]', String),
+ (r'[^"$]', String),
(r'\$' + cs_ident, Name),
(r'(\$)(\()', bygroups(Name, Punctuation),
'splice-string-content'),
@@ -264,7 +267,7 @@ class NemerleLexer(RegexLexer):
(r'"', String, '#pop')
],
'splice-string2': [
- (r'[^#<>$]', String),
+ (r'[^#<>$]', String),
(r'\$' + cs_ident, Name),
(r'(\$)(\()', bygroups(Name, Punctuation),
'splice-string-content'),
@@ -278,8 +281,9 @@ class NemerleLexer(RegexLexer):
],
'splice-string-content': [
(r'if|match', Keyword),
- (r'[~!%^&*+=|\[\]:;,.<>/?-]', Punctuation),
- (cs_ident, Name),
+ (r'[~!%^&*+=|\[\]:;,.<>/?-\\"$ ]', Punctuation),
+ (cs_ident, Name),
+ (r'\d+', Number),
(r'\(', Punctuation, '#push'),
(r'\)', Punctuation, '#pop')
]
@@ -335,10 +339,10 @@ class BooLexer(RegexLexer):
r'matrix|max|min|normalArrayIndexing|print|property|range|'
r'rawArrayIndexing|required|typeof|unchecked|using|'
r'yieldAll|zip)\b', Name.Builtin),
- ('"""(\\\\|\\"|.*?)"""', String.Double),
- ('"(\\\\|\\"|[^"]*?)"', String.Double),
- ("'(\\\\|\\'|[^']*?)'", String.Single),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (r'"""(\\\\|\\"|.*?)"""', String.Double),
+ (r'"(\\\\|\\"|[^"]*?)"', String.Double),
+ (r"'(\\\\|\\'|[^']*?)'", String.Single),
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
(r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
(r'[0-9][0-9\.]*(m|ms|d|h|s)', Number),
(r'0\d+', Number.Oct),
@@ -581,7 +585,7 @@ class FSharpLexer(RegexLexer):
Name.Namespace, 'dotted'),
(r'\b([A-Z][A-Za-z0-9_\']*)', Name.Class),
(r'//.*?\n', Comment.Single),
- (r'\(\*', Comment, 'comment'),
+ (r'\(\*(?!\))', Comment, 'comment'),
(r'\b(%s)\b' % '|'.join(keywords), Keyword),
(r'(%s)' % '|'.join(keyopts), Operator),
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py
index 3462158e..5f710837 100644
--- a/pygments/lexers/functional.py
+++ b/pygments/lexers/functional.py
@@ -5,7 +5,7 @@
Lexers for functional languages.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -15,10 +15,9 @@ from pygments.lexer import Lexer, RegexLexer, bygroups, include, do_insertions
from pygments.token import Text, Comment, Operator, Keyword, Name, \
String, Number, Punctuation, Literal, Generic, Error
-
__all__ = ['SchemeLexer', 'CommonLispLexer', 'HaskellLexer',
'LiterateHaskellLexer', 'SMLLexer', 'OcamlLexer', 'ErlangLexer',
- 'ErlangShellLexer', 'NewLispLexer']
+ 'ErlangShellLexer', 'OpaLexer', 'CoqLexer', 'NewLispLexer']
class SchemeLexer(RegexLexer):
@@ -942,7 +941,7 @@ class ErlangLexer(RegexLexer):
name = 'Erlang'
aliases = ['erlang']
- filenames = ['*.erl', '*.hrl']
+ filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
mimetypes = ['text/x-erlang']
keywords = [
@@ -1080,27 +1079,547 @@ class ErlangShellLexer(Lexer):
yield item
+class OpaLexer(RegexLexer):
+ """
+ Lexer for the Opa language (http://opalang.org).
+
+ *New in Pygments 1.5.*
+ """
+
+ name = 'Opa'
+ aliases = ['opa']
+ filenames = ['*.opa']
+ mimetypes = ['text/x-opa']
+
+ # most of these aren't strictly keywords
+ # but if you color only real keywords, you might just
+ # as well not color anything
+ keywords = [
+ 'and', 'as', 'begin', 'css', 'database', 'db', 'do', 'else', 'end',
+ 'external', 'forall', 'if', 'import', 'match', 'package', 'parser',
+ 'rec', 'server', 'then', 'type', 'val', 'with', 'xml_parser'
+ ]
+
+ # matches both stuff and `stuff`
+ ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
+
+ op_re = r'[.=\-<>,@~%/+?*&^!]'
+ punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
+ # because they are also used for inserts
+
+ tokens = {
+ # copied from the caml lexer, should be adapted
+ 'escape-sequence': [
+ (r'\\[\\\"\'ntr}]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+
+ # factorizing these rules, because they are inserted many times
+ 'comments': [
+ (r'/\*', Comment, 'nested-comment'),
+ (r'//.*?$', Comment),
+ ],
+ 'comments-and-spaces': [
+ include('comments'),
+ (r'\s+', Text),
+ ],
+
+ 'root': [
+ include('comments-and-spaces'),
+ # keywords
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ # directives
+ # we could parse the actual set of directives instead of anything
+ # starting with @, but this is troublesome
+ # because it needs to be adjusted all the time
+ # and assuming we parse only sources that compile, it is useless
+ (r'@'+ident_re+r'\b', Name.Builtin.Pseudo),
+
+ # number literals
+ (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
+ (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
+ (r'-?\d+[eE][+\-]?\d+', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[bB][01]+', Number.Binary),
+ (r'\d+', Number.Integer),
+ # color literals
+ (r'#[\da-fA-F]{3,6}', Number.Integer),
+
+ # string literals
+ (r'"', String.Double, 'string'),
+ # char literal, should be checked because this is the regexp from
+ # the caml lexer
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
+ String.Char),
+
+ # this is meant to deal with embedded exprs in strings
+ # every time we find a '}' we pop a state so that if we were
+ # inside a string, we are back in the string state
+ # as a consequence, we must also push a state every time we find a
+ # '{' or else we will have errors when parsing {} for instance
+ (r'{', Operator, '#push'),
+ (r'}', Operator, '#pop'),
+
+ # html literals
+ # this is a much more strict that the actual parser,
+ # since a<b would not be parsed as html
+ # but then again, the parser is way too lax, and we can't hope
+ # to have something as tolerant
+ (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
+
+ # db path
+ # matching the '[_]' in '/a[_]' because it is a part
+ # of the syntax of the db path definition
+ # unfortunately, i don't know how to match the ']' in
+ # /a[1], so this is somewhat inconsistent
+ (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
+ # putting the same color on <- as on db path, since
+ # it can be used only to mean Db.write
+ (r'<-(?!'+op_re+r')', Name.Variable),
+
+ # 'modules'
+ # although modules are not distinguished by their names as in caml
+ # the standard library seems to follow the convention that modules
+ # only area capitalized
+ (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
+
+ # operators
+ # = has a special role because this is the only
+ # way to syntactic distinguish binding constructions
+ # unfortunately, this colors the equal in {x=2} too
+ (r'=(?!'+op_re+r')', Keyword),
+ (r'(%s)+' % op_re, Operator),
+ (r'(%s)+' % punc_re, Operator),
+
+ # coercions
+ (r':', Operator, 'type'),
+ # type variables
+ # we need this rule because we don't parse specially type
+ # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
+ ("'"+ident_re, Keyword.Type),
+
+ # id literal, #something, or #{expr}
+ (r'#'+ident_re, String.Single),
+ (r'#(?={)', String.Single),
+
+ # identifiers
+ # this avoids to color '2' in 'a2' as an integer
+ (ident_re, Text),
+
+ # default, not sure if that is needed or not
+ # (r'.', Text),
+ ],
+
+ # it is quite painful to have to parse types to know where they end
+ # this is the general rule for a type
+ # a type is either:
+ # * -> ty
+ # * type-with-slash
+ # * type-with-slash -> ty
+ # * type-with-slash (, type-with-slash)+ -> ty
+ #
+ # the code is pretty funky in here, but this code would roughly
+ # translate in caml to:
+ # let rec type stream =
+ # match stream with
+ # | [< "->"; stream >] -> type stream
+ # | [< ""; stream >] ->
+ # type_with_slash stream
+ # type_lhs_1 stream;
+ # and type_1 stream = ...
+ 'type': [
+ include('comments-and-spaces'),
+ (r'->', Keyword.Type),
+ (r'', Keyword.Type, ('#pop', 'type-lhs-1', 'type-with-slash')),
+ ],
+
+ # parses all the atomic or closed constructions in the syntax of type
+ # expressions: record types, tuple types, type constructors, basic type
+ # and type variables
+ 'type-1': [
+ include('comments-and-spaces'),
+ (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
+ (r'~?{', Keyword.Type, ('#pop', 'type-record')),
+ (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
+ (ident_re, Keyword.Type, '#pop'),
+ ("'"+ident_re, Keyword.Type),
+ # this case is not in the syntax but sometimes
+ # we think we are parsing types when in fact we are parsing
+ # some css, so we just pop the states until we get back into
+ # the root state
+ (r'', Keyword.Type, '#pop'),
+ ],
+
+ # type-with-slash is either:
+ # * type-1
+ # * type-1 (/ type-1)+
+ 'type-with-slash': [
+ include('comments-and-spaces'),
+ (r'', Keyword.Type, ('#pop', 'slash-type-1', 'type-1')),
+ ],
+ 'slash-type-1': [
+ include('comments-and-spaces'),
+ ('/', Keyword.Type, ('#pop', 'type-1')),
+ # same remark as above
+ (r'', Keyword.Type, '#pop'),
+ ],
+
+ # we go in this state after having parsed a type-with-slash
+ # while trying to parse a type
+ # and at this point we must determine if we are parsing an arrow
+ # type (in which case we must continue parsing) or not (in which
+ # case we stop)
+ 'type-lhs-1': [
+ include('comments-and-spaces'),
+ (r'->', Keyword.Type, ('#pop', 'type')),
+ (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
+ (r'', Keyword.Type, '#pop'),
+ ],
+ 'type-arrow': [
+ include('comments-and-spaces'),
+ # the look ahead here allows to parse f(x : int, y : float -> truc)
+ # correctly
+ (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
+ (r'->', Keyword.Type, ('#pop', 'type')),
+ # same remark as above
+ (r'', Keyword.Type, '#pop'),
+ ],
+
+ # no need to do precise parsing for tuples and records
+ # because they are closed constructions, so we can simply
+ # find the closing delimiter
+ # note that this function would be not work if the source
+ # contained identifiers like `{)` (although it could be patched
+ # to support it)
+ 'type-tuple': [
+ include('comments-and-spaces'),
+ (r'[^\(\)/*]+', Keyword.Type),
+ (r'[/*]', Keyword.Type),
+ (r'\(', Keyword.Type, '#push'),
+ (r'\)', Keyword.Type, '#pop'),
+ ],
+ 'type-record': [
+ include('comments-and-spaces'),
+ (r'[^{}/*]+', Keyword.Type),
+ (r'[/*]', Keyword.Type),
+ (r'{', Keyword.Type, '#push'),
+ (r'}', Keyword.Type, '#pop'),
+ ],
+
+# 'type-tuple': [
+# include('comments-and-spaces'),
+# (r'\)', Keyword.Type, '#pop'),
+# (r'', Keyword.Type, ('#pop', 'type-tuple-1', 'type-1')),
+# ],
+# 'type-tuple-1': [
+# include('comments-and-spaces'),
+# (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
+# (r',', Keyword.Type, 'type-1'),
+# ],
+# 'type-record':[
+# include('comments-and-spaces'),
+# (r'}', Keyword.Type, '#pop'),
+# (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
+# ],
+# 'type-record-field-expr': [
+#
+# ],
+
+ 'nested-comment': [
+ (r'[^/*]+', Comment),
+ (r'/\*', Comment, '#push'),
+ (r'\*/', Comment, '#pop'),
+ (r'[/*]', Comment),
+ ],
+
+ # the coy pasting between string and single-string
+ # is kinda sad. Is there a way to avoid that??
+ 'string': [
+ (r'[^\\"{]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ (r'{', Operator, 'root'),
+ include('escape-sequence'),
+ ],
+ 'single-string': [
+ (r'[^\\\'{]+', String.Double),
+ (r'\'', String.Double, '#pop'),
+ (r'{', Operator, 'root'),
+ include('escape-sequence'),
+ ],
+
+ # all the html stuff
+ # can't really reuse some existing html parser
+ # because we must be able to parse embedded expressions
+
+ # we are in this state after someone parsed the '<' that
+ # started the html literal
+ 'html-open-tag': [
+ (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
+ (r'>', String.Single, ('#pop', 'html-content')),
+ ],
+
+ # we are in this state after someone parsed the '</' that
+ # started the end of the closing tag
+ 'html-end-tag': [
+ # this is a star, because </> is allowed
+ (r'[\w\-:]*>', String.Single, '#pop'),
+ ],
+
+ # we are in this state after having parsed '<ident(:ident)?'
+ # we thus parse a possibly empty list of attributes
+ 'html-attr': [
+ (r'\s+', Text),
+ (r'[\w\-:]+=', String.Single, 'html-attr-value'),
+ (r'/>', String.Single, '#pop'),
+ (r'>', String.Single, ('#pop', 'html-content')),
+ ],
+
+ 'html-attr-value': [
+ (r"'", String.Single, ('#pop', 'single-string')),
+ (r'"', String.Single, ('#pop', 'string')),
+ (r'#'+ident_re, String.Single, '#pop'),
+ (r'#(?={)', String.Single, ('#pop', 'root')),
+ (r'{', Operator, ('#pop', 'root')), # this is a tail call!
+ ],
+
+ # we should probably deal with '\' escapes here
+ 'html-content': [
+ (r'<!--', Comment, 'html-comment'),
+ (r'</', String.Single, ('#pop', 'html-end-tag')),
+ (r'<', String.Single, 'html-open-tag'),
+ (r'{', Operator, 'root'),
+ (r'.|\s+', String.Single),
+ ],
+
+ 'html-comment': [
+ (r'-->', Comment, '#pop'),
+ (r'[^\-]+|-', Comment),
+ ],
+ }
+
+
+class CoqLexer(RegexLexer):
+ """
+ For the `Coq <http://coq.inria.fr/>`_ theorem prover.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = 'Coq'
+ aliases = ['coq']
+ filenames = ['*.v']
+ mimetypes = ['text/x-coq']
+
+ keywords1 = [
+ # Vernacular commands
+ 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
+ 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
+ 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
+ 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
+ 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
+ 'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
+ 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
+ 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
+ 'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
+ 'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
+ 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
+ 'outside',
+ ]
+ keywords2 = [
+ # Gallina
+ 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
+ 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
+ 'for', 'of', 'nosimpl', 'with', 'as',
+ ]
+ keywords3 = [
+ # Sorts
+ 'Type', 'Prop',
+ ]
+ keywords4 = [
+ # Tactics
+ 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
+ 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
+ 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
+ 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
+ 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
+ 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
+ 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
+ 'split', 'left', 'right', 'autorewrite',
+ ]
+ keywords5 = [
+ # Terminators
+ 'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
+ 'assumption', 'solve', 'contradiction', 'discriminate',
+ ]
+ keywords6 = [
+ # Control
+ 'do', 'last', 'first', 'try', 'idtac', 'repeat',
+ ]
+ # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
+ # 'downto', 'else', 'end', 'exception', 'external', 'false',
+ # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
+ # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
+ # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
+ # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+ # 'type', 'val', 'virtual', 'when', 'while', 'with'
+ keyopts = [
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
+ r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
+ '<-', '=', '>', '>]', '>}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
+ r'\[\|', ']', '_', '`', '{', '{<', r'\|', r'\|]', '}', '~', '=>',
+ r'/\\', r'\\/',
+ u'Π', u'λ',
+ ]
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or']
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+ primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list',
+ 'array']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\(\*', Comment, 'comment'),
+ (r'\b(%s)\b' % '|'.join(keywords1), Keyword.Namespace),
+ (r'\b(%s)\b' % '|'.join(keywords2), Keyword),
+ (r'\b(%s)\b' % '|'.join(keywords3), Keyword.Type),
+ (r'\b(%s)\b' % '|'.join(keywords4), Keyword),
+ (r'\b(%s)\b' % '|'.join(keywords5), Keyword.Pseudo),
+ (r'\b(%s)\b' % '|'.join(keywords6), Keyword.Reserved),
+ (r'\b([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
+ Name.Namespace, 'dotted'),
+ (r'\b([A-Z][A-Za-z0-9_\']*)', Name.Class),
+ (r'(%s)' % '|'.join(keyopts), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'\d[\d_]*', Number.Integer),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Binary),
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'string': [
+ (r'[^"]+', String.Double),
+ (r'""', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][A-Za-z0-9_\']*', Name.Class, '#pop'),
+ (r'[a-z][a-z0-9_\']*', Name, '#pop'),
+ (r'', Text, '#pop')
+ ],
+ }
+
+ def analyse_text(text):
+ if text.startswith('(*'):
+ return True
+
+
class NewLispLexer(RegexLexer):
"""
For `newLISP. <www.newlisp.org>`_ source code (version 10.3.0).
+ *New in Pygments 1.5.*
"""
-
+
name = 'NewLisp'
- aliases = ['newLISP', 'newlisp']
+ aliases = ['newlisp']
filenames = ['*.lsp', '*.nl']
mimetypes = ['text/x-newlisp', 'application/x-newlisp']
-
+
flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
# list of built-in functions for newLISP version 10.3
builtins = [
- '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++', '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10', '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7', '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs', 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'and', 'append-file', 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin', 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec', 'base64-enc', 'bayes-query', 'bayes-train', 'begin', 'begin', 'begin', 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback', 'case', 'case', 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean', 'close', 'command-event', 'cond', 'cond', 'cond', 'cons', 'constant', 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count', 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry', 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec', 'def-new', 'default', 'define-macro', 'define-macro', 'define', 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device', 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while', 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup', 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event', 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand', 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter', 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt', 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln', 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string', 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc', 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert', 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error', 'last', 'legal?', 'length', 'let', 'let', 'let', 'letex', 'letn', 'letn', 'letn', 'list?', 'list', 'load', 'local', 'log', 'lookup', 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat', 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply', 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error', 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local', 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping', 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select', 'net-send-to', 'net-send-udp', 'net-send', 'net-service', 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper', 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack', 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop', 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print', 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event', 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand', 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file', 'read-key', 'read-line', 'read-utf8', 'read', 'reader-event', 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex', 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse', 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self', 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all', 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent', 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt', 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?', 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term', 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case', 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?', 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until', 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while', 'write', 'write-char', 'write-file', 'write-line', 'write', 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?', "unless"
+ '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
+ '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
+ '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
+ '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
+ 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'and', 'append-file',
+ 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
+ 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
+ 'base64-enc', 'bayes-query', 'bayes-train', 'begin', 'begin', 'begin',
+ 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback', 'case', 'case',
+ 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
+ 'close', 'command-event', 'cond', 'cond', 'cond', 'cons', 'constant',
+ 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
+ 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
+ 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
+ 'def-new', 'default', 'define-macro', 'define-macro', 'define',
+ 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
+ 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
+ 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
+ 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
+ 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
+ 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
+ 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
+ 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
+ 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
+ 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
+ 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
+ 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
+ 'last', 'legal?', 'length', 'let', 'let', 'let', 'letex', 'letn',
+ 'letn', 'letn', 'list?', 'list', 'load', 'local', 'log', 'lookup',
+ 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
+ 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
+ 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
+ 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
+ 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
+ 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
+ 'net-send-to', 'net-send-udp', 'net-send', 'net-service',
+ 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
+ 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
+ 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
+ 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
+ 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
+ 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
+ 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
+ 'read-key', 'read-line', 'read-utf8', 'read', 'reader-event',
+ 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
+ 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
+ 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
+ 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
+ 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
+ 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
+ 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
+ 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
+ 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
+ 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
+ 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
+ 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
+ 'write', 'write-char', 'write-file', 'write-line', 'write',
+ 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
]
# valid names
valid_name = r'([a-zA-Z0-9!$%&*+.,/<=>?@^_~|-])+|(\[.*?\])+'
-
+
tokens = {
'root': [
# shebang
@@ -1115,39 +1634,37 @@ class NewLispLexer(RegexLexer):
# strings, symbols and characters
(r'"(\\\\|\\"|[^"])*"', String),
-
+
# braces
(r"{", String, "bracestring"),
-
+
# [text] ... [/text] delimited strings
(r'\[text\]*', String, "tagstring"),
-
+
# 'special' operators...
(r"('|:)", Operator),
-
+
# highlight the builtins
- ('(%s)' % '|'.join([
- re.escape(entry) + ' ' for entry in builtins]),
- Keyword
- ),
-
+ ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
+ Keyword),
+
# the remaining functions
(r'(?<=\()' + valid_name, Name.Variable),
# the remaining variables
(valid_name, String.Symbol),
-
+
# parentheses
(r'(\(|\))', Punctuation),
],
-
+
# braced strings...
'bracestring': [
("{", String, "#push"),
("}", String, "#pop"),
("[^{}]+", String),
],
-
+
# tagged [text]...[/text] delimited strings...
'tagstring': [
(r'(?s)(.*?)(\[/text\])', String, '#pop'),
diff --git a/pygments/lexers/hdl.py b/pygments/lexers/hdl.py
index b176cac1..0e97df9e 100644
--- a/pygments/lexers/hdl.py
+++ b/pygments/lexers/hdl.py
@@ -5,18 +5,16 @@
Lexers for hardware descriptor languages.
- :copyright: Copyright 2010 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups
+from pygments.lexer import RegexLexer, bygroups
from pygments.token import \
Text, Comment, Operator, Keyword, Name, String, Number, Punctuation, \
Error
-__all__ = ['VerilogLexer']
+__all__ = ['VerilogLexer', 'SystemVerilogLexer']
class VerilogLexer(RegexLexer):
@@ -27,7 +25,7 @@ class VerilogLexer(RegexLexer):
"""
name = 'verilog'
aliases = ['v']
- filenames = ['*.v', '*.sv']
+ filenames = ['*.v']
mimetypes = ['text/x-verilog']
#: optional Comment or Whitespace
@@ -92,8 +90,143 @@ class VerilogLexer(RegexLexer):
r'\$showscopes|\$showvariables|\$showvars|\$sreadmemb|\$sreadmemh|'
r'\$stime|\$stop|\$strobe|\$time|\$timeformat|\$write)\b', Name.Builtin),
+ (r'(byte|shortint|int|longint|integer|time|'
+ r'bit|logic|reg|'
+ r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
+ r'shortreal|real|realtime)\b', Keyword.Type),
+ ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'import': [
+ (r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop')
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ # Convention: mark all upper case names as constants
+ if token is Name:
+ if value.isupper():
+ token = Name.Constant
+ yield index, token, value
+
+
+class SystemVerilogLexer(RegexLexer):
+ """
+ Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
+ 1800-2009 standard.
+
+ *New in Pygments 1.5.*
+ """
+ name = 'systemverilog'
+ aliases = ['sv']
+ filenames = ['*.sv', '*.svh']
+ mimetypes = ['text/x-systemverilog']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
+ (r'^\s*`define', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'[{}#@]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
+ (r'([0-9]+)|(\'b)[0-1]+', Number.Hex), # should be binary
+ (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
+ (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
+ (r'\'[01xz]', Number),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;\']', Punctuation),
+ (r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant),
+
+ (r'^\s*(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'^\s*(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+
+
+
+ (r'(accept_on|alias|always|always_comb|always_ff|always_latch|'
+ r'and|assert|assign|assume|automatic|before|begin|bind|bins|'
+ r'binsof|bit|break|buf|bufif0|bufif1|byte|case|casex|casez|'
+ r'cell|chandle|checker|class|clocking|cmos|config|const|constraint|'
+ r'context|continue|cover|covergroup|coverpoint|cross|deassign|'
+ r'default|defparam|design|disable|dist|do|edge|else|end|endcase|'
+ r'endchecker|endclass|endclocking|endconfig|endfunction|endgenerate|'
+ r'endgroup|endinterface|endmodule|endpackage|endprimitive|'
+ r'endprogram|endproperty|endsequence|endspecify|endtable|'
+ r'endtask|enum|event|eventually|expect|export|extends|extern|'
+ r'final|first_match|for|force|foreach|forever|fork|forkjoin|'
+ r'function|generate|genvar|global|highz0|highz1|if|iff|ifnone|'
+ r'ignore_bins|illegal_bins|implies|import|incdir|include|'
+ r'initial|inout|input|inside|instance|int|integer|interface|'
+ r'intersect|join|join_any|join_none|large|let|liblist|library|'
+ r'local|localparam|logic|longint|macromodule|matches|medium|'
+ r'modport|module|nand|negedge|new|nexttime|nmos|nor|noshowcancelled|'
+ r'not|notif0|notif1|null|or|output|package|packed|parameter|'
+ r'pmos|posedge|primitive|priority|program|property|protected|'
+ r'pull0|pull1|pulldown|pullup|pulsestyle_ondetect|pulsestyle_onevent|'
+ r'pure|rand|randc|randcase|randsequence|rcmos|real|realtime|'
+ r'ref|reg|reject_on|release|repeat|restrict|return|rnmos|'
+ r'rpmos|rtran|rtranif0|rtranif1|s_always|s_eventually|s_nexttime|'
+ r's_until|s_until_with|scalared|sequence|shortint|shortreal|'
+ r'showcancelled|signed|small|solve|specify|specparam|static|'
+ r'string|strong|strong0|strong1|struct|super|supply0|supply1|'
+ r'sync_accept_on|sync_reject_on|table|tagged|task|this|throughout|'
+ r'time|timeprecision|timeunit|tran|tranif0|tranif1|tri|tri0|'
+ r'tri1|triand|trior|trireg|type|typedef|union|unique|unique0|'
+ r'unsigned|until|until_with|untyped|use|uwire|var|vectored|'
+ r'virtual|void|wait|wait_order|wand|weak|weak0|weak1|while|'
+ r'wildcard|wire|with|within|wor|xnor|xor)\b', Keyword ),
+
+ (r'(`__FILE__|`__LINE__|`begin_keywords|`celldefine|`default_nettype|'
+ r'`define|`else|`elsif|`end_keywords|`endcelldefine|`endif|'
+ r'`ifdef|`ifndef|`include|`line|`nounconnected_drive|`pragma|'
+ r'`resetall|`timescale|`unconnected_drive|`undef|`undefineall)\b',
+ Comment.Preproc ),
+
+ (r'(\$display|\$displayb|\$displayh|\$displayo|\$dumpall|\$dumpfile|'
+ r'\$dumpflush|\$dumplimit|\$dumpoff|\$dumpon|\$dumpports|'
+ r'\$dumpportsall|\$dumpportsflush|\$dumpportslimit|\$dumpportsoff|'
+ r'\$dumpportson|\$dumpvars|\$fclose|\$fdisplay|\$fdisplayb|'
+ r'\$fdisplayh|\$fdisplayo|\$feof|\$ferror|\$fflush|\$fgetc|'
+ r'\$fgets|\$fmonitor|\$fmonitorb|\$fmonitorh|\$fmonitoro|'
+ r'\$fopen|\$fread|\$fscanf|\$fseek|\$fstrobe|\$fstrobeb|\$fstrobeh|'
+ r'\$fstrobeo|\$ftell|\$fwrite|\$fwriteb|\$fwriteh|\$fwriteo|'
+ r'\$monitor|\$monitorb|\$monitorh|\$monitoro|\$monitoroff|'
+ r'\$monitoron|\$plusargs|\$readmemb|\$readmemh|\$rewind|\$sformat|'
+ r'\$sformatf|\$sscanf|\$strobe|\$strobeb|\$strobeh|\$strobeo|'
+ r'\$swrite|\$swriteb|\$swriteh|\$swriteo|\$test|\$ungetc|'
+ r'\$value\$plusargs|\$write|\$writeb|\$writeh|\$writememb|'
+ r'\$writememh|\$writeo)\b' , Name.Builtin ),
+
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(byte|shortint|int|longint|interger|time|'
+ (r'(byte|shortint|int|longint|integer|time|'
r'bit|logic|reg|'
r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
r'shortreal|real|realtime)\b', Keyword.Type),
@@ -132,4 +265,6 @@ class VerilogLexer(RegexLexer):
token = Name.Constant
yield index, token, value
-
+ def analyse_text(text):
+ if text.startswith('//') or text.startswith('/*'):
+ return 0.5
diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py
new file mode 100644
index 00000000..8d4a74b3
--- /dev/null
+++ b/pygments/lexers/jvm.py
@@ -0,0 +1,664 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.jvm
+ ~~~~~~~~~~~~~~~~~~~
+
+ Pygments lexers for JVM languages.
+
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
+ this
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+
+__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
+ 'GroovyLexer', 'IokeLexer', 'ClojureLexer']
+
+
+class JavaLexer(RegexLexer):
+ """
+ For `Java <http://www.sun.com/java/>`_ source code.
+ """
+
+ name = 'Java'
+ aliases = ['java']
+ filenames = ['*.java']
+ mimetypes = ['text/x-java']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+ (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
+ Keyword),
+ (r'(abstract|const|enum|extends|final|implements|native|private|'
+ r'protected|public|static|strictfp|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Declaration),
+ (r'(boolean|byte|char|double|float|int|long|short|void)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+ (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
+ (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
+ (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
+
+
+class ScalaLexer(RegexLexer):
+ """
+ For `Scala <http://www.scala-lang.org>`_ source code.
+ """
+
+ name = 'Scala'
+ aliases = ['scala']
+ filenames = ['*.scala']
+ mimetypes = ['text/x-scala']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ # don't use raw unicode strings!
+ op = u'[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+'
+
+ letter = u'[a-zA-Z\\$_\u00aa\u00b5\u00ba\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386\u0388-\u03f5\u03f7-\u0481\u048a-\u0556\u0561-\u0587\u05d0-\u05f2\u0621-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u097f\u0985-\u09b9\u09bd\u09ce\u09dc-\u09e1\u09f0-\u09f1\u0a05-\u0a39\u0a59-\u0a5e\u0a72-\u0a74\u0a85-\u0ab9\u0abd\u0ad0-\u0ae1\u0b05-\u0b39\u0b3d\u0b5c-\u0b61\u0b71\u0b83-\u0bb9\u0bd0\u0c05-\u0c3d\u0c58-\u0c61\u0c85-\u0cb9\u0cbd\u0cde-\u0ce1\u0d05-\u0d3d\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0eb0\u0eb2-\u0eb3\u0ebd-\u0ec4\u0edc-\u0f00\u0f40-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10fa\u1100-\u135a\u1380-\u138f\u13a0-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u16ee-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u18a8\u18aa-\u191c\u1950-\u19a9\u19c1-\u19c7\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1d00-\u1d2b\u1d62-\u1d77\u1d79-\u1d9a\u1e00-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ffc\u2071\u207f\u2102\u2107\u210a-\u2113\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c7c\u2c80-\u2ce4\u2d00-\u2d65\u2d80-\u2dde\u3006-\u3007\u3021-\u3029\u3038-\u303a\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff-\u318e\u31a0-\u31b7\u31f0-\u31ff\u3400-\u4db5\u4e00-\ua014\ua016-\ua48c\ua500-\ua60b\ua610-\ua61f\ua62a-\ua66e\ua680-\ua697\ua722-\ua76f\ua771-\ua787\ua78b-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uac00-\ud7a3\uf900-\ufb1d\ufb1f-\ufb28\ufb2a-\ufd3d\ufd50-\ufdfb\ufe70-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff6f\uff71-\uff9d\uffa0-\uffdc]'
+
+ upper = u'[A-Z\\$_\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]'
+
+ idrest = ur'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
+ (ur"'%s" % idrest, Text.Symbol),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (ur'@%s' % idrest, Name.Decorator),
+ (ur'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
+ ur'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
+ ur'lazy|match|new|override|pr(?:ivate|otected)'
+ ur'|re(?:quires|turn)|s(?:ealed|uper)|'
+ ur't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\b|'
+ u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])(\b|(?=\\s)|$)', Keyword),
+ (ur':(?!%s)' % op, Keyword, 'type'),
+ (ur'%s%s\b' % (upper, idrest), Name.Class),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'),
+ (r'(type)(\s+)', bygroups(Keyword, Text), 'type'),
+ (r'""".*?"""', String),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (ur"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+# (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
+# Name.Attribute)),
+ (idrest, Name),
+ (r'`[^`]+`', Name),
+ (r'\[', Operator, 'typeparam'),
+ (r'[\(\)\{\};,.#]', Operator),
+ (op, Operator),
+ (ur'([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?',
+ Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (ur'(%s|%s|`[^`]+`)(\s*)(\[)' % (idrest, op),
+ bygroups(Name.Class, Text, Operator), 'typeparam'),
+ (r'[\s\n]+', Text),
+ (r'{', Operator, '#pop'),
+ (r'\(', Operator, '#pop'),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (ur'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
+ ],
+ 'type': [
+ (r'\s+', Text),
+ (u'<[%:]|>:|[#_\u21D2]|forSome|type', Keyword),
+ (r'([,\);}]|=>|=)([\s\n]*)', bygroups(Operator, Text), '#pop'),
+ (r'[\(\{]', Operator, '#push'),
+ (ur'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)(\[)' %
+ (idrest, op, idrest, op),
+ bygroups(Keyword.Type, Text, Operator), ('#pop', 'typeparam')),
+ (ur'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)$' %
+ (idrest, op, idrest, op),
+ bygroups(Keyword.Type, Text), '#pop'),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (ur'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
+ ],
+ 'typeparam': [
+ (r'[\s\n,]+', Text),
+ (u'<[%:]|=>|>:|[#_\u21D2]|forSome|type', Keyword),
+ (r'([\]\)\}])', Operator, '#pop'),
+ (r'[\(\[\{]', Operator, '#push'),
+ (ur'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
+ ],
+ 'comment': [
+ (r'[^/\*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'import': [
+ (ur'(%s|\.)+' % idrest, Name.Namespace, '#pop')
+ ],
+ }
+
+
+class GosuLexer(RegexLexer):
+ """
+ For Gosu source code.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = 'Gosu'
+ aliases = ['gosu']
+ filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
+ mimetypes = ['text/x-gosu']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # modifiers etc.
+ r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+ (r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
+ r'index|while|do|continue|break|return|try|catch|finally|this|'
+ r'throw|new|switch|case|default|eval|super|outer|classpath|'
+ r'using)\b', Keyword),
+ (r'(var|delegate|construct|function|private|internal|protected|'
+ r'public|abstract|override|final|static|extends|transient|'
+ r'implements|represents|readonly)\b', Keyword.Declaration),
+ (r'(property\s+)(get|set|)', Keyword.Declaration),
+ (r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
+ (r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
+ bygroups(Keyword.Declaration, Text, Name.Class)),
+ (r'(uses)(\s+)([a-zA-Z0-9_.]+\*?)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ (r'"', String, 'string'),
+ (r'(\??[\.#])([a-zA-Z_][a-zA-Z0-9_]*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'(:)([a-zA-Z_][a-zA-Z0-9_]*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
+ (r'and|or|not|[\\~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'templateText': [
+ (r'(\\<)|(\\\$)', String),
+ (r'(<%@\s+)(extends|params)',
+ bygroups(Operator, Name.Decorator), 'stringTemplate'),
+ (r'<%!--.*?--%>', Comment.Multiline),
+ (r'(<%)|(<%=)', Operator, 'stringTemplate'),
+ (r'\$\{', Operator, 'stringTemplateShorthand'),
+ (r'.', String)
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ include('templateText')
+ ],
+ 'stringTemplate': [
+ (r'"', String, 'string'),
+ (r'%>', Operator, '#pop'),
+ include('root')
+ ],
+ 'stringTemplateShorthand': [
+ (r'"', String, 'string'),
+ (r'\{', Operator, 'stringTemplateShorthand'),
+ (r'\}', Operator, '#pop'),
+ include('root')
+ ],
+ }
+
+
+class GosuTemplateLexer(Lexer):
+ """
+ For Gosu templates.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = 'Gosu Template'
+ aliases = ['gst']
+ filenames = ['*.gst']
+ mimetypes = ['text/x-gosu-template']
+ lexer = GosuLexer()
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['templateText']
+ for item in self.lexer.get_tokens_unprocessed(text, stack):
+ yield item
+
+
+class GroovyLexer(RegexLexer):
+ """
+ For `Groovy <http://groovy.codehaus.org/>`_ source code.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = 'Groovy'
+ aliases = ['groovy']
+ filenames = ['*.groovy']
+ mimetypes = ['text/x-groovy']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+ (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
+ Keyword),
+ (r'(abstract|const|enum|extends|final|implements|native|private|'
+ r'protected|public|static|strictfp|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Declaration),
+ (r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
+ 'class'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'\$/((?!/\$).)*/\$', String),
+ (r'/(\\\\|\\"|[^/])*/', String),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+ (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
+ (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
+ (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
+
+
+class IokeLexer(RegexLexer):
+ """
+ For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
+ prototype based programming language) source.
+
+ *New in Pygments 1.4.*
+ """
+ name = 'Ioke'
+ filenames = ['*.ik']
+ aliases = ['ioke', 'ik']
+ mimetypes = ['text/x-iokesrc']
+ tokens = {
+ 'interpolatableText': [
+ (r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
+ r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
+ (r'#{', Punctuation, 'textInterpolationRoot')
+ ],
+
+ 'text': [
+ (r'(?<!\\)"', String, '#pop'),
+ include('interpolatableText'),
+ (r'[^"]', String)
+ ],
+
+ 'documentation': [
+ (r'(?<!\\)"', String.Doc, '#pop'),
+ include('interpolatableText'),
+ (r'[^"]', String.Doc)
+ ],
+
+ 'textInterpolationRoot': [
+ (r'}', Punctuation, '#pop'),
+ include('root')
+ ],
+
+ 'slashRegexp': [
+ (r'(?<!\\)/[oxpniums]*', String.Regex, '#pop'),
+ include('interpolatableText'),
+ (r'\\/', String.Regex),
+ (r'[^/]', String.Regex)
+ ],
+
+ 'squareRegexp': [
+ (r'(?<!\\)][oxpniums]*', String.Regex, '#pop'),
+ include('interpolatableText'),
+ (r'\\]', String.Regex),
+ (r'[^\]]', String.Regex)
+ ],
+
+ 'squareText': [
+ (r'(?<!\\)]', String, '#pop'),
+ include('interpolatableText'),
+ (r'[^\]]', String)
+ ],
+
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+
+ # Comments
+ (r';(.*?)\n', Comment),
+ (r'\A#!(.*?)\n', Comment),
+
+ #Regexps
+ (r'#/', String.Regex, 'slashRegexp'),
+ (r'#r\[', String.Regex, 'squareRegexp'),
+
+ #Symbols
+ (r':[a-zA-Z0-9_!:?]+', String.Symbol),
+ (r'[a-zA-Z0-9_!:?]+:(?![a-zA-Z0-9_!?])', String.Other),
+ (r':"(\\\\|\\"|[^"])*"', String.Symbol),
+
+ #Documentation
+ (r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
+ r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
+ r'|(?<=dsyntax\())[\s\n\r]*"', String.Doc, 'documentation'),
+
+ #Text
+ (r'"', String, 'text'),
+ (r'#\[', String, 'squareText'),
+
+ #Mimic
+ (r'[a-zA-Z0-9_][a-zA-Z0-9!?_:]+(?=\s*=.*mimic\s)', Name.Entity),
+
+ #Assignment
+ (r'[a-zA-Z_][a-zA-Z0-9_!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
+ Name.Variable),
+
+ # keywords
+ (r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
+ r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
+ r'with)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+ # Origin
+ (r'(eval|mimic|print|println)(?![a-zA-Z0-9!:_?])', Keyword),
+
+ # Base
+ (r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
+ r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
+ r'(?![a-zA-Z0-9!:_?])', Keyword),
+
+ # Ground
+ (r'(stackTraceAsText)(?![a-zA-Z0-9!:_?])', Keyword),
+
+ #DefaultBehaviour Literals
+ (r'(dict|list|message|set)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+ #DefaultBehaviour Case
+ (r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
+ r'case:otherwise|case:xor)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+ #DefaultBehaviour Reflection
+ (r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
+ r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
+ r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
+ r'(?![a-zA-Z0-9!:_?])', Keyword),
+
+ #DefaultBehaviour Aspects
+ (r'(after|around|before)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+ # DefaultBehaviour
+ (r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
+ r'(?![a-zA-Z0-9!:_?])', Keyword),
+ (r'(use|destructuring)', Keyword.Reserved),
+
+ #DefaultBehavior BaseBehavior
+ (r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
+ r'documentation|identity|removeCell!|undefineCell)'
+ r'(?![a-zA-Z0-9!:_?])', Keyword),
+
+ #DefaultBehavior Internal
+ (r'(internal:compositeRegexp|internal:concatenateText|'
+ r'internal:createDecimal|internal:createNumber|'
+ r'internal:createRegexp|internal:createText)'
+ r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+ #DefaultBehaviour Conditions
+ (r'(availableRestarts|bind|error\!|findRestart|handle|'
+ r'invokeRestart|rescue|restart|signal\!|warn\!)'
+ r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+ # constants
+ (r'(nil|false|true)(?![a-zA-Z0-9!:_?])', Name.Constant),
+
+ # names
+ (r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
+ r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
+ r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
+ r'Conditions|Definitions|FlowControl|Internal|Literals|'
+ r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
+ r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
+ r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
+ r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
+ r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
+ r'System|Text|Tuple)(?![a-zA-Z0-9!:_?])', Name.Builtin),
+
+ # functions
+ (ur'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
+ ur'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
+ ur'(?![a-zA-Z0-9!:_?])', Name.Function),
+
+ # Numbers
+ (r'-?0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'-?\d+', Number.Integer),
+
+ (r'#\(', Punctuation),
+
+ # Operators
+ (ur'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
+ ur'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
+ ur'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
+ ur'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
+ ur'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
+ ur'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
+ ur'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
+ (r'(and|nand|or|xor|nor|return|import)(?![a-zA-Z0-9_!?])',
+ Operator),
+
+ # Punctuation
+ (r'(\`\`|\`|\'\'|\'|\.|\,|@|@@|\[|\]|\(|\)|{|})', Punctuation),
+
+ #kinds
+ (r'[A-Z][a-zA-Z0-9_!:?]*', Name.Class),
+
+ #default cellnames
+ (r'[a-z_][a-zA-Z0-9_!:?]*', Name)
+ ]
+ }
+
+
+class ClojureLexer(RegexLexer):
+ """
+ Lexer for `Clojure <http://clojure.org/>`_ source code.
+
+ *New in Pygments 0.11.*
+ """
+ name = 'Clojure'
+ aliases = ['clojure', 'clj']
+ filenames = ['*.clj']
+ mimetypes = ['text/x-clojure', 'application/x-clojure']
+
+ keywords = [
+ 'fn', 'def', 'defn', 'defmacro', 'defmethod', 'defmulti', 'defn-',
+ 'defstruct', 'if', 'cond', 'let', 'for'
+ ]
+ builtins = [
+ '.', '..',
+ '*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=',
+ 'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
+ 'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
+ 'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
+ 'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
+ 'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
+ 'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
+ 'butlast', 'byte', 'cast', 'char', 'children', 'class',
+ 'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
+ 'complement', 'concat', 'conj', 'cons', 'constantly',
+ 'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
+ 'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
+ 'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
+ 'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
+ 'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
+ 'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush',
+ 'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
+ 'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
+ 'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
+ 'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
+ 'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
+ 'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
+ 'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
+ 'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
+ 'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
+ 'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
+ 'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
+ 'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
+ 'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
+ 'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
+ 'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
+ 'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
+ 'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
+ 're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
+ 'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
+ 'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
+ 'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
+ 'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
+ 'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
+ 'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
+ 'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
+ 'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
+ 'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
+ 'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
+ 'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
+ 'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
+ 'vector?', 'when', 'when-first', 'when-let', 'when-not',
+ 'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
+ 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper', 'ns']
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
+
+ # TODO / should divide keywords/symbols into namespace/rest
+ # but that's hard, so just pretend / is part of the name
+ valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
+
+ def _multi_escape(entries):
+ return '(%s)' % ('|'.join(re.escape(entry) + ' ' for entry in entries))
+
+ tokens = {
+ 'root': [
+ # the comments - always starting with semicolon
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+
+ # whitespaces - usually not relevant
+ (r'[,\s]+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ (r'0x-?[abcdef\d]+', Number.Hex),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'" + valid_name, String.Symbol),
+ (r"\\(.|[a-z]+)", String.Char),
+
+ # keywords
+ (r':' + valid_name, Name.Constant),
+
+ # special operators
+ (r'~@|[`\'#^~&]', Operator),
+
+ # highlight the keywords
+ (_multi_escape(keywords), Keyword),
+
+ # highlight the builtins
+ (_multi_escape(builtins), Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # Clojure accepts vector notation
+ (r'(\[|\])', Punctuation),
+
+ # Clojure accepts map notation
+ (r'(\{|\})', Punctuation),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+ ],
+ }
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index de804aa9..89839ab8 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -5,7 +5,7 @@
Lexers for math languages.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index e73154aa..a64022e4 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -5,260 +5,133 @@
Lexers for other languages.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- this, do_insertions, combined
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, combined, ExtendedRegexLexer
from pygments.token import Error, Punctuation, Literal, Token, \
Text, Comment, Operator, Keyword, Name, String, Number, Generic
-from pygments.util import shebang_matches
from pygments.lexers.web import HtmlLexer
-__all__ = ['SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer', 'BrainfuckLexer',
- 'BashLexer', 'BatchLexer', 'BefungeLexer', 'RedcodeLexer',
- 'MOOCodeLexer', 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer',
- 'GnuplotLexer', 'PovrayLexer', 'AppleScriptLexer',
- 'BashSessionLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer',
- 'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer',
- 'PostScriptLexer', 'AutohotkeyLexer', 'GoodDataCLLexer',
- 'MaqlLexer', 'ProtoBufLexer', 'HybrisLexer', 'AwkLexer',
- 'Cfengine3Lexer']
+# backwards compatibility
+from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
+from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
+ TcshLexer
-line_re = re.compile('.*?\n')
+__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
+ 'SmalltalkLexer', 'LogtalkLexer', 'GnuplotLexer', 'PovrayLexer',
+ 'AppleScriptLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer',
+ 'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer', 'PostScriptLexer',
+ 'AutohotkeyLexer', 'GoodDataCLLexer', 'MaqlLexer', 'ProtoBufLexer',
+ 'HybrisLexer', 'AwkLexer', 'Cfengine3Lexer', 'SnobolLexer',
+ 'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer', 'BroLexer']
-class SqlLexer(RegexLexer):
+class ECLLexer(RegexLexer):
"""
- Lexer for Structured Query Language. Currently, this lexer does
- not recognize any special syntax except ANSI SQL.
- """
-
- name = 'SQL'
- aliases = ['sql']
- filenames = ['*.sql']
- mimetypes = ['text/x-sql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'--.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|'
- r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|'
- r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|'
- r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|'
- r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|'
- r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|'
- r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|'
- r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|'
- r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|'
- r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|'
- r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|'
- r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|'
- r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|'
- r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|'
- r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|'
- r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|'
- r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|'
- r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|'
- r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|'
- r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|'
- r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|'
- r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|'
- r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|'
- r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
- r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
- r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
- r'EXCEPT|ESCEPTION|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
- r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
- r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
- r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
- r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|'
- r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|'
- r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|'
- r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|'
- r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|'
- r'KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|'
- r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LIMIT|LISTEN|LOAD|'
- r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|'
- r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|'
- r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|'
- r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|'
- r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|'
- r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|'
- r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|'
- r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|'
- r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|'
- r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|'
- r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|'
- r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|'
- r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|'
- r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|'
- r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|'
- r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|'
- r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|'
- r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|'
- r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|'
- r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|'
- r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|'
- r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|'
- r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|'
- r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|'
- r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|'
- r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|'
- r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|'
- r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|'
- r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|'
- r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|'
- r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|'
- r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|'
- r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|'
- r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|'
- r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|'
- r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|'
- r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|'
- r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword),
- (r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|'
- r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|'
- r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b',
- Name.Builtin),
- (r'[+*/<>=~!@#%^&|`?^-]', Operator),
- (r'[0-9]+', Number.Integer),
- # TODO: Backslash escapes?
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
- (r'[;:()\[\],\.]', Punctuation)
- ],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/\*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ]
- }
-
+ Lexer for the declarative big-data `ECL
+ <http://hpccsystems.com/community/docs/ecl-language-reference/html>`_
+ language.
-class MySqlLexer(RegexLexer):
- """
- Special lexer for MySQL.
+ *New in Pygments 1.5.*
"""
- name = 'MySQL'
- aliases = ['mysql']
- mimetypes = ['text/x-mysql']
+ name = 'ECL'
+ aliases = ['ecl']
+ filenames = ['*.ecl']
+ mimetypes = ['application/x-ecl']
+
+ flags = re.IGNORECASE | re.MULTILINE
- flags = re.IGNORECASE
tokens = {
'root': [
+ include('whitespace'),
+ include('statements'),
+ ],
+ 'whitespace': [
(r'\s+', Text),
- (r'(#|--\s+).*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'[0-9]+', Number.Integer),
- (r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
- # TODO: add backslash escapes
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Double),
- (r"`(``|[^`])*`", String.Symbol),
- (r'[+*/<>=~!@#%^&|`?^-]', Operator),
- (r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|'
- r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|'
- r'tinyblob|mediumblob|longblob|blob|float|double|double\s+'
- r'precision|real|numeric|dec|decimal|timestamp|year|char|'
- r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?',
- bygroups(Keyword.Type, Text, Punctuation)),
- (r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|'
- r'bigint|binary|blob|both|by|call|cascade|case|change|char|'
- r'character|check|collate|column|condition|constraint|continue|'
- r'convert|create|cross|current_date|current_time|'
- r'current_timestamp|current_user|cursor|database|databases|'
- r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|'
- r'declare|default|delayed|delete|desc|describe|deterministic|'
- r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|'
- r'enclosed|escaped|exists|exit|explain|fetch|float|float4|float8'
- r'|for|force|foreign|from|fulltext|grant|group|having|'
- r'high_priority|hour_microsecond|hour_minute|hour_second|if|'
- r'ignore|in|index|infile|inner|inout|insensitive|insert|int|'
- r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|'
- r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|'
- r'localtime|localtimestamp|lock|long|loop|low_priority|match|'
- r'minute_microsecond|minute_second|mod|modifies|natural|'
- r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|'
- r'or|order|out|outer|outfile|precision|primary|procedure|purge|'
- r'raid0|read|reads|real|references|regexp|release|rename|repeat|'
- r'replace|require|restrict|return|revoke|right|rlike|schema|'
- r'schemas|second_microsecond|select|sensitive|separator|set|'
- r'show|smallint|soname|spatial|specific|sql|sql_big_result|'
- r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|'
- r'sqlwarning|ssl|starting|straight_join|table|terminated|then|'
- r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|'
- r'usage|use|using|utc_date|utc_time|utc_timestamp|values|'
- r'varying|when|where|while|with|write|x509|xor|year_month|'
- r'zerofill)\b', Keyword),
- # TODO: this list is not complete
- (r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo),
- (r'(true|false|null)', Name.Constant),
- (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
+ (r'\/\/.*', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+ 'statements': [
+ include('types'),
+ include('keywords'),
+ include('functions'),
+ include('hash'),
+ (r'"', String, 'string'),
+ (r'\'', String, 'string'),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]+', Operator),
+ (r'[{}()\[\],.;]', Punctuation),
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
- (r'@[A-Za-z0-9]*[._]*[A-Za-z0-9]*', Name.Variable),
- (r'[;:()\[\],\.]', Punctuation)
],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/\*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ]
+ 'hash': [
+ (r'^#.*$', Comment.Preproc),
+ ],
+ 'types': [
+ (r'(RECORD|END)[^\d]', Keyword.Declaration),
+ (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
+ r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
+ r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
+ bygroups(Keyword.Type, Text)),
+ ],
+ 'keywords': [
+ (r'(APPLY|ASSERT|BUILD|BUILDINDEX|EVALUATE|FAIL|KEYDIFF|KEYPATCH|'
+ r'LOADXML|NOTHOR|NOTIFY|OUTPUT|PARALLEL|SEQUENTIAL|SOAPCALL|WAIT'
+ r'CHECKPOINT|DEPRECATED|FAILCODE|FAILMESSAGE|FAILURE|GLOBAL|'
+ r'INDEPENDENT|ONWARNING|PERSIST|PRIORITY|RECOVERY|STORED|SUCCESS|'
+ r'WAIT|WHEN)\b', Keyword.Reserved),
+ # These are classed differently, check later
+ (r'(ALL|AND|ANY|AS|ATMOST|BEFORE|BEGINC\+\+|BEST|BETWEEN|CASE|CONST|'
+ r'COUNTER|CSV|DESCEND|ENCRYPT|ENDC\+\+|ENDMACRO|EXCEPT|EXCLUSIVE|'
+ r'EXPIRE|EXPORT|EXTEND|FALSE|FEW|FIRST|FLAT|FULL|FUNCTION|GROUP|'
+ r'HEADER|HEADING|HOLE|IFBLOCK|IMPORT|IN|JOINED|KEEP|KEYED|LAST|'
+ r'LEFT|LIMIT|LOAD|LOCAL|LOCALE|LOOKUP|MACRO|MANY|MAXCOUNT|'
+ r'MAXLENGTH|MIN SKEW|MODULE|INTERFACE|NAMED|NOCASE|NOROOT|NOSCAN|'
+ r'NOSORT|NOT|OF|ONLY|OPT|OR|OUTER|OVERWRITE|PACKED|PARTITION|'
+ r'PENALTY|PHYSICALLENGTH|PIPE|QUOTE|RELATIONSHIP|REPEAT|RETURN|'
+ r'RIGHT|SCAN|SELF|SEPARATOR|SERVICE|SHARED|SKEW|SKIP|SQL|STORE|'
+ r'TERMINATOR|THOR|THRESHOLD|TOKEN|TRANSFORM|TRIM|TRUE|TYPE|'
+ r'UNICODEORDER|UNSORTED|VALIDATE|VIRTUAL|WHOLE|WILD|WITHIN|XML|'
+ r'XPATH|__COMPRESSED__)\b', Keyword.Reserved),
+ ],
+ 'functions': [
+ (r'(ABS|ACOS|ALLNODES|ASCII|ASIN|ASSTRING|ATAN|ATAN2|AVE|CASE|'
+ r'CHOOSE|CHOOSEN|CHOOSESETS|CLUSTERSIZE|COMBINE|CORRELATION|COS|'
+ r'COSH|COUNT|COVARIANCE|CRON|DATASET|DEDUP|DEFINE|DENORMALIZE|'
+ r'DISTRIBUTE|DISTRIBUTED|DISTRIBUTION|EBCDIC|ENTH|ERROR|EVALUATE|'
+ r'EVENT|EVENTEXTRA|EVENTNAME|EXISTS|EXP|FAILCODE|FAILMESSAGE|'
+ r'FETCH|FROMUNICODE|GETISVALID|GLOBAL|GRAPH|GROUP|HASH|HASH32|'
+ r'HASH64|HASHCRC|HASHMD5|HAVING|IF|INDEX|INTFORMAT|ISVALID|'
+ r'ITERATE|JOIN|KEYUNICODE|LENGTH|LIBRARY|LIMIT|LN|LOCAL|LOG|LOOP|'
+ r'MAP|MATCHED|MATCHLENGTH|MATCHPOSITION|MATCHTEXT|MATCHUNICODE|'
+ r'MAX|MERGE|MERGEJOIN|MIN|NOLOCAL|NONEMPTY|NORMALIZE|PARSE|PIPE|'
+ r'POWER|PRELOAD|PROCESS|PROJECT|PULL|RANDOM|RANGE|RANK|RANKED|'
+ r'REALFORMAT|RECORDOF|REGEXFIND|REGEXREPLACE|REGROUP|REJECTED|'
+ r'ROLLUP|ROUND|ROUNDUP|ROW|ROWDIFF|SAMPLE|SET|SIN|SINH|SIZEOF|'
+ r'SOAPCALL|SORT|SORTED|SQRT|STEPPED|STORED|SUM|TABLE|TAN|TANH|'
+ r'THISNODE|TOPN|TOUNICODE|TRANSFER|TRIM|TRUNCATE|TYPEOF|UNGROUP|'
+ r'UNICODEORDER|VARIANCE|WHICH|WORKUNIT|XMLDECODE|XMLENCODE|'
+ r'XMLTEXT|XMLUNICODE)\b', Name.Function),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\'', String, '#pop'),
+ (r'[^"\']+', String),
+ ],
}
-class SqliteConsoleLexer(Lexer):
- """
- Lexer for example sessions using sqlite3.
-
- *New in Pygments 0.11.*
- """
-
- name = 'sqlite3con'
- aliases = ['sqlite3']
- filenames = ['*.sqlite3-console']
- mimetypes = ['text/x-sqlite3-console']
-
- def get_tokens_unprocessed(self, data):
- sql = SqlLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(data):
- line = match.group()
- if line.startswith('sqlite> ') or line.startswith(' ...> '):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:8])]))
- curcode += line[8:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- if line.startswith('SQL error: '):
- yield (match.start(), Generic.Traceback, line)
- else:
- yield (match.start(), Generic.Output, line)
- if curcode:
- for item in do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode)):
- yield item
-
-
class BrainfuckLexer(RegexLexer):
"""
Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
@@ -321,191 +194,6 @@ class BefungeLexer(RegexLexer):
}
-
-class BashLexer(RegexLexer):
- """
- Lexer for (ba|k|)sh shell scripts.
-
- *New in Pygments 0.6.*
- """
-
- name = 'Bash'
- aliases = ['bash', 'sh', 'ksh']
- filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass']
- mimetypes = ['application/x-sh', 'application/x-shellscript']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'\$\(\(', Keyword, 'math'),
- (r'\$\(', Keyword, 'paren'),
- (r'\${#?', Keyword, 'curly'),
- (r'`', String.Backtick, 'backticks'),
- include('data'),
- ],
- 'basic': [
- (r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
- r'select|continue|until|esac|elif)\s*\b',
- Keyword),
- (r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
- r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
- r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
- r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
- r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
- r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
- Name.Builtin),
- (r'#.*\n', Comment),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
- (r'[\[\]{}()=]', Operator),
- (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- (r'&&|\|\|', Operator),
- ],
- 'data': [
- (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r';', Text),
- (r'\s+', Text),
- (r'[^=\s\n\[\]{}()$"\'`\\<]+', Text),
- (r'\d+(?= |\Z)', Number),
- (r'\$#?(\w+|.)', Name.Variable),
- (r'<', Text),
- ],
- 'curly': [
- (r'}', Keyword, '#pop'),
- (r':-', Keyword),
- (r'[a-zA-Z0-9_]+', Name.Variable),
- (r'[^}:"\'`$]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- 'math': [
- (r'\)\)', Keyword, '#pop'),
- (r'[-+*/%^|&]|\*\*|\|\|', Operator),
- (r'\d+', Number),
- include('root'),
- ],
- 'backticks': [
- (r'`', String.Backtick, '#pop'),
- include('root'),
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'(ba|z|)sh')
-
-
-class BashSessionLexer(Lexer):
- """
- Lexer for simplistic shell sessions.
-
- *New in Pygments 1.1.*
- """
-
- name = 'Bash Session'
- aliases = ['console']
- filenames = ['*.sh-session']
- mimetypes = ['application/x-shell-session']
-
- def get_tokens_unprocessed(self, text):
- bashlexer = BashLexer(**self.options)
-
- pos = 0
- curcode = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
- m = re.match(r'^((?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)?|\[\S+[@:]'
- r'[^\n]+\].+)[$#%])(.*\n?)', line)
- if m:
- # To support output lexers (say diff output), the output
- # needs to be broken by prompts whenever the output lexer
- # changes.
- if not insertions:
- pos = match.start()
-
- insertions.append((len(curcode),
- [(0, Generic.Prompt, m.group(1))]))
- curcode += m.group(2)
- elif line.startswith('>'):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:1])]))
- curcode += line[1:]
- else:
- if insertions:
- toks = bashlexer.get_tokens_unprocessed(curcode)
- for i, t, v in do_insertions(insertions, toks):
- yield pos+i, t, v
- yield match.start(), Generic.Output, line
- insertions = []
- curcode = ''
- if insertions:
- for i, t, v in do_insertions(insertions,
- bashlexer.get_tokens_unprocessed(curcode)):
- yield pos+i, t, v
-
-
-class BatchLexer(RegexLexer):
- """
- Lexer for the DOS/Windows Batch file format.
-
- *New in Pygments 0.7.*
- """
- name = 'Batchfile'
- aliases = ['bat']
- filenames = ['*.bat', '*.cmd']
- mimetypes = ['application/x-dos-batch']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- # Lines can start with @ to prevent echo
- (r'^\s*@', Punctuation),
- (r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)),
- (r'".*?"', String.Double),
- (r"'.*?'", String.Single),
- # If made more specific, make sure you still allow expansions
- # like %~$VAR:zlt
- (r'%%?[~$:\w]+%?', Name.Variable),
- (r'::.*', Comment), # Technically :: only works at BOL
- (r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
- (r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
- (r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
- (r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
- r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
- r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
- (r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator),
- include('basic'),
- (r'.', Text),
- ],
- 'echo': [
- # Escapes only valid within echo args?
- (r'\^\^|\^<|\^>|\^\|', String.Escape),
- (r'\n', Text, '#pop'),
- include('basic'),
- (r'[^\'"^]+', Text),
- ],
- 'basic': [
- (r'".*?"', String.Double),
- (r"'.*?'", String.Single),
- (r'`.*?`', String.Backtick),
- (r'-?\d+', Number),
- (r',', Punctuation),
- (r'=', Operator),
- (r'/\S+', Name),
- (r':\w+', Name.Label),
- (r'\w:\w+', Text),
- (r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)),
- ],
- }
-
-
class RedcodeLexer(RegexLexer):
"""
A simple Redcode lexer based on ICWS'94.
@@ -707,73 +395,6 @@ class SmalltalkLexer(RegexLexer):
}
-class TcshLexer(RegexLexer):
- """
- Lexer for tcsh scripts.
-
- *New in Pygments 0.10.*
- """
-
- name = 'Tcsh'
- aliases = ['tcsh', 'csh']
- filenames = ['*.tcsh', '*.csh']
- mimetypes = ['application/x-csh']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'\$\(', Keyword, 'paren'),
- (r'\${#?', Keyword, 'curly'),
- (r'`', String.Backtick, 'backticks'),
- include('data'),
- ],
- 'basic': [
- (r'\b(if|endif|else|while|then|foreach|case|default|'
- r'continue|goto|breaksw|end|switch|endsw)\s*\b',
- Keyword),
- (r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
- r'complete|dirs|echo|echotc|eval|exec|exit|'
- r'fg|filetest|getxvers|glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
- r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
- r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|set|shift|'
- r'sched|setenv|setpath|settc|setty|setxvers|shift|source|stop|suspend|'
- r'source|suspend|telltc|time|'
- r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
- r'ver|wait|warp|watchlog|where|which)\s*\b',
- Name.Builtin),
- (r'#.*\n', Comment),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
- (r'[\[\]{}()=]+', Operator),
- (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- ],
- 'data': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r'\s+', Text),
- (r'[^=\s\n\[\]{}()$"\'`\\]+', Text),
- (r'\d+(?= |\Z)', Number),
- (r'\$#?(\w+|.)', Name.Variable),
- ],
- 'curly': [
- (r'}', Keyword, '#pop'),
- (r':-', Keyword),
- (r'[a-zA-Z0-9_]+', Name.Variable),
- (r'[^}:"\'`$]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- 'backticks': [
- (r'`', String.Backtick, '#pop'),
- include('root'),
- ],
- }
-
-
class LogtalkLexer(RegexLexer):
"""
For `Logtalk <http://logtalk.org/>`_ source code.
@@ -2915,11 +2536,12 @@ class Cfengine3Lexer(RegexLexer):
bygroups(Keyword.Reserved,Text,Operator,Text)),
(r'"', String, 'string'),
(r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
- (r'([\w.!&|]+)(::)', bygroups(Name.Class, Punctuation)),
+ (r'([\w.!&|\(\)]+)(::)', bygroups(Name.Class, Punctuation)),
(r'(\w+)(:)', bygroups(Keyword.Declaration,Punctuation)),
(r'@[\{\(][^\)\}]+[\}\)]', Name.Variable),
(r'[(){},;]', Punctuation),
(r'=>', Operator),
+ (r'->', Operator),
(r'\d+\.\d+', Number.Float),
(r'\d+', Number.Integer),
(r'\w+', Name.Function),
@@ -2944,3 +2566,775 @@ class Cfengine3Lexer(RegexLexer):
(r'\s+', Text),
],
}
+
+
+class SnobolLexer(RegexLexer):
+ """
+ Lexer for the SNOBOL4 programming language.
+
+ Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
+ Does not require spaces around binary operators.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = "Snobol"
+ aliases = ["snobol"]
+ filenames = ['*.snobol']
+ mimetypes = ['text/x-snobol']
+
+ tokens = {
+ # root state, start of line
+ # comments, continuation lines, and directives start in column 1
+ # as do labels
+ 'root': [
+ (r'\*.*\n', Comment),
+ (r'[\+\.] ', Punctuation, 'statement'),
+ (r'-.*\n', Comment),
+ (r'END\s*\n', Name.Label, 'heredoc'),
+ (r'[A-Za-z\$][\w$]*', Name.Label, 'statement'),
+ (r'\s+', Text, 'statement'),
+ ],
+ # statement state, line after continuation or label
+ 'statement': [
+ (r'\s*\n', Text, '#pop'),
+ (r'\s+', Text),
+ (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
+ r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
+ r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
+ r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
+ Name.Builtin),
+ (r'[A-Za-z][\w\.]*', Name),
+ # ASCII equivalents of original operators
+ # | for the EBCDIC equivalent, ! likewise
+ # \ for EBCDIC negation
+ (r'\*\*|[\?\$\.!%\*/#+\-@\|&\\!=]', Operator),
+ (r'"[^"]*"', String),
+ (r"'[^']*'", String),
+ # Accept SPITBOL syntax for real numbers
+ # as well as Macro SNOBOL4
+ (r'[0-9]+(?=[^\.EeDd])', Number.Integer),
+ (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
+ # Goto
+ (r':', Punctuation, 'goto'),
+ (r'[\(\)<>,;]', Punctuation),
+ ],
+ # Goto block
+ 'goto': [
+ (r'\s*\n', Text, "#pop:2"),
+ (r'\s+', Text),
+ (r'F|S', Keyword),
+ (r'(\()([A-Za-z][\w.]*)(\))',
+ bygroups(Punctuation, Name.Label, Punctuation))
+ ],
+ # everything after the END statement is basically one
+ # big heredoc.
+ 'heredoc': [
+ (r'.*\n', String.Heredoc)
+ ]
+ }
+
+
+class UrbiscriptLexer(ExtendedRegexLexer):
+ """
+ For UrbiScript source code.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = 'UrbiScript'
+ aliases = ['urbiscript']
+ filenames = ['*.u']
+ mimetypes = ['application/x-urbiscript']
+
+ flags = re.DOTALL
+
+ ## TODO
+ # - handle Experimental and deprecated tags with specific tokens
+ # - handle Angles and Durations with specific tokens
+
+ def blob_callback(lexer, match, ctx):
+ text_before_blob = match.group(1)
+ blob_start = match.group(2)
+ blob_size_str = match.group(3)
+ blob_size = int(blob_size_str)
+ yield match.start(), String, text_before_blob
+ ctx.pos += len(text_before_blob)
+
+ # if blob size doesn't match blob format (example : "\B(2)(aaa)")
+ # yield blob as a string
+ if ctx.text[match.end() + blob_size] != ")":
+ result = "\\B(" + blob_size_str + ")("
+ yield match.start(), String, result
+ ctx.pos += len(result)
+ return
+
+ # if blob is well formated, yield as Escape
+ blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
+ yield match.start(), String.Escape, blob_text
+ ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # comments
+ (r'//.*?\n', Comment),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'(?:every|for|loop|while)(?:;|&|\||,)',Keyword),
+ (r'(?:assert|at|break|case|catch|closure|compl|continue|'
+ r'default|else|enum|every|external|finally|for|freezeif|if|new|'
+ r'onleave|return|stopif|switch|this|throw|timeout|try|'
+ r'waituntil|whenever|while)\b', Keyword),
+ (r'(?:asm|auto|bool|char|const_cast|delete|double|dynamic_cast|'
+ r'explicit|export|extern|float|friend|goto|inline|int|'
+ r'long|mutable|namespace|register|reinterpret_cast|short|'
+ r'signed|sizeof|static_cast|struct|template|typedef|typeid|'
+ r'typename|union|unsigned|using|virtual|volatile|'
+ r'wchar_t)\b', Keyword.Reserved),
+ # deprecated keywords, use a meaningfull token when available
+ (r'(?:emit|foreach|internal|loopn|static)\b', Keyword),
+ # ignored keywords, use a meaningfull token when available
+ (r'(?:private|protected|public)\b', Keyword),
+ (r'(?:var|do|const|function|class)\b', Keyword.Declaration),
+ (r'(?:true|false|nil|void)\b', Keyword.Constant),
+ (r'(?:Barrier|Binary|Boolean|CallMessage|Channel|Code|'
+ r'Comparable|Container|Control|Date|Dictionary|Directory|'
+ r'Duration|Enumeration|Event|Exception|Executable|File|Finalizable|'
+ r'Float|FormatInfo|Formatter|Global|Group|Hash|InputStream|'
+ r'IoService|Job|Kernel|Lazy|List|Loadable|Lobby|Location|Logger|Math|'
+ r'Mutex|nil|Object|Orderable|OutputStream|Pair|Path|Pattern|Position|'
+ r'Primitive|Process|Profile|PseudoLazy|PubSub|RangeIterable|Regexp|'
+ r'Semaphore|Server|Singleton|Socket|StackFrame|Stream|String|System|'
+ r'Tag|Timeout|Traceable|TrajectoryGenerator|Triplet|Tuple'
+ r'|UObject|UValue|UVar)\b', Name.Builtin),
+ (r'(?:this)\b', Name.Builtin.Pseudo),
+ # don't match single | and &
+ (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
+ (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
+ Operator.Word),
+ (r'[{}\[\]()]+', Punctuation),
+ (r'(?:;|\||,|&|\?|!)+', Punctuation),
+ (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ # Float, Integer, Angle and Duration
+ (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
+ r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
+ # handle binary blob in strings
+ (r'"', String.Double, "string.double"),
+ (r"'", String.Single, "string.single"),
+ ],
+ 'string.double': [
+ (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
+ (r'(\\\\|\\"|[^"])*?"', String.Double, '#pop'),
+ ],
+ 'string.single': [
+ (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
+ (r"(\\\\|\\'|[^'])*?'", String.Single, '#pop'),
+ ],
+ # from http://pygments.org/docs/lexerdevelopment/#changing-states
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ]
+ }
+
+
+class OpenEdgeLexer(RegexLexer):
+ """
+ Lexer for `OpenEdge ABL (formerly Progress)
+ <http://web.progress.com/en/openedge/abl.html>`_ source code.
+
+ *New in Pygments 1.5.*
+ """
+ name = 'OpenEdge ABL'
+ aliases = ['openedge', 'abl', 'progress']
+ filenames = ['*.p', '*.cls']
+ mimetypes = ['text/x-openedge', 'application/x-openedge']
+
+ types = (r'(?i)(^|(?<=[^0-9a-z_\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
+ r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
+ r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
+ r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
+ r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^0-9a-z_\-]))')
+
+ keywords = (r'(?i)(^|(?<=[^0-9a-z_\-]))(ABSOLUTE|ABS|ABSO|ABSOL|ABSOLU|ABSOLUT|ACCELERATOR|'
+ r'ACCUM|ACCUMULATE|ACCUM|ACCUMU|ACCUMUL|ACCUMULA|ACCUMULAT|'
+ r'ACTIVE-FORM|ACTIVE-WINDOW|ADD|ADD-BUFFER|'
+ r'ADD-CALC-COLUMN|ADD-COLUMNS-FROM|ADD-EVENTS-PROCEDURE|'
+ r'ADD-FIELDS-FROM|ADD-FIRST|ADD-INDEX-FIELD|ADD-LAST|'
+ r'ADD-LIKE-COLUMN|ADD-LIKE-FIELD|ADD-LIKE-INDEX|'
+ r'ADD-NEW-FIELD|ADD-NEW-INDEX|ADD-SCHEMA-LOCATION|ADD-SUPER-PROCEDURE|'
+ r'ADM-DATA|ADVISE|ALERT-BOX|ALIAS|ALL|ALLOW-COLUMN-SEARCHING|'
+ r'ALLOW-REPLICATION|ALTER|ALWAYS-ON-TOP|AMBIGUOUS|AMBIG|AMBIGU|AMBIGUO|AMBIGUOU|'
+ r'ANALYZE|ANALYZ|AND|ANSI-ONLY|ANY|ANYWHERE|APPEND|APPL-ALERT-BOXES|'
+ r'APPL-ALERT|APPL-ALERT-|APPL-ALERT-B|APPL-ALERT-BO|APPL-ALERT-BOX|APPL-ALERT-BOXE|'
+ r'APPL-CONTEXT-ID|APPLICATION|APPLY|APPSERVER-INFO|APPSERVER-PASSWORD|'
+ r'APPSERVER-USERID|ARRAY-MESSAGE|AS|ASC|ASCENDING|ASCE|ASCEN|'
+ r'ASCEND|ASCENDI|ASCENDIN|ASK-OVERWRITE|ASSEMBLY|ASSIGN|'
+ r'ASYNCHRONOUS|ASYNC-REQUEST-COUNT|ASYNC-REQUEST-HANDLE|AT|'
+ r'ATTACHED-PAIRLIST|ATTR-SPACE|ATTR|ATTRI|ATTRIB|ATTRIBU|ATTRIBUT|'
+ r'AUDIT-CONTROL|AUDIT-ENABLED|AUDIT-EVENT-CONTEXT|AUDIT-POLICY|'
+ r'AUTHENTICATION-FAILED|AUTHORIZATION|AUTO-COMPLETION|AUTO-COMP|'
+ r'AUTO-COMPL|AUTO-COMPLE|AUTO-COMPLET|AUTO-COMPLETI|AUTO-COMPLETIO|'
+ r'AUTO-ENDKEY|AUTO-END-KEY|AUTO-GO|AUTO-INDENT|AUTO-IND|'
+ r'AUTO-INDE|AUTO-INDEN|AUTOMATIC|AUTO-RESIZE|AUTO-RETURN|AUTO-RET|'
+ r'AUTO-RETU|AUTO-RETUR|AUTO-SYNCHRONIZE|AUTO-ZAP|AUTO-Z|AUTO-ZA|'
+ r'AVAILABLE|AVAIL|AVAILA|AVAILAB|AVAILABL|AVAILABLE-FORMATS|'
+ r'AVERAGE|AVE|AVER|AVERA|AVERAG|AVG|BACKGROUND|BACK|BACKG|'
+ r'BACKGR|BACKGRO|BACKGROU|BACKGROUN|BACKWARDS|BACKWARD|'
+ r'BASE64-DECODE|BASE64-ENCODE|BASE-ADE|BASE-KEY|BATCH-MODE|BATCH|'
+ r'BATCH-|BATCH-M|BATCH-MO|BATCH-MOD|BATCH-SIZE|BEFORE-HIDE|BEFORE-H|'
+ r'BEFORE-HI|BEFORE-HID|BEGIN-EVENT-GROUP|BEGINS|BELL|BETWEEN|'
+ r'BGCOLOR|BGC|BGCO|BGCOL|BGCOLO|BIG-ENDIAN|BINARY|BIND|BIND-WHERE|'
+ r'BLANK|BLOCK-ITERATION-DISPLAY|BORDER-BOTTOM-CHARS|BORDER-B|'
+ r'BORDER-BO|BORDER-BOT|BORDER-BOTT|BORDER-BOTTO|BORDER-BOTTOM-PIXELS|'
+ r'BORDER-BOTTOM-P|BORDER-BOTTOM-PI|BORDER-BOTTOM-PIX|'
+ r'BORDER-BOTTOM-PIXE|BORDER-BOTTOM-PIXEL|BORDER-LEFT-CHARS|BORDER-L|'
+ r'BORDER-LE|BORDER-LEF|BORDER-LEFT|BORDER-LEFT-|BORDER-LEFT-C|'
+ r'BORDER-LEFT-CH|BORDER-LEFT-CHA|BORDER-LEFT-CHAR|BORDER-LEFT-PIXELS|'
+ r'BORDER-LEFT-P|BORDER-LEFT-PI|BORDER-LEFT-PIX|BORDER-LEFT-PIXE|'
+ r'BORDER-LEFT-PIXEL|BORDER-RIGHT-CHARS|BORDER-R|BORDER-RI|BORDER-RIG|'
+ r'BORDER-RIGH|BORDER-RIGHT|BORDER-RIGHT-|BORDER-RIGHT-C|BORDER-RIGHT-CH|'
+ r'BORDER-RIGHT-CHA|BORDER-RIGHT-CHAR|BORDER-RIGHT-PIXELS|BORDER-RIGHT-P|'
+ r'BORDER-RIGHT-PI|BORDER-RIGHT-PIX|BORDER-RIGHT-PIXE|BORDER-RIGHT-PIXEL|'
+ r'BORDER-TOP-CHARS|BORDER-T|BORDER-TO|BORDER-TOP|BORDER-TOP-|BORDER-TOP-C|'
+ r'BORDER-TOP-CH|BORDER-TOP-CHA|BORDER-TOP-CHAR|BORDER-TOP-PIXELS|'
+ r'BORDER-TOP-P|BORDER-TOP-PI|BORDER-TOP-PIX|BORDER-TOP-PIXE|BORDER-TOP-PIXEL|'
+ r'BOX|BOX-SELECTABLE|BOX-SELECT|BOX-SELECTA|BOX-SELECTAB|BOX-SELECTABL|'
+ r'BREAK|BROWSE|BUFFER|BUFFER-CHARS|BUFFER-COMPARE|BUFFER-COPY|BUFFER-CREATE|'
+ r'BUFFER-DELETE|BUFFER-FIELD|BUFFER-HANDLE|BUFFER-LINES|BUFFER-NAME|'
+ r'BUFFER-RELEASE|BUFFER-VALUE|BUTTON|BUTTONS|BUTTON|BY|BY-POINTER|'
+ r'BY-VARIANT-POINTER|CACHE|CACHE-SIZE|CALL|CALL-NAME|CALL-TYPE|CANCEL-BREAK|'
+ r'CANCEL-BUTTON|CAN-CREATE|CAN-DELETE|CAN-DO|CAN-FIND|CAN-QUERY|CAN-READ|'
+ r'CAN-SET|CAN-WRITE|CAPS|CAREFUL-PAINT|CASE|CASE-SENSITIVE|CASE-SEN|'
+ r'CASE-SENS|CASE-SENSI|CASE-SENSIT|CASE-SENSITI|CASE-SENSITIV|'
+ r'CAST|CATCH|CDECL|CENTERED|CENTER|CENTERE|CHAINED|CHARACTER_LENGTH|'
+ r'CHARSET|CHECK|CHECKED|CHOOSE|CHR|CLASS|CLASS-TYPE|CLEAR|'
+ r'CLEAR-APPL-CONTEXT|CLEAR-LOG|CLEAR-SELECTION|CLEAR-SELECT|'
+ r'CLEAR-SELECTI|CLEAR-SELECTIO|CLEAR-SORT-ARROWS|CLEAR-SORT-ARROW|'
+ r'CLIENT-CONNECTION-ID|CLIENT-PRINCIPAL|CLIENT-TTY|CLIENT-TYPE|'
+ r'CLIENT-WORKSTATION|CLIPBOARD|CLOSE|CLOSE-LOG|CODE|CODEBASE-LOCATOR|'
+ r'CODEPAGE|CODEPAGE-CONVERT|COLLATE|COL-OF|COLON|COLON-ALIGNED|'
+ r'COLON-ALIGN|COLON-ALIGNE|COLOR|COLOR-TABLE|COLUMN|COL|COLU|COLUM|'
+ r'COLUMN-BGCOLOR|COLUMN-DCOLOR|COLUMN-FGCOLOR|COLUMN-FONT|COLUMN-LABEL|'
+ r'COLUMN-LAB|COLUMN-LABE|COLUMN-MOVABLE|COLUMN-OF|COLUMN-PFCOLOR|'
+ r'COLUMN-READ-ONLY|COLUMN-RESIZABLE|COLUMNS|COLUMN-SCROLLING|'
+ r'COMBO-BOX|COMMAND|COMPARES|COMPILE|COMPILER|COMPLETE|COM-SELF|'
+ r'CONFIG-NAME|CONNECT|CONNECTED|CONSTRUCTOR|CONTAINS|CONTENTS|CONTEXT|'
+ r'CONTEXT-HELP|CONTEXT-HELP-FILE|CONTEXT-HELP-ID|CONTEXT-POPUP|'
+ r'CONTROL|CONTROL-BOX|CONTROL-FRAME|CONVERT|CONVERT-3D-COLORS|'
+ r'CONVERT-TO-OFFSET|CONVERT-TO-OFFS|CONVERT-TO-OFFSE|COPY-DATASET|'
+ r'COPY-LOB|COPY-SAX-ATTRIBUTES|COPY-TEMP-TABLE|COUNT|COUNT-OF|'
+ r'CPCASE|CPCOLL|CPINTERNAL|CPLOG|CPPRINT|CPRCODEIN|CPRCODEOUT|'
+ r'CPSTREAM|CPTERM|CRC-VALUE|CREATE|CREATE-LIKE|CREATE-LIKE-SEQUENTIAL|'
+ r'CREATE-NODE-NAMESPACE|CREATE-RESULT-LIST-ENTRY|CREATE-TEST-FILE|'
+ r'CURRENT|CURRENT_DATE|CURRENT_DATE|CURRENT-CHANGED|CURRENT-COLUMN|'
+ r'CURRENT-ENVIRONMENT|CURRENT-ENV|CURRENT-ENVI|CURRENT-ENVIR|'
+ r'CURRENT-ENVIRO|CURRENT-ENVIRON|CURRENT-ENVIRONM|CURRENT-ENVIRONME|'
+ r'CURRENT-ENVIRONMEN|CURRENT-ITERATION|CURRENT-LANGUAGE|CURRENT-LANG|'
+ r'CURRENT-LANGU|CURRENT-LANGUA|CURRENT-LANGUAG|CURRENT-QUERY|'
+ r'CURRENT-RESULT-ROW|CURRENT-ROW-MODIFIED|CURRENT-VALUE|CURRENT-WINDOW|'
+ r'CURSOR|CURS|CURSO|CURSOR-CHAR|CURSOR-LINE|CURSOR-OFFSET|DATABASE|'
+ r'DATA-BIND|DATA-ENTRY-RETURN|DATA-ENTRY-RET|DATA-ENTRY-RETU|'
+ r'DATA-ENTRY-RETUR|DATA-RELATION|DATA-REL|DATA-RELA|DATA-RELAT|'
+ r'DATA-RELATI|DATA-RELATIO|DATASERVERS|DATASET|DATASET-HANDLE|DATA-SOURCE|'
+ r'DATA-SOURCE-COMPLETE-MAP|DATA-SOURCE-MODIFIED|DATA-SOURCE-ROWID|'
+ r'DATA-TYPE|DATA-T|DATA-TY|DATA-TYP|DATE-FORMAT|DATE-F|DATE-FO|'
+ r'DATE-FOR|DATE-FORM|DATE-FORMA|DAY|DBCODEPAGE|DBCOLLATION|DBNAME|'
+ r'DBPARAM|DB-REFERENCES|DBRESTRICTIONS|DBREST|DBRESTR|DBRESTRI|'
+ r'DBRESTRIC|DBRESTRICT|DBRESTRICTI|DBRESTRICTIO|DBRESTRICTION|'
+ r'DBTASKID|DBTYPE|DBVERSION|DBVERS|DBVERSI|DBVERSIO|DCOLOR|'
+ r'DDE|DDE-ERROR|DDE-ID|DDE-I|DDE-ITEM|DDE-NAME|DDE-TOPIC|DEBLANK|'
+ r'DEBUG|DEBU|DEBUG-ALERT|DEBUGGER|DEBUG-LIST|DECIMALS|DECLARE|'
+ r'DECLARE-NAMESPACE|DECRYPT|DEFAULT|DEFAULT-BUFFER-HANDLE|'
+ r'DEFAULT-BUTTON|DEFAUT-B|DEFAUT-BU|DEFAUT-BUT|DEFAUT-BUTT|DEFAUT-BUTTO|'
+ r'DEFAULT-COMMIT|DEFAULT-EXTENSION|DEFAULT-EX|DEFAULT-EXT|DEFAULT-EXTE|'
+ r'DEFAULT-EXTEN|DEFAULT-EXTENS|DEFAULT-EXTENSI|DEFAULT-EXTENSIO|'
+ r'DEFAULT-NOXLATE|DEFAULT-NOXL|DEFAULT-NOXLA|DEFAULT-NOXLAT|'
+ r'DEFAULT-VALUE|DEFAULT-WINDOW|DEFINED|'
+ r'DEFINE-USER-EVENT-MANAGER|DELETE|DEL|DELE|DELET|DELETE-CHARACTER|'
+ r'DELETE-CHAR|DELETE-CHARA|DELETE-CHARAC|DELETE-CHARACT|DELETE-CHARACTE|'
+ r'DELETE-CURRENT-ROW|DELETE-LINE|DELETE-RESULT-LIST-ENTRY|DELETE-SELECTED-ROW|'
+ r'DELETE-SELECTED-ROWS|DELIMITER|DESC|DESCENDING|DESC|DESCE|DESCEN|'
+ r'DESCEND|DESCENDI|DESCENDIN|DESELECT-FOCUSED-ROW|DESELECTION|'
+ r'DESELECT-ROWS|DESELECT-SELECTED-ROW|DESTRUCTOR|DIALOG-BOX|'
+ r'DICTIONARY|DICT|DICTI|DICTIO|DICTION|DICTIONA|DICTIONAR|'
+ r'DIR|DISABLE|DISABLE-AUTO-ZAP|DISABLED|DISABLE-DUMP-TRIGGERS|'
+ r'DISABLE-LOAD-TRIGGERS|DISCONNECT|DISCON|DISCONN|DISCONNE|DISCONNEC|'
+ r'DISP|DISPLAY|DISP|DISPL|DISPLA|DISPLAY-MESSAGE|DISPLAY-TYPE|'
+ r'DISPLAY-T|DISPLAY-TY|DISPLAY-TYP|DISTINCT|DO|DOMAIN-DESCRIPTION|'
+ r'DOMAIN-NAME|DOMAIN-TYPE|DOS|DOUBLE|DOWN|DRAG-ENABLED|DROP|DROP-DOWN|'
+ r'DROP-DOWN-LIST|DROP-FILE-NOTIFY|DROP-TARGET|DUMP|DYNAMIC|'
+ r'DYNAMIC-FUNCTION|EACH|ECHO|EDGE-CHARS|EDGE|EDGE-|EDGE-C|'
+ r'EDGE-CH|EDGE-CHA|EDGE-CHAR|EDGE-PIXELS|EDGE-P|EDGE-PI|EDGE-PIX|'
+ r'EDGE-PIXE|EDGE-PIXEL|EDIT-CAN-PASTE|EDIT-CAN-UNDO|EDIT-CLEAR|'
+ r'EDIT-COPY|EDIT-CUT|EDITING|EDITOR|EDIT-PASTE|EDIT-UNDO|ELSE|'
+ r'EMPTY|EMPTY-TEMP-TABLE|ENABLE|ENABLED-FIELDS|ENCODE|ENCRYPT|'
+ r'ENCRYPT-AUDIT-MAC-KEY|ENCRYPTION-SALT|END|END-DOCUMENT|'
+ r'END-ELEMENT|END-EVENT-GROUP|END-FILE-DROP|ENDKEY|END-KEY|'
+ r'END-MOVE|END-RESIZE|END-ROW-RESIZE|END-USER-PROMPT|ENTERED|'
+ r'ENTRY|EQ|ERROR|ERROR-COLUMN|ERROR-COL|ERROR-COLU|ERROR-COLUM|'
+ r'ERROR-ROW|ERROR-STACK-TRACE|ERROR-STATUS|ERROR-STAT|ERROR-STATU|'
+ r'ESCAPE|ETIME|EVENT-GROUP-ID|EVENT-PROCEDURE|EVENT-PROCEDURE-CONTEXT|'
+ r'EVENTS|EVENT|EVENT-TYPE|EVENT-T|EVENT-TY|EVENT-TYP|EXCEPT|'
+ r'EXCLUSIVE-ID|EXCLUSIVE-LOCK|EXCLUSIVE|EXCLUSIVE-|EXCLUSIVE-L|'
+ r'EXCLUSIVE-LO|EXCLUSIVE-LOC|EXCLUSIVE-WEB-USER|EXECUTE|EXISTS|'
+ r'EXP|EXPAND|EXPANDABLE|EXPLICIT|EXPORT|EXPORT-PRINCIPAL|EXTENDED|'
+ r'EXTENT|EXTERNAL|FALSE|FETCH|FETCH-SELECTED-ROW|FGCOLOR|FGC|FGCO|'
+ r'FGCOL|FGCOLO|FIELD|FIELDS|FIELD|FILE|FILE-CREATE-DATE|'
+ r'FILE-CREATE-TIME|FILE-INFORMATION|FILE-INFO|FILE-INFOR|FILE-INFORM|'
+ r'FILE-INFORMA|FILE-INFORMAT|FILE-INFORMATI|FILE-INFORMATIO|FILE-MOD-DATE|'
+ r'FILE-MOD-TIME|FILENAME|FILE-NAME|FILE-OFFSET|FILE-OFF|FILE-OFFS|FILE-OFFSE|'
+ r'FILE-SIZE|FILE-TYPE|FILL|FILLED|FILL-IN|FILTERS|FINAL|FINALLY|FIND|'
+ r'FIND-BY-ROWID|FIND-CASE-SENSITIVE|FIND-CURRENT|FINDER|FIND-FIRST|'
+ r'FIND-GLOBAL|FIND-LAST|FIND-NEXT-OCCURRENCE|FIND-PREV-OCCURRENCE|'
+ r'FIND-SELECT|FIND-UNIQUE|FIND-WRAP-AROUND|FIRST|FIRST-ASYNCH-REQUEST|'
+ r'FIRST-CHILD|FIRST-COLUMN|FIRST-FORM|FIRST-OBJECT|FIRST-OF|'
+ r'FIRST-PROCEDURE|FIRST-PROC|FIRST-PROCE|FIRST-PROCED|FIRST-PROCEDU|FIRST-PROCEDUR|'
+ r'FIRST-SERVER|FIRST-TAB-ITEM|FIRST-TAB-I|FIRST-TAB-IT|FIRST-TAB-ITE|'
+ r'FIT-LAST-COLUMN|FIXED-ONLY|FLAT-BUTTON|FLOAT|FOCUS|FOCUSED-ROW|'
+ r'FOCUSED-ROW-SELECTED|FONT|FONT-TABLE|FOR|FORCE-FILE|'
+ r'FOREGROUND|FORE|FOREG|FOREGR|FOREGRO|FOREGROU|FOREGROUN|'
+ r'FORM|FORMAT|FORM|FORMA|FORMATTED|FORMATTE|FORM-LONG-INPUT|'
+ r'FORWARD|FORWARDS|FORWARD|FRAGMENT|FRAGMEN|FRAME|FRAM|'
+ r'FRAME-COL|FRAME-DB|FRAME-DOWN|FRAME-FIELD|FRAME-FILE|'
+ r'FRAME-INDEX|FRAME-INDE|FRAME-LINE|FRAME-NAME|FRAME-ROW|'
+ r'FRAME-SPACING|FRAME-SPA|FRAME-SPAC|FRAME-SPACI|FRAME-SPACIN|'
+ r'FRAME-VALUE|FRAME-VAL|FRAME-VALU|FRAME-X|FRAME-Y|FREQUENCY|FROM|'
+ r'FROM-CHARS|FROM-C|FROM-CH|FROM-CHA|FROM-CHAR|'
+ r'FROM-CURRENT|FROM-CUR|FROM-CURR|FROM-CURRE|FROM-CURREN|'
+ r'FROM-PIXELS|FROM-P|FROM-PI|FROM-PIX|FROM-PIXE|FROM-PIXEL|'
+ r'FULL-HEIGHT-CHARS|FULL-HEIGHT|FULL-HEIGHT-|FULL-HEIGHT-C|FULL-HEIGHT-CH|FULL-HEIGHT-CHA|FULL-HEIGHT-CHAR|'
+ r'FULL-HEIGHT-PIXELS|FULL-HEIGHT-P|FULL-HEIGHT-PI|FULL-HEIGHT-PIX|FULL-HEIGHT-PIXE|FULL-HEIGHT-PIXEL|'
+ r'FULL-PATHNAME|FULL-PATHN|FULL-PATHNA|FULL-PATHNAM|'
+ r'FULL-WIDTH-CHARS|FULL-WIDTH|FULL-WIDTH-|FULL-WIDTH-C|FULL-WIDTH-CH|FULL-WIDTH-CHA|FULL-WIDTH-CHAR|'
+ r'FULL-WIDTH-PIXELS|FULL-WIDTH-P|FULL-WIDTH-PI|FULL-WIDTH-PIX|FULL-WIDTH-PIXE|FULL-WIDTH-PIXEL|'
+ r'FUNCTION|FUNCTION-CALL-TYPE|GATEWAYS|GATEWAY|GE|GENERATE-MD5|'
+ r'GENERATE-PBE-KEY|GENERATE-PBE-SALT|GENERATE-RANDOM-KEY|GENERATE-UUID|GET|'
+ r'GET-ATTR-CALL-TYPE|GET-ATTRIBUTE-NODE|GET-BINARY-DATA|'
+ r'GET-BLUE-VALUE|GET-BLUE|GET-BLUE-|GET-BLUE-V|GET-BLUE-VA|GET-BLUE-VAL|GET-BLUE-VALU|'
+ r'GET-BROWSE-COLUMN|GET-BUFFER-HANDLEGETBYTE|GET-BYTE|GET-CALLBACK-PROC-CONTEXT|'
+ r'GET-CALLBACK-PROC-NAME|GET-CGI-LIST|GET-CGI-LONG-VALUE|GET-CGI-VALUE|'
+ r'GET-CODEPAGES|GET-COLLATIONS|GET-CONFIG-VALUE|GET-CURRENT|GET-DOUBLE|'
+ r'GET-DROPPED-FILE|GET-DYNAMIC|GET-ERROR-COLUMN|GET-ERROR-ROW|GET-FILE|'
+ r'GET-FILE-NAME|GET-FILE-OFFSET|GET-FILE-OFFSE|GET-FIRST|GET-FLOAT|'
+ r'GET-GREEN-VALUE|GET-GREEN|GET-GREEN-|GET-GREEN-V|GET-GREEN-VA|GET-GREEN-VAL|GET-GREEN-VALU|'
+ r'GET-INDEX-BY-NAMESPACE-NAME|GET-INDEX-BY-QNAME|GET-INT64|GET-ITERATION|'
+ r'GET-KEY-VALUE|GET-KEY-VAL|GET-KEY-VALU|GET-LAST|GET-LOCALNAME-BY-INDEX|'
+ r'GET-LONG|GET-MESSAGE|GET-NEXT|GET-NUMBER|GET-POINTER-VALUE|'
+ r'GET-PREV|GET-PRINTERS|GET-PROPERTY|GET-QNAME-BY-INDEX|'
+ r'GET-RED-VALUE|GET-RED|GET-RED-|GET-RED-V|GET-RED-VA|GET-RED-VAL|GET-RED-VALU|'
+ r'GET-REPOSITIONED-ROW|GET-RGB-VALUE|'
+ r'GET-SELECTED-WIDGET|GET-SELECTED|GET-SELECTED-|GET-SELECTED-W|GET-SELECTED-WI|GET-SELECTED-WID|GET-SELECTED-WIDG|GET-SELECTED-WIDGE|'
+ r'GET-SHORT|GET-SIGNATURE|GET-SIZE|GET-STRING|GET-TAB-ITEM|'
+ r'GET-TEXT-HEIGHT-CHARS|GET-TEXT-HEIGHT|GET-TEXT-HEIGHT-|GET-TEXT-HEIGHT-C|GET-TEXT-HEIGHT-CH|GET-TEXT-HEIGHT-CHA|GET-TEXT-HEIGHT-CHAR|'
+ r'GET-TEXT-HEIGHT-PIXELS|GET-TEXT-HEIGHT-P|GET-TEXT-HEIGHT-PI|GET-TEXT-HEIGHT-PIX|GET-TEXT-HEIGHT-PIXE|GET-TEXT-HEIGHT-PIXEL|'
+ r'GET-TEXT-WIDTH-CHARS|GET-TEXT-WIDTH|GET-TEXT-WIDTH-|GET-TEXT-WIDTH-C|GET-TEXT-WIDTH-CH|GET-TEXT-WIDTH-CHA|GET-TEXT-WIDTH-CHAR|'
+ r'GET-TEXT-WIDTH-PIXELS|GET-TEXT-WIDTH-P|GET-TEXT-WIDTH-PI|GET-TEXT-WIDTH-PIX|GET-TEXT-WIDTH-PIXE|GET-TEXT-WIDTH-PIXEL|'
+ r'GET-TYPE-BY-INDEX|GET-TYPE-BY-NAMESPACE-NAME|GET-TYPE-BY-QNAME|GET-UNSIGNED-LONG|'
+ r'GET-UNSIGNED-SHORT|GET-URI-BY-INDEX|GET-VALUE-BY-INDEX|GET-VALUE-BY-NAMESPACE-NAME|'
+ r'GET-VALUE-BY-QNAME|GET-WAIT-STATE|GLOBAL|GO-ON|'
+ r'GO-PENDING|GO-PEND|GO-PENDI|GO-PENDIN|GRANT|'
+ r'GRAPHIC-EDGE|GRAPHIC-E|GRAPHIC-ED|GRAPHIC-EDG|'
+ r'GRID-FACTOR-HORIZONTAL|GRID-FACTOR-H|GRID-FACTOR-HO|GRID-FACTOR-HOR|GRID-FACTOR-HORI|GRID-FACTOR-HORIZ|GRID-FACTOR-HORIZO|GRID-FACTOR-HORIZON|GRID-FACTOR-HORIZONT|GRID-FACTOR-HORIZONTA|'
+ r'GRID-FACTOR-VERTICAL|GRID-FACTOR-V|GRID-FACTOR-VE|GRID-FACTOR-VER|GRID-FACTOR-VERT|GRID-FACTOR-VERT|GRID-FACTOR-VERTI|GRID-FACTOR-VERTIC|GRID-FACTOR-VERTICA|'
+ r'GRID-SNAP|'
+ r'GRID-UNIT-HEIGHT-CHARS|GRID-UNIT-HEIGHT|GRID-UNIT-HEIGHT-|GRID-UNIT-HEIGHT-C|GRID-UNIT-HEIGHT-CH|GRID-UNIT-HEIGHT-CHA|'
+ r'GRID-UNIT-HEIGHT-PIXELS|GRID-UNIT-HEIGHT-P|GRID-UNIT-HEIGHT-PI|GRID-UNIT-HEIGHT-PIX|GRID-UNIT-HEIGHT-PIXE|GRID-UNIT-HEIGHT-PIXEL|'
+ r'GRID-UNIT-WIDTH-CHARS|GRID-UNIT-WIDTH|GRID-UNIT-WIDTH-|GRID-UNIT-WIDTH-C|GRID-UNIT-WIDTH-CH|GRID-UNIT-WIDTH-CHA|GRID-UNIT-WIDTH-CHAR|'
+ r'GRID-UNIT-WIDTH-PIXELS|GRID-UNIT-WIDTH-P|GRID-UNIT-WIDTH-PI|GRID-UNIT-WIDTH-PIX|GRID-UNIT-WIDTH-PIXE|GRID-UNIT-WIDTH-PIXEL|'
+ r'GRID-VISIBLE|GROUP|GT|GUID|HANDLER|HAS-RECORDS|HAVING|HEADER|'
+ r'HEIGHT-CHARS|HEIGHT|HEIGHT-|HEIGHT-C|HEIGHT-CH|HEIGHT-CHA|HEIGHT-CHAR|'
+ r'HEIGHT-PIXELS|HEIGHT-P|HEIGHT-PI|HEIGHT-PIX|HEIGHT-PIXE|HEIGHT-PIXEL|'
+ r'HELP|HEX-DECODE|HEX-ENCODE|HIDDEN|HIDE|'
+ r'HORIZONTAL|HORI|HORIZ|HORIZO|HORIZON|HORIZONT|HORIZONTA|'
+ r'HOST-BYTE-ORDER|HTML-CHARSET|HTML-END-OF-LINE|HTML-END-OF-PAGE|'
+ r'HTML-FRAME-BEGIN|HTML-FRAME-END|HTML-HEADER-BEGIN|HTML-HEADER-END|'
+ r'HTML-TITLE-BEGIN|HTML-TITLE-END|HWND|ICON|IF|'
+ r'IMAGE|IMAGE-DOWN|IMAGE-INSENSITIVE|IMAGE-SIZE|'
+ r'IMAGE-SIZE-CHARS|IMAGE-SIZE-C|IMAGE-SIZE-CH|IMAGE-SIZE-CHA|IMAGE-SIZE-CHAR|'
+ r'IMAGE-SIZE-PIXELS|IMAGE-SIZE-P|IMAGE-SIZE-PI|IMAGE-SIZE-PIX|IMAGE-SIZE-PIXE|IMAGE-SIZE-PIXEL|'
+ r'IMAGE-UP|IMMEDIATE-DISPLAY|IMPLEMENTS|IMPORT|IMPORT-PRINCIPAL|'
+ r'IN|INCREMENT-EXCLUSIVE-ID|INDEX|INDEXED-REPOSITION|INDEX-HINT|'
+ r'INDEX-INFORMATION|INDICATOR|'
+ r'INFORMATION|INFO|INFOR|INFORM|INFORMA|INFORMAT|INFORMATI|INFORMATIO|'
+ r'IN-HANDLE|'
+ r'INHERIT-BGCOLOR|INHERIT-BGC|INHERIT-BGCO|INHERIT-BGCOL|INHERIT-BGCOLO|'
+ r'INHERIT-FGCOLOR|INHERIT-FGC|INHERIT-FGCO|INHERIT-FGCOL|INHERIT-FGCOLO|'
+ r'INHERITS|INITIAL|INIT|INITI|INITIA|INITIAL-DIR|INITIAL-FILTER|'
+ r'INITIALIZE-DOCUMENT-TYPE|INITIATE|INNER-CHARS|INNER-LINES|INPUT|'
+ r'INPUT-OUTPUT|INPUT-O|INPUT-OU|INPUT-OUT|INPUT-OUTP|INPUT-OUTPU|'
+ r'INPUT-VALUE|INSERT|INSERT-ATTRIBUTE|'
+ r'INSERT-BACKTAB|INSERT-B|INSERT-BA|INSERT-BAC|INSERT-BACK|INSERT-BACKT|INSERT-BACKTA|'
+ r'INSERT-FILE|INSERT-ROW|INSERT-STRING|INSERT-TAB|INSERT-T|INSERT-TA|'
+ r'INTERFACE|INTERNAL-ENTRIES|INTO|INVOKE|IS|'
+ r'IS-ATTR-SPACE|IS-ATTR|IS-ATTR-|IS-ATTR-S|IS-ATTR-SP|IS-ATTR-SPA|IS-ATTR-SPAC|'
+ r'IS-CLASS|IS-CLAS|IS-LEAD-BYTE|IS-ATTR|IS-OPEN|IS-PARAMETER-SET|IS-ROW-SELECTED|'
+ r'IS-SELECTED|ITEM|ITEMS-PER-ROW|JOIN|JOIN-BY-SQLDB|KBLABEL|KEEP-CONNECTION-OPEN|'
+ r'KEEP-FRAME-Z-ORDER|KEEP-FRAME-Z|KEEP-FRAME-Z-|KEEP-FRAME-Z-O|KEEP-FRAME-Z-OR|KEEP-FRAME-Z-ORD|KEEP-FRAME-Z-ORDE|'
+ r'KEEP-MESSAGES|KEEP-SECURITY-CACHE|KEEP-TAB-ORDER|KEY|KEYCODE|KEY-CODE|'
+ r'KEYFUNCTION|KEYFUNC|KEYFUNCT|KEYFUNCTI|KEYFUNCTIO|'
+ r'KEY-FUNCTION|KEY-FUNC|KEY-FUNCT|KEY-FUNCTI|KEY-FUNCTIO|'
+ r'KEYLABEL|KEY-LABEL|KEYS|KEYWORD|KEYWORD-ALL|LABEL|'
+ r'LABEL-BGCOLOR|LABEL-BGC|LABEL-BGCO|LABEL-BGCOL|LABEL-BGCOLO|'
+ r'LABEL-DCOLOR|LABEL-DC|LABEL-DCO|LABEL-DCOL|LABEL-DCOLO|'
+ r'LABEL-FGCOLOR|LABEL-FGC|LABEL-FGCO|LABEL-FGCOL|LABEL-FGCOLO|'
+ r'LABEL-FONT|'
+ r'LABEL-PFCOLOR|LABEL-PFC|LABEL-PFCO|LABEL-PFCOL|LABEL-PFCOLO|'
+ r'LABELS|LANDSCAPE|LANGUAGES|LANGUAGE|LARGE|LARGE-TO-SMALL|LAST|'
+ r'LAST-ASYNCH-REQUEST|LAST-BATCH|LAST-CHILD|LAST-EVENT|LAST-EVEN|LAST-FORM|'
+ r'LASTKEY|LAST-KEY|LAST-OBJECT|LAST-OF|'
+ r'LAST-PROCEDURE|LAST-PROCE|LAST-PROCED|LAST-PROCEDU|LAST-PROCEDUR|'
+ r'LAST-SERVER|LAST-TAB-ITEM|LAST-TAB-I|LAST-TAB-IT|LAST-TAB-ITE|'
+ r'LC|LDBNAME|LE|LEAVE|LEFT-ALIGNED|LEFT-ALIGN|LEFT-ALIGNE|LEFT-TRIM|'
+ r'LENGTH|LIBRARY|LIKE|LIKE-SEQUENTIAL|LINE|LINE-COUNTER|LINE-COUNT|LINE-COUNTE|'
+ r'LIST-EVENTS|LISTING|LISTI|LISTIN|LIST-ITEM-PAIRS|LIST-ITEMS|'
+ r'LIST-PROPERTY-NAMES|LIST-QUERY-ATTRS|LIST-SET-ATTRS|LIST-WIDGETS|'
+ r'LITERAL-QUESTION|LITTLE-ENDIAN|LOAD|LOAD-DOMAINS|LOAD-ICON|'
+ r'LOAD-IMAGE|LOAD-IMAGE-DOWN|LOAD-IMAGE-INSENSITIVE|LOAD-IMAGE-UP|'
+ r'LOAD-MOUSE-POINTER|LOAD-MOUSE-P|LOAD-MOUSE-PO|LOAD-MOUSE-POI|LOAD-MOUSE-POIN|LOAD-MOUSE-POINT|LOAD-MOUSE-POINTE|'
+ r'LOAD-PICTURE|LOAD-SMALL-ICON|LOCAL-NAME|LOCATOR-COLUMN-NUMBER|'
+ r'LOCATOR-LINE-NUMBER|LOCATOR-PUBLIC-ID|LOCATOR-SYSTEM-ID|LOCATOR-TYPE|'
+ r'LOCKED|LOCK-REGISTRATION|LOG|LOG-AUDIT-EVENT|LOGIN-EXPIRATION-TIMESTAMP|'
+ r'LOGIN-HOST|LOGIN-STATE|LOG-MANAGER|LOGOUT|LOOKAHEAD|LOOKUP|LT|'
+ r'MACHINE-CLASS|MANDATORY|MANUAL-HIGHLIGHT|MAP|MARGIN-EXTRA|'
+ r'MARGIN-HEIGHT-CHARS|MARGIN-HEIGHT|MARGIN-HEIGHT-|MARGIN-HEIGHT-C|MARGIN-HEIGHT-CH|MARGIN-HEIGHT-CHA|MARGIN-HEIGHT-CHAR|'
+ r'MARGIN-HEIGHT-PIXELS|MARGIN-HEIGHT-P|MARGIN-HEIGHT-PI|MARGIN-HEIGHT-PIX|MARGIN-HEIGHT-PIXE|MARGIN-HEIGHT-PIXEL|'
+ r'MARGIN-WIDTH-CHARS|MARGIN-WIDTH|MARGIN-WIDTH-|MARGIN-WIDTH-C|MARGIN-WIDTH-CH|MARGIN-WIDTH-CHA|MARGIN-WIDTH-CHAR|'
+ r'MARGIN-WIDTH-PIXELS|MARGIN-WIDTH-P|MARGIN-WIDTH-PI|MARGIN-WIDTH-PIX|MARGIN-WIDTH-PIXE|MARGIN-WIDTH-PIXEL|'
+ r'MARK-NEW|MARK-ROW-STATE|MATCHES|MAX|MAX-BUTTON|'
+ r'MAX-CHARS|MAX-DATA-GUESS|MAX-HEIGHT|'
+ r'MAX-HEIGHT-CHARS|MAX-HEIGHT-C|MAX-HEIGHT-CH|MAX-HEIGHT-CHA|MAX-HEIGHT-CHAR|'
+ r'MAX-HEIGHT-PIXELS|MAX-HEIGHT-P|MAX-HEIGHT-PI|MAX-HEIGHT-PIX|MAX-HEIGHT-PIXE|MAX-HEIGHT-PIXEL|'
+ r'MAXIMIZE|MAXIMUM|MAX|MAXI|MAXIM|MAXIMU|MAXIMUM-LEVEL|MAX-ROWS|'
+ r'MAX-SIZE|MAX-VALUE|MAX-VAL|MAX-VALU|MAX-WIDTH|'
+ r'MAX-WIDTH-CHARS|MAX-WIDTH|MAX-WIDTH-|MAX-WIDTH-C|MAX-WIDTH-CH|MAX-WIDTH-CHA|MAX-WIDTH-CHAR|'
+ r'MAX-WIDTH-PIXELS|MAX-WIDTH-P|MAX-WIDTH-PI|MAX-WIDTH-PIX|MAX-WIDTH-PIXE|MAX-WIDTH-PIXEL|'
+ r'MD5-DIGEST|MEMBER|MEMPTR-TO-NODE-VALUE|MENU|MENUBAR|MENU-BAR|MENU-ITEM|'
+ r'MENU-KEY|MENU-K|MENU-KE|MENU-MOUSE|MENU-M|MENU-MO|MENU-MOU|MENU-MOUS|'
+ r'MERGE-BY-FIELD|MESSAGE|MESSAGE-AREA|MESSAGE-AREA-FONT|MESSAGE-LINES|'
+ r'METHOD|MIN|MIN-BUTTON|'
+ r'MIN-COLUMN-WIDTH-CHARS|MIN-COLUMN-WIDTH-C|MIN-COLUMN-WIDTH-CH|MIN-COLUMN-WIDTH-CHA|MIN-COLUMN-WIDTH-CHAR|'
+ r'MIN-COLUMN-WIDTH-PIXELS|MIN-COLUMN-WIDTH-P|MIN-COLUMN-WIDTH-PI|MIN-COLUMN-WIDTH-PIX|MIN-COLUMN-WIDTH-PIXE|MIN-COLUMN-WIDTH-PIXEL|'
+ r'MIN-HEIGHT-CHARS|MIN-HEIGHT|MIN-HEIGHT-|MIN-HEIGHT-C|MIN-HEIGHT-CH|MIN-HEIGHT-CHA|MIN-HEIGHT-CHAR|'
+ r'MIN-HEIGHT-PIXELS|MIN-HEIGHT-P|MIN-HEIGHT-PI|MIN-HEIGHT-PIX|MIN-HEIGHT-PIXE|MIN-HEIGHT-PIXEL|'
+ r'MINIMUM|MIN|MINI|MINIM|MINIMU|MIN-SIZE|'
+ r'MIN-VALUE|MIN-VAL|MIN-VALU|'
+ r'MIN-WIDTH-CHARS|MIN-WIDTH|MIN-WIDTH-|MIN-WIDTH-C|MIN-WIDTH-CH|MIN-WIDTH-CHA|MIN-WIDTH-CHAR|'
+ r'MIN-WIDTH-PIXELS|MIN-WIDTH-P|MIN-WIDTH-PI|MIN-WIDTH-PIX|MIN-WIDTH-PIXE|MIN-WIDTH-PIXEL|'
+ r'MODIFIED|MODULO|MOD|MODU|MODUL|MONTH|MOUSE|'
+ r'MOUSE-POINTER|MOUSE-P|MOUSE-PO|MOUSE-POI|MOUSE-POIN|MOUSE-POINT|MOUSE-POINTE|'
+ r'MOVABLE|'
+ r'MOVE-AFTER-TAB-ITEM|MOVE-AFTER|MOVE-AFTER-|MOVE-AFTER-T|MOVE-AFTER-TA|MOVE-AFTER-TAB|MOVE-AFTER-TAB-|MOVE-AFTER-TAB-I|MOVE-AFTER-TAB-IT|MOVE-AFTER-TAB-ITE|'
+ r'MOVE-BEFORE-TAB-ITEM|MOVE-BEFOR|MOVE-BEFORE|MOVE-BEFORE-|MOVE-BEFORE-T|MOVE-BEFORE-TA|MOVE-BEFORE-TAB|MOVE-BEFORE-TAB-|MOVE-BEFORE-TAB-I|MOVE-BEFORE-TAB-IT|MOVE-BEFORE-TAB-ITE|'
+ r'MOVE-COLUMN|MOVE-COL|MOVE-COLU|MOVE-COLUM|'
+ r'MOVE-TO-BOTTOM|MOVE-TO-B|MOVE-TO-BO|MOVE-TO-BOT|MOVE-TO-BOTT|MOVE-TO-BOTTO|'
+ r'MOVE-TO-EOF|MOVE-TO-TOP|MOVE-TO-T|MOVE-TO-TO|MPE|MULTI-COMPILE|MULTIPLE|'
+ r'MULTIPLE-KEY|MULTITASKING-INTERVAL|MUST-EXIST|NAME|NAMESPACE-PREFIX|'
+ r'NAMESPACE-URI|NATIVE|NE|NEEDS-APPSERVER-PROMPT|NEEDS-PROMPT|NEW|'
+ r'NEW-INSTANCE|NEW-ROW|NEXT|NEXT-COLUMN|NEXT-PROMPT|NEXT-ROWID|'
+ r'NEXT-SIBLING|NEXT-TAB-ITEM|NEXT-TAB-I|NEXT-TAB-IT|NEXT-TAB-ITE|'
+ r'NEXT-VALUE|NO|NO-APPLY|NO-ARRAY-MESSAGE|NO-ASSIGN|'
+ r'NO-ATTR-LIST|NO-ATTR|NO-ATTR-|NO-ATTR-L|NO-ATTR-LI|NO-ATTR-LIS|'
+ r'NO-ATTR-SPACE|NO-ATTR|NO-ATTR-|NO-ATTR-S|NO-ATTR-SP|NO-ATTR-SPA|NO-ATTR-SPAC|'
+ r'NO-AUTO-VALIDATE|NO-BIND-WHERE|NO-BOX|NO-CONSOLE|NO-CONVERT|'
+ r'NO-CONVERT-3D-COLORS|NO-CURRENT-VALUE|NO-DEBUG|NODE-VALUE-TO-MEMPTR|'
+ r'NO-DRAG|NO-ECHO|NO-EMPTY-SPACE|NO-ERROR|NO-FILL|NO-F|NO-FI|'
+ r'NO-FIL|NO-FOCUS|NO-HELP|NO-HIDE|NO-INDEX-HINT|'
+ r'NO-INHERIT-BGCOLOR|NO-INHERIT-BGC|NO-INHERIT-BGCO|LABEL-BGCOL|LABEL-BGCOLO|'
+ r'NO-INHERIT-FGCOLOR|NO-INHERIT-FGC|NO-INHERIT-FGCO|NO-INHERIT-FGCOL|NO-INHERIT-FGCOLO|'
+ r'NO-JOIN-BY-SQLDB|NO-LABELS|NO-LABE|NO-LOBS|NO-LOCK|'
+ r'NO-LOOKAHEAD|NO-MAP|'
+ r'NO-MESSAGE|NO-MES|NO-MESS|NO-MESSA|NO-MESSAG|'
+ r'NONAMESPACE-SCHEMA-LOCATION|NONE|NO-PAUSE|'
+ r'NO-PREFETCH|NO-PREFE|NO-PREFET|NO-PREFETC|NORMALIZE|'
+ r'NO-ROW-MARKERS|NO-SCROLLBAR-VERTICAL|NO-SEPARATE-CONNECTION|'
+ r'NO-SEPARATORS|NOT|NO-TAB-STOP|NOT-ACTIVE|'
+ r'NO-UNDERLINE|NO-UND|NO-UNDE|NO-UNDER|NO-UNDERL|NO-UNDERLI|NO-UNDERLIN|'
+ r'NO-UNDO|'
+ r'NO-VALIDATE|NO-VAL|NO-VALI|NO-VALID|NO-VALIDA|NO-VALIDAT|NOW|'
+ r'NO-WAIT|NO-WORD-WRAP|NULL|NUM-ALIASES|NUM-ALI|NUM-ALIA|NUM-ALIAS|NUM-ALIASE|'
+ r'NUM-BUFFERS|NUM-BUTTONS|NUM-BUT|NUM-BUTT|NUM-BUTTO|NUM-BUTTON|'
+ r'NUM-COLUMNS|NUM-COL|NUM-COLU|NUM-COLUM|NUM-COLUMN|NUM-COPIES|'
+ r'NUM-DBS|NUM-DROPPED-FILES|NUM-ENTRIES|NUMERIC|'
+ r'NUMERIC-FORMAT|NUMERIC-F|NUMERIC-FO|NUMERIC-FOR|NUMERIC-FORM|NUMERIC-FORMA|'
+ r'NUM-FIELDS|NUM-FORMATS|NUM-ITEMS|NUM-ITERATIONS|NUM-LINES|'
+ r'NUM-LOCKED-COLUMNS|NUM-LOCKED-COL|NUM-LOCKED-COLU|NUM-LOCKED-COLUM|NUM-LOCKED-COLUMN|'
+ r'NUM-MESSAGES|NUM-PARAMETERS|NUM-REFERENCES|NUM-REPLACED|NUM-RESULTS|NUM-SELECTED-ROWS|'
+ r'NUM-SELECTED-WIDGETS|NUM-SELECTED|NUM-SELECTED-|NUM-SELECTED-W|NUM-SELECTED-WI|NUM-SELECTED-WID|NUM-SELECTED-WIDG|NUM-SELECTED-WIDGE|NUM-SELECTED-WIDGET|'
+ r'NUM-TABS|NUM-TO-RETAIN|NUM-VISIBLE-COLUMNS|OCTET-LENGTH|OF|'
+ r'OFF|OK|OK-CANCEL|OLD|ON|'
+ r'ON-FRAME-BORDER|ON-FRAME|ON-FRAME-|ON-FRAME-B|ON-FRAME-BO|ON-FRAME-BOR|ON-FRAME-BORD|ON-FRAME-BORDE|'
+ r'OPEN|OPSYS|OPTION|OR|ORDERED-JOIN|ORDINAL|'
+ r'OS-APPEND|OS-COMMAND|OS-COPY|OS-CREATE-DIR|OS-DELETE|OS-DIR|'
+ r'OS-DRIVES|OS-DRIVE|OS-ERROR|OS-GETENV|OS-RENAME|OTHERWISE|'
+ r'OUTPUT|OVERLAY|OVERRIDE|OWNER|PAGE|'
+ r'PAGE-BOTTOM|PAGE-BOT|PAGE-BOTT|PAGE-BOTTO|PAGED|'
+ r'PAGE-NUMBER|PAGE-NUM|PAGE-NUMB|PAGE-NUMBE|PAGE-SIZE|'
+ r'PAGE-TOP|PAGE-WIDTH|PAGE-WID|PAGE-WIDT|'
+ r'PARAMETER|PARAM|PARAME|PARAMET|PARAMETE|'
+ r'PARENT|PARSE-STATUS|PARTIAL-KEY|PASCAL|PASSWORD-FIELD|PATHNAME|PAUSE|'
+ r'PBE-HASH-ALGORITHM|PBE-HASH-ALG|PBE-HASH-ALGO|PBE-HASH-ALGOR|PBE-HASH-ALGORI|PBE-HASH-ALGORIT|PBE-HASH-ALGORITH|'
+ r'PBE-KEY-ROUNDS|PDBNAME|PERSISTENT|PERSIST|PERSISTE|PERSISTEN|'
+ r'PERSISTENT-CACHE-DISABLED|PFCOLOR|PFC|PFCO|PFCOL|PFCOLO|PIXELS|'
+ r'PIXELS-PER-COLUMN|PIXELS-PER-COL|PIXELS-PER-COLU|PIXELS-PER-COLUM|'
+ r'PIXELS-PER-ROW|POPUP-MENU|POPUP-M|POPUP-ME|POPUP-MEN|'
+ r'POPUP-ONLY|POPUP-O|POPUP-ON|POPUP-ONL|PORTRAIT|POSITION|'
+ r'PRECISION|PREFER-DATASET|PREPARED|PREPARE-STRING|'
+ r'PREPROCESS|PREPROC|PREPROCE|PREPROCES|'
+ r'PRESELECT|PRESEL|PRESELE|PRESELEC|PREV|PREV-COLUMN|'
+ r'PREV-SIBLING|'
+ r'PREV-TAB-ITEM|PREV-TAB-I|PREV-TAB-IT|PREV-TAB-ITE|'
+ r'PRIMARY|PRINTER|PRINTER-CONTROL-HANDLE|PRINTER-HDC|'
+ r'PRINTER-NAME|PRINTER-PORT|PRINTER-SETUP|PRIVATE|'
+ r'PRIVATE-DATA|PRIVATE-D|PRIVATE-DA|PRIVATE-DAT|'
+ r'PRIVILEGES|'
+ r'PROCEDURE|PROCE|PROCED|PROCEDU|PROCEDUR|'
+ r'PROCEDURE-CALL-TYPE|'
+ r'PROCESS|'
+ r'PROC-HANDLE|PROC-HA|PROC-HAN|PROC-HAND|PROC-HANDL|'
+ r'PROC-STATUS|PROC-ST|PROC-STA|PROC-STAT|PROC-STATU|'
+ r'proc-text|proc-text-buffer|'
+ r'PROFILER|PROGRAM-NAME|PROGRESS|'
+ r'PROGRESS-SOURCE|PROGRESS-S|PROGRESS-SO|PROGRESS-SOU|PROGRESS-SOUR|PROGRESS-SOURC|'
+ r'PROMPT|PROMPT-FOR|PROMPT-F|PROMPT-FO|PROMSGS|PROPATH|'
+ r'PROPERTY|PROTECTED|PROVERSION|PROVERS|PROVERSI|PROVERSIO|'
+ r'PROXY|PROXY-PASSWORD|PROXY-USERID|PUBLIC|PUBLIC-ID|'
+ r'PUBLISH|PUBLISHED-EVENTS|PUT|PUTBYTE|PUT-BYTE|PUT-DOUBLE|'
+ r'PUT-FLOAT|PUT-INT64|PUT-KEY-VALUE|PUT-KEY-VAL|PUT-KEY-VALU|PUT-LONG|'
+ r'PUT-SHORT|PUT-STRING|PUT-UNSIGNED-LONG|QUERY|QUERY-CLOSE|QUERY-OFF-END|'
+ r'QUERY-OPEN|QUERY-PREPARE|QUERY-TUNING|QUESTION|QUIT|QUOTER|'
+ r'RADIO-BUTTONS|RADIO-SET|RANDOM|RAW-TRANSFER|'
+ r'RCODE-INFORMATION|RCODE-INFO|RCODE-INFOR|RCODE-INFORM|RCODE-INFORMA|RCODE-INFORMAT|RCODE-INFORMATI|RCODE-INFORMATIO|'
+ r'READ-AVAILABLE|READ-EXACT-NUM|READ-FILE|READKEY|READ-ONLY|READ-XML|READ-XMLSCHEMA|'
+ r'REAL|RECORD-LENGTH|RECTANGLE|RECT|RECTA|RECTAN|RECTANG|RECTANGL|'
+ r'RECURSIVE|REFERENCE-ONLY|REFRESH|REFRESHABLE|REFRESH-AUDIT-POLICY|'
+ r'REGISTER-DOMAIN|RELEASE|REMOTE|REMOVE-EVENTS-PROCEDURE|REMOVE-SUPER-PROCEDURE|'
+ r'REPEAT|REPLACE|REPLACE-SELECTION-TEXT|REPOSITION|REPOSITION-BACKWARD|'
+ r'REPOSITION-FORWARD|REPOSITION-MODE|REPOSITION-TO-ROW|REPOSITION-TO-ROWID|'
+ r'REQUEST|RESET|RESIZABLE|RESIZA|RESIZAB|RESIZABL|RESIZE|RESTART-ROW|'
+ r'RESTART-ROWID|RETAIN|RETAIN-SHAPE|RETRY|RETRY-CANCEL|RETURN|'
+ r'RETURN-INSERTED|RETURN-INS|RETURN-INSE|RETURN-INSER|RETURN-INSERT|RETURN-INSERTE|'
+ r'RETURNS|RETURN-TO-START-DIR|RETURN-TO-START-DI|'
+ r'RETURN-VALUE|RETURN-VAL|RETURN-VALU|'
+ r'RETURN-VALUE-DATA-TYPE|REVERSE-FROM|REVERT|'
+ r'REVOKE|RGB-VALUE|RIGHT-ALIGNED|RETURN-ALIGN|RETURN-ALIGNE|'
+ r'RIGHT-TRIM|R-INDEX|ROLES|ROUND|ROUTINE-LEVEL|ROW|'
+ r'ROW-HEIGHT-CHARS|HEIGHT|ROW-HEIGHT-PIXELS|HEIGHT-P|ROW-MARKERS|'
+ r'ROW-OF|ROW-RESIZABLE|RULE|RUN|RUN-PROCEDURE|SAVE|SAVE-AS|'
+ r'SAVE-FILE|SAX-COMPLETE|SAX-COMPLE|SAX-COMPLET|SAX-PARSE|SAX-PARSE-FIRST|'
+ r'SAX-PARSE-NEXT|SAX-PARSER-ERROR|SAX-RUNNING|SAX-UNINITIALIZED|'
+ r'SAX-WRITE-BEGIN|SAX-WRITE-COMPLETE|SAX-WRITE-CONTENT|SAX-WRITE-ELEMENT|'
+ r'SAX-WRITE-ERROR|SAX-WRITE-IDLE|SAX-WRITER|SAX-WRITE-TAG|SCHEMA|'
+ r'SCHEMA-LOCATION|SCHEMA-MARSHAL|SCHEMA-PATH|SCREEN|SCREEN-IO|'
+ r'SCREEN-LINES|SCREEN-VALUE|SCREEN-VAL|SCREEN-VALU|SCROLL|SCROLLABLE|'
+ r'SCROLLBAR-HORIZONTAL|SCROLLBAR-H|SCROLLBAR-HO|SCROLLBAR-HOR|SCROLLBAR-HORI|SCROLLBAR-HORIZ|SCROLLBAR-HORIZO|SCROLLBAR-HORIZON|SCROLLBAR-HORIZONT|SCROLLBAR-HORIZONTA|'
+ r'SCROLL-BARS|'
+ r'SCROLLBAR-VERTICAL|SCROLLBAR-V|SCROLLBAR-VE|SCROLLBAR-VER|SCROLLBAR-VERT|SCROLLBAR-VERTI|SCROLLBAR-VERTIC|SCROLLBAR-VERTICA|'
+ r'SCROLL-DELTA|'
+ r'SCROLLED-ROW-POSITION|SCROLLED-ROW-POS|SCROLLED-ROW-POSI|SCROLLED-ROW-POSIT|SCROLLED-ROW-POSITI|SCROLLED-ROW-POSITIO|'
+ r'SCROLLING|SCROLL-OFFSET|SCROLL-TO-CURRENT-ROW|SCROLL-TO-ITEM|SCROLL-TO-I|SCROLL-TO-IT|SCROLL-TO-ITE|'
+ r'SCROLL-TO-SELECTED-ROW|SDBNAME|SEAL|SEAL-TIMESTAMP|SEARCH|SEARCH-SELF|SEARCH-TARGET|'
+ r'SECTION|SECURITY-POLICY|SEEK|SELECT|SELECTABLE|SELECT-ALL|'
+ r'SELECTED|SELECT-FOCUSED-ROW|SELECTION|SELECTION-END|SELECTION-LIST|'
+ r'SELECTION-START|SELECTION-TEXT|SELECT-NEXT-ROW|SELECT-PREV-ROW|'
+ r'SELECT-ROW|SELF|SEND|send-sql-statement|send-sql|SENSITIVE|'
+ r'SEPARATE-CONNECTION|SEPARATOR-FGCOLOR|SEPARATORS|SERVER|'
+ r'SERVER-CONNECTION-BOUND|SERVER-CONNECTION-BOUND-REQUEST|'
+ r'SERVER-CONNECTION-CONTEXT|SERVER-CONNECTION-ID|SERVER-OPERATING-MODE|'
+ r'SESSION|SESSION-ID|SET|SET-APPL-CONTEXT|SET-ATTR-CALL-TYPE|SET-ATTRIBUTE-NODE|'
+ r'SET-BLUE-VALUE|SET-BLUE|SET-BLUE-|SET-BLUE-V|SET-BLUE-VA|SET-BLUE-VAL|SET-BLUE-VALU|'
+ r'SET-BREAK|SET-BUFFERS|SET-CALLBACK|SET-CLIENT|SET-COMMIT|SET-CONTENTS|'
+ r'SET-CURRENT-VALUE|SET-DB-CLIENT|SET-DYNAMIC|SET-EVENT-MANAGER-OPTION|'
+ r'SET-GREEN-VALUE|SET-GREEN|SET-GREEN-|SET-GREEN-V|SET-GREEN-VA|SET-GREEN-VAL|SET-GREEN-VALU|'
+ r'SET-INPUT-SOURCE|SET-OPTION|SET-OUTPUT-DESTINATION|SET-PARAMETER|SET-POINTER-VALUE|'
+ r'SET-PROPERTY|SET-RED-VALUE|SET-RED|SET-RED-|SET-RED-V|SET-RED-VA|SET-RED-VAL|SET-RED-VALU|'
+ r'SET-REPOSITIONED-ROW|SET-RGB-VALUE|SET-ROLLBACK|SET-SELECTION|SET-SIZE|'
+ r'SET-SORT-ARROW|SETUSERID|SETUSER|SETUSERI|SET-WAIT-STATE|SHA1-DIGEST|SHARED|'
+ r'SHARE-LOCK|SHARE|SHARE-|SHARE-L|SHARE-LO|SHARE-LOC|SHOW-IN-TASKBAR|SHOW-STATS|SHOW-STAT|'
+ r'SIDE-LABEL-HANDLE|SIDE-LABEL-H|SIDE-LABEL-HA|SIDE-LABEL-HAN|SIDE-LABEL-HAND|SIDE-LABEL-HANDL|'
+ r'SIDE-LABELS|SIDE-LAB|SIDE-LABE|SIDE-LABEL|'
+ r'SILENT|SIMPLE|SINGLE|SIZE|'
+ r'SIZE-CHARS|SIZE-C|SIZE-CH|SIZE-CHA|SIZE-CHAR|'
+ r'SIZE-PIXELS|SIZE-P|SIZE-PI|SIZE-PIX|SIZE-PIXE|SIZE-PIXEL|SKIP|'
+ r'SKIP-DELETED-RECORD|SLIDER|SMALL-ICON|SMALLINT|SMALL-TITLE|SOME|SORT|'
+ r'SORT-ASCENDING|SORT-NUMBER|SOURCE|SOURCE-PROCEDURE|SPACE|SQL|SQRT|'
+ r'SSL-SERVER-NAME|STANDALONE|START|START-DOCUMENT|START-ELEMENT|START-MOVE|'
+ r'START-RESIZE|START-ROW-RESIZE|STATE-DETAIL|STATIC|STATUS|STATUS-AREA|STATUS-AREA-FONT|'
+ r'STDCALL|STOP|STOP-PARSING|STOPPED|STOPPE|'
+ r'STORED-PROCEDURE|STORED-PROC|STORED-PROCE|STORED-PROCED|STORED-PROCEDU|STORED-PROCEDUR|'
+ r'STREAM|STREAM-HANDLE|STREAM-IO|STRETCH-TO-FIT|STRICT|STRING|STRING-VALUE|STRING-XREF|'
+ r'SUB-AVERAGE|SUB-AVE|SUB-AVER|SUB-AVERA|SUB-AVERAG|'
+ r'SUB-COUNT|SUB-MAXIMUM|SUM-MAX|SUM-MAXI|SUM-MAXIM|SUM-MAXIMU|SUB-MENU|SUBSUB-|'
+ r'MINIMUM|SUB-MIN|SUBSCRIBE|SUBSTITUTE|SUBST|SUBSTI|SUBSTIT|SUBSTITU|SUBSTITUT|'
+ r'SUBSTRING|SUBSTR|SUBSTRI|SUBSTRIN|SUB-TOTAL|SUBTYPE|SUM|SUPER|SUPER-PROCEDURES|'
+ r'SUPPRESS-NAMESPACE-PROCESSING|'
+ r'SUPPRESS-WARNINGS|SUPPRESS-W|SUPPRESS-WA|SUPPRESS-WAR|SUPPRESS-WARN|SUPPRESS-WARNI|SUPPRESS-WARNIN|SUPPRESS-WARNING|'
+ r'SYMMETRIC-ENCRYPTION-ALGORITHM|SYMMETRIC-ENCRYPTION-IV|SYMMETRIC-ENCRYPTION-KEY|SYMMETRIC-SUPPORT|'
+ r'SYSTEM-ALERT-BOXES|SYSTEM-ALERT|SYSTEM-ALERT-|SYSTEM-ALERT-B|SYSTEM-ALERT-BO|SYSTEM-ALERT-BOX|SYSTEM-ALERT-BOXE|'
+ r'SYSTEM-DIALOG|SYSTEM-HELP|SYSTEM-ID|TABLE|TABLE-HANDLE|TABLE-NUMBER|TAB-POSITION|'
+ r'TAB-STOP|TARGET|TARGET-PROCEDURE|'
+ r'TEMP-DIRECTORY|TEMP-DIR|TEMP-DIRE|TEMP-DIREC|TEMP-DIRECT|TEMP-DIRECTO|TEMP-DIRECTOR|'
+ r'TEMP-TABLE|TEMP-TABLE-PREPARE|TERM|TERMINAL|TERM|TERMI|TERMIN|TERMINA|'
+ r'TERMINATE|TEXT|TEXT-CURSOR|TEXT-SEG-GROW|TEXT-SELECTED|THEN|'
+ r'THIS-OBJECT|THIS-PROCEDURE|THREE-D|THROW|THROUGH|THRU|TIC-MARKS|TIME|'
+ r'TIME-SOURCE|TITLE|'
+ r'TITLE-BGCOLOR|TITLE-BGC|TITLE-BGCO|TITLE-BGCOL|TITLE-BGCOLO|'
+ r'TITLE-DCOLOR|TITLE-DC|TITLE-DCO|TITLE-DCOL|TITLE-DCOLO|'
+ r'TITLE-FGCOLOR|TITLE-FGC|TITLE-FGCO|TITLE-FGCOL|TITLE-FGCOLO|'
+ r'TITLE-FONT|TITLE-FO|TITLE-FON|'
+ r'TO|TODAY|TOGGLE-BOX|TOOLTIP|TOOLTIPS|TOPIC|TOP-NAV-QUERY|TOP-ONLY|'
+ r'TO-ROWID|TOTAL|TRAILING|TRANS|TRANSACTION|TRANSACTION-MODE|'
+ r'TRANS-INIT-PROCEDURE|TRANSPARENT|TRIGGER|TRIGGERS|TRIM|'
+ r'TRUE|TRUNCATE|TRUNC|TRUNCA|TRUNCAT|TYPE|TYPE-OF|'
+ r'UNBOX|UNBUFFERED|UNBUFF|UNBUFFE|UNBUFFER|UNBUFFERE|'
+ r'UNDERLINE|UNDERL|UNDERLI|UNDERLIN|UNDO|'
+ r'UNFORMATTED|UNFORM|UNFORMA|UNFORMAT|UNFORMATT|UNFORMATTE|UNION|'
+ r'UNIQUE|UNIQUE-ID|UNIQUE-MATCH|UNIX|UNLESS-HIDDEN|UNLOAD|'
+ r'UNSIGNED-LONG|UNSUBSCRIBE|UP|UPDATE|UPDATE-ATTRIBUTE|'
+ r'URL|URL-DECODE|URL-ENCODE|URL-PASSWORD|URL-USERID|USE|'
+ r'USE-DICT-EXPS|USE-FILENAME|USE-INDEX|USER|USE-REVVIDEO|'
+ r'USERID|USER-ID|USE-TEXT|USE-UNDERLINE|USE-WIDGET-POOL|'
+ r'USING|V6DISPLAY|V6FRAME|VALIDATE|VALIDATE-EXPRESSION|'
+ r'VALIDATE-MESSAGE|VALIDATE-SEAL|VALIDATION-ENABLED|VALID-EVENT|'
+ r'VALID-HANDLE|VALID-OBJECT|VALUE|VALUE-CHANGED|VALUES|'
+ r'VARIABLE|VAR|VARI|VARIA|VARIAB|VARIABL|VERBOSE|'
+ r'VERSION|VERTICAL|VERT|VERTI|VERTIC|VERTICA|'
+ r'VIEW|VIEW-AS|VIEW-FIRST-COLUMN-ON-REOPEN|'
+ r'VIRTUAL-HEIGHT-CHARS|VIRTUAL-HEIGHT|VIRTUAL-HEIGHT-|VIRTUAL-HEIGHT-C|VIRTUAL-HEIGHT-CH|VIRTUAL-HEIGHT-CHA|VIRTUAL-HEIGHT-CHAR|'
+ r'VIRTUAL-HEIGHT-PIXELS|VIRTUAL-HEIGHT-P|VIRTUAL-HEIGHT-PI|VIRTUAL-HEIGHT-PIX|VIRTUAL-HEIGHT-PIXE|VIRTUAL-HEIGHT-PIXEL|'
+ r'VIRTUAL-WIDTH-CHARS|VIRTUAL-WIDTH|VIRTUAL-WIDTH-|VIRTUAL-WIDTH-C|VIRTUAL-WIDTH-CH|VIRTUAL-WIDTH-CHA|VIRTUAL-WIDTH-CHAR|'
+ r'VIRTUAL-WIDTH-PIXELS|VIRTUAL-WIDTH-P|VIRTUAL-WIDTH-PI|VIRTUAL-WIDTH-PIX|VIRTUAL-WIDTH-PIXE|VIRTUAL-WIDTH-PIXEL|'
+ r'VISIBLE|VOID|WAIT|WAIT-FOR|WARNING|WEB-CONTEXT|WEEKDAY|WHEN|'
+ r'WHERE|WHILE|WIDGET|'
+ r'WIDGET-ENTER|WIDGET-E|WIDGET-EN|WIDGET-ENT|WIDGET-ENTE|'
+ r'WIDGET-ID|'
+ r'WIDGET-LEAVE|WIDGET-L|WIDGET-LE|WIDGET-LEA|WIDGET-LEAV|'
+ r'WIDGET-POOL|WIDTH|'
+ r'WIDTH-CHARS|WIDTH|WIDTH-|WIDTH-C|WIDTH-CH|WIDTH-CHA|WIDTH-CHAR|'
+ r'WIDTH-PIXELS|WIDTH-P|WIDTH-PI|WIDTH-PIX|WIDTH-PIXE|WIDTH-PIXEL|'
+ r'WINDOW|'
+ r'WINDOW-MAXIMIZED|WINDOW-MAXIM|WINDOW-MAXIMI|WINDOW-MAXIMIZ|WINDOW-MAXIMIZE|'
+ r'WINDOW-MINIMIZED|WINDOW-MINIM|WINDOW-MINIMI|WINDOW-MINIMIZ|WINDOW-MINIMIZE|'
+ r'WINDOW-NAME|WINDOW-NORMAL|WINDOW-STATE|WINDOW-STA|WINDOW-STAT|'
+ r'WINDOW-SYSTEM|WITH|WORD-INDEX|WORD-WRAP|WORK-AREA-HEIGHT-PIXELS|'
+ r'WORK-AREA-WIDTH-PIXELS|WORK-AREA-X|WORK-AREA-Y|WORKFILE|'
+ r'WORK-TABLE|WORK-TAB|WORK-TABL|WRITE|WRITE-CDATA|WRITE-CHARACTERS|'
+ r'WRITE-COMMENT|WRITE-DATA-ELEMENT|WRITE-EMPTY-ELEMENT|WRITE-ENTITY-REF|'
+ r'WRITE-EXTERNAL-DTD|WRITE-FRAGMENT|WRITE-MESSAGE|'
+ r'WRITE-PROCESSING-INSTRUCTION|WRITE-STATUS|WRITE-XML|WRITE-XMLSCHEMA|'
+ r'X|XCODE|XML-DATA-TYPE|XML-NODE-TYPE|XML-SCHEMA-PATH|'
+ r'XML-SUPPRESS-NAMESPACE-PROCESSING|X-OF|XREF|'
+ r'XREF-XML|Y|YEAR|YEAR-OFFSET|YES|YES-NO|'
+ r'YES-NO-CANCEL|Y-OF)\s*($|(?=[^0-9a-z_\-]))')
+
+ tokens = {
+ 'root': [
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'\{', Comment.Preproc, 'preprocessor'),
+ (r'\s*&.*', Comment.Preproc),
+ (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
+ (types, Keyword.Type),
+ (keywords, Name.Builtin),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\s+', Text),
+ (r'[\\+\\-\\*\\/\\=]', Operator),
+ (r'[\\.\\:\\(\\)]', Punctuation),
+ (r'.', Name.Variable), # Lazy catch-all
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'preprocessor': [
+ (r'[^{}]', Comment.Preproc),
+ (r'{', Comment.Preproc, '#push'),
+ (r'}', Comment.Preproc, '#pop'),
+ ],
+ }
+
+
+class BroLexer(RegexLexer):
+ """
+ For `Bro <http://bro-ids.org/>`_ scripts.
+
+ *New in Pygments 1.5.*
+ """
+ name = 'Bro'
+ aliases = ['bro']
+ filenames = ['*.bro']
+
+ _hex = r'[0-9a-fA-F_]+'
+ _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
+ _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ ('^@.*?\n', Comment.Preproc),
+ (r'#.*?\n', Comment.Single),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text),
+ # Keywords
+ (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event'
+ r'|export|for|function|if|global|local|module|next'
+ r'|of|print|redef|return|schedule|type|when|while)\b', Keyword),
+ (r'(addr|any|bool|count|counter|double|file|int|interval|net'
+ r'|pattern|port|record|set|string|subnet|table|time|timer'
+ r'|vector)\b', Keyword.Type),
+ (r'(T|F)\b', Keyword.Constant),
+ (r'(&)((?:add|delete|expire)_func|attr|(create|read|write)_expire'
+ r'|default|disable_print_hook|raw_output|encrypt|group|log'
+ r'|mergeable|optional|persistent|priority|redef'
+ r'|rotate_(?:interval|size)|synchronized)\b', bygroups(Punctuation,
+ Keyword)),
+ (r'\s+module\b', Keyword.Namespace),
+ # Addresses, ports and networks
+ (r'\d+/(tcp|udp|icmp|unknown)\b', Number),
+ (r'(\d+\.){3}\d+', Number),
+ (r'(' + _hex + r'){7}' + _hex, Number),
+ (r'0x' + _hex + r'(' + _hex + r'|:)*::(' + _hex + r'|:)*', Number),
+ (r'((\d+|:)(' + _hex + r'|:)*)?::(' + _hex + r'|:)*', Number),
+ (r'(\d+\.\d+\.|(\d+\.){2}\d+)', Number),
+ # Hostnames
+ (_h + r'(\.' + _h + r')+', String),
+ # Numeric
+ (_float + r'\s+(day|hr|min|sec|msec|usec)s?\b', Literal.Date),
+ (r'0[xX]' + _hex, Number.Hex),
+ (_float, Number.Float),
+ (r'\d+', Number.Integer),
+ (r'/', String.Regex, 'regex'),
+ (r'"', String, 'string'),
+ # Operators
+ (r'[!%*/+-:<=>?~|]', Operator),
+ (r'([-+=&|]{2}|[+-=!><]=)', Operator),
+ (r'(in|match)\b', Operator.Word),
+ (r'[{}()\[\]$.,;]', Punctuation),
+ # Identfier
+ (r'([_a-zA-Z]\w*)(::)', bygroups(Name, Name.Namespace)),
+ (r'[a-zA-Z_][a-zA-Z_0-9]*', Name)
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String),
+ (r'\\\n', String),
+ (r'\\', String)
+ ],
+ 'regex': [
+ (r'/', String.Regex, '#pop'),
+ (r'\\[\\nt/]', String.Regex), # String.Escape is too intense here.
+ (r'[^\\/\n]+', String.Regex),
+ (r'\\\n', String.Regex),
+ (r'\\', String.Regex)
+ ]
+ }
diff --git a/pygments/lexers/parsers.py b/pygments/lexers/parsers.py
index b2310488..c4aacf22 100644
--- a/pygments/lexers/parsers.py
+++ b/pygments/lexers/parsers.py
@@ -5,7 +5,7 @@
Lexers for parser generators.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/postgres.py b/pygments/lexers/postgres.py
deleted file mode 100644
index 08bc92f9..00000000
--- a/pygments/lexers/postgres.py
+++ /dev/null
@@ -1,326 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.postgres
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for PostgreSQL-specific SQL and psql interactive session.
-
- `PostgresLexer`
- A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
- lexer are:
-
- - keywords and data types list parsed from the PG docs (run the
- `_postgres_builtins` module to update them);
- - Content of $-strings parsed using a specific lexer, e.g. the content
- of a PL/Python function is parsed using the Python lexer;
- - parse PG specific constructs: E-strings, $-strings, U&-strings,
- different operators and punctuation.
-
- `PlPgsqlLexer`
- A lexer for the PL/pgSQL language. Adds a few specific construct on
- top of the PG SQL lexer (such as <<label>>).
-
- `PostgresConsoleLexer`
- A lexer to highlight an interactive psql session:
-
- - identifies the prompt and does its best to detect the end of command
- in multiline statement where not all the lines are prefixed by a
- prompt, telling them apart from the output;
- - highlights errors in the output and notification levels;
- - handles psql backslash commands.
-
- The ``tests/examplefiles`` contains a few test files with data to be
- parsed by these lexers.
-
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from copy import deepcopy
-
-from pygments.lexer import Lexer, RegexLexer, do_insertions
-from pygments.token import Punctuation, \
- Text, Comment, Operator, Keyword, Name, String, Number, Generic
-from pygments.lexers import get_lexer_by_name, ClassNotFound
-
-from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
- PSEUDO_TYPES, PLPGSQL_KEYWORDS
-
-
-__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer']
-
-line_re = re.compile('.*?\n')
-
-language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
-
-def language_callback(lexer, match):
- """Parse the content of a $-string using a lexer
-
- The lexer is chosen looking for a nearby LANGUAGE.
-
- Note: this function should have been a `PostgresBase` method, but the
- rules deepcopy fails in this case.
- """
- l = None
- m = language_re.match(lexer.text[match.end():match.end()+100])
- if m is not None:
- l = lexer._get_lexer(m.group(1))
- else:
- m = list(language_re.finditer(
- lexer.text[max(0, match.start()-100):match.start()]))
- if m:
- l = lexer._get_lexer(m[-1].group(1))
-
- if l:
- yield (match.start(1), String, match.group(1))
- for x in l.get_tokens_unprocessed(match.group(2)):
- yield x
- yield (match.start(3), String, match.group(3))
-
- else:
- yield (match.start(), String, match.group())
-
-
-class PostgresBase(object):
- """Base class for Postgres-related lexers.
-
- This is implemented as a mixin to avoid the Lexer metaclass kicking in.
- this way the different lexer don't have a common Lexer ancestor. If they
- had, _tokens could be created on this ancestor and not updated for the
- other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
- seem to suggest that regexp lexers are not really subclassable.
-
- `language_callback` should really be our method, but this breaks deepcopy.
- """
- def get_tokens_unprocessed(self, text, *args):
- # Have a copy of the entire text to be used by `language_callback`.
- self.text = text
- for x in super(PostgresBase, self).get_tokens_unprocessed(
- text, *args):
- yield x
-
- def _get_lexer(self, lang):
- if lang.lower() == 'sql':
- return get_lexer_by_name('postgresql', **self.options)
-
- tries = [ lang ]
- if lang.startswith('pl'):
- tries.append(lang[2:])
- if lang.endswith('u'):
- tries.append(lang[:-1])
- if lang.startswith('pl') and lang.endswith('u'):
- tries.append(lang[2:-1])
-
- for l in tries:
- try:
- return get_lexer_by_name(l, **self.options)
- except ClassNotFound:
- pass
- else:
- # TODO: better logging
- # print >>sys.stderr, "language not found:", lang
- return None
-
-
-class PostgresLexer(PostgresBase, RegexLexer):
- """
- Lexer for the PostgreSQL dialect of SQL.
-
- *New in Pygments 1.5.*
- """
-
- name = 'PostgreSQL SQL dialect'
- aliases = ['postgresql', 'postgres']
- mimetypes = ['text/x-postgresql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'--.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'(' + '|'.join([s.replace(" ", "\s+")
- for s in DATATYPES + PSEUDO_TYPES])
- + r')\b', Name.Builtin),
- (r'(' + '|'.join(KEYWORDS) + r')\b', Keyword),
- (r'[+*/<>=~!@#%^&|`?^-]+', Operator),
- (r'::', Operator), # cast
- (r'\$\d+', Name.Variable),
- (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r"(E|U&)?'(''|[^'])*'", String.Single),
- (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier
- (r'(?ms)(\$[^\$]*\$)(.*?)(\1)', language_callback),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
-
- # psql variable in SQL
- (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
-
- (r'[;:()\[\]\{\},\.]', Punctuation),
- ],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/\*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ],
- }
-
-
-class PlPgsqlLexer(PostgresBase, RegexLexer):
- """
- Handle the extra syntax in Pl/pgSQL language.
-
- *New in Pygments 1.5.*
- """
- name = 'PL/pgSQL'
- aliases = ['plpgsql']
- mimetypes = ['text/x-plpgsql']
-
- flags = re.IGNORECASE
- tokens = deepcopy(PostgresLexer.tokens)
-
- # extend the keywords list
- for i, pattern in enumerate(tokens['root']):
- if pattern[1] == Keyword:
- tokens['root'][i] = (
- r'(' + '|'.join(KEYWORDS + PLPGSQL_KEYWORDS) + r')\b',
- Keyword)
- del i
- break
- else:
- assert 0, "SQL keywords not found"
-
- # Add specific PL/pgSQL rules (before the SQL ones)
- tokens['root'][:0] = [
- (r'\%[a-z][a-z0-9_]*\b', Name.Builtin), # actually, a datatype
- (r':=', Operator),
- (r'\<\<[a-z][a-z0-9_]*\>\>', Name.Label),
- (r'\#[a-z][a-z0-9_]*\b', Keyword.Pseudo), # #variable_conflict
- ]
-
-
-class PsqlRegexLexer(PostgresBase, RegexLexer):
- """
- Extend the PostgresLexer adding support specific for psql commands.
-
- This is not a complete psql lexer yet as it lacks prompt support
- and output rendering.
- """
-
- name = 'PostgreSQL console - regexp based lexer'
- aliases = [] # not public
-
- flags = re.IGNORECASE
- tokens = deepcopy(PostgresLexer.tokens)
-
- tokens['root'].append(
- (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
- tokens['psql-command'] = [
- (r'\n', Text, 'root'),
- (r'\s+', Text),
- (r'\\[^\s]+', Keyword.Pseudo),
- (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
- (r"'(''|[^'])*'", String.Single),
- (r"`([^`])*`", String.Backtick),
- (r"[^\s]+", String.Symbol),
- ]
-
-re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
-re_psql_command = re.compile(r'\s*\\')
-re_end_command = re.compile(r';\s*(--.*?)?$')
-re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
-re_error = re.compile(r'(ERROR|FATAL):')
-re_message = re.compile(
- r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
- r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
-
-def lookahead(x):
- """Wrap an iterator and allow pushing back an item."""
- for i in x:
- while 1:
- i = yield i
- if i is None:
- break
- yield i
-
-
-class PostgresConsoleLexer(Lexer):
- """
- Lexer for psql sessions.
-
- *New in Pygments 1.5.*
- """
-
- name = 'PostgreSQL console (psql)'
- aliases = ['psql', 'postgresql-console', 'postgres-console']
- mimetypes = ['text/x-postgresql-psql']
-
- def get_tokens_unprocessed(self, data):
- sql = PsqlRegexLexer(**self.options)
-
- lines = lookahead(line_re.findall(data))
-
- # prompt-output cycle
- while 1:
-
- # consume the lines of the command: start with an optional prompt
- # and continue until the end of command is detected
- curcode = ''
- insertions = []
- while 1:
- try:
- line = lines.next()
- except StopIteration:
- # allow the emission of partially collected items
- # the repl loop will be broken below
- break
-
- # Identify a shell prompt in case of psql commandline example
- if line.startswith('$') and not curcode:
- lexer = get_lexer_by_name('console', **self.options)
- for x in lexer.get_tokens_unprocessed(line):
- yield x
- break
-
- # Identify a psql prompt
- mprompt = re_prompt.match(line)
- if mprompt is not None:
- insertions.append((len(curcode),
- [(0, Generic.Prompt, mprompt.group())]))
- curcode += line[len(mprompt.group()):]
- else:
- curcode += line
-
- # Check if this is the end of the command
- # TODO: better handle multiline comments at the end with
- # a lexer with an external state?
- if re_psql_command.match(curcode) \
- or re_end_command.search(curcode):
- break
-
- # Emit the combined stream of command and prompt(s)
- for item in do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode)):
- yield item
-
- # Emit the output lines
- out_token = Generic.Output
- while 1:
- line = lines.next()
- mprompt = re_prompt.match(line)
- if mprompt is not None:
- # push the line back to have it processed by the prompt
- lines.send(line)
- break
-
- mmsg = re_message.match(line)
- if mmsg is not None:
- if mmsg.group(1).startswith("ERROR") \
- or mmsg.group(1).startswith("FATAL"):
- out_token = Generic.Error
- yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
- yield (mmsg.start(2), out_token, mmsg.group(2))
- else:
- yield (0, out_token, line)
diff --git a/pygments/lexers/pypylog.py b/pygments/lexers/pypylog.py
deleted file mode 100644
index c3af3387..00000000
--- a/pygments/lexers/pypylog.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.pypylog
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for pypy log files.
-
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include
-from pygments.token import Text, Keyword, Number, Comment, Punctuation, Name, \
- String
-
-
-__all__ = ["PyPyLogLexer"]
-
-
-class PyPyLogLexer(RegexLexer):
- """
- Lexer for PyPy log files.
-
- *New in Pygments 1.5.*
- """
- name = "PyPy Log"
- aliases = ["pypylog", "pypy"]
- filenames = ["*.pypylog"]
- mimetypes = ['application/x-pypylog']
-
- tokens = {
- "root": [
- (r"\[\w+\] {jit-log-.*?$", Keyword, "jit-log"),
- (r"\[\w+\] {jit-backend-counts$", Keyword, "jit-backend-counts"),
- include("extra-stuff"),
- ],
- "jit-log": [
- (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
-
- (r"[ifp]\d+", Name),
- (r"ptr\d+", Name),
- (r"(\()([\w_]+(?:\.[\w_]+)?)(\))",
- bygroups(Punctuation, Name.Builtin, Punctuation)),
- (r"[\[\]=,()]", Punctuation),
- (r"(\d+\.\d+|inf|-inf)", Number.Float),
- (r"-?\d+", Number.Integer),
- (r"'.*'", String),
- (r"(None|descr|ConstClass|ConstPtr)", Name),
- (r"<.*?>", Name.Builtin),
- (r"(debug_merge_point|jump|finish)", Name.Class),
- (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
- r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
- r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
- r"int_is_true|"
- r"uint_floordiv|uint_ge|uint_lt|"
- r"float_add|float_sub|float_mul|float_truediv|"
- r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
- r"ptr_eq|"
- r"cast_int_to_float|cast_float_to_int|cast_opaque_ptr|"
- r"force_token|quasiimmut_field|same_as|virtual_ref_finish|virtual_ref|"
- r"call_may_force|call_assembler|call_loopinvariant|call_release_gil|call_pure|call|"
- r"new_with_vtable|new_array|newstr|newunicode|new|"
- r"arraylen_gc|"
- r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
- r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|getfield_gc|"
- r"getfield_raw|setfield_gc|setfield_raw|"
- r"strgetitem|strsetitem|strlen|copystrcontent|"
- r"unicodegetitem|unicodesetitem|unicodelen|"
- r"guard_true|guard_false|guard_value|guard_isnull|"
- r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
- r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
- Name.Builtin),
- include("extra-stuff"),
- ],
- "jit-backend-counts": [
- (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
- (r"[:]", Punctuation),
- (r"\d+", Number),
- include("extra-stuff"),
- ],
- "extra-stuff": [
- (r"[\n\s]+", Text),
- (r"#.*?$", Comment),
- ],
- }
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
new file mode 100644
index 00000000..8c78d158
--- /dev/null
+++ b/pygments/lexers/shell.py
@@ -0,0 +1,360 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.shell
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various shells.
+
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, include
+from pygments.token import Punctuation, \
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic
+from pygments.util import shebang_matches
+
+
+__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
+ 'PowerShellLexer']
+
+line_re = re.compile('.*?\n')
+
+
+class BashLexer(RegexLexer):
+ """
+ Lexer for (ba|k|)sh shell scripts.
+
+ *New in Pygments 0.6.*
+ """
+
+ name = 'Bash'
+ aliases = ['bash', 'sh', 'ksh']
+ filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass']
+ mimetypes = ['application/x-sh', 'application/x-shellscript']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'\$\(\(', Keyword, 'math'),
+ (r'\$\(', Keyword, 'paren'),
+ (r'\${#?', Keyword, 'curly'),
+ (r'`', String.Backtick, 'backticks'),
+ include('data'),
+ ],
+ 'basic': [
+ (r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
+ r'select|continue|until|esac|elif)\s*\b',
+ Keyword),
+ (r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
+ r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
+ r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
+ r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
+ r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
+ r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
+ Name.Builtin),
+ (r'#.*\n', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]{}()=]', Operator),
+ (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r'&&|\|\|', Operator),
+ ],
+ 'data': [
+ (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r';', Text),
+ (r'\s+', Text),
+ (r'[^=\s\n\[\]{}()$"\'`\\<]+', Text),
+ (r'\d+(?= |\Z)', Number),
+ (r'\$#?(\w+|.)', Name.Variable),
+ (r'<', Text),
+ ],
+ 'curly': [
+ (r'}', Keyword, '#pop'),
+ (r':-', Keyword),
+ (r'[a-zA-Z0-9_]+', Name.Variable),
+ (r'[^}:"\'`$]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ 'math': [
+ (r'\)\)', Keyword, '#pop'),
+ (r'[-+*/%^|&]|\*\*|\|\|', Operator),
+ (r'\d+', Number),
+ include('root'),
+ ],
+ 'backticks': [
+ (r'`', String.Backtick, '#pop'),
+ include('root'),
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'(ba|z|)sh')
+
+
+class BashSessionLexer(Lexer):
+ """
+ Lexer for simplistic shell sessions.
+
+ *New in Pygments 1.1.*
+ """
+
+ name = 'Bash Session'
+ aliases = ['console']
+ filenames = ['*.sh-session']
+ mimetypes = ['application/x-shell-session']
+
+ def get_tokens_unprocessed(self, text):
+ bashlexer = BashLexer(**self.options)
+
+ pos = 0
+ curcode = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = re.match(r'^((?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)?|\[\S+[@:]'
+ r'[^\n]+\].+)[$#%])(.*\n?)', line)
+ if m:
+ # To support output lexers (say diff output), the output
+ # needs to be broken by prompts whenever the output lexer
+ # changes.
+ if not insertions:
+ pos = match.start()
+
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, m.group(1))]))
+ curcode += m.group(2)
+ elif line.startswith('>'):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:1])]))
+ curcode += line[1:]
+ else:
+ if insertions:
+ toks = bashlexer.get_tokens_unprocessed(curcode)
+ for i, t, v in do_insertions(insertions, toks):
+ yield pos+i, t, v
+ yield match.start(), Generic.Output, line
+ insertions = []
+ curcode = ''
+ if insertions:
+ for i, t, v in do_insertions(insertions,
+ bashlexer.get_tokens_unprocessed(curcode)):
+ yield pos+i, t, v
+
+
+class BatchLexer(RegexLexer):
+ """
+ Lexer for the DOS/Windows Batch file format.
+
+ *New in Pygments 0.7.*
+ """
+ name = 'Batchfile'
+ aliases = ['bat']
+ filenames = ['*.bat', '*.cmd']
+ mimetypes = ['application/x-dos-batch']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ # Lines can start with @ to prevent echo
+ (r'^\s*@', Punctuation),
+ (r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)),
+ (r'".*?"', String.Double),
+ (r"'.*?'", String.Single),
+ # If made more specific, make sure you still allow expansions
+ # like %~$VAR:zlt
+ (r'%%?[~$:\w]+%?', Name.Variable),
+ (r'::.*', Comment), # Technically :: only works at BOL
+ (r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
+ (r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
+ (r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
+ (r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
+ r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
+ r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
+ (r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator),
+ include('basic'),
+ (r'.', Text),
+ ],
+ 'echo': [
+ # Escapes only valid within echo args?
+ (r'\^\^|\^<|\^>|\^\|', String.Escape),
+ (r'\n', Text, '#pop'),
+ include('basic'),
+ (r'[^\'"^]+', Text),
+ ],
+ 'basic': [
+ (r'".*?"', String.Double),
+ (r"'.*?'", String.Single),
+ (r'`.*?`', String.Backtick),
+ (r'-?\d+', Number),
+ (r',', Punctuation),
+ (r'=', Operator),
+ (r'/\S+', Name),
+ (r':\w+', Name.Label),
+ (r'\w:\w+', Text),
+ (r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)),
+ ],
+ }
+
+
+class TcshLexer(RegexLexer):
+ """
+ Lexer for tcsh scripts.
+
+ *New in Pygments 0.10.*
+ """
+
+ name = 'Tcsh'
+ aliases = ['tcsh', 'csh']
+ filenames = ['*.tcsh', '*.csh']
+ mimetypes = ['application/x-csh']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'\$\(', Keyword, 'paren'),
+ (r'\${#?', Keyword, 'curly'),
+ (r'`', String.Backtick, 'backticks'),
+ include('data'),
+ ],
+ 'basic': [
+ (r'\b(if|endif|else|while|then|foreach|case|default|'
+ r'continue|goto|breaksw|end|switch|endsw)\s*\b',
+ Keyword),
+ (r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
+ r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
+ r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
+ r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
+ r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
+ r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
+ r'source|stop|suspend|source|suspend|telltc|time|'
+ r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
+ r'ver|wait|warp|watchlog|where|which)\s*\b',
+ Name.Builtin),
+ (r'#.*\n', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]{}()=]+', Operator),
+ (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ ],
+ 'data': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'\s+', Text),
+ (r'[^=\s\n\[\]{}()$"\'`\\]+', Text),
+ (r'\d+(?= |\Z)', Number),
+ (r'\$#?(\w+|.)', Name.Variable),
+ ],
+ 'curly': [
+ (r'}', Keyword, '#pop'),
+ (r':-', Keyword),
+ (r'[a-zA-Z0-9_]+', Name.Variable),
+ (r'[^}:"\'`$]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ 'backticks': [
+ (r'`', String.Backtick, '#pop'),
+ include('root'),
+ ],
+ }
+
+
+class PowerShellLexer(RegexLexer):
+ """
+ For Windows PowerShell code.
+
+ *New in Pygments 1.5.*
+ """
+ name = 'PowerShell'
+ aliases = ['powershell', 'posh', 'ps1']
+ filenames = ['*.ps1']
+ mimetypes = ['text/x-powershell']
+
+ flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
+
+ keywords = (
+ 'while validateset validaterange validatepattern validatelength '
+ 'validatecount until trap switch return ref process param parameter in '
+ 'if global: function foreach for finally filter end elseif else '
+ 'dynamicparam do default continue cmdletbinding break begin alias \\? '
+ '% #script #private #local #global mandatory parametersetname position '
+ 'valuefrompipeline valuefrompipelinebypropertyname '
+ 'valuefromremainingarguments helpmessage try catch').split()
+
+ operators = (
+ 'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
+ 'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
+ 'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
+ 'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
+ 'lt match ne not notcontains notlike notmatch or regex replace '
+ 'wildcard').split()
+
+ verbs = (
+ 'write where wait use update unregister undo trace test tee take '
+ 'suspend stop start split sort skip show set send select scroll resume '
+ 'restore restart resolve resize reset rename remove register receive '
+ 'read push pop ping out new move measure limit join invoke import '
+ 'group get format foreach export expand exit enter enable disconnect '
+ 'disable debug cxnew copy convertto convertfrom convert connect '
+ 'complete compare clear checkpoint aggregate add').split()
+
+ commenthelp = (
+ 'component description example externalhelp forwardhelpcategory '
+ 'forwardhelptargetname forwardhelptargetname functionality inputs link '
+ 'notes outputs parameter remotehelprunspace role synopsis').split()
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
+ bygroups(Comment, String.Doc, Comment)),
+ (r'#[^\n]*?$', Comment),
+ (r'(&lt;|<)#', Comment.Multiline, 'multline'),
+ (r'@"\n.*?\n"@', String.Heredoc),
+ (r"@'\n.*?\n'@", String.Heredoc),
+ (r'"', String.Double, 'string'),
+ (r"'([^']|'')*'", String.Single),
+ (r'(\$|@@|@)((global|script|private|env):)?[a-z0-9_]+',
+ Name.Variable),
+ (r'(%s)\b' % '|'.join(keywords), Keyword),
+ (r'-(%s)\b' % '|'.join(operators), Operator),
+ (r'(%s)-[a-z_][a-z0-9_]*\b' % '|'.join(verbs), Name.Builtin),
+ (r'\[[a-z_\[][a-z0-9_. `,\[\]]*\]', Name.Constant), # .net [type]s
+ (r'-[a-z_][a-z0-9_]*', Name),
+ (r'\w+', Name),
+ (r'[.,{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
+ ],
+ 'multline': [
+ (r'[^#&.]+', Comment.Multiline),
+ (r'#(>|&gt;)', Comment.Multiline, '#pop'),
+ (r'\.(%s)' % '|'.join(commenthelp), String.Doc),
+ (r'[#&.]', Comment.Multiline),
+ ],
+ 'string': [
+ (r'[^$`"]+', String.Double),
+ (r'\$\(', String.Interpol, 'interpol'),
+ (r'`"|""', String.Double),
+ (r'[`$]', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'interpol': [
+ (r'[^$)]+', String.Interpol),
+ (r'\$\(', String.Interpol, '#push'),
+ (r'\)', String.Interpol, '#pop'),
+ ]
+ }
diff --git a/pygments/lexers/special.py b/pygments/lexers/special.py
index 61cbc2c1..785ab73c 100644
--- a/pygments/lexers/special.py
+++ b/pygments/lexers/special.py
@@ -5,7 +5,7 @@
Special lexers.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py
new file mode 100644
index 00000000..61a08a3c
--- /dev/null
+++ b/pygments/lexers/sql.py
@@ -0,0 +1,556 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.sql
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various SQL dialects and related interactive sessions.
+
+ Postgres specific lexers:
+
+ `PostgresLexer`
+ A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
+ lexer are:
+
+ - keywords and data types list parsed from the PG docs (run the
+ `_postgres_builtins` module to update them);
+ - Content of $-strings parsed using a specific lexer, e.g. the content
+ of a PL/Python function is parsed using the Python lexer;
+ - parse PG specific constructs: E-strings, $-strings, U&-strings,
+ different operators and punctuation.
+
+ `PlPgsqlLexer`
+ A lexer for the PL/pgSQL language. Adds a few specific construct on
+ top of the PG SQL lexer (such as <<label>>).
+
+ `PostgresConsoleLexer`
+ A lexer to highlight an interactive psql session:
+
+ - identifies the prompt and does its best to detect the end of command
+ in multiline statement where not all the lines are prefixed by a
+ prompt, telling them apart from the output;
+ - highlights errors in the output and notification levels;
+ - handles psql backslash commands.
+
+ The ``tests/examplefiles`` contains a few test files with data to be
+ parsed by these lexers.
+
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from copy import deepcopy
+
+from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups
+from pygments.token import Punctuation, \
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic
+from pygments.lexers import get_lexer_by_name, ClassNotFound
+
+from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
+ PSEUDO_TYPES, PLPGSQL_KEYWORDS
+
+
+__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
+ 'SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer']
+
+line_re = re.compile('.*?\n')
+
+language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
+
+def language_callback(lexer, match):
+ """Parse the content of a $-string using a lexer
+
+ The lexer is chosen looking for a nearby LANGUAGE.
+
+ Note: this function should have been a `PostgresBase` method, but the
+ rules deepcopy fails in this case.
+ """
+ l = None
+ m = language_re.match(lexer.text[match.end():match.end()+100])
+ if m is not None:
+ l = lexer._get_lexer(m.group(1))
+ else:
+ m = list(language_re.finditer(
+ lexer.text[max(0, match.start()-100):match.start()]))
+ if m:
+ l = lexer._get_lexer(m[-1].group(1))
+
+ if l:
+ yield (match.start(1), String, match.group(1))
+ for x in l.get_tokens_unprocessed(match.group(2)):
+ yield x
+ yield (match.start(3), String, match.group(3))
+
+ else:
+ yield (match.start(), String, match.group())
+
+
+class PostgresBase(object):
+ """Base class for Postgres-related lexers.
+
+ This is implemented as a mixin to avoid the Lexer metaclass kicking in.
+ this way the different lexer don't have a common Lexer ancestor. If they
+ had, _tokens could be created on this ancestor and not updated for the
+ other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
+ seem to suggest that regexp lexers are not really subclassable.
+
+ `language_callback` should really be our method, but this breaks deepcopy.
+ """
+ def get_tokens_unprocessed(self, text, *args):
+ # Have a copy of the entire text to be used by `language_callback`.
+ self.text = text
+ for x in super(PostgresBase, self).get_tokens_unprocessed(
+ text, *args):
+ yield x
+
+ def _get_lexer(self, lang):
+ if lang.lower() == 'sql':
+ return get_lexer_by_name('postgresql', **self.options)
+
+ tries = [ lang ]
+ if lang.startswith('pl'):
+ tries.append(lang[2:])
+ if lang.endswith('u'):
+ tries.append(lang[:-1])
+ if lang.startswith('pl') and lang.endswith('u'):
+ tries.append(lang[2:-1])
+
+ for l in tries:
+ try:
+ return get_lexer_by_name(l, **self.options)
+ except ClassNotFound:
+ pass
+ else:
+ # TODO: better logging
+ # print >>sys.stderr, "language not found:", lang
+ return None
+
+
+class PostgresLexer(PostgresBase, RegexLexer):
+ """
+ Lexer for the PostgreSQL dialect of SQL.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = 'PostgreSQL SQL dialect'
+ aliases = ['postgresql', 'postgres']
+ mimetypes = ['text/x-postgresql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'--.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'(' + '|'.join([s.replace(" ", "\s+")
+ for s in DATATYPES + PSEUDO_TYPES])
+ + r')\b', Name.Builtin),
+ (r'(' + '|'.join(KEYWORDS) + r')\b', Keyword),
+ (r'[+*/<>=~!@#%^&|`?^-]+', Operator),
+ (r'::', Operator), # cast
+ (r'\$\d+', Name.Variable),
+ (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r"(E|U&)?'(''|[^'])*'", String.Single),
+ (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier
+ (r'(?ms)(\$[^\$]*\$)(.*?)(\1)', language_callback),
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
+
+ # psql variable in SQL
+ (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
+
+ (r'[;:()\[\]\{\},\.]', Punctuation),
+ ],
+ 'multiline-comments': [
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[^/\*]+', Comment.Multiline),
+ (r'[/*]', Comment.Multiline)
+ ],
+ }
+
+
+class PlPgsqlLexer(PostgresBase, RegexLexer):
+ """
+ Handle the extra syntax in Pl/pgSQL language.
+
+ *New in Pygments 1.5.*
+ """
+ name = 'PL/pgSQL'
+ aliases = ['plpgsql']
+ mimetypes = ['text/x-plpgsql']
+
+ flags = re.IGNORECASE
+ tokens = deepcopy(PostgresLexer.tokens)
+
+ # extend the keywords list
+ for i, pattern in enumerate(tokens['root']):
+ if pattern[1] == Keyword:
+ tokens['root'][i] = (
+ r'(' + '|'.join(KEYWORDS + PLPGSQL_KEYWORDS) + r')\b',
+ Keyword)
+ del i
+ break
+ else:
+ assert 0, "SQL keywords not found"
+
+ # Add specific PL/pgSQL rules (before the SQL ones)
+ tokens['root'][:0] = [
+ (r'\%[a-z][a-z0-9_]*\b', Name.Builtin), # actually, a datatype
+ (r':=', Operator),
+ (r'\<\<[a-z][a-z0-9_]*\>\>', Name.Label),
+ (r'\#[a-z][a-z0-9_]*\b', Keyword.Pseudo), # #variable_conflict
+ ]
+
+
+class PsqlRegexLexer(PostgresBase, RegexLexer):
+ """
+ Extend the PostgresLexer adding support specific for psql commands.
+
+ This is not a complete psql lexer yet as it lacks prompt support
+ and output rendering.
+ """
+
+ name = 'PostgreSQL console - regexp based lexer'
+ aliases = [] # not public
+
+ flags = re.IGNORECASE
+ tokens = deepcopy(PostgresLexer.tokens)
+
+ tokens['root'].append(
+ (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
+ tokens['psql-command'] = [
+ (r'\n', Text, 'root'),
+ (r'\s+', Text),
+ (r'\\[^\s]+', Keyword.Pseudo),
+ (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
+ (r"'(''|[^'])*'", String.Single),
+ (r"`([^`])*`", String.Backtick),
+ (r"[^\s]+", String.Symbol),
+ ]
+
+re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
+re_psql_command = re.compile(r'\s*\\')
+re_end_command = re.compile(r';\s*(--.*?)?$')
+re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
+re_error = re.compile(r'(ERROR|FATAL):')
+re_message = re.compile(
+ r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
+ r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
+
+def lookahead(x):
+ """Wrap an iterator and allow pushing back an item."""
+ for i in x:
+ while 1:
+ i = yield i
+ if i is None:
+ break
+ yield i
+
+
+class PostgresConsoleLexer(Lexer):
+ """
+ Lexer for psql sessions.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = 'PostgreSQL console (psql)'
+ aliases = ['psql', 'postgresql-console', 'postgres-console']
+ mimetypes = ['text/x-postgresql-psql']
+
+ def get_tokens_unprocessed(self, data):
+ sql = PsqlRegexLexer(**self.options)
+
+ lines = lookahead(line_re.findall(data))
+
+ # prompt-output cycle
+ while 1:
+
+ # consume the lines of the command: start with an optional prompt
+ # and continue until the end of command is detected
+ curcode = ''
+ insertions = []
+ while 1:
+ try:
+ line = lines.next()
+ except StopIteration:
+ # allow the emission of partially collected items
+ # the repl loop will be broken below
+ break
+
+ # Identify a shell prompt in case of psql commandline example
+ if line.startswith('$') and not curcode:
+ lexer = get_lexer_by_name('console', **self.options)
+ for x in lexer.get_tokens_unprocessed(line):
+ yield x
+ break
+
+ # Identify a psql prompt
+ mprompt = re_prompt.match(line)
+ if mprompt is not None:
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, mprompt.group())]))
+ curcode += line[len(mprompt.group()):]
+ else:
+ curcode += line
+
+ # Check if this is the end of the command
+ # TODO: better handle multiline comments at the end with
+ # a lexer with an external state?
+ if re_psql_command.match(curcode) \
+ or re_end_command.search(curcode):
+ break
+
+ # Emit the combined stream of command and prompt(s)
+ for item in do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode)):
+ yield item
+
+ # Emit the output lines
+ out_token = Generic.Output
+ while 1:
+ line = lines.next()
+ mprompt = re_prompt.match(line)
+ if mprompt is not None:
+ # push the line back to have it processed by the prompt
+ lines.send(line)
+ break
+
+ mmsg = re_message.match(line)
+ if mmsg is not None:
+ if mmsg.group(1).startswith("ERROR") \
+ or mmsg.group(1).startswith("FATAL"):
+ out_token = Generic.Error
+ yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
+ yield (mmsg.start(2), out_token, mmsg.group(2))
+ else:
+ yield (0, out_token, line)
+
+
+class SqlLexer(RegexLexer):
+ """
+ Lexer for Structured Query Language. Currently, this lexer does
+ not recognize any special syntax except ANSI SQL.
+ """
+
+ name = 'SQL'
+ aliases = ['sql']
+ filenames = ['*.sql']
+ mimetypes = ['text/x-sql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'--.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|'
+ r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|'
+ r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|'
+ r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|'
+ r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|'
+ r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|'
+ r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|'
+ r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|'
+ r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|'
+ r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|'
+ r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|'
+ r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|'
+ r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|'
+ r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|'
+ r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|'
+ r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|'
+ r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|'
+ r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|'
+ r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|'
+ r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|'
+ r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|'
+ r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|'
+ r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|'
+ r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
+ r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
+ r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
+ r'EXCEPT|ESCEPTION|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
+ r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
+ r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
+ r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
+ r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|'
+ r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|'
+ r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|'
+ r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|'
+ r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|'
+ r'KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|'
+ r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LIMIT|LISTEN|LOAD|'
+ r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|'
+ r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|'
+ r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|'
+ r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|'
+ r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|'
+ r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|'
+ r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|'
+ r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|'
+ r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|'
+ r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|'
+ r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|'
+ r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|'
+ r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|'
+ r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|'
+ r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|'
+ r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|'
+ r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|'
+ r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|'
+ r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|'
+ r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|'
+ r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|'
+ r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|'
+ r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|'
+ r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|'
+ r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|'
+ r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|'
+ r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|'
+ r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|'
+ r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|'
+ r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|'
+ r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|'
+ r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|'
+ r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|'
+ r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|'
+ r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|'
+ r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|'
+ r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|'
+ r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword),
+ (r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|'
+ r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|'
+ r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b',
+ Name.Builtin),
+ (r'[+*/<>=~!@#%^&|`?^-]', Operator),
+ (r'[0-9]+', Number.Integer),
+ # TODO: Backslash escapes?
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (r'[;:()\[\],\.]', Punctuation)
+ ],
+ 'multiline-comments': [
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[^/\*]+', Comment.Multiline),
+ (r'[/*]', Comment.Multiline)
+ ]
+ }
+
+
+class MySqlLexer(RegexLexer):
+ """
+ Special lexer for MySQL.
+ """
+
+ name = 'MySQL'
+ aliases = ['mysql']
+ mimetypes = ['text/x-mysql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(#|--\s+).*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'[0-9]+', Number.Integer),
+ (r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
+ # TODO: add backslash escapes
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Double),
+ (r"`(``|[^`])*`", String.Symbol),
+ (r'[+*/<>=~!@#%^&|`?^-]', Operator),
+ (r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|'
+ r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|'
+ r'tinyblob|mediumblob|longblob|blob|float|double|double\s+'
+ r'precision|real|numeric|dec|decimal|timestamp|year|char|'
+ r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?',
+ bygroups(Keyword.Type, Text, Punctuation)),
+ (r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|'
+ r'bigint|binary|blob|both|by|call|cascade|case|change|char|'
+ r'character|check|collate|column|condition|constraint|continue|'
+ r'convert|create|cross|current_date|current_time|'
+ r'current_timestamp|current_user|cursor|database|databases|'
+ r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|'
+ r'declare|default|delayed|delete|desc|describe|deterministic|'
+ r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|'
+ r'enclosed|escaped|exists|exit|explain|fetch|float|float4|float8'
+ r'|for|force|foreign|from|fulltext|grant|group|having|'
+ r'high_priority|hour_microsecond|hour_minute|hour_second|if|'
+ r'ignore|in|index|infile|inner|inout|insensitive|insert|int|'
+ r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|'
+ r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|'
+ r'localtime|localtimestamp|lock|long|loop|low_priority|match|'
+ r'minute_microsecond|minute_second|mod|modifies|natural|'
+ r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|'
+ r'or|order|out|outer|outfile|precision|primary|procedure|purge|'
+ r'raid0|read|reads|real|references|regexp|release|rename|repeat|'
+ r'replace|require|restrict|return|revoke|right|rlike|schema|'
+ r'schemas|second_microsecond|select|sensitive|separator|set|'
+ r'show|smallint|soname|spatial|specific|sql|sql_big_result|'
+ r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|'
+ r'sqlwarning|ssl|starting|straight_join|table|terminated|then|'
+ r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|'
+ r'usage|use|using|utc_date|utc_time|utc_timestamp|values|'
+ r'varying|when|where|while|with|write|x509|xor|year_month|'
+ r'zerofill)\b', Keyword),
+ # TODO: this list is not complete
+ (r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo),
+ (r'(true|false|null)', Name.Constant),
+ (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (r'@[A-Za-z0-9]*[._]*[A-Za-z0-9]*', Name.Variable),
+ (r'[;:()\[\],\.]', Punctuation)
+ ],
+ 'multiline-comments': [
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[^/\*]+', Comment.Multiline),
+ (r'[/*]', Comment.Multiline)
+ ]
+ }
+
+
+class SqliteConsoleLexer(Lexer):
+ """
+ Lexer for example sessions using sqlite3.
+
+ *New in Pygments 0.11.*
+ """
+
+ name = 'sqlite3con'
+ aliases = ['sqlite3']
+ filenames = ['*.sqlite3-console']
+ mimetypes = ['text/x-sqlite3-console']
+
+ def get_tokens_unprocessed(self, data):
+ sql = SqlLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(data):
+ line = match.group()
+ if line.startswith('sqlite> ') or line.startswith(' ...> '):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:8])]))
+ curcode += line[8:]
+ else:
+ if curcode:
+ for item in do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ if line.startswith('SQL error: '):
+ yield (match.start(), Generic.Traceback, line)
+ else:
+ yield (match.start(), Generic.Output, line)
+ if curcode:
+ for item in do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode)):
+ yield item
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
index 77656bfa..717d689f 100644
--- a/pygments/lexers/templates.py
+++ b/pygments/lexers/templates.py
@@ -5,7 +5,7 @@
Lexers for various template engines' markup.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index cfe5c2e7..9c1a9d95 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -5,7 +5,7 @@
Lexers for non-source code file types.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -16,7 +16,7 @@ from pygments.lexer import Lexer, LexerContext, RegexLexer, ExtendedRegexLexer,
bygroups, include, using, this, do_insertions
from pygments.token import Punctuation, Text, Comment, Keyword, Name, String, \
Generic, Operator, Number, Whitespace, Literal
-from pygments.util import get_bool_opt
+from pygments.util import get_bool_opt, ClassNotFound
from pygments.lexers.other import BashLexer
__all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer',
@@ -24,7 +24,8 @@ __all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer
'GroffLexer', 'ApacheConfLexer', 'BBCodeLexer', 'MoinWikiLexer',
'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
- 'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer']
+ 'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
+ 'PyPyLogLexer']
class IniLexer(RegexLexer):
@@ -613,7 +614,6 @@ class RstLexer(RegexLexer):
def _handle_sourcecode(self, match):
from pygments.lexers import get_lexer_by_name
- from pygments.util import ClassNotFound
# section header
yield match.start(1), Punctuation, match.group(1)
@@ -774,15 +774,16 @@ class VimLexer(RegexLexer):
tokens = {
'root': [
- # Who decided that doublequote was a good comment character??
(r'^\s*".*', Comment),
- (r'(?<=\s)"[^\-:.%#=*].*', Comment),
(r'[ \t]+', Text),
# TODO: regexes can have other delims
(r'/(\\\\|\\/|[^\n/])*/', String.Regex),
(r'"(\\\\|\\"|[^\n"])*"', String.Double),
(r"'(\\\\|\\'|[^\n'])*'", String.Single),
+
+ # Who decided that doublequote was a good comment character??
+ (r'(?<=\s)"[^\-:.%#=*].*', Comment),
(r'-?\d+', Number),
(r'#[0-9a-f]{6}', Number.Hex),
(r'^:', Punctuation),
@@ -883,99 +884,99 @@ class SquidConfLexer(RegexLexer):
mimetypes = ['text/x-squidconf']
flags = re.IGNORECASE
- keywords = [ "access_log", "acl", "always_direct", "announce_host",
- "announce_period", "announce_port", "announce_to",
- "anonymize_headers", "append_domain", "as_whois_server",
- "auth_param_basic", "authenticate_children",
- "authenticate_program", "authenticate_ttl", "broken_posts",
- "buffered_logs", "cache_access_log", "cache_announce",
- "cache_dir", "cache_dns_program", "cache_effective_group",
- "cache_effective_user", "cache_host", "cache_host_acl",
- "cache_host_domain", "cache_log", "cache_mem",
- "cache_mem_high", "cache_mem_low", "cache_mgr",
- "cachemgr_passwd", "cache_peer", "cache_peer_access",
- "cahce_replacement_policy", "cache_stoplist",
- "cache_stoplist_pattern", "cache_store_log", "cache_swap",
- "cache_swap_high", "cache_swap_log", "cache_swap_low",
- "client_db", "client_lifetime", "client_netmask",
- "connect_timeout", "coredump_dir", "dead_peer_timeout",
- "debug_options", "delay_access", "delay_class",
- "delay_initial_bucket_level", "delay_parameters",
- "delay_pools", "deny_info", "dns_children", "dns_defnames",
- "dns_nameservers", "dns_testnames", "emulate_httpd_log",
- "err_html_text", "fake_user_agent", "firewall_ip",
- "forwarded_for", "forward_snmpd_port", "fqdncache_size",
- "ftpget_options", "ftpget_program", "ftp_list_width",
- "ftp_passive", "ftp_user", "half_closed_clients",
- "header_access", "header_replace", "hierarchy_stoplist",
- "high_response_time_warning", "high_page_fault_warning", "hosts_file",
- "htcp_port", "http_access", "http_anonymizer", "httpd_accel",
- "httpd_accel_host", "httpd_accel_port",
- "httpd_accel_uses_host_header", "httpd_accel_with_proxy",
- "http_port", "http_reply_access", "icp_access",
- "icp_hit_stale", "icp_port", "icp_query_timeout",
- "ident_lookup", "ident_lookup_access", "ident_timeout",
- "incoming_http_average", "incoming_icp_average",
- "inside_firewall", "ipcache_high", "ipcache_low",
- "ipcache_size", "local_domain", "local_ip", "logfile_rotate",
- "log_fqdn", "log_icp_queries", "log_mime_hdrs",
- "maximum_object_size", "maximum_single_addr_tries",
- "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
- "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
- "memory_pools_limit", "memory_replacement_policy",
- "mime_table", "min_http_poll_cnt", "min_icp_poll_cnt",
- "minimum_direct_hops", "minimum_object_size",
- "minimum_retry_timeout", "miss_access", "negative_dns_ttl",
- "negative_ttl", "neighbor_timeout", "neighbor_type_domain",
- "netdb_high", "netdb_low", "netdb_ping_period",
- "netdb_ping_rate", "never_direct", "no_cache",
- "passthrough_proxy", "pconn_timeout", "pid_filename",
- "pinger_program", "positive_dns_ttl", "prefer_direct",
- "proxy_auth", "proxy_auth_realm", "query_icmp", "quick_abort",
- "quick_abort", "quick_abort_max", "quick_abort_min",
- "quick_abort_pct", "range_offset_limit", "read_timeout",
- "redirect_children", "redirect_program",
- "redirect_rewrites_host_header", "reference_age",
- "reference_age", "refresh_pattern", "reload_into_ims",
- "request_body_max_size", "request_size", "request_timeout",
- "shutdown_lifetime", "single_parent_bypass",
- "siteselect_timeout", "snmp_access", "snmp_incoming_address",
- "snmp_port", "source_ping", "ssl_proxy",
- "store_avg_object_size", "store_objects_per_bucket",
- "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
- "tcp_incoming_address", "tcp_outgoing_address",
- "tcp_recv_bufsize", "test_reachability", "udp_hit_obj",
- "udp_hit_obj_size", "udp_incoming_address",
- "udp_outgoing_address", "unique_hostname", "unlinkd_program",
- "uri_whitespace", "useragent_log", "visible_hostname",
- "wais_relay", "wais_relay_host", "wais_relay_port",
- ]
-
- opts = [ "proxy-only", "weight", "ttl", "no-query", "default",
- "round-robin", "multicast-responder", "on", "off", "all",
- "deny", "allow", "via", "parent", "no-digest", "heap", "lru",
- "realm", "children", "credentialsttl", "none", "disable",
- "offline_toggle", "diskd", "q1", "q2",
- ]
-
- actions = [ "shutdown", "info", "parameter", "server_list",
- "client_list", r'squid\.conf',
- ]
-
- actions_stats = [ "objects", "vm_objects", "utilization",
- "ipcache", "fqdncache", "dns", "redirector", "io",
- "reply_headers", "filedescriptors", "netdb",
- ]
-
- actions_log = [ "status", "enable", "disable", "clear"]
-
- acls = [ "url_regex", "urlpath_regex", "referer_regex", "port",
- "proto", "req_mime_type", "rep_mime_type", "method",
- "browser", "user", "src", "dst", "time", "dstdomain", "ident",
- "snmp_community",
- ]
-
- ip_re = r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|:(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))'
+ keywords = [
+ "access_log", "acl", "always_direct", "announce_host",
+ "announce_period", "announce_port", "announce_to", "anonymize_headers",
+ "append_domain", "as_whois_server", "auth_param_basic",
+ "authenticate_children", "authenticate_program", "authenticate_ttl",
+ "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
+ "cache_dir", "cache_dns_program", "cache_effective_group",
+ "cache_effective_user", "cache_host", "cache_host_acl",
+ "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
+ "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
+ "cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
+ "cache_stoplist_pattern", "cache_store_log", "cache_swap",
+ "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
+ "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
+ "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
+ "delay_initial_bucket_level", "delay_parameters", "delay_pools",
+ "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
+ "dns_testnames", "emulate_httpd_log", "err_html_text",
+ "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
+ "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
+ "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
+ "header_replace", "hierarchy_stoplist", "high_response_time_warning",
+ "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
+ "http_anonymizer", "httpd_accel", "httpd_accel_host",
+ "httpd_accel_port", "httpd_accel_uses_host_header",
+ "httpd_accel_with_proxy", "http_port", "http_reply_access",
+ "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
+ "ident_lookup", "ident_lookup_access", "ident_timeout",
+ "incoming_http_average", "incoming_icp_average", "inside_firewall",
+ "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
+ "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
+ "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
+ "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
+ "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
+ "memory_pools_limit", "memory_replacement_policy", "mime_table",
+ "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
+ "minimum_object_size", "minimum_retry_timeout", "miss_access",
+ "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
+ "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
+ "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
+ "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
+ "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
+ "quick_abort", "quick_abort", "quick_abort_max", "quick_abort_min",
+ "quick_abort_pct", "range_offset_limit", "read_timeout",
+ "redirect_children", "redirect_program",
+ "redirect_rewrites_host_header", "reference_age", "reference_age",
+ "refresh_pattern", "reload_into_ims", "request_body_max_size",
+ "request_size", "request_timeout", "shutdown_lifetime",
+ "single_parent_bypass", "siteselect_timeout", "snmp_access",
+ "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
+ "store_avg_object_size", "store_objects_per_bucket",
+ "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
+ "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
+ "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
+ "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
+ "unlinkd_program", "uri_whitespace", "useragent_log",
+ "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
+ ]
+
+ opts = [
+ "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
+ "multicast-responder", "on", "off", "all", "deny", "allow", "via",
+ "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
+ "credentialsttl", "none", "disable", "offline_toggle", "diskd",
+ ]
+
+ actions = [
+ "shutdown", "info", "parameter", "server_list", "client_list",
+ r'squid\.conf',
+ ]
+
+ actions_stats = [
+ "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
+ "redirector", "io", "reply_headers", "filedescriptors", "netdb",
+ ]
+
+ actions_log = ["status", "enable", "disable", "clear"]
+
+ acls = [
+ "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
+ "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
+ "dst", "time", "dstdomain", "ident", "snmp_community",
+ ]
+
+ ip_re = (
+ r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
+ r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
+ r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
+ r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
+ r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
+ r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
+ r'[1-9]?\d)){3}))'
+ )
def makelistre(list):
return r'\b(?:' + '|'.join(list) + r')\b'
@@ -1033,7 +1034,8 @@ class DebianControlLexer(RegexLexer):
(r'.', Text),
],
'description': [
- (r'(.*)(Homepage)(: )([^\s]+)', bygroups(Text, String, Name, Name.Class)),
+ (r'(.*)(Homepage)(: )([^\s]+)',
+ bygroups(Text, String, Name, Name.Class)),
(r':.*\n', Generic.Strong),
(r' .*\n', Text),
('', Text, '#pop'),
@@ -1613,3 +1615,138 @@ class CMakeLexer(RegexLexer):
]
}
+
+class HttpLexer(RegexLexer):
+ """
+ Lexer for HTTP sessions.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = 'HTTP'
+ aliases = ['http']
+
+ flags = re.DOTALL
+
+ def header_callback(self, match):
+ if match.group(1).lower() == 'content-type':
+ content_type = match.group(5).strip()
+ if ';' in content_type:
+ content_type = content_type[:content_type.find(';')].strip()
+ self.content_type = content_type
+ yield match.start(1), Name.Attribute, match.group(1)
+ yield match.start(2), Text, match.group(2)
+ yield match.start(3), Operator, match.group(3)
+ yield match.start(4), Text, match.group(4)
+ yield match.start(5), Literal, match.group(5)
+ yield match.start(6), Text, match.group(6)
+
+ def content_callback(self, match):
+ content_type = getattr(self, 'content_type', None)
+ content = match.group()
+ offset = match.start()
+ if content_type:
+ from pygments.lexers import get_lexer_for_mimetype
+ try:
+ lexer = get_lexer_for_mimetype(content_type)
+ except ClassNotFound:
+ pass
+ else:
+ for idx, token, value in lexer.get_tokens_unprocessed(content):
+ yield offset + idx, token, value
+ return
+ yield offset, Text, content
+
+ tokens = {
+ 'root': [
+ (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE)( +)([^ ]+)( +)'
+ r'(HTTPS?)(/)(1\.[01])(\r?\n|$)',
+ bygroups(Name.Function, Text, Name.Namespace, Text,
+ Keyword.Reserved, Operator, Number, Text),
+ 'headers'),
+ (r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
+ bygroups(Keyword.Reserved, Operator, Number, Text, Number,
+ Text, Name.Exception, Text),
+ 'headers'),
+ ],
+ 'headers': [
+ (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
+ (r'\r?\n', Text, 'content')
+ ],
+ 'content': [
+ (r'.+', content_callback)
+ ]
+ }
+
+
+class PyPyLogLexer(RegexLexer):
+ """
+ Lexer for PyPy log files.
+
+ *New in Pygments 1.5.*
+ """
+ name = "PyPy Log"
+ aliases = ["pypylog", "pypy"]
+ filenames = ["*.pypylog"]
+ mimetypes = ['application/x-pypylog']
+
+ tokens = {
+ "root": [
+ (r"\[\w+\] {jit-log-.*?$", Keyword, "jit-log"),
+ (r"\[\w+\] {jit-backend-counts$", Keyword, "jit-backend-counts"),
+ include("extra-stuff"),
+ ],
+ "jit-log": [
+ (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
+
+ (r"^\+\d+: ", Comment),
+ (r"[ifp]\d+", Name),
+ (r"ptr\d+", Name),
+ (r"(\()([\w_]+(?:\.[\w_]+)?)(\))",
+ bygroups(Punctuation, Name.Builtin, Punctuation)),
+ (r"[\[\]=,()]", Punctuation),
+ (r"(\d+\.\d+|inf|-inf)", Number.Float),
+ (r"-?\d+", Number.Integer),
+ (r"'.*'", String),
+ (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
+ (r"<.*?>", Name.Builtin),
+ (r"(label|debug_merge_point|jump|finish)", Name.Class),
+ (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
+ r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
+ r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
+ r"int_is_true|"
+ r"uint_floordiv|uint_ge|uint_lt|"
+ r"float_add|float_sub|float_mul|float_truediv|"
+ r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
+ r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
+ r"cast_int_to_float|cast_float_to_int|"
+ r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
+ r"virtual_ref|mark_opaque_ptr|"
+ r"call_may_force|call_assembler|call_loopinvariant|"
+ r"call_release_gil|call_pure|call|"
+ r"new_with_vtable|new_array|newstr|newunicode|new|"
+ r"arraylen_gc|"
+ r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
+ r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
+ r"getfield_gc|getinteriorfield_gc|"
+ r"getinteriorfield_gc|setinteriorfield_gc|"
+ r"getfield_raw|setfield_gc|setfield_raw|"
+ r"strgetitem|strsetitem|strlen|copystrcontent|"
+ r"unicodegetitem|unicodesetitem|unicodelen|"
+ r"guard_true|guard_false|guard_value|guard_isnull|"
+ r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
+ r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
+ Name.Builtin),
+ include("extra-stuff"),
+ ],
+ "jit-backend-counts": [
+ (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
+ (r"[:]", Punctuation),
+ (r"\d+", Number),
+ include("extra-stuff"),
+ ],
+ "extra-stuff": [
+ (r"[\n\s]+", Text),
+ (r"#.*?$", Comment),
+ ],
+ }
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index 148762fd..38f75619 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -5,7 +5,7 @@
Lexers for web-related languages and markup.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,11 +22,11 @@ from pygments.lexers.agile import RubyLexer
from pygments.lexers.compiled import ScalaLexer
-__all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'CssLexer',
+__all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'JSONLexer', 'CssLexer',
'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer', 'ScssLexer',
'ObjectiveJLexer', 'CoffeeScriptLexer', 'DuelLexer', 'ScamlLexer',
- 'JadeLexer', 'XQueryLexer', 'DtdLexer']
+ 'JadeLexer', 'XQueryLexer', 'DtdLexer', 'DartLexer']
class JavascriptLexer(RegexLexer):
@@ -36,9 +36,9 @@ class JavascriptLexer(RegexLexer):
name = 'JavaScript'
aliases = ['js', 'javascript']
- filenames = ['*.js', '*.json']
+ filenames = ['*.js', ]
mimetypes = ['application/javascript', 'application/x-javascript',
- 'text/x-javascript', 'text/javascript', 'application/json']
+ 'text/x-javascript', 'text/javascript', ]
flags = re.DOTALL
tokens = {
@@ -89,6 +89,74 @@ class JavascriptLexer(RegexLexer):
}
+class JSONLexer(RegexLexer):
+ """
+ For JSON data structures.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = 'JSON'
+ aliases = ['json']
+ filenames = ['*.json']
+ mimetypes = [ 'application/json', ]
+
+ flags = re.DOTALL
+ tokens = {
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+
+ # represents a simple terminal value
+ 'simplevalue':[
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'-?[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ ],
+
+
+ # the right hand side of an object, after the attribute name
+ 'objectattribute': [
+ include('value'),
+ (r':', Punctuation),
+ # comma terminates the attribute but expects more
+ (r',', Punctuation, '#pop'),
+ # a closing bracket terminates the entire object, so pop twice
+ (r'}', Punctuation, ('#pop', '#pop')),
+ ],
+
+ # a json object - { attr, attr, ... }
+ 'objectvalue': [
+ include('whitespace'),
+ (r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'),
+ (r'}', Punctuation, '#pop'),
+ ],
+
+ # json array - [ value, value, ... }
+ 'arrayvalue': [
+ include('whitespace'),
+ include('value'),
+ (r',', Punctuation),
+ (r']', Punctuation, '#pop'),
+ ],
+
+ # a json value - either a simple value or a complex value (object or array)
+ 'value': [
+ include('whitespace'),
+ include('simplevalue'),
+ (r'{', Punctuation, 'objectvalue'),
+ (r'\[', Punctuation, 'arrayvalue'),
+ ],
+
+
+ # the root of a json document whould be a value
+ 'root': [
+ include('value'),
+ ],
+
+ }
+
+
class ActionScriptLexer(RegexLexer):
"""
For ActionScript source code.
@@ -388,7 +456,7 @@ class CssLexer(RegexLexer):
(r'[\[\]();]+', Punctuation),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[a-zA-Z][a-zA-Z0-9]+', Name)
+ (r'[a-zA-Z_][a-zA-Z0-9_]+', Name)
]
}
@@ -1728,12 +1796,13 @@ class CoffeeScriptLexer(RegexLexer):
'slashstartsregex': [
include('commentsandwhitespace'),
(r'///', String.Regex, ('#pop', 'multilineregex')),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'', Text, '#pop'),
],
'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ # this next expr leads to infinite loops root -> slashstartsregex
+ #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'\+\+|--|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|=|'
r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*`%&\|\^/])=?',
@@ -1766,8 +1835,9 @@ class CoffeeScriptLexer(RegexLexer):
("'", String, 'sqs'),
],
'strings': [
- (r'[^#\\\'"]+', String) # note that all coffee script strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
+ (r'[^#\\\'"]+', String),
+ # note that all coffee script strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
],
'interpoling_string' : [
(r'}', String.Interpol, "#pop"),
@@ -2707,3 +2777,69 @@ class XQueryLexer(ExtendedRegexLexer):
]
}
+
+class DartLexer(RegexLexer):
+ """
+ For `Dart <http://dartlang.org/>`_ source code.
+
+ *New in Pygments 1.5.*
+ """
+
+ name = 'Dart'
+ aliases = ['dart']
+ filenames = ['*.dart']
+ mimetypes = ['text/x-dart']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'#!(.*?)$', Comment.Preproc),
+ (r'(#)(import|library|source)', bygroups(Text, Keyword)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(class|interface)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
+ Keyword),
+ (r'(abstract|const|extends|factory|final|get|implements|'
+ r'native|operator|set|static|typedef|var)\b', Keyword.Declaration),
+ (r'(bool|double|Dynamic|int|num|Object|String|void)', Keyword.Type),
+ (r'(false|null|true)', Keyword.Constant),
+ (r'@"(\\\\|\\"|[^"])*"', String.Double), # raw string
+ (r"@'(\\\\|\\'|[^'])*'", String.Single), # raw string
+ (r'"', String.Double, 'string_double'),
+ (r"'", String.Single, 'string_single'),
+ (r'[a-zA-Z_$][a-zA-Z0-9_]*:', Name.Label),
+ (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[(){}\[\],.;]', Punctuation),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # DIGIT+ (‘.’ DIGIT*)? EXPONENT?
+ (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
+ (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
+ (r'\n', Text)
+ # pseudo-keyword negate intentionally left out
+ ],
+ 'class': [
+ (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name.Class, '#pop')
+ ],
+ 'string_double': [
+ (r'"', String.Double, '#pop'),
+ (r'[^"$]+', String.Double),
+ (r'(\$)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(String.Interpol, Name)),
+ (r'(\$\{)(.*?)(\})',
+ bygroups(String.Interpol, using(this), String.Interpol)),
+ (r'\$+', String.Double)
+ ],
+ 'string_single': [
+ (r"'", String.Single, '#pop'),
+ (r"[^'$]+", String.Single),
+ (r'(\$)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(String.Interpol, Name)),
+ (r'(\$\{)(.*?)(\})',
+ bygroups(String.Interpol, using(this), String.Interpol)),
+ (r'\$+', String.Single)
+ ]
+ }
diff --git a/pygments/plugin.py b/pygments/plugin.py
index 3f0b6703..ea606b9c 100644
--- a/pygments/plugin.py
+++ b/pygments/plugin.py
@@ -32,7 +32,7 @@
yourfilter = yourfilter:YourFilter
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
try:
diff --git a/pygments/scanner.py b/pygments/scanner.py
index 337eb778..2acd6e6b 100644
--- a/pygments/scanner.py
+++ b/pygments/scanner.py
@@ -12,7 +12,7 @@
Have a look at the `DelphiLexer` to get an idea of how to use
this scanner.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
diff --git a/pygments/style.py b/pygments/style.py
index 96a415ff..470fe99f 100644
--- a/pygments/style.py
+++ b/pygments/style.py
@@ -5,7 +5,7 @@
Basic style object.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/__init__.py b/pygments/styles/__init__.py
index 15503f28..2be93da4 100644
--- a/pygments/styles/__init__.py
+++ b/pygments/styles/__init__.py
@@ -5,7 +5,7 @@
Contains built-in styles.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -33,6 +33,7 @@ STYLE_MAP = {
'vim': 'vim::VimStyle',
'vs': 'vs::VisualStudioStyle',
'tango': 'tango::TangoStyle',
+ 'rrt': 'rrt::RrtStyle',
}
diff --git a/pygments/styles/autumn.py b/pygments/styles/autumn.py
index 61f2adb1..a311487c 100644
--- a/pygments/styles/autumn.py
+++ b/pygments/styles/autumn.py
@@ -5,7 +5,7 @@
A colorful style, inspired by the terminal highlighting style.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/borland.py b/pygments/styles/borland.py
index b119ea72..ce6120b5 100644
--- a/pygments/styles/borland.py
+++ b/pygments/styles/borland.py
@@ -5,7 +5,7 @@
Style similar to the style used in the Borland IDEs.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/bw.py b/pygments/styles/bw.py
index 34930f17..fdec8a2f 100644
--- a/pygments/styles/bw.py
+++ b/pygments/styles/bw.py
@@ -5,7 +5,7 @@
Simple black/white only style.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/colorful.py b/pygments/styles/colorful.py
index e091d1d8..819e81ba 100644
--- a/pygments/styles/colorful.py
+++ b/pygments/styles/colorful.py
@@ -5,7 +5,7 @@
A colorful style, inspired by CodeRay.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/default.py b/pygments/styles/default.py
index 1e0450c0..d90f08d8 100644
--- a/pygments/styles/default.py
+++ b/pygments/styles/default.py
@@ -5,7 +5,7 @@
The default highlighting style.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/emacs.py b/pygments/styles/emacs.py
index d165d6fd..96640f87 100644
--- a/pygments/styles/emacs.py
+++ b/pygments/styles/emacs.py
@@ -5,7 +5,7 @@
A highlighting style for Pygments, inspired by Emacs.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/friendly.py b/pygments/styles/friendly.py
index 92d2e3d6..25dda6ca 100644
--- a/pygments/styles/friendly.py
+++ b/pygments/styles/friendly.py
@@ -5,7 +5,7 @@
A modern style based on the VIM pyte theme.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/fruity.py b/pygments/styles/fruity.py
index ce381f42..6f6f2864 100644
--- a/pygments/styles/fruity.py
+++ b/pygments/styles/fruity.py
@@ -5,7 +5,7 @@
pygments version of my "fruity" vim theme.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -30,7 +30,6 @@ class FruityStyle(Style):
Number: '#0086f7 bold',
Name.Tag: '#fb660a bold',
Name.Variable: '#fb660a',
- Name.Constant: '#fb660a',
Comment: '#008800 bg:#0f140f italic',
Name.Attribute: '#ff0086 bold',
String: '#0086d2',
diff --git a/pygments/styles/manni.py b/pygments/styles/manni.py
index 39bb31ee..2f689419 100644
--- a/pygments/styles/manni.py
+++ b/pygments/styles/manni.py
@@ -8,7 +8,7 @@
This is a port of the style used in the `php port`_ of pygments
by Manni. The style is called 'default' there.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/monokai.py b/pygments/styles/monokai.py
index e80d8919..b5338122 100644
--- a/pygments/styles/monokai.py
+++ b/pygments/styles/monokai.py
@@ -7,7 +7,7 @@
http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/murphy.py b/pygments/styles/murphy.py
index c72d128e..03494fb6 100644
--- a/pygments/styles/murphy.py
+++ b/pygments/styles/murphy.py
@@ -5,7 +5,7 @@
Murphy's style from CodeRay.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/native.py b/pygments/styles/native.py
index f2b96588..53749e64 100644
--- a/pygments/styles/native.py
+++ b/pygments/styles/native.py
@@ -5,7 +5,7 @@
pygments version of my "native" vim theme.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/pastie.py b/pygments/styles/pastie.py
index c9cc126b..ca8f6206 100644
--- a/pygments/styles/pastie.py
+++ b/pygments/styles/pastie.py
@@ -7,7 +7,7 @@
.. _pastie: http://pastie.caboo.se/
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/perldoc.py b/pygments/styles/perldoc.py
index 8542b855..73f9bd3c 100644
--- a/pygments/styles/perldoc.py
+++ b/pygments/styles/perldoc.py
@@ -7,7 +7,7 @@
.. _perldoc: http://perldoc.perl.org/
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/rrt.py b/pygments/styles/rrt.py
new file mode 100644
index 00000000..1c363858
--- /dev/null
+++ b/pygments/styles/rrt.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.rrt
+ ~~~~~~~~~~~~~~~~~~~
+
+ pygments "rrt" theme, based on Zap and Emacs defaults.
+
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Comment, Name, Keyword, String
+
+
+class RrtStyle(Style):
+ """
+ Minimalistic "rrt" theme, based on Zap and Emacs defaults.
+ """
+
+ background_color = '#000000'
+ highlight_color = '#0000ff'
+
+ styles = {
+ Comment: '#00ff00',
+ Name.Function: '#ffff00',
+ Name.Variable: '#eedd82',
+ Name.Constant: '#7fffd4',
+ Keyword: '#ff0000',
+ Comment.Preproc: '#e5e5e5',
+ String: '#87ceeb',
+ Keyword.Type: '#ee82ee',
+ }
diff --git a/pygments/styles/tango.py b/pygments/styles/tango.py
index dc6fd79e..fea7ae5d 100644
--- a/pygments/styles/tango.py
+++ b/pygments/styles/tango.py
@@ -33,7 +33,7 @@
have been chosen to have the same style. Similarly, keywords (Keyword.*),
and Operator.Word (and, or, in) have been assigned the same style.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/trac.py b/pygments/styles/trac.py
index f08d7eb2..468a5822 100644
--- a/pygments/styles/trac.py
+++ b/pygments/styles/trac.py
@@ -5,7 +5,7 @@
Port of the default trac highlighter design.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/vim.py b/pygments/styles/vim.py
index 6ab93ea2..0aab8bcd 100644
--- a/pygments/styles/vim.py
+++ b/pygments/styles/vim.py
@@ -5,7 +5,7 @@
A highlighting style for Pygments, inspired by vim.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/vs.py b/pygments/styles/vs.py
index a056c69b..5cdda4ae 100644
--- a/pygments/styles/vs.py
+++ b/pygments/styles/vs.py
@@ -5,7 +5,7 @@
Simple style with MS Visual Studio colors.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/token.py b/pygments/token.py
index 2c962c10..cf78cca2 100644
--- a/pygments/token.py
+++ b/pygments/token.py
@@ -5,7 +5,7 @@
Basic token types and the standard tokens.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/unistring.py b/pygments/unistring.py
index b6db21cf..847bc3ea 100644
--- a/pygments/unistring.py
+++ b/pygments/unistring.py
@@ -8,7 +8,7 @@
Inspired by chartypes_create.py from the MoinMoin project.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.util import u_prefix
diff --git a/pygments/util.py b/pygments/util.py
index 429e40a1..f8c6c824 100644
--- a/pygments/util.py
+++ b/pygments/util.py
@@ -5,7 +5,7 @@
Utility functions.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -118,7 +118,7 @@ def make_analysator(f):
return 0.0
try:
return min(1.0, max(0.0, float(rv)))
- except ValueError:
+ except (ValueError, TypeError):
return 0.0
text_analyse.__doc__ = f.__doc__
return staticmethod(text_analyse)
diff --git a/scripts/check_sources.py b/scripts/check_sources.py
index 1816d2a7..e1c7544d 100755
--- a/scripts/check_sources.py
+++ b/scripts/check_sources.py
@@ -7,7 +7,7 @@
Make sure each Python file has a correct file header
including copyright and license information.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -30,7 +30,7 @@ def checker(*suffixes, **kwds):
name_mail_re = r'[\w ]+(<.*?>)?'
-copyright_re = re.compile(r'^ :copyright: Copyright (?:\d{4}-)?2010 by '
+copyright_re = re.compile(r'^ :copyright: Copyright 2006-2012 by '
r'the Pygments team, see AUTHORS\.$', re.UNICODE)
copyright_2_re = re.compile(r'^ %s(, %s)*[,.]$' %
(name_mail_re, name_mail_re), re.UNICODE)
diff --git a/scripts/find_codetags.py b/scripts/find_codetags.py
index 69fa5798..35db143f 100755
--- a/scripts/find_codetags.py
+++ b/scripts/find_codetags.py
@@ -7,7 +7,7 @@
Find code tags in specified files and/or directories
and create a report in HTML format.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/scripts/find_error.py b/scripts/find_error.py
index 199e9c5c..453f16ed 100755
--- a/scripts/find_error.py
+++ b/scripts/find_error.py
@@ -8,7 +8,7 @@
the text where Error tokens are being generated, along
with some context.
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -101,6 +101,9 @@ def main(fn, lexer=None, options={}):
if lx.__class__.__bases__ == (RegexLexer,):
lx.__class__.__bases__ = (DebuggingRegexLexer,)
debug_lexer = True
+ elif lx.__class__.__bases__ == (DebuggingRegexLexer,):
+ # already debugged before
+ debug_lexer = True
lno = 1
text = file(fn, 'U').read()
text = text.strip('\n') + '\n'
@@ -123,7 +126,7 @@ def main(fn, lexer=None, options={}):
for tok, state in map(None, tokens, states):
show_token(tok, state)
else:
- for i in range(len(tokens) - num, len(tokens)):
+ for i in range(max(len(tokens) - num, 0), len(tokens)):
show_token(tokens[i], states[i])
print 'Error token:'
l = len(repr(val))
diff --git a/scripts/get_vimkw.py b/scripts/get_vimkw.py
index 1ecf7148..153c88c3 100644
--- a/scripts/get_vimkw.py
+++ b/scripts/get_vimkw.py
@@ -22,11 +22,16 @@ def getkw(input, output):
# Extract all the shortened versions
for i in r_item.finditer(m.group(2)):
- d.append((i.group(1), "%s%s" % (i.group(1), i.group(2) or '')))
- d.sort()
+ d.append('(%r,%r)' %
+ (i.group(1), "%s%s" % (i.group(1), i.group(2) or '')))
+
+ output_info['option'].append("('nnoremap','nnoremap')")
+ output_info['option'].append("('inoremap','inoremap')")
+ output_info['option'].append("('vnoremap','vnoremap')")
for a, b in output_info.items():
- print >>out, '%s=%r' % (a, b)
+ b.sort()
+ print >>out, '%s=[%s]' % (a, ','.join(b))
def is_keyword(w, keywords):
for i in range(len(w), 0, -1):
@@ -35,4 +40,4 @@ def is_keyword(w, keywords):
return False
if __name__ == "__main__":
- getkw("/usr/share/vim/vim70/syntax/vim.vim", "temp.py")
+ getkw("/usr/share/vim/vim73/syntax/vim.vim", "temp.py")
diff --git a/setup.py b/setup.py
index 085841f8..3297e471 100755
--- a/setup.py
+++ b/setup.py
@@ -23,7 +23,7 @@
.. _Pygments tip:
http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/examplefiles/classes.dylan b/tests/examplefiles/classes.dylan
index ff435b77..6dd55ff2 100644
--- a/tests/examplefiles/classes.dylan
+++ b/tests/examplefiles/classes.dylan
@@ -22,3 +22,19 @@ end function;
define constant $blue-car = make(<car>, model: "Viper");
define constant $black-car = make(<car>, model: "Town Car", sunroof?: #t);
define constant $red-car = make(<car>, model: "F40", sunroof?: #f);
+
+define method foo() => _ :: <boolean>
+ #t
+end method;
+
+define method foo() => _ :: <boolean>;
+ #t
+end method;
+
+define method \+()
+end;
+
+define constant $symbol = #"hello";
+define variable *vector* = #[3.5, 5]
+define constant $list = #(1, 2);
+define constant $pair = #(1 . "foo")
diff --git a/tests/examplefiles/coq_RelationClasses b/tests/examplefiles/coq_RelationClasses
new file mode 100644
index 00000000..94c51bf1
--- /dev/null
+++ b/tests/examplefiles/coq_RelationClasses
@@ -0,0 +1,447 @@
+(* -*- coding: utf-8 -*- *)
+(************************************************************************)
+(* v * The Coq Proof Assistant / The Coq Development Team *)
+(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2011 *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(************************************************************************)
+
+(** * Typeclass-based relations, tactics and standard instances
+
+ This is the basic theory needed to formalize morphisms and setoids.
+
+ Author: Matthieu Sozeau
+ Institution: LRI, CNRS UMR 8623 - University Paris Sud
+*)
+
+(* $Id: RelationClasses.v 14641 2011-11-06 11:59:10Z herbelin $ *)
+
+Require Export Coq.Classes.Init.
+Require Import Coq.Program.Basics.
+Require Import Coq.Program.Tactics.
+Require Import Coq.Relations.Relation_Definitions.
+
+(** We allow to unfold the [relation] definition while doing morphism search. *)
+
+Notation inverse R := (flip (R:relation _) : relation _).
+
+Definition complement {A} (R : relation A) : relation A := fun x y => R x y -> False.
+
+(** Opaque for proof-search. *)
+Typeclasses Opaque complement.
+
+(** These are convertible. *)
+
+Lemma complement_inverse : forall A (R : relation A), complement (inverse R) = inverse (complement R).
+Proof. reflexivity. Qed.
+
+(** We rebind relations in separate classes to be able to overload each proof. *)
+
+Set Implicit Arguments.
+Unset Strict Implicit.
+
+Class Reflexive {A} (R : relation A) :=
+ reflexivity : forall x, R x x.
+
+Class Irreflexive {A} (R : relation A) :=
+ irreflexivity : Reflexive (complement R).
+
+Hint Extern 1 (Reflexive (complement _)) => class_apply @irreflexivity : typeclass_instances.
+
+Class Symmetric {A} (R : relation A) :=
+ symmetry : forall x y, R x y -> R y x.
+
+Class Asymmetric {A} (R : relation A) :=
+ asymmetry : forall x y, R x y -> R y x -> False.
+
+Class Transitive {A} (R : relation A) :=
+ transitivity : forall x y z, R x y -> R y z -> R x z.
+
+Hint Resolve @irreflexivity : ord.
+
+Unset Implicit Arguments.
+
+(** A HintDb for relations. *)
+
+Ltac solve_relation :=
+ match goal with
+ | [ |- ?R ?x ?x ] => reflexivity
+ | [ H : ?R ?x ?y |- ?R ?y ?x ] => symmetry ; exact H
+ end.
+
+Hint Extern 4 => solve_relation : relations.
+
+(** We can already dualize all these properties. *)
+
+Generalizable Variables A B C D R S T U l eqA eqB eqC eqD.
+
+Lemma flip_Reflexive `{Reflexive A R} : Reflexive (flip R).
+Proof. tauto. Qed.
+
+Hint Extern 3 (Reflexive (flip _)) => apply flip_Reflexive : typeclass_instances.
+
+Program Definition flip_Irreflexive `(Irreflexive A R) : Irreflexive (flip R) :=
+ irreflexivity (R:=R).
+
+Program Definition flip_Symmetric `(Symmetric A R) : Symmetric (flip R) :=
+ fun x y H => symmetry (R:=R) H.
+
+Program Definition flip_Asymmetric `(Asymmetric A R) : Asymmetric (flip R) :=
+ fun x y H H' => asymmetry (R:=R) H H'.
+
+Program Definition flip_Transitive `(Transitive A R) : Transitive (flip R) :=
+ fun x y z H H' => transitivity (R:=R) H' H.
+
+Hint Extern 3 (Irreflexive (flip _)) => class_apply flip_Irreflexive : typeclass_instances.
+Hint Extern 3 (Symmetric (flip _)) => class_apply flip_Symmetric : typeclass_instances.
+Hint Extern 3 (Asymmetric (flip _)) => class_apply flip_Asymmetric : typeclass_instances.
+Hint Extern 3 (Transitive (flip _)) => class_apply flip_Transitive : typeclass_instances.
+
+Definition Reflexive_complement_Irreflexive `(Reflexive A (R : relation A))
+ : Irreflexive (complement R).
+Proof. firstorder. Qed.
+
+Definition complement_Symmetric `(Symmetric A (R : relation A)) : Symmetric (complement R).
+Proof. firstorder. Qed.
+
+Hint Extern 3 (Symmetric (complement _)) => class_apply complement_Symmetric : typeclass_instances.
+Hint Extern 3 (Irreflexive (complement _)) => class_apply Reflexive_complement_Irreflexive : typeclass_instances.
+
+(** * Standard instances. *)
+
+Ltac reduce_hyp H :=
+ match type of H with
+ | context [ _ <-> _ ] => fail 1
+ | _ => red in H ; try reduce_hyp H
+ end.
+
+Ltac reduce_goal :=
+ match goal with
+ | [ |- _ <-> _ ] => fail 1
+ | _ => red ; intros ; try reduce_goal
+ end.
+
+Tactic Notation "reduce" "in" hyp(Hid) := reduce_hyp Hid.
+
+Ltac reduce := reduce_goal.
+
+Tactic Notation "apply" "*" constr(t) :=
+ first [ refine t | refine (t _) | refine (t _ _) | refine (t _ _ _) | refine (t _ _ _ _) |
+ refine (t _ _ _ _ _) | refine (t _ _ _ _ _ _) | refine (t _ _ _ _ _ _ _) ].
+
+Ltac simpl_relation :=
+ unfold flip, impl, arrow ; try reduce ; program_simpl ;
+ try ( solve [ intuition ]).
+
+Local Obligation Tactic := simpl_relation.
+
+(** Logical implication. *)
+
+Program Instance impl_Reflexive : Reflexive impl.
+Program Instance impl_Transitive : Transitive impl.
+
+(** Logical equivalence. *)
+
+Program Instance iff_Reflexive : Reflexive iff.
+Program Instance iff_Symmetric : Symmetric iff.
+Program Instance iff_Transitive : Transitive iff.
+
+(** Leibniz equality. *)
+
+Instance eq_Reflexive {A} : Reflexive (@eq A) := @eq_refl A.
+Instance eq_Symmetric {A} : Symmetric (@eq A) := @eq_sym A.
+Instance eq_Transitive {A} : Transitive (@eq A) := @eq_trans A.
+
+(** Various combinations of reflexivity, symmetry and transitivity. *)
+
+(** A [PreOrder] is both Reflexive and Transitive. *)
+
+Class PreOrder {A} (R : relation A) : Prop := {
+ PreOrder_Reflexive :> Reflexive R ;
+ PreOrder_Transitive :> Transitive R }.
+
+(** A partial equivalence relation is Symmetric and Transitive. *)
+
+Class PER {A} (R : relation A) : Prop := {
+ PER_Symmetric :> Symmetric R ;
+ PER_Transitive :> Transitive R }.
+
+(** Equivalence relations. *)
+
+Class Equivalence {A} (R : relation A) : Prop := {
+ Equivalence_Reflexive :> Reflexive R ;
+ Equivalence_Symmetric :> Symmetric R ;
+ Equivalence_Transitive :> Transitive R }.
+
+(** An Equivalence is a PER plus reflexivity. *)
+
+Instance Equivalence_PER `(Equivalence A R) : PER R | 10 :=
+ { PER_Symmetric := Equivalence_Symmetric ;
+ PER_Transitive := Equivalence_Transitive }.
+
+(** We can now define antisymmetry w.r.t. an equivalence relation on the carrier. *)
+
+Class Antisymmetric A eqA `{equ : Equivalence A eqA} (R : relation A) :=
+ antisymmetry : forall {x y}, R x y -> R y x -> eqA x y.
+
+Program Definition flip_antiSymmetric `(Antisymmetric A eqA R) :
+ Antisymmetric A eqA (flip R).
+Proof. firstorder. Qed.
+
+(** Leibinz equality [eq] is an equivalence relation.
+ The instance has low priority as it is always applicable
+ if only the type is constrained. *)
+
+Program Instance eq_equivalence : Equivalence (@eq A) | 10.
+
+(** Logical equivalence [iff] is an equivalence relation. *)
+
+Program Instance iff_equivalence : Equivalence iff.
+
+(** We now develop a generalization of results on relations for arbitrary predicates.
+ The resulting theory can be applied to homogeneous binary relations but also to
+ arbitrary n-ary predicates. *)
+
+Local Open Scope list_scope.
+
+(* Notation " [ ] " := nil : list_scope. *)
+(* Notation " [ x ; .. ; y ] " := (cons x .. (cons y nil) ..) (at level 1) : list_scope. *)
+
+(** A compact representation of non-dependent arities, with the codomain singled-out. *)
+
+Fixpoint arrows (l : list Type) (r : Type) : Type :=
+ match l with
+ | nil => r
+ | A :: l' => A -> arrows l' r
+ end.
+
+(** We can define abbreviations for operation and relation types based on [arrows]. *)
+
+Definition unary_operation A := arrows (A::nil) A.
+Definition binary_operation A := arrows (A::A::nil) A.
+Definition ternary_operation A := arrows (A::A::A::nil) A.
+
+(** We define n-ary [predicate]s as functions into [Prop]. *)
+
+Notation predicate l := (arrows l Prop).
+
+(** Unary predicates, or sets. *)
+
+Definition unary_predicate A := predicate (A::nil).
+
+(** Homogeneous binary relations, equivalent to [relation A]. *)
+
+Definition binary_relation A := predicate (A::A::nil).
+
+(** We can close a predicate by universal or existential quantification. *)
+
+Fixpoint predicate_all (l : list Type) : predicate l -> Prop :=
+ match l with
+ | nil => fun f => f
+ | A :: tl => fun f => forall x : A, predicate_all tl (f x)
+ end.
+
+Fixpoint predicate_exists (l : list Type) : predicate l -> Prop :=
+ match l with
+ | nil => fun f => f
+ | A :: tl => fun f => exists x : A, predicate_exists tl (f x)
+ end.
+
+(** Pointwise extension of a binary operation on [T] to a binary operation
+ on functions whose codomain is [T].
+ For an operator on [Prop] this lifts the operator to a binary operation. *)
+
+Fixpoint pointwise_extension {T : Type} (op : binary_operation T)
+ (l : list Type) : binary_operation (arrows l T) :=
+ match l with
+ | nil => fun R R' => op R R'
+ | A :: tl => fun R R' =>
+ fun x => pointwise_extension op tl (R x) (R' x)
+ end.
+
+(** Pointwise lifting, equivalent to doing [pointwise_extension] and closing using [predicate_all]. *)
+
+Fixpoint pointwise_lifting (op : binary_relation Prop) (l : list Type) : binary_relation (predicate l) :=
+ match l with
+ | nil => fun R R' => op R R'
+ | A :: tl => fun R R' =>
+ forall x, pointwise_lifting op tl (R x) (R' x)
+ end.
+
+(** The n-ary equivalence relation, defined by lifting the 0-ary [iff] relation. *)
+
+Definition predicate_equivalence {l : list Type} : binary_relation (predicate l) :=
+ pointwise_lifting iff l.
+
+(** The n-ary implication relation, defined by lifting the 0-ary [impl] relation. *)
+
+Definition predicate_implication {l : list Type} :=
+ pointwise_lifting impl l.
+
+(** Notations for pointwise equivalence and implication of predicates. *)
+
+Infix "<∙>" := predicate_equivalence (at level 95, no associativity) : predicate_scope.
+Infix "-∙>" := predicate_implication (at level 70, right associativity) : predicate_scope.
+
+Open Local Scope predicate_scope.
+
+(** The pointwise liftings of conjunction and disjunctions.
+ Note that these are [binary_operation]s, building new relations out of old ones. *)
+
+Definition predicate_intersection := pointwise_extension and.
+Definition predicate_union := pointwise_extension or.
+
+Infix "/∙\" := predicate_intersection (at level 80, right associativity) : predicate_scope.
+Infix "\∙/" := predicate_union (at level 85, right associativity) : predicate_scope.
+
+(** The always [True] and always [False] predicates. *)
+
+Fixpoint true_predicate {l : list Type} : predicate l :=
+ match l with
+ | nil => True
+ | A :: tl => fun _ => @true_predicate tl
+ end.
+
+Fixpoint false_predicate {l : list Type} : predicate l :=
+ match l with
+ | nil => False
+ | A :: tl => fun _ => @false_predicate tl
+ end.
+
+Notation "∙⊤∙" := true_predicate : predicate_scope.
+Notation "∙⊥∙" := false_predicate : predicate_scope.
+
+(** Predicate equivalence is an equivalence, and predicate implication defines a preorder. *)
+
+Program Instance predicate_equivalence_equivalence : Equivalence (@predicate_equivalence l).
+ Next Obligation.
+ induction l ; firstorder.
+ Qed.
+ Next Obligation.
+ induction l ; firstorder.
+ Qed.
+ Next Obligation.
+ fold pointwise_lifting.
+ induction l. firstorder.
+ intros. simpl in *. pose (IHl (x x0) (y x0) (z x0)).
+ firstorder.
+ Qed.
+
+Program Instance predicate_implication_preorder :
+ PreOrder (@predicate_implication l).
+ Next Obligation.
+ induction l ; firstorder.
+ Qed.
+ Next Obligation.
+ induction l. firstorder.
+ unfold predicate_implication in *. simpl in *.
+ intro. pose (IHl (x x0) (y x0) (z x0)). firstorder.
+ Qed.
+
+(** We define the various operations which define the algebra on binary relations,
+ from the general ones. *)
+
+Definition relation_equivalence {A : Type} : relation (relation A) :=
+ @predicate_equivalence (_::_::nil).
+
+Class subrelation {A:Type} (R R' : relation A) : Prop :=
+ is_subrelation : @predicate_implication (A::A::nil) R R'.
+
+Implicit Arguments subrelation [[A]].
+
+Definition relation_conjunction {A} (R : relation A) (R' : relation A) : relation A :=
+ @predicate_intersection (A::A::nil) R R'.
+
+Definition relation_disjunction {A} (R : relation A) (R' : relation A) : relation A :=
+ @predicate_union (A::A::nil) R R'.
+
+(** Relation equivalence is an equivalence, and subrelation defines a partial order. *)
+
+Set Automatic Introduction.
+
+Instance relation_equivalence_equivalence (A : Type) :
+ Equivalence (@relation_equivalence A).
+Proof. exact (@predicate_equivalence_equivalence (A::A::nil)). Qed.
+
+Instance relation_implication_preorder A : PreOrder (@subrelation A).
+Proof. exact (@predicate_implication_preorder (A::A::nil)). Qed.
+
+(** *** Partial Order.
+ A partial order is a preorder which is additionally antisymmetric.
+ We give an equivalent definition, up-to an equivalence relation
+ on the carrier. *)
+
+Class PartialOrder {A} eqA `{equ : Equivalence A eqA} R `{preo : PreOrder A R} :=
+ partial_order_equivalence : relation_equivalence eqA (relation_conjunction R (inverse R)).
+
+(** The equivalence proof is sufficient for proving that [R] must be a morphism
+ for equivalence (see Morphisms).
+ It is also sufficient to show that [R] is antisymmetric w.r.t. [eqA] *)
+
+Instance partial_order_antisym `(PartialOrder A eqA R) : ! Antisymmetric A eqA R.
+Proof with auto.
+ reduce_goal.
+ pose proof partial_order_equivalence as poe. do 3 red in poe.
+ apply <- poe. firstorder.
+Qed.
+
+(** The partial order defined by subrelation and relation equivalence. *)
+
+Program Instance subrelation_partial_order :
+ ! PartialOrder (relation A) relation_equivalence subrelation.
+
+ Next Obligation.
+ Proof.
+ unfold relation_equivalence in *. firstorder.
+ Qed.
+
+Typeclasses Opaque arrows predicate_implication predicate_equivalence
+ relation_equivalence pointwise_lifting.
+
+(** Rewrite relation on a given support: declares a relation as a rewrite
+ relation for use by the generalized rewriting tactic.
+ It helps choosing if a rewrite should be handled
+ by the generalized or the regular rewriting tactic using leibniz equality.
+ Users can declare an [RewriteRelation A RA] anywhere to declare default
+ relations. This is also done automatically by the [Declare Relation A RA]
+ commands. *)
+
+Class RewriteRelation {A : Type} (RA : relation A).
+
+Instance: RewriteRelation impl.
+Instance: RewriteRelation iff.
+Instance: RewriteRelation (@relation_equivalence A).
+
+(** Any [Equivalence] declared in the context is automatically considered
+ a rewrite relation. *)
+
+Instance equivalence_rewrite_relation `(Equivalence A eqA) : RewriteRelation eqA.
+
+(** Strict Order *)
+
+Class StrictOrder {A : Type} (R : relation A) := {
+ StrictOrder_Irreflexive :> Irreflexive R ;
+ StrictOrder_Transitive :> Transitive R
+}.
+
+Instance StrictOrder_Asymmetric `(StrictOrder A R) : Asymmetric R.
+Proof. firstorder. Qed.
+
+(** Inversing a [StrictOrder] gives another [StrictOrder] *)
+
+Lemma StrictOrder_inverse `(StrictOrder A R) : StrictOrder (inverse R).
+Proof. firstorder. Qed.
+
+(** Same for [PartialOrder]. *)
+
+Lemma PreOrder_inverse `(PreOrder A R) : PreOrder (inverse R).
+Proof. firstorder. Qed.
+
+Hint Extern 3 (StrictOrder (inverse _)) => class_apply StrictOrder_inverse : typeclass_instances.
+Hint Extern 3 (PreOrder (inverse _)) => class_apply PreOrder_inverse : typeclass_instances.
+
+Lemma PartialOrder_inverse `(PartialOrder A eqA R) : PartialOrder eqA (inverse R).
+Proof. firstorder. Qed.
+
+Hint Extern 3 (PartialOrder (inverse _)) => class_apply PartialOrder_inverse : typeclass_instances.
diff --git a/tests/examplefiles/example.cls b/tests/examplefiles/example.cls
new file mode 100644
index 00000000..d36ad6f0
--- /dev/null
+++ b/tests/examplefiles/example.cls
@@ -0,0 +1,15 @@
+USING Progress.Lang.*.
+
+CLASS Test INHERITS Progress.Sucks:
+
+ DEFINE PRIVATE VARIABLE cTest AS CHAR NO-UNDO.
+
+ CONSTRUCTOR PUBLIC Test():
+ SUPER().
+ MESSAGE "Why are you punishing yourself by coding in this language?".
+ END CONSTRUCTOR.
+
+ METHOD PUBLIC LOGICAL Blowup(INPUT iTime AS INT):
+ END.
+
+END CLASS.
diff --git a/tests/examplefiles/example.moon b/tests/examplefiles/example.moon
new file mode 100644
index 00000000..d4415e32
--- /dev/null
+++ b/tests/examplefiles/example.moon
@@ -0,0 +1,629 @@
+-- transform.moon
+-- Leaf Corcoran (leafot@gmail.com) 2011
+--
+-- This is part of the MoonScript compiler. See <http://moonscript.org>
+-- MoonScript is licensed under the MIT License
+--
+
+module "moonscript.transform", package.seeall
+
+types = require "moonscript.types"
+util = require "moonscript.util"
+data = require "moonscript.data"
+
+import reversed from util
+import ntype, build, smart_node, is_slice from types
+import insert from table
+
+export Statement, Value, NameProxy, LocalName, Run
+
+-- always declares as local
+class LocalName
+ new: (@name) => self[1] = "temp_name"
+ get_name: => @name
+
+class NameProxy
+ new: (@prefix) =>
+ self[1] = "temp_name"
+
+ get_name: (scope) =>
+ if not @name
+ @name = scope\free_name @prefix, true
+ @name
+
+ chain: (...) =>
+ items = {...} -- todo: fix ... propagation
+ items = for i in *items
+ if type(i) == "string"
+ {"dot", i}
+ else
+ i
+
+ build.chain {
+ base: self
+ unpack items
+ }
+
+ index: (key) =>
+ build.chain {
+ base: self, {"index", key}
+ }
+
+ __tostring: =>
+ if @name
+ ("name<%s>")\format @name
+ else
+ ("name<prefix(%s)>")\format @prefix
+
+class Run
+ new: (@fn) =>
+ self[1] = "run"
+
+ call: (state) =>
+ self.fn state
+
+-- transform the last stm is a list of stms
+-- will puke on group
+apply_to_last = (stms, fn) ->
+ -- find last (real) exp
+ last_exp_id = 0
+ for i = #stms, 1, -1
+ stm = stms[i]
+ if stm and util.moon.type(stm) != Run
+ last_exp_id = i
+ break
+
+ return for i, stm in ipairs stms
+ if i == last_exp_id
+ fn stm
+ else
+ stm
+
+-- is a body a sindle expression/statement
+is_singular = (body) ->
+ return false if #body != 1
+ if "group" == ntype body
+ is_singular body[2]
+ else
+ true
+
+constructor_name = "new"
+
+class Transformer
+ new: (@transformers, @scope) =>
+ @seen_nodes = {}
+
+ transform: (scope, node, ...) =>
+ -- print scope, node, ...
+ return node if @seen_nodes[node]
+ @seen_nodes[node] = true
+ while true
+ transformer = @transformers[ntype node]
+ res = if transformer
+ transformer(scope, node, ...) or node
+ else
+ node
+ return node if res == node
+ node = res
+
+ __call: (node, ...) =>
+ @transform @scope, node, ...
+
+ instance: (scope) =>
+ Transformer @transformers, scope
+
+ can_transform: (node) =>
+ @transformers[ntype node] != nil
+
+construct_comprehension = (inner, clauses) ->
+ current_stms = inner
+ for _, clause in reversed clauses
+ t = clause[1]
+ current_stms = if t == "for"
+ _, names, iter = unpack clause
+ {"foreach", names, iter, current_stms}
+ elseif t == "when"
+ _, cond = unpack clause
+ {"if", cond, current_stms}
+ else
+ error "Unknown comprehension clause: "..t
+ current_stms = {current_stms}
+
+ current_stms[1]
+
+Statement = Transformer {
+ assign: (node) =>
+ _, names, values = unpack node
+ -- bubble cascading assigns
+ if #values == 1 and types.cascading[ntype values[1]]
+ values[1] = @transform.statement values[1], (stm) ->
+ t = ntype stm
+ if types.is_value stm
+ {"assign", names, {stm}}
+ else
+ stm
+
+ build.group {
+ {"declare", names}
+ values[1]
+ }
+ else
+ node
+
+ export: (node) =>
+ -- assign values if they are included
+ if #node > 2
+ if node[2] == "class"
+ cls = smart_node node[3]
+ build.group {
+ {"export", {cls.name}}
+ cls
+ }
+ else
+ build.group {
+ node
+ build.assign {
+ names: node[2]
+ values: node[3]
+ }
+ }
+ else
+ nil
+
+ update: (node) =>
+ _, name, op, exp = unpack node
+ op_final = op\match "^(.+)=$"
+ error "Unknown op: "..op if not op_final
+ build.assign_one name, {"exp", name, op_final, exp}
+
+ import: (node) =>
+ _, names, source = unpack node
+
+ stubs = for name in *names
+ if type(name) == "table"
+ name
+ else
+ {"dot", name}
+
+ real_names = for name in *names
+ type(name) == "table" and name[2] or name
+
+ if type(source) == "string"
+ build.assign {
+ names: real_names
+ values: [build.chain { base: source, stub} for stub in *stubs]
+ }
+ else
+ source_name = NameProxy "table"
+ build.group {
+ {"declare", real_names}
+ build["do"] {
+ build.assign_one source_name, source
+ build.assign {
+ names: real_names
+ values: [build.chain { base: source_name, stub} for stub in *stubs]
+ }
+ }
+ }
+
+ comprehension: (node, action) =>
+ _, exp, clauses = unpack node
+
+ action = action or (exp) -> {exp}
+ construct_comprehension action(exp), clauses
+
+ -- handle cascading return decorator
+ if: (node, ret) =>
+ if ret
+ smart_node node
+ -- mutate all the bodies
+ node['then'] = apply_to_last node['then'], ret
+ for i = 4, #node
+ case = node[i]
+ body_idx = #node[i]
+ case[body_idx] = apply_to_last case[body_idx], ret
+ node
+
+ with: (node, ret) =>
+ _, exp, block = unpack node
+ scope_name = NameProxy "with"
+ build["do"] {
+ build.assign_one scope_name, exp
+ Run => @set "scope_var", scope_name
+ build.group block
+ if ret
+ ret scope_name
+ }
+
+ foreach: (node) =>
+ smart_node node
+ if ntype(node.iter) == "unpack"
+ list = node.iter[2]
+
+ index_name = NameProxy "index"
+ list_name = NameProxy "list"
+
+ slice_var = nil
+ bounds = if is_slice list
+ slice = list[#list]
+ table.remove list
+ table.remove slice, 1
+
+ slice[2] = if slice[2] and slice[2] != ""
+ max_tmp_name = NameProxy "max"
+ slice_var = build.assign_one max_tmp_name, slice[2]
+ {"exp", max_tmp_name, "<", 0
+ "and", {"length", list_name}, "+", max_tmp_name
+ "or", max_tmp_name }
+ else
+ {"length", list_name}
+
+ slice
+ else
+ {1, {"length", list_name}}
+
+ build.group {
+ build.assign_one list_name, list
+ slice_var
+ build["for"] {
+ name: index_name
+ bounds: bounds
+ body: {
+ {"assign", node.names, {list_name\index index_name}}
+ build.group node.body
+ }
+ }
+ }
+
+ switch: (node, ret) =>
+ _, exp, conds = unpack node
+ exp_name = NameProxy "exp"
+
+ -- convert switch conds into if statment conds
+ convert_cond = (cond) ->
+ t, case_exp, body = unpack cond
+ out = {}
+ insert out, t == "case" and "elseif" or "else"
+ if t != "else"
+ insert out, {"exp", case_exp, "==", exp_name} if t != "else"
+ else
+ body = case_exp
+
+ if ret
+ body = apply_to_last body, ret
+
+ insert out, body
+
+ out
+
+ first = true
+ if_stm = {"if"}
+ for cond in *conds
+ if_cond = convert_cond cond
+ if first
+ first = false
+ insert if_stm, if_cond[2]
+ insert if_stm, if_cond[3]
+ else
+ insert if_stm, if_cond
+
+ build.group {
+ build.assign_one exp_name, exp
+ if_stm
+ }
+
+ class: (node) =>
+ _, name, parent_val, body = unpack node
+
+ -- split apart properties and statements
+ statements = {}
+ properties = {}
+ for item in *body
+ switch item[1]
+ when "stm"
+ insert statements, item[2]
+ when "props"
+ for tuple in *item[2,]
+ insert properties, tuple
+
+ -- find constructor
+ constructor = nil
+ properties = for tuple in *properties
+ if tuple[1] == constructor_name
+ constructor = tuple[2]
+ nil
+ else
+ tuple
+
+ parent_cls_name = NameProxy "parent"
+ base_name = NameProxy "base"
+ self_name = NameProxy "self"
+ cls_name = NameProxy "class"
+
+ if not constructor
+ constructor = build.fndef {
+ args: {{"..."}}
+ arrow: "fat"
+ body: {
+ build["if"] {
+ cond: parent_cls_name
+ then: {
+ build.chain { base: "super", {"call", {"..."}} }
+ }
+ }
+ }
+ }
+ else
+ smart_node constructor
+ constructor.arrow = "fat"
+
+ cls = build.table {
+ {"__init", constructor}
+ {"__base", base_name}
+ {"__name", {"string", '"', name}} -- "quote the string"
+ {"__parent", parent_cls_name}
+ }
+
+ -- look up a name in the class object
+ class_lookup = build["if"] {
+ cond: {"exp", "val", "==", "nil", "and", parent_cls_name}
+ then: {
+ parent_cls_name\index"name"
+ }
+ }
+ insert class_lookup, {"else", {"val"}}
+
+ cls_mt = build.table {
+ {"__index", build.fndef {
+ args: {{"cls"}, {"name"}}
+ body: {
+ build.assign_one LocalName"val", build.chain {
+ base: "rawget", {"call", {base_name, "name"}}
+ }
+ class_lookup
+ }
+ }}
+ {"__call", build.fndef {
+ args: {{"cls"}, {"..."}}
+ body: {
+ build.assign_one self_name, build.chain {
+ base: "setmetatable"
+ {"call", {"{}", base_name}}
+ }
+ build.chain {
+ base: "cls.__init"
+ {"call", {self_name, "..."}}
+ }
+ self_name
+ }
+ }}
+ }
+
+ cls = build.chain {
+ base: "setmetatable"
+ {"call", {cls, cls_mt}}
+ }
+
+ value = nil
+ with build
+ value = .block_exp {
+ Run =>
+ @set "super", (block, chain) ->
+ if chain
+ slice = [item for item in *chain[3,]]
+ new_chain = {"chain", parent_cls_name}
+
+ head = slice[1]
+
+ if head == nil
+ return parent_cls_name
+
+ switch head[1]
+ -- calling super, inject calling name and self into chain
+ when "call"
+ calling_name = block\get"current_block"
+ slice[1] = {"call", {"self", unpack head[2]}}
+ act = if ntype(calling_name) != "value" then "index" else "dot"
+ insert new_chain, {act, calling_name}
+
+ -- colon call on super, replace class with self as first arg
+ when "colon"
+ call = head[3]
+ insert new_chain, {"dot", head[2]}
+ slice[1] = { "call", { "self", unpack call[2] } }
+
+ insert new_chain, item for item in *slice
+
+ new_chain
+ else
+ parent_cls_name
+
+ .assign_one parent_cls_name, parent_val == "" and "nil" or parent_val
+ .assign_one base_name, {"table", properties}
+ .assign_one base_name\chain"__index", base_name
+
+ build["if"] {
+ cond: parent_cls_name
+ then: {
+ .chain {
+ base: "setmetatable"
+ {"call", {
+ base_name,
+ .chain { base: parent_cls_name, {"dot", "__base"}}
+ }}
+ }
+ }
+ }
+
+ .assign_one cls_name, cls
+ .assign_one base_name\chain"__class", cls_name
+
+ .group if #statements > 0 {
+ .assign_one LocalName"self", cls_name
+ .group statements
+ } else {}
+
+ cls_name
+ }
+
+ value = .group {
+ .declare names: {name}
+ .assign {
+ names: {name}
+ values: {value}
+ }
+ }
+
+ value
+}
+
+class Accumulator
+ body_idx: { for: 4, while: 3, foreach: 4 }
+
+ new: =>
+ @accum_name = NameProxy "accum"
+ @value_name = NameProxy "value"
+ @len_name = NameProxy "len"
+
+ -- wraps node and mutates body
+ convert: (node) =>
+ index = @body_idx[ntype node]
+ node[index] = @mutate_body node[index]
+ @wrap node
+
+ -- wrap the node into a block_exp
+ wrap: (node) =>
+ build.block_exp {
+ build.assign_one @accum_name, build.table!
+ build.assign_one @len_name, 0
+ node
+ @accum_name
+ }
+
+ -- mutates the body of a loop construct to save last value into accumulator
+ -- can optionally skip nil results
+ mutate_body: (body, skip_nil=true) =>
+ val = if not skip_nil and is_singular body
+ with body[1]
+ body = {}
+ else
+ body = apply_to_last body, (n) ->
+ build.assign_one @value_name, n
+ @value_name
+
+ update = {
+ {"update", @len_name, "+=", 1}
+ build.assign_one @accum_name\index(@len_name), val
+ }
+
+ if skip_nil
+ table.insert body, build["if"] {
+ cond: {"exp", @value_name, "!=", "nil"}
+ then: update
+ }
+ else
+ table.insert body, build.group update
+
+ body
+
+default_accumulator = (node) =>
+ Accumulator!\convert node
+
+
+implicitly_return = (scope) ->
+ fn = (stm) ->
+ t = ntype stm
+ if types.manual_return[t] or not types.is_value stm
+ stm
+ elseif types.cascading[t]
+ scope.transform.statement stm, fn
+ else
+ if t == "comprehension" and not types.comprehension_has_value stm
+ stm
+ else
+ {"return", stm}
+
+ fn
+
+Value = Transformer {
+ for: default_accumulator
+ while: default_accumulator
+ foreach: default_accumulator
+
+ comprehension: (node) =>
+ a = Accumulator!
+ node = @transform.statement node, (exp) ->
+ a\mutate_body {exp}, false
+ a\wrap node
+
+ tblcomprehension: (node) =>
+ _, key_exp, value_exp, clauses = unpack node
+
+ accum = NameProxy "tbl"
+ dest = build.chain { base: accum, {"index", key_exp} }
+ inner = build.assign_one dest, value_exp
+
+ build.block_exp {
+ build.assign_one accum, build.table!
+ construct_comprehension {inner}, clauses
+ accum
+ }
+
+ fndef: (node) =>
+ smart_node node
+ node.body = apply_to_last node.body, implicitly_return self
+ node
+
+ if: (node) => build.block_exp { node }
+ with: (node) => build.block_exp { node }
+ switch: (node) =>
+ build.block_exp { node }
+
+ -- pull out colon chain
+ chain: (node) =>
+ stub = node[#node]
+ if type(stub) == "table" and stub[1] == "colon_stub"
+ table.remove node, #node
+
+ base_name = NameProxy "base"
+ fn_name = NameProxy "fn"
+
+ is_super = node[2] == "super"
+ @transform.value build.block_exp {
+ build.assign {
+ names: {base_name}
+ values: {node}
+ }
+
+ build.assign {
+ names: {fn_name}
+ values: {
+ build.chain { base: base_name, {"dot", stub[2]} }
+ }
+ }
+
+ build.fndef {
+ args: {{"..."}}
+ body: {
+ build.chain {
+ base: fn_name, {"call", {is_super and "self" or base_name, "..."}}
+ }
+ }
+ }
+ }
+
+ block_exp: (node) =>
+ _, body = unpack node
+
+ fn = nil
+ arg_list = {}
+
+ insert body, Run =>
+ if @has_varargs
+ insert arg_list, "..."
+ insert fn.args, {"..."}
+
+ fn = smart_node build.fndef body: body
+ build.chain { base: {"parens", fn}, {"call", arg_list} }
+}
+
diff --git a/tests/examplefiles/example.p b/tests/examplefiles/example.p
new file mode 100644
index 00000000..e8c17e33
--- /dev/null
+++ b/tests/examplefiles/example.p
@@ -0,0 +1,34 @@
+{include.i}
+{nested.i {include.i}}
+
+&SCOPED-DEFINE MY_NAME "Abe"
+
+DEF VAR i AS INT NO-UNDO.
+i = 0xABE + 1337 / (1 * 1.00)
+
+def var clowercasetest as char no-undo.
+DEF VAR vardashtest AS DATETIME-TZ NO-UNDO.
+
+DEFINE TEMP-TABLE ttNames NO-UNDO
+ FIELD cName AS CHAR
+ INDEX IXPK_ttNames IS PRIMARY UNIQUE cName.
+
+/* One-line comment */
+/* Two-line
+ Comment */
+/*
+ Nested
+ /*
+ Multiline
+ /*
+ Comment
+ */
+ */
+*/
+
+CREATE ttNames.
+ASSIGN ttNames.cName = {&MY_NAME}.
+
+FOR EACH ttNames:
+ MESSAGE "Hello, " + ttNames.cName + '!' VIEW-AS ALERT-BOX.
+END.
diff --git a/tests/examplefiles/example.snobol b/tests/examplefiles/example.snobol
new file mode 100644
index 00000000..26ca5cf4
--- /dev/null
+++ b/tests/examplefiles/example.snobol
@@ -0,0 +1,15 @@
+-SOME RANDOM DIRECTIVE WOULD GO HERE
+*
+* SNOBOL4 example file for lexer
+*
+ SOME.THING_OR_OTHER32 = 1 + 1.0 - 1E3 * 1E-3 ** 2.718284590E0
++ :F(END)S(IN_LOOP)
+ PATTERN = LEN(3) ("GAR" | "BAR")
+IN_LOOP THING = INPUT :F(END)
+ THING LEN(3) ("GAR" | "BAR") :S(OK)
+ OUTPUT = THING " : Failure!" :(IN_LOOP)
+OK OUTPUT = THING ' : "Success"!' :(IN_LOOP)
+END
+FOOBAR
+FOOGAR
+THiNIg
diff --git a/tests/examplefiles/example.u b/tests/examplefiles/example.u
new file mode 100644
index 00000000..42c85902
--- /dev/null
+++ b/tests/examplefiles/example.u
@@ -0,0 +1,548 @@
+ // This is a one line comment.
+ /* an inner comment */
+
+ /* nested /* comments */ */
+
+ /*
+ /*
+ Multi-line.
+ */
+ */
+
+// Binary blob escape.
+//"some text \B(3)("\") ouhyeah" == "\"\\\"";
+"some text \B(3)("\") ouhyeah" == "\"\\\"";
+'some text \B(3)('\') ouhyeah' == '\'\\\'';
+
+//"\B(4)()"'()";
+"\B(4)()"'()";
+'\B(4)()'"()';
+
+//blob size limits
+"hey ! \B(0)() oh !"
+
+//blob format is wrong
+"hey ! \B(2)(aaa) oh !"
+"hey ! \B(100)(aaa) oh !"
+
+//multiple blob in a string
+"hey ! \B(3)(aaa) hey ! \B(3)(aaa) oh !"
+
+// multiple digits blob size
+"hey ! \B(10)(aaaaaaaaaa) !"
+"hey ! \B(10)(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa) !"
+"hey ! \B(100)(a) !"
+
+// multiple digits blob size
+"hey ! \B(007)(aaaaaaa) !"
+"hey ! \B(007)(aa) !"
+"hey ! \B(007)(aaaaaaaaaaaaaaaaaa) !"
+
+// deprecated and restricted keyworks
+emit Event.new;
+static int main();
+
+loopn (2) {echo("a");};
+
+foreach (var i : [1,2,3,4]) {
+ echo(i);
+};
+
+function() {};
+
+var 'if';
+var this.'else';
+
+var '%x';
+var '1 2 3';
+var this.'[]';
+
+// angles
+pi == 180deg;
+pi == 200grad;
+
+// Dictionary
+[ => ]; // The empty dictionary
+
+// duration
+1d == 24h;
+0.5d == 12h;
+1h == 60min;
+1min == 60s;
+1s == 1000ms;
+
+1s == 1;
+1s 2s 3s == 6;
+1s 1ms == 1.001;
+1ms 1s == 1.001;
+
+
+ 1 == 1;
+ 1 == 1.0;
+ 1.2 == 1.2000;
+ 1.234e6 == 1234000;
+ 1e+11 == 1E+11;
+ 1e10 == 10000000000;
+ 1e30 == 1e10 * 1e10 * 1e10;
+
+
+0.000001;
+
+0.0000001;
+
+0.00000000001;
+
+1e+3;
+
+1E-5;
+
+
+1.;
+// [00004701:error] !!! syntax error: unexpected ;
+
+ 0x2a == 42;
+ 0x2A == 42;
+ 0xabcdef == 11259375;
+ 0xABCDEF == 11259375;
+0xFFFFFFFF == 4294967295;
+
+
+//123foo;
+//[00005658:error] !!! syntax error: invalid token: '123foo'
+//12.3foo;
+//[00018827:error] !!! syntax error: invalid token: '12.3foo'
+0xabcdef;
+//[00060432] 11259375
+//0xabcdefg;
+//[00061848:error] !!! syntax error: invalid token: '0xabcdefg'
+
+
+[]; // The empty list
+[1, 2, 3];
+
+// Special characters.
+"\"" == "\"";
+"\\" == "\\";
+
+// ASCII characters.
+"\a" == "\007"; "\a" == "\x07";
+"\b" == "\010"; "\b" == "\x08";
+"\f" == "\014"; "\f" == "\x0c";
+"\n" == "\012"; "\n" == "\x0a";
+"\r" == "\015"; "\r" == "\x0d";
+"\t" == "\011"; "\t" == "\x09";
+"\v" == "\013"; "\v" == "\x0b";
+
+// Octal escapes.
+"\0" == "\00"; "\0" == "\000";
+"\0000" == "\0""0";
+"\062\063" == "23";
+
+// Hexadecimal escapes.
+"\x00" == "\0";
+"\x32\x33" == "23";
+
+
+
+"foo" "bar" "baz" == "foobarbaz";
+
+// Tuples
+();
+[00000000] ()
+(1,);
+[00000000] (1,)
+(1, 2);
+[00000000] (1, 2)
+(1, 2, 3, 4,);
+[00000000] (1, 2, 3, 4)
+
+function Global.verboseId(var x)
+{
+ echo(x) | x
+}|;
+class verboseId(Global).math : verboseId(Math)
+{
+};
+
+{
+ for (3)
+ {
+ sleep(1s);
+ echo("ping");
+ },
+ sleep(0.5s);
+ for (3)
+ {
+ sleep(1s);
+ echo("pong");
+ },
+};
+
+ 1 + 1 == 2;
+ 1 - 2 == -1;
+ 2 * 3 == 6;
+ 10 / 2 == 5;
+ 2 ** 10 == 1024;
+ -(1 + 2) == -3;
+ 1 + 2 * 3 == 7;
+ (1 + 2) * 3 == 9;
+ -2 ** 2 == -4;
+ - - - - 1 == 1;
+
+a = b
+a += b
+a -= b
+a *= b
+a /= b
+a %= b
+a ^= b
+
+
+var value = 0|;
+var valueAlias = value|;
+value += 10;
+valueAlias;
+var myList = []|;
+var myList.specialFeature = 42|;
+myList += [1, 2, 3];
+myList.specialFeature;
+var myOtherList = myList + [4, 5];
+myOtherList.specialFeature;
+var something = []|;
+var somethingElse = something|;
+something += [1, 2];
+somethingElse += [3, 4];
+something;
+
+
+class Counter
+{
+ var count = 0;
+ function init (n) { var this.count = n };
+ // Display the value, and the identity.
+ function asString() { "%s @ %s" % [count, uid ] };
+ function '+'(var n) { new(count + n) };
+ function '-'(var n) { new(count - n) };
+}|;
+
+
+class ImmutableCounter : Counter
+{
+ function '+='(var n) { this + n };
+ function '-='(var n) { this - n };
+}|;
+
+var ic1 = ImmutableCounter.new(0);
+var ic2 = ic1;
+
+ic1 += 1;
+ic1;
+ic2;
+
+
+a << b
+a >> b
+a ^ b
+
+4 << 2 == 16;
+4 >> 2 == 1;
+
+!a
+a && b
+a || b
+
+true && true;
+true || false;
+!true == false;
+true || (1 / 0);
+(false && (1 / 0)) == false;
+
+a == b
+a != b
+a === b
+a !== b
+a ~= b
+a =~= b
+a < b
+a <= b
+a > b
+a >= b
+
+assert{
+ ! (0 < 0);
+ 0 <= 0;
+ 0 == 0;
+ 0 !== 0;
+};
+
+a in b
+a not in b
+a[args]
+a[args] = v
+
+1 in [0, 1, 2];
+3 not in [0, 1, 2];
+
+"one" in ["zero" => 0, "one" => 1, "two" => 2];
+"three" not in ["zero" => 0, "one" => 1, "two" => 2];
+
+a.b
+a.b(args)
+a->b
+a->b = v
+a.&b
+
+var obj = Object.new|;
+function obj.f() { 24 }|;
+
+
+var f = function(a, b) {
+ echo(b + a);
+}|
+f(1, 0);
+
+
+function g3()
+{
+ return; // Stop execution at this point and return void
+ echo(0); // This is not executed
+}|
+
+Object.setProperty, to define/set a property.
+Object.getProperty, to get a property.
+Object.removeProperty, to delete a property.
+Object.hasProperty, to test for the existence of a property.
+Object.properties, to get all the properties of a slot.
+
+enum Suit
+{
+ hearts,
+ diamonds,
+ clubs,
+ spades, // Last comma is optional
+};
+
+for (var suit in Suit)
+ echo("%s the ace of %s." % [find_ace(suit), suit]);
+
+switch ( ("foo", [1, 2]) )
+{
+ // The pattern does not match the values of the list.
+ case ("foo", [2, 1]):
+ echo("fail");
+
+ // The pattern does not match the tuple.
+ case ["foo", [1, 2]]:
+ echo("fail");
+
+ // The pattern matches and binds the variable "l"
+ // but the condition is not verified.
+ case ("foo", var l) if l.size == 0:
+ echo("fail");
+
+ // The pattern matches.
+ case ("foo", [var a, var b]):
+ echo("foo(%s, %s)" % [a, b]);
+};
+//[00000000] *** foo(1, 2)
+
+{
+ ["b" => var b, "a" => var a] = ["a" => 1, "b" => 2, "c" => 3];
+ echo("a = %d, b = %d" % [a, b]);
+};
+//[00000000] *** a = 1, b = 2
+
+
+switch (["speed" => 2, "time" => 6s])
+{
+ case ["speed" => var s] if s > 3:
+ echo("Too fast");
+ case ["speed" => var s, "time" => var t] if s * t > 10:
+ echo("Too far");
+};
+//[00000000] *** Too far
+
+
+try
+{
+ throw ("message", 0)
+}
+catch (var e if e.isA(Exception))
+{
+ echo(e.message)
+}
+catch ((var msg, var value) if value.isA(Float))
+{
+ echo("%s: %d" % [msg, value])
+};
+//[00000000] *** message: 0
+
+
+{
+ var e = Event.new;
+ at (e?(var msg, var value) if value % 2 == 0)
+ echo("%s: %d" % [msg, value]);
+
+ // Does not trigger the "at" because the guard is not verified.
+ e!("message", 1);
+
+ // Trigger the "at".
+ e!("message", 2);
+};
+//[00000000] *** message: 2
+
+for (var i = 0; i < 8; i++)
+{
+ if (i % 2 != 0)
+ continue;
+ echo(i);
+};
+
+do (1024)
+{
+ assert(this == 1024);
+ assert(sqrt == 32);
+ setSlot("y", 23);
+}.y;
+
+{
+ var n = 10|;
+ var res = []|;
+ loop;{
+ n--;
+ res << n;
+ if (n == 0)
+ break
+ };
+ res
+}
+
+
+{
+ var n = 10|;
+ var res = []|;
+ loop|{
+ n--;
+ res << n;
+ if (n == 0)
+ break
+ };
+ res
+}
+
+
+var j = 3|
+while (0 < j)
+{
+ echo(j);
+ j--;
+};
+
+
+{
+ var i = 4|
+ while| (true)
+ {
+ i -= 1;
+ echo ("in: " + i);
+ if (i == 1)
+ break
+ else if (i == 2)
+ continue;
+ echo ("out: " + i);
+ };
+};
+
+
+
+function test(e)
+{
+ try
+ { throw e; }
+ catch (0)
+ { echo("zero") }
+ catch ([var x, var y])
+ { echo(x + y) }
+} | {};
+
+try { echo("try") }
+catch { echo("catch")}
+else { echo("else")};
+
+
+try
+{
+ echo("inside");
+}
+finally
+{
+ echo("finally");
+};
+//[00000001] *** inside
+//[00000002] *** finally
+
+at (e?(var start) ~ 1s)
+ echo("in : %s" % (time - start).round)
+onleave
+ echo("out: %s" % (time - start).round);
+
+// This emission is too short to trigger the at.
+e!(time);
+
+// This one is long enough.
+// The body triggers 1s after the emission started.
+e!(time) ~ 2s;
+//[00001000] *** in : 1
+//[00002000] *** out: 2
+
+
+timeout (2.1s)
+ every (1s)
+ echo("Are you still there?");
+//[00000000] *** Are you still there?
+//[00001000] *** Are you still there?
+//[00002000] *** Are you still there?
+
+ every| (1s)
+ {
+ echo("aba");
+ };
+
+for, (var i = 3; 0 < i; i -= 1)
+{
+ echo (i);
+};
+
+
+for& (var i: [0, 1, 2])
+{
+ echo (i * i);
+};
+
+loop,{
+};
+
+
+waituntil (e?(1, var b));
+
+whenever (e?("arg", var arg) if arg % 2)
+ echo("e (%s) on" % arg)
+else
+ echo("e off");
+
+
+ while, (i)
+ {
+ var j = i -= 1;
+ }|
+
+
+var y = 0;
+{
+ sleep(0.5s);
+ y = 100 smooth:3s,
+},
+
+
+
+
diff --git a/tests/examplefiles/http_request_example b/tests/examplefiles/http_request_example
new file mode 100644
index 00000000..5d2a1d52
--- /dev/null
+++ b/tests/examplefiles/http_request_example
@@ -0,0 +1,14 @@
+POST /demo/submit/ HTTP/1.1
+Host: pygments.org
+Connection: keep-alivk
+Cache-Control: max-age=0
+Origin: http://pygments.org
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.63 Safari/535.7
+Content-Type: application/x-www-form-urlencoded
+Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
+Referer: http://pygments.org/
+Accept-Encoding: gzip,deflate,sdch
+Accept-Language: en-US,en;q=0.8
+Accept-Charset: windows-949,utf-8;q=0.7,*;q=0.3
+
+name=test&lang=text&code=asdf&user=
diff --git a/tests/examplefiles/http_response_example b/tests/examplefiles/http_response_example
new file mode 100644
index 00000000..bf53d61d
--- /dev/null
+++ b/tests/examplefiles/http_response_example
@@ -0,0 +1,27 @@
+HTTP/1.1 200 OK
+Date: Tue, 13 Dec 2011 00:11:44 GMT
+Status: 200 OK
+X-Transaction: 50b85fff78dab4a3
+X-RateLimit-Limit: 150
+ETag: "b31143be48ebfe7512b65fe64fe092f3"
+X-Frame-Options: SAMEORIGIN
+Last-Modified: Tue, 13 Dec 2011 00:11:44 GMT
+X-RateLimit-Remaining: 145
+X-Runtime: 0.01190
+X-Transaction-Mask: a6183ffa5f8ca943ff1b53b5644ef1145f6f285d
+Content-Type: application/json; charset=utf-8
+Content-Length: 2389
+Pragma: no-cache
+X-RateLimit-Class: api
+X-Revision: DEV
+Expires: Tue, 31 Mar 1981 05:00:00 GMT
+Cache-Control: no-cache, no-store, must-revalidate, pre-check=0, post-check=0
+X-MID: a55f21733bc52bb11d1fc58f9b51b4974fbb8f83
+X-RateLimit-Reset: 1323738416
+Set-Cookie: k=10.34.234.116.1323735104238974; path=/; expires=Tue, 20-Dec-11 00:11:44 GMT; domain=.twitter.com
+Set-Cookie: guest_id=v1%3A13237351042425496; domain=.twitter.com; path=/; expires=Thu, 12-Dec-2013 12:11:44 GMT
+Set-Cookie: _twitter_sess=BAh7CDoPY3JlYXRlZF9hdGwrCPS6wjQ0AToHaWQiJTFiMTlhY2E1ZjczYThk%250ANDUwMWQxNjMwZGU2YTQ1ODBhIgpmbGFzaElDOidBY3Rpb25Db250cm9sbGVy%250AOjpGbGFzaDo6Rmxhc2hIYXNoewAGOgpAdXNlZHsA--6b502f30a083e8a41a64f10930e142ea362b1561; domain=.twitter.com; path=/; HttpOnly
+Vary: Accept-Encoding
+Server: tfe
+
+[{"contributors_enabled":false,"profile_background_tile":true,"followers_count":644,"protected":false,"profile_image_url":"http:\/\/a0.twimg.com\/profile_images\/69064242\/gb_normal.jpg","screen_name":"birkenfeld","default_profile_image":false,"following":null,"friends_count":88,"profile_sidebar_fill_color":"7AC3EE","url":"http:\/\/pythonic.pocoo.org\/","name":"Georg Brandl","default_profile":false,"is_translator":false,"utc_offset":3600,"profile_sidebar_border_color":"65B0DA","description":"","profile_background_image_url_https":"https:\/\/si0.twimg.com\/images\/themes\/theme10\/bg.gif","favourites_count":0,"profile_use_background_image":true,"created_at":"Tue Dec 30 22:25:11 +0000 2008","status":{"retweet_count":10,"favorited":false,"geo":null,"possibly_sensitive":false,"coordinates":null,"in_reply_to_screen_name":null,"in_reply_to_status_id_str":null,"retweeted":false,"in_reply_to_status_id":null,"in_reply_to_user_id_str":null,"created_at":"Sat Jul 09 13:42:35 +0000 2011","truncated":false,"id_str":"89690914515206144","contributors":null,"place":null,"source":"web","in_reply_to_user_id":null,"id":89690914515206144,"retweeted_status":{"retweet_count":10,"favorited":false,"geo":null,"possibly_sensitive":false,"coordinates":null,"in_reply_to_screen_name":null,"in_reply_to_status_id_str":null,"retweeted":false,"in_reply_to_status_id":null,"in_reply_to_user_id_str":null,"created_at":"Sat Jul 09 13:07:04 +0000 2011","truncated":false,"id_str":"89681976755372032","contributors":null,"place":null,"source":"web","in_reply_to_user_id":null,"id":89681976755372032,"text":"Excellent Python posts from @mitsuhiko - http:\/\/t.co\/k1wt6e4 and @ncoghlan_dev - http:\/\/t.co\/eTxacgZ (links fixed)"},"text":"RT @jessenoller: Excellent Python posts from @mitsuhiko - http:\/\/t.co\/k1wt6e4 and @ncoghlan_dev - http:\/\/t.co\/eTxacgZ (links fixed)"},"follow_request_sent":null,"statuses_count":553,"geo_enabled":false,"notifications":null,"profile_text_color":"3D1957","id_str":"18490730","lang":"en","profile_background_image_url":"http:\/\/a1.twimg.com\/images\/themes\/theme10\/bg.gif","profile_image_url_https":"https:\/\/si0.twimg.com\/profile_images\/69064242\/gb_normal.jpg","show_all_inline_media":true,"listed_count":65,"profile_link_color":"FF0000","verified":false,"id":18490730,"time_zone":"Berlin","profile_background_color":"642D8B","location":"Bavaria, Germany"}]
diff --git a/tests/examplefiles/nemerle_sample.n b/tests/examplefiles/nemerle_sample.n
index 2c05033a..5236857d 100644
--- a/tests/examplefiles/nemerle_sample.n
+++ b/tests/examplefiles/nemerle_sample.n
@@ -13,13 +13,15 @@ namespace Demo.Ns
public virtual someMethod(str : string) : list[double]
{
def x = "simple string";
- def x = $"simple $splice string $(spliceMethod())";
+ def x = $"simple $splice string $(spliceMethod() + 1)";
def x = <#
recursive <# string #> sample
#>;
def x = $<#
recursive $splice <# string #> sample
+ ..$(lst; "; "; x => $"x * 2 = $(x * 2)") str
#>;
+ def x = @"somestring \";
def localFunc(arg)
{
@@ -80,6 +82,6 @@ namespace Demo.Ns
macro sampleMacro(expr)
syntax ("write", expr)
{
- <[ WriteLine($expr) ]>
+ <[ WriteLine($(expr : dyn)) ]>
}
}
diff --git a/tests/examplefiles/newlisp-parser.lsp b/tests/examplefiles/newlisp-parser.lsp
deleted file mode 100644
index 985615b3..00000000
--- a/tests/examplefiles/newlisp-parser.lsp
+++ /dev/null
@@ -1,298 +0,0 @@
-#!/usr/bin/env newlisp
-
-;; @module Nlex
-;; @author cormullion
-;; @description newLISP source code lexer/tokenizer/parser
-;; @location somewhere on github
-;; @version 0.1 of 2011-09-19 08:55:19
-;;<h4>About this module</h4>
-;;<p>The Nlex module is a lexer/tokenizer/parser for newLISP source code.
-;; An expert from StackOverflow xplains:
-;; A tokenizer breaks a stream of text into tokens.
-;; A lexer is basically a tokenizer, but it usually attaches extra context to the tokens.
-;; A parser takes the stream of tokens from the lexer and turns it into an abstract syntax tree representing the program represented by the original text.</p>
-;;<p><b>Usage</b></p>
-;;<p>To tokenize/parse source code stored in symbol 'original, use <b>parse-newlisp</b>, To convert the parsed source tree back to plain source, use <b>nlx-to-plaintext</b>:</p>
-;;<pre>
-;;(letn ((converted (Nlex:parse-newlisp original-source)) ; parses
-;; (new-original (Nlex:nlx-to-plaintext converted))) ; converts back to plain text
-;;</pre>
-;;<p>After this round trip, original-source and new-original should be identical.</p>
-;;<p></p>
-
-(context 'Nlex)
-
-; class variables
-
-(define *cursor*)
-(define *source-length*)
-(define *source-list*)
-(define *depth*)
-(define *tree*)
-(define *loc*)
-
-(define (get-next-char)
- (let ((nch ""))
- (if (< *cursor* *source-length*)
- (begin
- (set 'nch (*source-list* *cursor*))
- (inc *cursor* (utf8len nch)))
- (set 'nch nil))
- nch))
-
-(define (peek-char)
- (let ((pch ""))
- (if (< *cursor* *source-length*)
- (set 'pch (*source-list* *cursor*))
- (set 'pch nil))))
-
-(define (char-identifier-first? c)
- (not (find (lower-case c) [text] #;"'(){}.0123456789[/text])))
-
-(define (char-identifier? c)
- (not (find (lower-case c) { "':,()})))
-
-(define (char-numeric-first? c)
- (find c {123456789+-.0}))
-
-(define (char-numeric? c)
- (find c {0123456789+-.xXabcdefABracketedCommandDEF}))
-
-(define (char-whitespace? c)
- (or (= c " ") (= c "\n") (= c "\t")))
-
-(define (open-paren-token)
- (add-to-parse-tree '(LeftParen "(")))
-
-(define (close-paren-token)
- (add-to-parse-tree '(RightParen ")")))
-
-(define (read-comment c)
- (let ((res c) (ch ""))
- (while (and (!= (set 'ch (get-next-char)) "\n") ch)
- (push ch res -1))
- (add-to-parse-tree (list 'Comment (string res "\n")))))
-
-(define (read-identifier c)
- (let ((res c) (ch ""))
- ; look for end of identifier
- (while (and (not (find (set 'ch (peek-char)) " \"',()\n\t\r")) (!= ch nil))
- (push (get-next-char) res -1))
- (add-to-parse-tree (list 'Symbol res))))
-
-(define (read-number-scanner list-so-far)
- (let ((next-char (peek-char)))
- ;; if next-char is a digit then recurse
- (if (and (char-numeric? next-char) next-char)
- (read-number-scanner (cons (get-next-char) list-so-far))
- (reverse list-so-far))))
-
-(define (precise-float str)
-; more faithful to original format than newLISP's float?
- (let ((p "") (q ""))
- (map set '(p q) (parse str "."))
- (append p "." q)))
-
-(define (scientific-float str)
- (let ((p "") (q ""))
- (map set '(p q) (parse str "e"))
- (append p "e" q)))
-
-(define (read-number c)
- (let ((res '() number-as-string ""))
- (set 'number-as-string (join (read-number-scanner (list c))))
- (cond
- ; try hex first
- ((starts-with (lower-case number-as-string) "0x")
- (set 'res (list 'Hex number-as-string)))
- ; scientific notation if there's an e
- ((find "e" (lower-case number-as-string))
- (set 'res (list 'Scientific (scientific-float number-as-string))))
- ; float?
- ((find "." number-as-string)
- ; newLISP's float function isn't quite what we want here
- (set 'res (list 'Float (precise-float number-as-string))))
- ; octal, not hex or float? 017 is OK, 019 is read as 10
- ((and (starts-with (lower-case number-as-string) "0")
- (> (length number-as-string) 1)
- (empty? (difference (explode number-as-string) (explode "01234567"))))
- (set 'res (list 'Octal number-as-string)))
- ; perhaps an integer? 019 is read as 19 ...
- ((integer? (int number-as-string 0 10))
- (set 'res (list 'Integer (int number-as-string 0 10))))
- ; give up
- (true
- (set 'res (list 'NaN "NaN"))))
- (add-to-parse-tree res)))
-
-(define (read-quote)
- (add-to-parse-tree '(Quote "'")))
-
-(define (read-quoted-string)
- (let ((res {}) (ch {}))
- (while (and (!= (set 'ch (get-next-char)) {"}) ch)
- (push ch res -1)
- ; check for backslashed quotes
- (when (= ch {\})
- (set 'ch (get-next-char))
- (push ch res -1)))
- (add-to-parse-tree (list 'QuotedString res))))
-
-(define (read-braced-string)
- (let ((res "") (ch {}) (level 1))
- ; we've already seen the first { so we're up to level 1
- (while (> level 0)
- (set 'ch (get-next-char))
- (if (= ch "{") (inc level))
- (if (= ch "}") (dec level))
- (if (or (< level 0) (= ch nil)) (throw-error (string "error in a braced string at character " *cursor*)))
- ; don't push final "}"
- (if (and (> level 0)) (push ch res -1)))
- (add-to-parse-tree (list 'BracedString res))))
-
-(define (read-bracketed-string ch)
- (let ((res "") (ch {}))
- (cond
- ; bracketed TEXT?
- ((= (lower-case (join (slice *source-list* (- *cursor* 1) 6))) "[text]")
- ; look for final [/text]
- (inc *cursor* 5)
- ; look for end
- (while (and (< *cursor* (- *source-length* 7))
- (!= (lower-case (join (*cursor* 7 *source-list*))) "[/text]")
- ch)
- (push (get-next-char) res -1))
- (inc *cursor* 7)
- (add-to-parse-tree (list 'BracketedText res)))
- ; bracketed CMD?
- ((= (lower-case (join (slice *source-list* (- *cursor* 1) 5))) "[cmd]")
- ; look for final [/cmd]
- (inc *cursor* 4)
- (while (and (< *cursor* (- *source-length* 6))
- (!= (lower-case (join (*cursor* 6 *source-list*))) "[/cmd]")
- ch)
- (push (get-next-char) res -1))
- (inc *cursor* 6)
- (add-to-parse-tree (list 'BracketedCommand res)))
- ; must be those weird bracketed identifiers
- (true
- (while (and (!= (set 'ch (get-next-char)) {]}) ch)
- (push ch res -1)
- ; check for backslashed quotes
- (when (= ch {\})
- (set 'ch (get-next-char))
- (push ch res -1)))
- (add-to-parse-tree (list 'BracketedIdentifier res))))))
-
-(define (read-whitespace ch)
- (let ((res ch))
- (while (find (set 'ch (peek-char)) " \n\t")
- (push (get-next-char) res -1))
- (add-to-parse-tree (list 'WhiteSpace (base64-enc res)))))
-
-(define (get-token)
- (let ((first-char (get-next-char)))
- (if first-char
- (cond
- ; a - or + could be the start of a symbol or a number, so look at the next char
- ((or (= first-char "-") (= first-char "+"))
- (if (find (peek-char) "1234567890")
- (read-number first-char)
- (read-identifier first-char)))
- ((char-whitespace? first-char)
- (read-whitespace first-char))
- ((= first-char {(})
- (open-paren-token))
- ((= first-char {)})
- (close-paren-token))
- ((= first-char {#})
- (read-comment first-char))
- ((= first-char {;})
- (read-comment first-char))
- ((= first-char {"})
- (read-quoted-string))
- ((= first-char "{")
- (read-braced-string))
- ((= first-char "[")
- (read-bracketed-string first-char))
- ((= first-char {'})
- (read-quote))
- ((char-numeric-first? first-char)
- (read-number first-char))
- ((char-identifier-first? first-char)
- (read-identifier first-char))
- (true (throw-error (string "{" first-char "} is an unrecognized token")))))))
-
-(define (add-to-parse-tree token-pair)
- (let (token (first token-pair))
- (cond
- ((= token 'LeftParen)
- (inc *depth*)
- (push '((LeftParen "(")) *tree* *loc*)
- (push -1 *loc*))
- ((= token 'RightParen)
- (push '(RightParen ")") *tree* *loc*)
- (dec *depth*)
- (pop *loc*))
- (true
- (push token-pair *tree* *loc*)
- true))))
-
-(define (parse-newlisp src)
- ; main function: tokenize/lex/parse the string in src
- (set '*depth* 0
- '*tree* '()
- '*loc* '(-1)
- '*cursor* 0
- '*source-list* (explode src)
- '*source-length* (utf8len src)
- '*source-length* (length *source-list*))
- (while (< *cursor* *source-length*)
- (get-token))
- *tree*)
-
-(define (nlx-to-plaintext nlx (depth 0))
- (if (= depth 0) (set 'buff {})) ; if first pass, initialize a buffer
- (dolist (element nlx)
- (set 'token-type (first element) 'token-value (last element))
- (if (atom? token-type)
- (cond
- ((= token-type 'LeftParen) ; left parenthesis
- (extend buff {(}))
- ((= token-type 'RightParen) ; right parenthesis
- (extend buff {)}))
- ((= token-type 'WhiteSpace) ; whitespace
- (dostring (s (base64-dec token-value))
- (extend buff (string (char s)))))
- ((= token-type 'BracedString) ; braced string
- (extend buff (string "{" token-value "}")))
- ((= token-type 'QuotedString) ; quoted string
- (extend buff (string {"} token-value {"})))
- ((= token-type 'BracketedText) ; bracketed text
- (extend buff (string {[text]} token-value {[/text]})))
- ((= token-type 'Quote); quote
- (extend buff (string "'")))
- ((= token-type 'Comment) ; comment
- (extend buff (string (last element) "\n")))
- ((= token-type 'Integer) ; int
- (extend buff (string (int (last element)))))
- ((= token-type 'Float) ; float
- (extend buff (string (precise-float (last element)))))
- ((= token-type 'Scientific) ; scientific notation
- (extend buff (scientific-float (last element))))
- ((= token-type 'BracketedCommand) ; bracketed command
- (extend buff (string {[cmd]} (last element) {[/cmd]})))
- ((or
- (= token-type 'Symbol) ; close parenthesis
- (= token-type 'Hex) ; hex
- (= token-type 'NaN) ; not a number
- (= token-type 'Octal) ; octal
- )
- (extend buff (string (last element))))
- ((= token-type 'BracketedIdentifier) ; bracketed identifier
- (extend buff (string {[} (last element) {]}))))
- ; not an atom, so recurse but don't initialize buffer
- (nlx-to-plaintext element 1)))
- buff)
-
-;eof
diff --git a/tests/examplefiles/test.bro b/tests/examplefiles/test.bro
new file mode 100644
index 00000000..9a1b42de
--- /dev/null
+++ b/tests/examplefiles/test.bro
@@ -0,0 +1,250 @@
+@load notice
+@load utils/thresholds
+
+module SSH;
+
+export {
+ redef enum Log::ID += { SSH };
+
+ redef enum Notice::Type += {
+ Login,
+ Password_Guessing,
+ Login_By_Password_Guesser,
+ Login_From_Interesting_Hostname,
+ Bytecount_Inconsistency,
+ };
+
+ type Info: record {
+ ts: time &log;
+ uid: string &log;
+ id: conn_id &log;
+ status: string &log &optional;
+ direction: string &log &optional;
+ remote_location: geo_location &log &optional;
+ client: string &log &optional;
+ server: string &log &optional;
+ resp_size: count &log &default=0;
+
+ ## Indicate if the SSH session is done being watched.
+ done: bool &default=F;
+ };
+
+ const password_guesses_limit = 30 &redef;
+
+ # The size in bytes at which the SSH connection is presumed to be
+ # successful.
+ const authentication_data_size = 5500 &redef;
+
+ # The amount of time to remember presumed non-successful logins to build
+ # model of a password guesser.
+ const guessing_timeout = 30 mins &redef;
+
+ # The set of countries for which you'd like to throw notices upon successful login
+ # requires Bro compiled with libGeoIP support
+ const watched_countries: set[string] = {"RO"} &redef;
+
+ # Strange/bad host names to originate successful SSH logins
+ const interesting_hostnames =
+ /^d?ns[0-9]*\./ |
+ /^smtp[0-9]*\./ |
+ /^mail[0-9]*\./ |
+ /^pop[0-9]*\./ |
+ /^imap[0-9]*\./ |
+ /^www[0-9]*\./ |
+ /^ftp[0-9]*\./ &redef;
+
+ # This is a table with orig subnet as the key, and subnet as the value.
+ const ignore_guessers: table[subnet] of subnet &redef;
+
+ # If true, we tell the event engine to not look at further data
+ # packets after the initial SSH handshake. Helps with performance
+ # (especially with large file transfers) but precludes some
+ # kinds of analyses (e.g., tracking connection size).
+ const skip_processing_after_detection = F &redef;
+
+ # Keeps count of how many rejections a host has had
+ global password_rejections: table[addr] of TrackCount
+ &write_expire=guessing_timeout
+ &synchronized;
+
+ # Keeps track of hosts identified as guessing passwords
+ # TODO: guessing_timeout doesn't work correctly here. If a user redefs
+ # the variable, it won't take effect.
+ global password_guessers: set[addr] &read_expire=guessing_timeout+1hr &synchronized;
+
+ global log_ssh: event(rec: Info);
+}
+
+# Configure DPD and the packet filter
+redef capture_filters += { ["ssh"] = "tcp port 22" };
+redef dpd_config += { [ANALYZER_SSH] = [$ports = set(22/tcp)] };
+
+redef record connection += {
+ ssh: Info &optional;
+};
+
+event bro_init()
+{
+ Log::create_stream(SSH, [$columns=Info, $ev=log_ssh]);
+}
+
+function set_session(c: connection)
+ {
+ if ( ! c?$ssh )
+ {
+ local info: Info;
+ info$ts=network_time();
+ info$uid=c$uid;
+ info$id=c$id;
+ c$ssh = info;
+ }
+ }
+
+function check_ssh_connection(c: connection, done: bool)
+ {
+ # If done watching this connection, just return.
+ if ( c$ssh$done )
+ return;
+
+ # If this is still a live connection and the byte count has not
+ # crossed the threshold, just return and let the resheduled check happen later.
+ if ( !done && c$resp$size < authentication_data_size )
+ return;
+
+ # Make sure the server has sent back more than 50 bytes to filter out
+ # hosts that are just port scanning. Nothing is ever logged if the server
+ # doesn't send back at least 50 bytes.
+ if ( c$resp$size < 50 )
+ return;
+
+ local status = "failure";
+ local direction = Site::is_local_addr(c$id$orig_h) ? "to" : "from";
+ local location: geo_location;
+ location = (direction == "to") ? lookup_location(c$id$resp_h) : lookup_location(c$id$orig_h);
+
+ if ( done && c$resp$size < authentication_data_size )
+ {
+ # presumed failure
+ if ( c$id$orig_h !in password_rejections )
+ password_rejections[c$id$orig_h] = new_track_count();
+
+ # Track the number of rejections
+ if ( !(c$id$orig_h in ignore_guessers &&
+ c$id$resp_h in ignore_guessers[c$id$orig_h]) )
+ ++password_rejections[c$id$orig_h]$n;
+
+ if ( default_check_threshold(password_rejections[c$id$orig_h]) )
+ {
+ add password_guessers[c$id$orig_h];
+ NOTICE([$note=Password_Guessing,
+ $conn=c,
+ $msg=fmt("SSH password guessing by %s", c$id$orig_h),
+ $sub=fmt("%d failed logins", password_rejections[c$id$orig_h]$n),
+ $n=password_rejections[c$id$orig_h]$n]);
+ }
+ }
+ # TODO: This is to work around a quasi-bug in Bro which occasionally
+ # causes the byte count to be oversized.
+ # Watch for Gregors work that adds an actual counter of bytes transferred.
+ else if ( c$resp$size < 20000000 )
+ {
+ # presumed successful login
+ status = "success";
+ c$ssh$done = T;
+
+ if ( c$id$orig_h in password_rejections &&
+ password_rejections[c$id$orig_h]$n > password_guesses_limit &&
+ c$id$orig_h !in password_guessers )
+ {
+ add password_guessers[c$id$orig_h];
+ NOTICE([$note=Login_By_Password_Guesser,
+ $conn=c,
+ $n=password_rejections[c$id$orig_h]$n,
+ $msg=fmt("Successful SSH login by password guesser %s", c$id$orig_h),
+ $sub=fmt("%d failed logins", password_rejections[c$id$orig_h]$n)]);
+ }
+
+ local message = fmt("SSH login %s %s \"%s\" \"%s\" %f %f %s (triggered with %d bytes)",
+ direction, location$country_code, location$region, location$city,
+ location$latitude, location$longitude,
+ id_string(c$id), c$resp$size);
+ NOTICE([$note=Login,
+ $conn=c,
+ $msg=message,
+ $sub=location$country_code]);
+
+ # Check to see if this login came from an interesting hostname
+ when ( local hostname = lookup_addr(c$id$orig_h) )
+ {
+ if ( interesting_hostnames in hostname )
+ {
+ NOTICE([$note=Login_From_Interesting_Hostname,
+ $conn=c,
+ $msg=fmt("Strange login from %s", hostname),
+ $sub=hostname]);
+ }
+ }
+
+ if ( location$country_code in watched_countries )
+ {
+
+ }
+
+ }
+ else if ( c$resp$size >= 200000000 )
+ {
+ NOTICE([$note=Bytecount_Inconsistency,
+ $conn=c,
+ $msg="During byte counting in SSH analysis, an overly large value was seen.",
+ $sub=fmt("%d",c$resp$size)]);
+ }
+
+ c$ssh$remote_location = location;
+ c$ssh$status = status;
+ c$ssh$direction = direction;
+ c$ssh$resp_size = c$resp$size;
+
+ Log::write(SSH, c$ssh);
+
+ # Set the "done" flag to prevent the watching event from rescheduling
+ # after detection is done.
+ c$ssh$done;
+
+ # Stop watching this connection, we don't care about it anymore.
+ if ( skip_processing_after_detection )
+ {
+ skip_further_processing(c$id);
+ set_record_packets(c$id, F);
+ }
+ }
+
+event connection_state_remove(c: connection) &priority=-5
+ {
+ if ( c?$ssh )
+ check_ssh_connection(c, T);
+ }
+
+event ssh_watcher(c: connection)
+ {
+ local id = c$id;
+ # don't go any further if this connection is gone already!
+ if ( !connection_exists(id) )
+ return;
+
+ check_ssh_connection(c, F);
+ if ( ! c$ssh$done )
+ schedule +15secs { ssh_watcher(c) };
+ }
+
+event ssh_server_version(c: connection, version: string) &priority=5
+ {
+ set_session(c);
+ c$ssh$server = version;
+ }
+
+event ssh_client_version(c: connection, version: string) &priority=5
+ {
+ set_session(c);
+ c$ssh$client = version;
+ schedule +15secs { ssh_watcher(c) };
+ }
diff --git a/tests/examplefiles/test.cs b/tests/examplefiles/test.cs
index ffa9bfea..faab7e42 100644
--- a/tests/examplefiles/test.cs
+++ b/tests/examplefiles/test.cs
@@ -153,6 +153,29 @@ namespace Diva.Core {
public OpenerTask (string fileName)
{
this.fileName = fileName;
+ var verbatimString = @"c:\test\";
+
+ var verbatimStringWithNewline = @"test \\ \n \t \r
+a
+b
+c";
+ var verbatimStringWithEscapedQuotes = @"He said
+""she says \"" is not an escaped character in verbatimstrings""
+";
+
+ int[] numbers = { 5,6,4,2,4,6,8,9,7,0 };
+ var linqExample = from n in numbers
+ where n > 5
+ select n;
+
+ var anotherlinqExample = from n in numbers
+ orderby n descending
+ select n;
+
+ int[] someMoreNumbers = { 8,2,17,34,8,9,9,5,3,4,2,1,5 };
+ var moreLinq = from n in numbers
+ join mn in moreNumbers on n equals mn + 2
+ select new {n, mn};
}
public override void Reset ()
diff --git a/tests/examplefiles/test.dart b/tests/examplefiles/test.dart
new file mode 100644
index 00000000..aa1fb0ed
--- /dev/null
+++ b/tests/examplefiles/test.dart
@@ -0,0 +1,23 @@
+// Greeter example from
+// <http://www.dartlang.org/docs/getting-started/interface.html>
+class Greeter implements Comparable {
+ String prefix = 'Hello,';
+ Greeter() {}
+ Greeter.withPrefix(this.prefix);
+ greet(String name) => print('$prefix $name');
+
+ int compareTo(Greeter other) => prefix.compareTo(other.prefix);
+}
+
+void main() {
+ Greeter greeter = new Greeter();
+ Greeter greeter2 = new Greeter.withPrefix('Hi,');
+
+ num result = greeter2.compareTo(greeter);
+ if (result == 0) {
+ greeter2.greet('you are the same.');
+ } else {
+ greeter2.greet('you are different.');
+ }
+}
+
diff --git a/tests/examplefiles/test.ecl b/tests/examplefiles/test.ecl
new file mode 100644
index 00000000..b686492a
--- /dev/null
+++ b/tests/examplefiles/test.ecl
@@ -0,0 +1,58 @@
+/*##############################################################################
+
+ Copyright (C) 2011 HPCC Systems.
+
+ All rights reserved. This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as
+ published by the Free Software Foundation, either version 3 of the
+ License, or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+############################################################################## */
+
+#option ('slidingJoins', true);
+
+namesRecord :=
+ RECORD
+string20 surname;
+string10 forename;
+integer2 age;
+integer2 dadAge;
+integer2 mumAge;
+ END;
+
+namesRecord2 :=
+ record
+string10 extra;
+namesRecord;
+ end;
+
+namesTable := dataset('x',namesRecord,FLAT);
+namesTable2 := dataset('y',namesRecord2,FLAT);
+
+integer2 aveAgeL(namesRecord l) := (l.dadAge+l.mumAge)/2;
+integer2 aveAgeR(namesRecord2 r) := (r.dadAge+r.mumAge)/2;
+
+// Standard join on a function of left and right
+output(join(namesTable, namesTable2, aveAgeL(left) = aveAgeR(right)));
+
+//Several simple examples of sliding join syntax
+output(join(namesTable, namesTable2, left.age >= right.age - 10 and left.age <= right.age +10));
+output(join(namesTable, namesTable2, left.age between right.age - 10 and right.age +10));
+output(join(namesTable, namesTable2, left.age between right.age + 10 and right.age +30));
+output(join(namesTable, namesTable2, left.age between (right.age + 20) - 10 and (right.age +20) + 10));
+output(join(namesTable, namesTable2, aveAgeL(left) between aveAgeR(right)+10 and aveAgeR(right)+40));
+
+//Same, but on strings. Also includes age to ensure sort is done by non-sliding before sliding.
+output(join(namesTable, namesTable2, left.surname between right.surname[1..10]+'AAAAAAAAAA' and right.surname[1..10]+'ZZZZZZZZZZ' and left.age=right.age));
+output(join(namesTable, namesTable2, left.surname between right.surname[1..10]+'AAAAAAAAAA' and right.surname[1..10]+'ZZZZZZZZZZ' and left.age=right.age,all));
+
+//This should not generate a self join
+output(join(namesTable, namesTable, left.age between right.age - 10 and right.age +10));
+
diff --git a/tests/examplefiles/test.fan b/tests/examplefiles/test.fan
new file mode 100755
index 00000000..00e80b60
--- /dev/null
+++ b/tests/examplefiles/test.fan
@@ -0,0 +1,818 @@
+//
+// Copyright (c) 2008, Brian Frank and Andy Frank
+// Licensed under the Academic Free License version 3.0
+//
+// History:
+// 17 Nov 08 Brian Frank Creation
+//
+
+using compiler
+
+**
+** JavaBridge is the compiler plugin for bringing Java
+** classes into the Fantom type system.
+**
+class JavaBridge : CBridge
+{
+
+//////////////////////////////////////////////////////////////////////////
+// Constructor
+//////////////////////////////////////////////////////////////////////////
+
+ **
+ ** Construct a JavaBridge for current environment
+ **
+ new make(Compiler c, ClassPath cp := ClassPath.makeForCurrent)
+ : super(c)
+ {
+ this.cp = cp
+ }
+
+//////////////////////////////////////////////////////////////////////////
+// Namespace
+//////////////////////////////////////////////////////////////////////////
+
+ **
+ ** Map a FFI "podName" to a Java package.
+ **
+ override CPod resolvePod(Str name, Loc? loc)
+ {
+ // the empty package is used to represent primitives
+ if (name == "") return primitives
+
+ // look for package name in classpatch
+ classes := cp.classes[name]
+ if (classes == null)
+ throw CompilerErr("Java package '$name' not found", loc)
+
+ // map package to JavaPod
+ return JavaPod(this, name, classes)
+ }
+
+ **
+ ** Map class meta-data and Java members to Fantom slots
+ ** for the specified JavaType.
+ **
+ virtual Void loadType(JavaType type, Str:CSlot slots)
+ {
+ JavaReflect.loadType(type, slots)
+ }
+
+//////////////////////////////////////////////////////////////////////////
+// Call Resolution
+//////////////////////////////////////////////////////////////////////////
+
+ **
+ ** Resolve a construction call to a Java constructor.
+ **
+ override Expr resolveConstruction(CallExpr call)
+ {
+ // if the last argument is an it-block, then we know
+ // right away that we will not be passing it thru to Java,
+ // so strip it off to be appended as call to Obj.with
+ itBlock := call.args.last as ClosureExpr
+ if (itBlock != null && itBlock.isItBlock)
+ call.args.removeAt(-1)
+ else
+ itBlock = null
+
+ // if this is an interop array like IntArray/int[] use make
+ // factory otherwise look for Java constructor called <init>
+ JavaType base := call.target.ctype
+ if (base.isInteropArray)
+ call.method = base.method("make")
+ else
+ call.method = base.method("<init>")
+
+ // call resolution to deal with overloading
+ call = resolveCall(call)
+
+ // we need to create an implicit target for the Java runtime
+ // to perform the new opcode to ensure it is on the stack
+ // before the args (we don't do this for interop Array classes)
+ if (!base.isInteropArray)
+ {
+ loc := call.loc
+ call.target = CallExpr.makeWithMethod(loc, null, base.newMethod) { synthetic=true }
+ }
+
+ // if we stripped an it-block argument,
+ // add it as trailing call to Obj.with
+ if (itBlock != null) return itBlock.toWith(call)
+ return call
+ }
+
+ **
+ ** Resolve a construction chain call where a Fantom constructor
+ ** calls the super-class constructor. Type check the arguments
+ ** and insert any conversions needed.
+ **
+ override Expr resolveConstructorChain(CallExpr call)
+ {
+ // we don't allow chaining to a this ctor for Java FFI
+ if (call.target.id !== ExprId.superExpr)
+ throw err("Must use super constructor call in Java FFI", call.loc)
+
+ // route to a superclass constructor
+ JavaType base := call.target.ctype.deref
+ call.method = base.method("<init>")
+
+ // call resolution to deal with overloading
+ return resolveCall(call)
+ }
+
+ **
+ ** Given a dot operator slot access on the given foreign
+ ** base type, determine the appopriate slot to use based on
+ ** whether parens were used
+ ** base.name => noParens = true
+ ** base.name() => noParens = false
+ **
+ ** In Java a given name could be bound to both a field and
+ ** a method. In this case we only resolve the field if
+ ** no parens are used. We also handle the special case of
+ ** Java annotations here because their element methods are
+ ** also mapped as Fantom fields (instance based mixin field).
+ **
+ override CSlot? resolveSlotAccess(CType base, Str name, Bool noParens)
+ {
+ // first try to resolve as a field
+ field := base.field(name)
+ if (field != null)
+ {
+ // if no () we used and this isn't an annotation field
+ if (noParens && (field.isStatic || !base.isMixin))
+ return field
+
+ // if we did find a field, then make sure we use that
+ // field's parent type to resolve a method (becuase the
+ // base type might be a sub-class of a Java type in which
+ // case it is unware of field/method overloads)
+ return field.parent.method(name)
+ }
+
+ // lookup method
+ return base.method(name)
+ }
+
+ **
+ ** Resolve a method call: try to find the best match
+ ** and apply any coercions needed.
+ **
+ override CallExpr resolveCall(CallExpr call)
+ {
+ // try to match against all the overloaded methods
+ matches := CallMatch[,]
+ CMethod? m := call.method
+ while (m != null)
+ {
+ match := matchCall(call, m)
+ if (match != null) matches.add(match)
+ m = m is JavaMethod ? ((JavaMethod)m).next : null
+ }
+
+ // if we have exactly one match use then use that one
+ if (matches.size == 1) return matches[0].apply(call)
+
+ // if we have multiple matches; resolve to
+ // most specific match according to JLS rules
+ // TODO: this does not correct resolve when using Fantom implicit casting
+ if (matches.size > 1)
+ {
+ best := resolveMostSpecific(matches)
+ if (best != null) return best.apply(call)
+ }
+
+ // zero or multiple ambiguous matches is a compiler error
+ s := StrBuf()
+ s.add(matches.isEmpty ? "Invalid args " : "Ambiguous call ")
+ s.add(call.name).add("(")
+ s.add(call.args.join(", ") |Expr arg->Str| { return arg.toTypeStr })
+ s.add(")")
+ throw err(s.toStr, call.loc)
+ }
+
+ **
+ ** Check if the call matches the specified overload method.
+ ** If so return method and coerced args otherwise return null.
+ **
+ internal CallMatch? matchCall(CallExpr call, CMethod m)
+ {
+ // first check if have matching numbers of args and params
+ args := call.args
+ if (m.params.size < args.size) return null
+
+ // check if each argument is ok or can be coerced
+ isErr := false
+ newArgs := args.dup
+ m.params.each |CParam p, Int i|
+ {
+ if (i >= args.size)
+ {
+ // param has a default value, then that is ok
+ if (!p.hasDefault) isErr = true
+ }
+ else
+ {
+ // ensure arg fits parameter type (or auto-cast)
+ newArgs[i] = coerce(args[i], p.paramType) |->| { isErr = true }
+ }
+ }
+ if (isErr) return null
+ return CallMatch { it.method = m; it.args = newArgs }
+ }
+
+ **
+ ** Given a list of overloaed methods find the most specific method
+ ** according to Java Language Specification 15.11.2.2. The "informal
+ ** intuition" rule is that a method is more specific than another
+ ** if the first could be could be passed onto the second one.
+ **
+ internal static CallMatch? resolveMostSpecific(CallMatch[] matches)
+ {
+ CallMatch? best := matches[0]
+ for (i:=1; i<matches.size; ++i)
+ {
+ x := matches[i]
+ if (isMoreSpecific(best, x)) { continue }
+ if (isMoreSpecific(x, best)) { best = x; continue }
+ return null
+ }
+ return best
+ }
+
+ **
+ ** Is 'a' more specific than 'b' such that 'a' could be used
+ ** passed to 'b' without a compile time error.
+ **
+ internal static Bool isMoreSpecific(CallMatch a, CallMatch b)
+ {
+ return a.method.params.all |CParam ap, Int i->Bool|
+ {
+ bp := b.method.params[i]
+ return ap.paramType.fits(bp.paramType)
+ }
+ }
+
+//////////////////////////////////////////////////////////////////////////
+// Overrides
+//////////////////////////////////////////////////////////////////////////
+
+ **
+ ** Called during Inherit step when a Fantom slot overrides a FFI slot.
+ ** Log and throw compiler error if there is a problem.
+ **
+ override Void checkOverride(TypeDef t, CSlot base, SlotDef def)
+ {
+ // we don't allow Fantom to override Java methods with multiple
+ // overloaded versions since the Fantom type system can't actually
+ // override all the overloaded versions
+ jslot := base as JavaSlot
+ if (jslot?.next != null)
+ throw err("Cannot override Java overloaded method: '$jslot.name'", def.loc)
+
+ // route to method override checking
+ if (base is JavaMethod && def is MethodDef)
+ checkMethodOverride(t, base, def)
+ }
+
+ **
+ ** Called on method/method overrides in the checkOverride callback.
+ **
+ private Void checkMethodOverride(TypeDef t, JavaMethod base, MethodDef def)
+ {
+ // bail early if we know things aren't going to work out
+ if (base.params.size != def.params.size) return
+
+ // if the return type is primitive or Java array and the
+ // Fantom declaration matches how it is inferred into the Fan
+ // type system, then just change the return type - the compiler
+ // will impliclty do all the return coercions
+ if (isOverrideInferredType(base.returnType, def.returnType))
+ {
+ def.ret = def.inheritedRet = base.returnType
+ }
+
+ // if any of the parameters is a primitive or Java array
+ // and the Fantom declaration matches how it is inferred into
+ // the Fantom type type, then change the parameter type to
+ // the Java override type and make the Fantom type a local
+ // variable:
+ // Java: void foo(int a) { ... }
+ // Fantom: Void foo(Int a) { ... }
+ // Result: Void foo(int a_$J) { Int a := a_$J; ... }
+ //
+ base.params.eachr |CParam bp, Int i|
+ {
+ dp := def.paramDefs[i]
+ if (!isOverrideInferredType(bp.paramType, dp.paramType)) return
+
+ // add local variable: Int bar := bar_$J
+ local := LocalDefStmt(def.loc)
+ local.ctype = dp.paramType
+ local.name = dp.name
+ local.init = UnknownVarExpr(def.loc, null, dp.name + "_\$J")
+ def.code.stmts.insert(0, local)
+
+ // rename parameter Int bar -> int bar_$J
+ dp.name = dp.name + "_\$J"
+ dp.paramType = bp.paramType
+ }
+ }
+
+ **
+ ** When overriding a Java method check if the base type is
+ ** is a Java primitive or array and the override definition is
+ ** matches how the Java type is inferred in the Fantom type system.
+ ** If we have a match return true and we'll swizzle things in
+ ** checkMethodOverride.
+ **
+ static private Bool isOverrideInferredType(CType base, CType def)
+ {
+ // check if base class slot is a JavaType
+ java := base.toNonNullable as JavaType
+ if (java != null)
+ {
+ // allow primitives is it matches the inferred type
+ if (java.isPrimitive) return java.inferredAs == def
+
+ // allow arrays if mapped as Foo[] -> Foo?[]?
+ if (java.isArray) return java.inferredAs == def.toNonNullable && def.isNullable
+ }
+ return false
+ }
+
+//////////////////////////////////////////////////////////////////////////
+// CheckErrors
+//////////////////////////////////////////////////////////////////////////
+
+ **
+ ** Called during CheckErrors step for a type which extends
+ ** a FFI class or implements any FFI mixins.
+ **
+ override Void checkType(TypeDef def)
+ {
+ // can't subclass a primitive array like ByteArray/byte[]
+ if (def.base.deref is JavaType && def.base.deref->isInteropArray)
+ {
+ err("Cannot subclass from Java interop array: $def.base", def.loc)
+ return
+ }
+
+ // we don't allow deep inheritance of Java classes because
+ // the Fantom constructor and Java constructor model don't match
+ // up past one level of inheritance
+ // NOTE: that that when we remove this restriction we need to
+ // test how field initialization works because instance$init
+ // is almost certain to break with the current emit design
+ javaBase := def.base
+ while (javaBase != null && !javaBase.isForeign) javaBase = javaBase.base
+ if (javaBase != null && javaBase !== def.base)
+ {
+ err("Cannot subclass Java class more than one level: $javaBase", def.loc)
+ return
+ }
+
+ // ensure that when we map Fantom constructors to Java
+ // constructors that we don't have duplicate signatures
+ ctors := def.ctorDefs
+ ctors.each |MethodDef a, Int i|
+ {
+ ctors.each |MethodDef b, Int j|
+ {
+ if (i > j && areParamsSame(a, b))
+ err("Duplicate Java FFI constructor signatures: '$b.name' and '$a.name'", a.loc)
+ }
+ }
+ }
+
+ **
+ ** Do the two methods have the exact same parameter types.
+ **
+ static Bool areParamsSame(CMethod a, CMethod b)
+ {
+ if (a.params.size != b.params.size) return false
+ for (i:=0; i<a.params.size; ++i)
+ {
+ if (a.params[i].paramType != b.params[i].paramType)
+ return false
+ }
+ return true
+ }
+
+//////////////////////////////////////////////////////////////////////////
+// Coercion
+//////////////////////////////////////////////////////////////////////////
+
+ **
+ ** Return if we can make the actual type fit the expected
+ ** type, potentially using a coercion.
+ **
+ Bool fits(CType actual, CType expected)
+ {
+ // use dummy expression and route to coerce code
+ dummy := UnknownVarExpr(Loc("dummy"), null, "dummy") { ctype = actual }
+ fits := true
+ coerce(dummy, expected) |->| { fits=false }
+ return fits
+ }
+
+ **
+ ** Coerce expression to expected type. If not a type match
+ ** then run the onErr function.
+ **
+ override Expr coerce(Expr expr, CType expected, |->| onErr)
+ {
+ // handle easy case
+ actual := expr.ctype
+ expected = expected.deref
+ if (actual == expected) return expr
+
+ // handle null literal
+ if (expr.id === ExprId.nullLiteral && expected.isNullable)
+ return expr
+
+ // handle Fantom to Java primitives
+ if (expected.pod == primitives)
+ return coerceToPrimitive(expr, expected, onErr)
+
+ // handle Java primitives to Fan
+ if (actual.pod == primitives)
+ return coerceFromPrimitive(expr, expected, onErr)
+
+ // handle Java array to Fantom list
+ if (actual.name[0] == '[')
+ return coerceFromArray(expr, expected, onErr)
+
+ // handle Fantom list to Java array
+ if (expected.name[0] == '[')
+ return coerceToArray(expr, expected, onErr)
+
+ // handle sys::Func -> Java interface
+ if (actual is FuncType && expected.isMixin && expected.toNonNullable is JavaType)
+ return coerceFuncToInterface(expr, expected.toNonNullable, onErr)
+
+ // handle special classes and interfaces for built-in Fantom
+ // classes which actually map directly to Java built-in types
+ if (actual.isBool && boolTypes.contains(expected.toNonNullable.signature)) return box(expr)
+ if (actual.isInt && intTypes.contains(expected.toNonNullable.signature)) return box(expr)
+ if (actual.isFloat && floatTypes.contains(expected.toNonNullable.signature)) return box(expr)
+ if (actual.isDecimal && decimalTypes.contains(expected.toNonNullable.signature)) return expr
+ if (actual.isStr && strTypes.contains(expected.toNonNullable.signature)) return expr
+
+ // use normal Fantom coercion behavior
+ return super.coerce(expr, expected, onErr)
+ }
+
+ **
+ ** Ensure value type is boxed.
+ **
+ private Expr box(Expr expr)
+ {
+ if (expr.ctype.isVal)
+ return TypeCheckExpr.coerce(expr, expr.ctype.toNullable)
+ else
+ return expr
+ }
+
+ **
+ ** Coerce a fan expression to a Java primitive (other
+ ** than the ones we support natively)
+ **
+ Expr coerceToPrimitive(Expr expr, JavaType expected, |->| onErr)
+ {
+ actual := expr.ctype
+
+ // sys::Int (long) -> int, short, byte
+ if (actual.isInt && expected.isPrimitiveIntLike)
+ return TypeCheckExpr.coerce(expr, expected)
+
+ // sys::Float (double) -> float
+ if (actual.isFloat && expected.isPrimitiveFloat)
+ return TypeCheckExpr.coerce(expr, expected)
+
+ // no coercion - type error
+ onErr()
+ return expr
+ }
+
+ **
+ ** Coerce a Java primitive to a Fantom type.
+ **
+ Expr coerceFromPrimitive(Expr expr, CType expected, |->| onErr)
+ {
+ actual := (JavaType)expr.ctype
+
+ // int, short, byte -> sys::Int (long)
+ if (actual.isPrimitiveIntLike)
+ {
+ if (expected.isInt || expected.isObj)
+ return TypeCheckExpr.coerce(expr, expected)
+ }
+
+ // float -> sys::Float (float)
+ if (actual.isPrimitiveFloat)
+ {
+ if (expected.isFloat || expected.isObj)
+ return TypeCheckExpr.coerce(expr, expected)
+ }
+
+ // no coercion - type error
+ onErr()
+ return expr
+ }
+
+ **
+ ** Coerce a Java array to a Fantom list.
+ **
+ Expr coerceFromArray(Expr expr, CType expected, |->| onErr)
+ {
+ actual := (JavaType)expr.ctype.toNonNullable
+
+ // if expected is array type
+ if (expected is JavaType && ((JavaType)expected).isArray)
+ if (actual.arrayOf.fits(((JavaType)expected).arrayOf)) return expr
+
+ // if expected is Obj
+ if (expected.isObj) return arrayToList(expr, actual.inferredArrayOf)
+
+ // if expected is list type
+ if (expected.toNonNullable is ListType)
+ {
+ expectedOf := ((ListType)expected.toNonNullable).v
+ if (actual.inferredArrayOf.fits(expectedOf)) return arrayToList(expr, expectedOf)
+ }
+
+ // no coercion available
+ onErr()
+ return expr
+ }
+
+ **
+ ** Generate List.make(of, expr) where expr is Object[]
+ **
+ private Expr arrayToList(Expr expr, CType of)
+ {
+ loc := expr.loc
+ ofExpr := LiteralExpr(loc, ExprId.typeLiteral, ns.typeType, of)
+ call := CallExpr.makeWithMethod(loc, null, listMakeFromArray, [ofExpr, expr])
+ call.synthetic = true
+ return call
+ }
+
+ **
+ ** Coerce a Fantom list to Java array.
+ **
+ Expr coerceToArray(Expr expr, CType expected, |->| onErr)
+ {
+ loc := expr.loc
+ expectedOf := ((JavaType)expected.toNonNullable).inferredArrayOf
+ actual := expr.ctype
+
+ // if actual is list type
+ if (actual.toNonNullable is ListType)
+ {
+ actualOf := ((ListType)actual.toNonNullable).v
+ if (actualOf.fits(expectedOf))
+ {
+ // (Foo[])list.asArray(cls)
+ clsLiteral := CallExpr.makeWithMethod(loc, null, JavaType.classLiteral(this, expectedOf))
+ asArray := CallExpr.makeWithMethod(loc, expr, listAsArray, [clsLiteral])
+ return TypeCheckExpr.coerce(asArray, expected)
+ }
+ }
+
+ // no coercion available
+ onErr()
+ return expr
+ }
+
+ **
+ ** Attempt to coerce a parameterized sys::Func expr to a Java
+ ** interface if the interface supports exactly one matching method.
+ **
+ Expr coerceFuncToInterface(Expr expr, JavaType expected, |->| onErr)
+ {
+ // check if we have exactly one abstract method in the expected type
+ loc := expr.loc
+ abstracts := expected.methods.findAll |CMethod m->Bool| { return m.isAbstract }
+ if (abstracts.size != 1) { onErr(); return expr }
+ method := abstracts.first
+
+ // check if we have a match
+ FuncType funcType := (FuncType)expr.ctype
+ if (!isFuncToInterfaceMatch(funcType, method)) { onErr(); return expr }
+
+ // check if we've already generated a wrapper for this combo
+ key := "${funcType.signature}+${method.qname}"
+ ctor := funcWrappers[key]
+ if (ctor == null)
+ {
+ ctor = generateFuncToInterfaceWrapper(expr.loc, funcType, expected, method)
+ funcWrappers[key] = ctor
+ }
+
+ // replace expr with FuncWrapperX(expr)
+ call := CallExpr.makeWithMethod(loc, null, ctor, [expr])
+ call.synthetic = true
+ return call
+ }
+
+ **
+ ** Return if the specified function type can be used to implement
+ ** the specified interface method.
+ **
+ Bool isFuncToInterfaceMatch(FuncType funcType, CMethod method)
+ {
+ // sanity check to map to callX method - can't handle more than 8 args
+ if (method.params.size > 8) return false
+
+ // check if method is match for function; first check is that
+ // method must supply all the arguments required by the function
+ if (funcType.params.size > method.params.size) return false
+
+ // check that func return type fits method return
+ retOk := method.returnType.isVoid || fits(funcType.ret, method.returnType)
+ if (!retOk) return false
+
+ // check all the method parameters fit the function parameters
+ paramsOk := funcType.params.all |CType f, Int i->Bool| { return fits(f, method.params[i].paramType) }
+ if (!paramsOk) return false
+
+ return true
+ }
+
+ **
+ ** Generate the wrapper which implements the specified expected interface
+ ** and overrides the specified method which calls the function.
+ **
+ CMethod generateFuncToInterfaceWrapper(Loc loc, FuncType funcType, CType expected, CMethod method)
+ {
+ // Fantom: func typed as |Str|
+ // Java: interface Foo { void bar(String) }
+ // Result: FuncWrapperX(func)
+ //
+ // class FuncWrapperX : Foo
+ // {
+ // new make(Func f) { _func = f }
+ // override Void bar(Str a) { _func.call(a) }
+ // Func _func
+ // }
+
+ // generate FuncWrapper class
+ name := "FuncWrapper" + funcWrappers.size
+ cls := TypeDef(ns, loc, compiler.types[0].unit, name, FConst.Internal + FConst.Synthetic)
+ cls.base = ns.objType
+ cls.mixins = [expected]
+ addTypeDef(cls)
+
+ // generate FuncWrapper._func field
+ field := FieldDef(loc, cls)
+ ((SlotDef)field).name = "_func"
+ ((DefNode)field).flags = FConst.Private + FConst.Storage + FConst.Synthetic
+ field.fieldType = funcType
+ cls.addSlot(field)
+
+ // generate FuncWrapper.make constructor
+ ctor := MethodDef(loc, cls, "make", FConst.Internal + FConst.Ctor + FConst.Synthetic)
+ ctor.ret = ns.voidType
+ ctor.paramDefs = [ParamDef(loc, funcType, "f")]
+ ctor.code = Block.make(loc)
+ ctor.code.stmts.add(BinaryExpr.makeAssign(
+ FieldExpr(loc, ThisExpr(loc), field),
+ UnknownVarExpr(loc, null, "f")).toStmt)
+ ctor.code.stmts.add(ReturnStmt.make(loc))
+ cls.addSlot(ctor)
+
+ // generate FuncWrapper override of abstract method
+ over := MethodDef(loc, cls, method.name, FConst.Public + FConst.Override + FConst.Synthetic)
+ over.ret = method.returnType
+ over.paramDefs = ParamDef[,]
+ over.code = Block.make(loc)
+ callArity := "call"
+ call := CallExpr.makeWithMethod(loc, FieldExpr(loc, ThisExpr(loc), field), funcType.method(callArity))
+ method.params.each |CParam param, Int i|
+ {
+ paramName := "p$i"
+ over.params.add(ParamDef(loc, param.paramType, paramName))
+ if (i < funcType.params.size)
+ call.args.add(UnknownVarExpr(loc, null, paramName))
+ }
+ if (method.returnType.isVoid)
+ over.code.stmts.add(call.toStmt).add(ReturnStmt(loc))
+ else
+ over.code.stmts.add(ReturnStmt(loc, call))
+ cls.addSlot(over)
+
+ // return the ctor which we use for coercion
+ return ctor
+ }
+
+//////////////////////////////////////////////////////////////////////////
+// Reflection
+//////////////////////////////////////////////////////////////////////////
+
+ **
+ ** Get a CMethod representation for 'List.make(Type, Object[])'
+ **
+ once CMethod listMakeFromArray()
+ {
+ return JavaMethod(
+ this.ns.listType,
+ "make",
+ FConst.Public + FConst.Static,
+ this.ns.listType.toNullable,
+ [
+ JavaParam("of", this.ns.typeType),
+ JavaParam("array", objectArrayType)
+ ])
+ }
+
+ **
+ ** Get a CMethod representation for 'Object[] List.asArray()'
+ **
+ once CMethod listAsArray()
+ {
+ return JavaMethod(
+ this.ns.listType,
+ "asArray",
+ FConst.Public,
+ objectArrayType,
+ [JavaParam("cls", classType)])
+ }
+
+ **
+ ** Get a CType representation for 'java.lang.Class'
+ **
+ once JavaType classType()
+ {
+ return ns.resolveType("[java]java.lang::Class")
+ }
+
+ **
+ ** Get a CType representation for 'java.lang.Object[]'
+ **
+ once JavaType objectArrayType()
+ {
+ return ns.resolveType("[java]java.lang::[Object")
+ }
+
+//////////////////////////////////////////////////////////////////////////
+// Fields
+//////////////////////////////////////////////////////////////////////////
+
+ const static Str[] boolTypes := Str[
+ "[java]java.io::Serializable",
+ "[java]java.lang::Comparable",
+ ]
+
+ const static Str[] intTypes := Str[
+ "[java]java.lang::Number",
+ "[java]java.io::Serializable",
+ "[java]java.lang::Comparable",
+ ]
+
+ const static Str[] floatTypes := Str[
+ "[java]java.lang::Number",
+ "[java]java.io::Serializable",
+ "[java]java.lang::Comparable",
+ ]
+
+ const static Str[] decimalTypes := Str[
+ "[java]java.lang::Number",
+ "[java]java.io::Serializable",
+ "[java]java.lang::Comparable",
+ ]
+
+ const static Str[] strTypes := Str[
+ "[java]java.io::Serializable",
+ "[java]java.lang::CharSequence",
+ "[java]java.lang::Comparable",
+ ]
+
+ JavaPrimitives primitives := JavaPrimitives(this)
+ ClassPath cp
+
+ private Str:CMethod funcWrappers := Str:CMethod[:] // funcType+method:ctor
+
+}
+
+**************************************************************************
+** CallMatch
+**************************************************************************
+
+internal class CallMatch
+{
+ CallExpr apply(CallExpr call)
+ {
+ call.args = args
+ call.method = method
+ call.ctype = method.isCtor ? method.parent : method.returnType
+ return call
+ }
+
+ override Str toStr() { return method.signature }
+
+ CMethod? method // matched method
+ Expr[]? args // coerced arguments
+} \ No newline at end of file
diff --git a/tests/examplefiles/test.ps1 b/tests/examplefiles/test.ps1
new file mode 100644
index 00000000..385fb6f4
--- /dev/null
+++ b/tests/examplefiles/test.ps1
@@ -0,0 +1,108 @@
+<#
+.SYNOPSIS
+Runs a T-SQL Query and optional outputs results to a delimited file.
+.DESCRIPTION
+Invoke-Sql script will run a T-SQL query or stored procedure and optionally outputs a delimited file.
+.EXAMPLE
+PowerShell.exe -File "C:\Scripts\Invoke-Sql.ps1" -ServerInstance "Z003\sqlprod2" -Database orders -Query "EXEC usp_accounts '12445678'"
+This example connects to Z003\sqlprod2.Orders and executes a stored procedure which does not return a result set
+.EXAMPLE
+PowerShell.exe -File "C:\Scripts\Invoke-Sql.ps1" -ServerInstance "Z003\sqlprod2" -Database orders -Query "SELECT * FROM dbo.accounts" -FilePath "C:\Scripts\accounts.txt" -Delimiter ","
+This example connects to Z003\sqlprod2.Orders and selects the records from the accounts tables, the data is outputed to a CSV file
+.NOTES
+Version History
+v1.0 - Chad Miller - 12/14/2010 - Initial release
+IMPORTANT!!! The EventLog source which is set to the application needs to be registered with
+the Event log:
+New-EventLog -LogName Application -Source $Application
+#>
+param(
+#ServerInstance is Mandatory!
+[Parameter(Position=0, Mandatory=$false)] [string]$ServerInstance,
+#Database is Mandatory!
+[Parameter(Position=1, Mandatory=$false)] [string]$Database,
+#Query is Mandatory!
+[Parameter(Position=2, Mandatory=$false)] [string]$Query,
+[Parameter(Position=3, Mandatory=$false)] [string]$Application="Invoke-Sql.ps1",
+[Parameter(Position=4, Mandatory=$false)] [string]$FilePath,
+[Parameter(Position=7, Mandatory=$false)] [string]$Delimiter="|",
+#If UserName isn't supplied a trusted connection will be used
+[Parameter(Position=5, Mandatory=$false)] [string]$UserName,
+[Parameter(Position=6, Mandatory=$false)] [string]$Password,
+[Parameter(Position=8, Mandatory=$false)] [Int32]$QueryTimeout=600,
+[Parameter(Position=9, Mandatory=$false)] [Int32]$ConnectionTimeout=15
+)
+
+
+#This must be run as administrator on Windows 2008 and higher!
+New-EventLog -LogName Application -Source $Application -EA SilentlyContinue
+$Error.Clear()
+
+#######################
+function Invoke-SqlCmd2
+{
+ param(
+ [Parameter(Position=0, Mandatory=$true)] [string]$ServerInstance,
+ [Parameter(Position=1, Mandatory=$true)] [string]$Database,
+ [Parameter(Position=2, Mandatory=$true)] [string]$Query,
+ [Parameter(Position=3, Mandatory=$false)] [string]$UserName,
+ [Parameter(Position=4, Mandatory=$false)] [string]$Password,
+ [Parameter(Position=5, Mandatory=$false)] [Int32]$QueryTimeout,
+ [Parameter(Position=6, Mandatory=$false)] [Int32]$ConnectionTimeout
+ )
+
+ try {
+ if ($Username)
+ { $ConnectionString = "Server={0};Database={1};User ID={2};Password={3};Trusted_Connection=False;Connect Timeout={4}" -f $ServerInstance,$Database,$Username,$Password,$ConnectionTimeout }
+ else
+ { $ConnectionString = "Server={0};Database={1};Integrated Security=True;Connect Timeout={2}" -f $ServerInstance,$Database,$ConnectionTimeout }
+ $conn=new-object System.Data.SqlClient.SQLConnection
+ $conn.ConnectionString=$ConnectionString
+ $conn.Open()
+ $cmd=new-object system.Data.SqlClient.SqlCommand($Query,$conn)
+ $cmd.CommandTimeout=$QueryTimeout
+ $ds=New-Object system.Data.DataSet
+ $da=New-Object system.Data.SqlClient.SqlDataAdapter($cmd)
+ [void]$da.fill($ds)
+ Write-Output ($ds.Tables[0])
+ }
+ finally {
+ $conn.Dispose()
+ }
+
+} #Invoke-SqlCmd2
+
+#######################
+# MAIN #
+#######################
+if ($PSBoundParameters.Count -eq 0)
+{
+ get-help $myInvocation.MyCommand.Path -full
+ break
+}
+
+try {
+ $msg = $null
+ $msg += "Application/Job Name: $Application`n"
+ $msg += "Query: $Query`n"
+ $msg += "ServerInstance: $ServerInstance`n"
+ $msg += "Database: $Database`n"
+ $msg += "FilePath: $FilePath`n"
+
+ Write-EventLog -LogName Application -Source "$Application" -EntryType Information -EventId 12345 -Message "Starting`n$msg"
+ $dt = Invoke-SqlCmd2 -ServerInstance $ServerInstance -Database $Database -Query $Query -UserName $UserName -Password $Password -QueryTimeOut $QueryTimeOut -ConnectionTimeout $ConnectionTimeout
+ if ($FilePath)
+ {
+ if ($dt)
+ { $dt | export-csv -Delimiter $Delimiter -Path $FilePath -NoTypeInformation }
+ else #Query Returned No Output!
+ {Write-EventLog -LogName Application -Source "$Application" -EntryType Warning -EventId 12345 -Message "NoOutput`n$msg" }
+ }
+
+ Write-EventLog -LogName Application -Source "$Application" -EntryType Information -EventId 12345 -Message "Completed`n$msg"
+}
+catch {
+ $Exception = "{0}, {1}" -f $_.Exception.GetType().FullName,$( $_.Exception.Message -replace "'" )
+ Write-EventLog -LogName Application -Source "$Application" -EntryType Error -EventId 12345 -Message "Error`n$msg`n$Exception"
+ throw
+}
diff --git a/tests/old_run.py b/tests/old_run.py
index ae410146..7929d743 100644
--- a/tests/old_run.py
+++ b/tests/old_run.py
@@ -8,7 +8,7 @@
python run.py [testfile ...]
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/run.py b/tests/run.py
index c803e276..ef92fe09 100644
--- a/tests/run.py
+++ b/tests/run.py
@@ -8,7 +8,7 @@
python run.py [testfile ...]
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py
index 02261d24..e86f1579 100644
--- a/tests/test_basic_api.py
+++ b/tests/test_basic_api.py
@@ -3,7 +3,7 @@
Pygments basic API tests
~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -57,7 +57,7 @@ def test_lexer_classes():
assert 'root' in cls._tokens, \
'%s has no root state' % cls
- if cls.name == 'XQuery': # XXX temporary
+ if cls.name in ['XQuery', 'Opa']: # XXX temporary
return
tokens = list(inst.get_tokens(test_content))
diff --git a/tests/test_clexer.py b/tests/test_clexer.py
index 996f7038..08fb42cf 100644
--- a/tests/test_clexer.py
+++ b/tests/test_clexer.py
@@ -3,7 +3,7 @@
Basic CLexer Test
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_cmdline.py b/tests/test_cmdline.py
index 00baef3b..6a285fcc 100644
--- a/tests/test_cmdline.py
+++ b/tests/test_cmdline.py
@@ -3,7 +3,7 @@
Command line test
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_examplefiles.py b/tests/test_examplefiles.py
index e5dbcf4c..4ab2912e 100644
--- a/tests/test_examplefiles.py
+++ b/tests/test_examplefiles.py
@@ -3,7 +3,7 @@
Pygments tests with example files
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -60,8 +60,9 @@ def check_lexer(lx, absfn, outfn):
tokens = []
for type, val in lx.get_tokens(text):
ntext.append(val)
- assert type != Error, 'lexer %s generated error token for %s' % \
- (lx, absfn)
+ assert type != Error, \
+ 'lexer %s generated error token for %s: %r at position %d' % \
+ (lx, absfn, val, len(u''.join(ntext)))
tokens.append((type, val))
if u''.join(ntext) != text:
print '\n'.join(difflib.unified_diff(u''.join(ntext).splitlines(),
diff --git a/tests/test_html_formatter.py b/tests/test_html_formatter.py
index 5a506755..b0b36c4d 100644
--- a/tests/test_html_formatter.py
+++ b/tests/test_html_formatter.py
@@ -3,7 +3,7 @@
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -75,6 +75,38 @@ class HtmlFormatterTest(unittest.TestCase):
fmt = HtmlFormatter(**optdict)
fmt.format(tokensource, outfile)
+ def test_linenos(self):
+ optdict = dict(linenos=True)
+ outfile = StringIO.StringIO()
+ fmt = HtmlFormatter(**optdict)
+ fmt.format(tokensource, outfile)
+ html = outfile.getvalue()
+ self.assert_(re.search("<pre>\s+1\s+2\s+3", html))
+
+ def test_linenos_with_startnum(self):
+ optdict = dict(linenos=True, linenostart=5)
+ outfile = StringIO.StringIO()
+ fmt = HtmlFormatter(**optdict)
+ fmt.format(tokensource, outfile)
+ html = outfile.getvalue()
+ self.assert_(re.search("<pre>\s+5\s+6\s+7", html))
+
+ def test_lineanchors(self):
+ optdict = dict(lineanchors="foo")
+ outfile = StringIO.StringIO()
+ fmt = HtmlFormatter(**optdict)
+ fmt.format(tokensource, outfile)
+ html = outfile.getvalue()
+ self.assert_(re.search("<pre><a name=\"foo-1\">", html))
+
+ def test_lineanchors_with_startnum(self):
+ optdict = dict(lineanchors="foo", linenostart=5)
+ outfile = StringIO.StringIO()
+ fmt = HtmlFormatter(**optdict)
+ fmt.format(tokensource, outfile)
+ html = outfile.getvalue()
+ self.assert_(re.search("<pre><a name=\"foo-5\">", html))
+
def test_valid_output(self):
# test all available wrappers
fmt = HtmlFormatter(full=True, linenos=True, noclasses=True,
diff --git a/tests/test_latex_formatter.py b/tests/test_latex_formatter.py
index 501eb198..0c9c9122 100644
--- a/tests/test_latex_formatter.py
+++ b/tests/test_latex_formatter.py
@@ -3,7 +3,7 @@
Pygments LaTeX formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_perllexer.py b/tests/test_perllexer.py
new file mode 100644
index 00000000..b9c3cb74
--- /dev/null
+++ b/tests/test_perllexer.py
@@ -0,0 +1,137 @@
+# -*- coding: utf-8 -*-
+"""
+ Pygments regex lexer tests
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import time
+import unittest
+
+from pygments.token import String
+from pygments.lexers.agile import PerlLexer
+
+
+class RunawayRegexTest(unittest.TestCase):
+ # A previous version of the Perl lexer would spend a great deal of
+ # time backtracking when given particular strings. These tests show that
+ # the runaway backtracking doesn't happen any more (at least for the given
+ # cases).
+
+ lexer = PerlLexer()
+
+ ### Test helpers.
+
+ def assert_single_token(self, s, token):
+ """Show that a given string generates only one token."""
+ tokens = list(self.lexer.get_tokens_unprocessed(s))
+ self.assertEqual(len(tokens), 1, tokens)
+ self.assertEqual(s, tokens[0][2])
+ self.assertEqual(token, tokens[0][1])
+
+ def assert_tokens(self, strings, expected_tokens):
+ """Show that a given string generates the expected tokens."""
+ tokens = list(self.lexer.get_tokens_unprocessed(''.join(strings)))
+ self.assertEqual(len(tokens), len(expected_tokens), tokens)
+ for index, s in enumerate(strings):
+ self.assertEqual(s, tokens[index][2])
+ self.assertEqual(expected_tokens[index], tokens[index][1])
+
+ def assert_fast_tokenization(self, s):
+ """Show that a given string is tokenized quickly."""
+ start = time.time()
+ tokens = list(self.lexer.get_tokens_unprocessed(s))
+ end = time.time()
+ # Isn't 10 seconds kind of a long time? Yes, but we don't want false
+ # positives when the tests are starved for CPU time.
+ if end-start > 10:
+ self.fail('tokenization took too long')
+ return tokens
+
+ ### Strings.
+
+ def test_single_quote_strings(self):
+ self.assert_single_token(r"'foo\tbar\\\'baz'", String)
+ self.assert_fast_tokenization("'" + '\\'*999)
+
+ def test_double_quote_strings(self):
+ self.assert_single_token(r'"foo\tbar\\\"baz"', String)
+ self.assert_fast_tokenization('"' + '\\'*999)
+
+ def test_backtick_strings(self):
+ self.assert_single_token(r'`foo\tbar\\\`baz`', String.Backtick)
+ self.assert_fast_tokenization('`' + '\\'*999)
+
+ ### Regex matches with various delimiters.
+
+ def test_match(self):
+ self.assert_single_token(r'/aa\tbb/', String.Regex)
+ self.assert_fast_tokenization('/' + '\\'*999)
+
+ def test_match_with_slash(self):
+ self.assert_tokens(['m', '/\n\\t\\\\/'], [String.Regex, String.Regex])
+ self.assert_fast_tokenization('m/xxx\n' + '\\'*999)
+
+ def test_match_with_bang(self):
+ self.assert_tokens(['m', r'!aa\t\!bb!'], [String.Regex, String.Regex])
+ self.assert_fast_tokenization('m!' + '\\'*999)
+
+ def test_match_with_brace(self):
+ self.assert_tokens(['m', r'{aa\t\}bb}'], [String.Regex, String.Regex])
+ self.assert_fast_tokenization('m{' + '\\'*999)
+
+ def test_match_with_angle_brackets(self):
+ self.assert_tokens(['m', r'<aa\t\>bb>'], [String.Regex, String.Regex])
+ self.assert_fast_tokenization('m<' + '\\'*999)
+
+ def test_match_with_parenthesis(self):
+ self.assert_tokens(['m', r'(aa\t\)bb)'], [String.Regex, String.Regex])
+ self.assert_fast_tokenization('m(' + '\\'*999)
+
+ def test_match_with_at_sign(self):
+ self.assert_tokens(['m', r'@aa\t\@bb@'], [String.Regex, String.Regex])
+ self.assert_fast_tokenization('m@' + '\\'*999)
+
+ def test_match_with_percent_sign(self):
+ self.assert_tokens(['m', r'%aa\t\%bb%'], [String.Regex, String.Regex])
+ self.assert_fast_tokenization('m%' + '\\'*999)
+
+ def test_match_with_dollar_sign(self):
+ self.assert_tokens(['m', r'$aa\t\$bb$'], [String.Regex, String.Regex])
+ self.assert_fast_tokenization('m$' + '\\'*999)
+
+ ### Regex substitutions with various delimeters.
+
+ def test_substitution_with_slash(self):
+ self.assert_single_token('s/aaa/bbb/g', String.Regex)
+ self.assert_fast_tokenization('s/foo/' + '\\'*999)
+
+ def test_substitution_with_at_sign(self):
+ self.assert_single_token(r's@aaa@bbb@g', String.Regex)
+ self.assert_fast_tokenization('s@foo@' + '\\'*999)
+
+ def test_substitution_with_percent_sign(self):
+ self.assert_single_token(r's%aaa%bbb%g', String.Regex)
+ self.assert_fast_tokenization('s%foo%' + '\\'*999)
+
+ def test_substitution_with_brace(self):
+ self.assert_single_token(r's{aaa}', String.Regex)
+ self.assert_fast_tokenization('s{' + '\\'*999)
+
+ def test_substitution_with_angle_bracket(self):
+ self.assert_single_token(r's<aaa>', String.Regex)
+ self.assert_fast_tokenization('s<' + '\\'*999)
+
+ def test_substitution_with_angle_bracket(self):
+ self.assert_single_token(r's<aaa>', String.Regex)
+ self.assert_fast_tokenization('s<' + '\\'*999)
+
+ def test_substitution_with_square_bracket(self):
+ self.assert_single_token(r's[aaa]', String.Regex)
+ self.assert_fast_tokenization('s[' + '\\'*999)
+
+ def test_substitution_with_parenthesis(self):
+ self.assert_single_token(r's(aaa)', String.Regex)
+ self.assert_fast_tokenization('s(' + '\\'*999)
diff --git a/tests/test_regexlexer.py b/tests/test_regexlexer.py
index e0f167ab..fbb71ad6 100644
--- a/tests/test_regexlexer.py
+++ b/tests/test_regexlexer.py
@@ -3,7 +3,7 @@
Pygments regex lexer tests
~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_token.py b/tests/test_token.py
index d7abd218..490c966c 100644
--- a/tests/test_token.py
+++ b/tests/test_token.py
@@ -3,7 +3,7 @@
Test suite for the token module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_using_api.py b/tests/test_using_api.py
index b1b6877d..e645a881 100644
--- a/tests/test_using_api.py
+++ b/tests/test_using_api.py
@@ -3,7 +3,7 @@
Pygments tests for using()
~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_util.py b/tests/test_util.py
index 0876cf70..d994e5fa 100644
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -3,7 +3,7 @@
Test suite for the util module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2011 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,6 +13,12 @@ import os
from pygments import util
+class FakeLexer(object):
+ def analyse(text):
+ return float(text)
+ analyse = util.make_analysator(analyse)
+
+
class UtilTest(unittest.TestCase):
def test_getoptions(self):
@@ -53,12 +59,36 @@ class UtilTest(unittest.TestCase):
self.assertEquals(util.docstring_headline(f1), "docstring headline")
self.assertEquals(util.docstring_headline(f2), "docstring headline")
- def test_analysator(self):
- class X(object):
+ def test_analysator_returns_float(self):
+ # If an analysator wrapped by make_analysator returns a floating point
+ # number, then that number will be returned by the wrapper.
+ self.assertEquals(FakeLexer.analyse('0.5'), 0.5)
+
+ def test_analysator_returns_boolean(self):
+ # If an analysator wrapped by make_analysator returns a boolean value,
+ # then the wrapper will return 1.0 if the boolean was True or 0.0 if
+ # it was False.
+ self.assertEquals(FakeLexer.analyse(True), 1.0)
+ self.assertEquals(FakeLexer.analyse(False), 0.0)
+
+ def test_analysator_raises_exception(self):
+ # If an analysator wrapped by make_analysator raises an exception,
+ # then the wrapper will return 0.0.
+ class ErrorLexer(object):
def analyse(text):
- return 0.5
+ raise RuntimeError('something bad happened')
analyse = util.make_analysator(analyse)
- self.assertEquals(X.analyse(''), 0.5)
+ self.assertEquals(ErrorLexer.analyse(''), 0.0)
+
+ def test_analysator_value_error(self):
+ # When converting the analysator's return value to a float a
+ # ValueError may occur. If that happens 0.0 is returned instead.
+ self.assertEquals(FakeLexer.analyse('bad input'), 0.0)
+
+ def test_analysator_type_error(self):
+ # When converting the analysator's return value to a float a
+ # TypeError may occur. If that happens 0.0 is returned instead.
+ self.assertEquals(FakeLexer.analyse(None), 0.0)
def test_shebang_matches(self):
self.assert_(util.shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?'))