summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGES42
-rw-r--r--docs/generate.py3
-rw-r--r--docs/src/tokens.txt4
-rw-r--r--external/markdown-processor.py14
-rw-r--r--pygments/cmdline.py50
-rw-r--r--pygments/formatters/latex.py7
-rw-r--r--pygments/formatters/other.py29
-rw-r--r--pygments/lexers/_mapping.py15
-rw-r--r--pygments/lexers/agile.py175
-rw-r--r--pygments/lexers/asm.py75
-rw-r--r--pygments/lexers/compiled.py65
-rw-r--r--pygments/lexers/dotnet.py10
-rw-r--r--pygments/lexers/functional.py6
-rw-r--r--pygments/lexers/math.py3
-rw-r--r--pygments/lexers/other.py348
-rw-r--r--pygments/lexers/special.py8
-rw-r--r--pygments/lexers/templates.py115
-rw-r--r--pygments/lexers/text.py102
-rw-r--r--pygments/lexers/web.py18
-rw-r--r--pygments/style.py13
-rw-r--r--pygments/styles/vim.py11
-rw-r--r--pygments/token.py1
-rw-r--r--pygments/util.py15
-rw-r--r--tests/examplefiles/cheetah_example.html13
-rw-r--r--tests/examplefiles/genclass.clj510
-rw-r--r--tests/examplefiles/lighttpd_config.conf13
-rw-r--r--tests/examplefiles/nasm_aoutso.asm96
-rw-r--r--tests/examplefiles/nasm_objexe.asm30
-rw-r--r--tests/examplefiles/nginx_nginx.conf118
-rw-r--r--tests/examplefiles/objc_example.m11
-rw-r--r--tests/examplefiles/objc_example2.m24
-rw-r--r--tests/examplefiles/regex-delims.pl120
-rw-r--r--tests/examplefiles/sphere.pov18
-rw-r--r--tests/examplefiles/sqlite3.sqlite3-console27
-rw-r--r--tests/examplefiles/test.plot333
-rw-r--r--tests/examplefiles/while.pov13
-rw-r--r--tests/test_basic_api.py7
-rw-r--r--tests/test_examplefiles.py7
38 files changed, 2352 insertions, 117 deletions
diff --git a/CHANGES b/CHANGES
index 1f50f4b1..ee8306f3 100644
--- a/CHANGES
+++ b/CHANGES
@@ -5,11 +5,49 @@ Version 0.11
------------
(codename not selected, release XXX XX, 2008)
+- Lexers added:
+
+ * Nasm-style assembly language, thanks to delroth
+ * YAML, thanks to Kirill Simonov
+ * ActionScript 3, thanks to Pierre Bourdon
+ * Cheetah/Spitfire templates, thanks to Matt Good
+ * Lighttpd config files
+ * Nginx config files
+ * Gnuplot plotting scripts
+ * Clojure
+ * POV-Ray scene files
+ * Sqlite3 interactive console sessions
+
+- Lexers improved:
+ * C lexer highlights standard library functions now and supports
+ C99 types.
+ * Bash lexer now correctly highlights heredocs without preceeding
+ whitespace.
+ * Vim lexer now highlights hex colors properly and knows a couple
+ more keywords.
+ * Irc logs lexer now handles xchat's default time format (#340)
+ and correctly highlights lines ending in ``>``
+ * Support more delimiters for perl regular expressions (#258)
+
- Added "Visual Studio" style.
-- Added ActionScript 3 lexer, thanks to Pierre Bourdon.
+- Updated markdown processor to Markdown 1.7.
+
+- Support roman/sans/mono style defs and use them in the LaTeX
+ formatter.
+
+- The RawTokenFormatter is no longer registered to ``*.raw``
+ and it's documented that tokenization with this lexer may
+ raise exceptions.
+
+- New option *-g* to pygmentize, to allow lexer guessing based
+ on filetext (can be slowish, so file extensions are still checked
+ first).
-- Added YAML lexer, thanks to Kirill Simonov.
+- ``guess_lexer()`` now makes its decision much faster due to a
+ cache of whether data is xml-like (a check which is used in several
+ versions of ``analyse_text()``. Several lexers also have more
+ accurate ``analyse_text()`` now.
Version 0.10
diff --git a/docs/generate.py b/docs/generate.py
index 7702cfe5..658ada4b 100644
--- a/docs/generate.py
+++ b/docs/generate.py
@@ -69,7 +69,8 @@ def generate_formatter_docs():
from pygments.formatters import FORMATTERS
out = []
- for cls, data in FORMATTERS.iteritems():
+ for cls, data in sorted(FORMATTERS.iteritems(),
+ key=lambda x: x[0].__name__):
heading = cls.__name__
out.append('`' + heading + '`\n' + '-'*(2+len(heading)) + '\n')
out.append(cls.__doc__)
diff --git a/docs/src/tokens.txt b/docs/src/tokens.txt
index 5451ec8c..9ef0df8d 100644
--- a/docs/src/tokens.txt
+++ b/docs/src/tokens.txt
@@ -117,6 +117,10 @@ Keyword Tokens
For keywords used for variable declaration (e.g. ``var`` in some programming
languages like JavaScript).
+`Keyword.Namespace`
+ For keywords used for namespace declarations (e.g. ``import`` in Python and
+ Java and ``package`` in Java).
+
`Keyword.Pseudo`
For keywords that aren't really keywords (e.g. ``None`` in old Python
versions).
diff --git a/external/markdown-processor.py b/external/markdown-processor.py
index 94d02576..e7c8d6f9 100644
--- a/external/markdown-processor.py
+++ b/external/markdown-processor.py
@@ -9,8 +9,8 @@
from markdown import Markdown
md = Markdown()
- md.preprocessors.insert(0, CodeBlockPreprocessor())
- markdown = md.__str__
+ md.textPreprocessors.insert(0, CodeBlockPreprocessor())
+ html = md.convert(someText)
markdown is then a callable that can be passed to the context of
a template and used in that template, for example.
@@ -40,14 +40,14 @@ INLINESTYLES = False
import re
-from markdown import Preprocessor
+from markdown import TextPreprocessor
from pygments import highlight
from pygments.formatters import HtmlFormatter
from pygments.lexers import get_lexer_by_name, TextLexer
-class CodeBlockPreprocessor(Preprocessor):
+class CodeBlockPreprocessor(TextPreprocessor):
pattern = re.compile(
r'\[sourcecode:(.+?)\](.+?)\[/sourcecode\]', re.S)
@@ -60,8 +60,8 @@ class CodeBlockPreprocessor(Preprocessor):
lexer = get_lexer_by_name(m.group(1))
except ValueError:
lexer = TextLexer()
- code = highlight(m.group(2), lexer, formatter)
- code = code.replace('\n\n', '\n \n')
+ code = highlight(m.group(2), lexer, self.formatter)
+ code = code.replace('\n\n', '\n&nbsp;\n').replace('\n', '<br />')
return '\n\n<div class="code">%s</div>\n\n' % code
return self.pattern.sub(
- repl, '\n'.join(lines)).split('\n')
+ repl, lines) \ No newline at end of file
diff --git a/pygments/cmdline.py b/pygments/cmdline.py
index 996207aa..de10debe 100644
--- a/pygments/cmdline.py
+++ b/pygments/cmdline.py
@@ -5,7 +5,7 @@
Command line interface.
- :copyright: 2006-2007 by Georg Brandl.
+ :copyright: 2006-2008 by Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import sys
@@ -15,7 +15,7 @@ from textwrap import dedent
from pygments import __version__, __author__, highlight
from pygments.util import ClassNotFound, OptionError, docstring_headline
from pygments.lexers import get_all_lexers, get_lexer_by_name, get_lexer_for_filename, \
- find_lexer_class
+ find_lexer_class, guess_lexer, TextLexer
from pygments.formatters import get_all_formatters, get_formatter_by_name, \
get_formatter_for_filename, find_formatter_class, \
TerminalFormatter # pylint:disable-msg=E0611
@@ -24,7 +24,7 @@ from pygments.styles import get_all_styles, get_style_by_name
USAGE = """\
-Usage: %s [-l <lexer>] [-F <filter>[:<options>]] [-f <formatter>]
+Usage: %s [-l <lexer> | -g] [-F <filter>[:<options>]] [-f <formatter>]
[-O <options>] [-P <option=value>] [-o <outfile>] [<infile>]
%s -S <style> -f <formatter> [-a <arg>] [-O <options>] [-P <option=value>]
@@ -38,7 +38,9 @@ If no input file is given, use stdin, if -o is not given, use stdout.
<lexer> is a lexer name (query all lexer names with -L). If -l is not
given, the lexer is guessed from the extension of the input file name
-(this obviously doesn't work if the input is stdin).
+(this obviously doesn't work if the input is stdin). If -g is passed,
+attempt to guess the lexer from the file contents, or pass through as
+plain text if this fails (this can work for stdin).
Likewise, <formatter> is a formatter name, and will be guessed from
the extension of the output file name. If no output file is given,
@@ -186,7 +188,7 @@ def main(args=sys.argv):
usage = USAGE % ((args[0],) * 5)
try:
- popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:hVH")
+ popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:hVHg")
except getopt.GetoptError, err:
print >>sys.stderr, usage
return 2
@@ -212,7 +214,7 @@ def main(args=sys.argv):
return 0
if opts.pop('-V', None) is not None:
- print 'Pygments version %s, (c) 2006-2007 by %s.' % (__version__, __author__)
+ print 'Pygments version %s, (c) 2006-2008 by %s.' % (__version__, __author__)
return 0
# handle ``pygmentize -L``
@@ -333,23 +335,41 @@ def main(args=sys.argv):
return 2
infn = args[0]
+ try:
+ code = file(infn).read()
+ except Exception, err:
+ print >>sys.stderr, 'Error: cannot read infile:', err
+ return 1
+
if not lexer:
try:
lexer = get_lexer_for_filename(infn, **parsed_opts)
- except (OptionError, ClassNotFound), err:
+ except ClassNotFound, err:
+ if '-g' in opts:
+ try:
+ lexer = guess_lexer(code)
+ except ClassNotFound:
+ lexer = TextLexer()
+ else:
+ print >>sys.stderr, 'Error:', err
+ return 1
+ except OptionError, err:
print >>sys.stderr, 'Error:', err
return 1
- try:
- code = file(infn).read()
- except Exception, err:
- print >>sys.stderr, 'Error: cannot read infile:', err
- return 1
else:
- if not lexer:
- print >>sys.stderr, 'Error: no lexer name given and reading from stdin'
+ if '-g' in opts:
+ code = sys.stdin.read()
+ try:
+ lexer = guess_lexer(code)
+ except ClassNotFound:
+ lexer = TextLexer()
+ elif not lexer:
+ print >>sys.stderr, 'Error: no lexer name given and reading ' + \
+ 'from stdin (try using -g or -l <lexer>)'
return 2
- code = sys.stdin.read()
+ else:
+ code = sys.stdin.read()
# No encoding given? Use latin1 if output file given,
# stdin/stdout encoding otherwise.
diff --git a/pygments/formatters/latex.py b/pygments/formatters/latex.py
index ae3ce26d..529fa8f9 100644
--- a/pygments/formatters/latex.py
+++ b/pygments/formatters/latex.py
@@ -109,6 +109,7 @@ class LatexFormatter(Formatter):
The LaTeX commands used to produce colored output are constructed
using this prefix and some letters (default: ``'PY'``).
*New in Pygments 0.7.*
+
*New in Pygments 0.10:* the default is now ``'PY'`` instead of ``'C'``.
"""
name = 'LaTeX'
@@ -154,6 +155,12 @@ class LatexFormatter(Formatter):
cmndef = r'\textit{' + cmndef + '}'
if ndef['underline']:
cmndef = r'\underline{' + cmndef + '}'
+ if ndef['roman']:
+ cmndef = r'\textrm{' + cmndef + '}'
+ if ndef['sans']:
+ cmndef = r'\textsf{' + cmndef + '}'
+ if ndef['mono']:
+ cmndef = r'\texttt{' + cmndef + '}'
if ndef['color']:
cmndef = r'\textcolor[rgb]{%s}{%s}' % (
rgbcolor(ndef['color']),
diff --git a/pygments/formatters/other.py b/pygments/formatters/other.py
index bd375f32..fb123689 100644
--- a/pygments/formatters/other.py
+++ b/pygments/formatters/other.py
@@ -11,7 +11,8 @@
from pygments.formatter import Formatter
from pygments.util import get_choice_opt
-
+from pygments.token import Token
+from pygments.console import colorize
__all__ = ['NullFormatter', 'RawTokenFormatter']
@@ -46,6 +47,11 @@ class RawTokenFormatter(Formatter):
`compress`
If set to ``'gz'`` or ``'bz2'``, compress the output with the given
compression algorithm after encoding (default: ``''``).
+ `error_color`
+ If set to a color name, highlight error tokens using that color. If
+ set but with no value, defaults to ``'red'``.
+ *New in Pygments 0.11.*
+
"""
name = 'Raw tokens'
aliases = ['raw', 'tokens']
@@ -57,6 +63,15 @@ class RawTokenFormatter(Formatter):
Formatter.__init__(self, **options)
self.compress = get_choice_opt(options, 'compress',
['', 'none', 'gz', 'bz2'], '')
+ self.error_color = options.get('error_color', None)
+ if self.error_color is True:
+ self.error_color = 'red'
+ if self.error_color is not None:
+ try:
+ colorize(self.error_color, '')
+ except KeyError:
+ raise ValueError("Invalid color %r specified" %
+ self.error_color)
def format(self, tokensource, outfile):
if self.compress == 'gz':
@@ -78,6 +93,14 @@ class RawTokenFormatter(Formatter):
lasttype = None
lastval = u''
- for ttype, value in tokensource:
- write("%s\t%r\n" % (ttype, value))
+ if self.error_color:
+ for ttype, value in tokensource:
+ line = "%s\t%r\n" % (ttype, value)
+ if ttype is Token.Error:
+ write(colorize(self.error_color, line))
+ else:
+ write(line)
+ else:
+ for ttype, value in tokensource:
+ write("%s\t%r\n" % (ttype, value))
flush()
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 690942cd..ceaac104 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -27,6 +27,11 @@ LEXERS = {
'CLexer': ('pygments.lexers.compiled', 'C', ('c',), ('*.c', '*.h'), ('text/x-chdr', 'text/x-csrc')),
'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
+ 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire'), (), ('text/html+cheetah', 'text/html+spitfire')),
+ 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
+ 'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
+ 'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
+ 'ClojureLexer': ('pygments.lexers.agile', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx'), ('text/x-c++hdr', 'text/x-c++src')),
'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
@@ -51,6 +56,7 @@ LEXERS = {
'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
'GettextLexer': ('pygments.lexers.text', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
+ 'GnuplotLexer': ('pygments.lexers.other', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
'GroffLexer': ('pygments.lexers.text', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
'HaskellLexer': ('pygments.lexers.functional', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja'), (), ('text/html+django', 'text/html+jinja')),
@@ -69,6 +75,7 @@ LEXERS = {
'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
+ 'LighttpdConfLexer': ('pygments.lexers.text', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell'), ('*.lhs',), ('text/x-literate-haskell',)),
'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
@@ -91,18 +98,21 @@ LEXERS = {
'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
- 'NumPyLexer': ('pygments.lexers.math', 'NumPy', ('numpy',), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript'), ('text/x-python', 'application/x-python')),
+ 'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
+ 'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
+ 'NumPyLexer': ('pygments.lexers.math', 'NumPy', ('numpy',), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript'), ()),
'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
'ObjectiveCLexer': ('pygments.lexers.compiled', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m',), ('text/x-objective-c',)),
'OcamlLexer': ('pygments.lexers.compiled', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
'OcamlLexer': ('pygments.lexers.functional', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm'), ('text/x-perl', 'application/x-perl')),
'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]'), ('text/x-php',)),
+ 'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript'), ('text/x-python', 'application/x-python')),
'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)),
- 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), ('*.raw',), ('application/x-pygments-tokens',)),
+ 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)),
'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()),
'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst',)),
@@ -115,6 +125,7 @@ LEXERS = {
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list'), ('sources.list',), ()),
'SqlLexer': ('pygments.lexers.other', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
+ 'SqliteConsoleLexer': ('pygments.lexers.other', 'sqlite3con', (), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
'TcshLexer': ('pygments.lexers.other', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index 30779ebd..14222409 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -7,7 +7,8 @@
:copyright: 2006-2008 by Georg Brandl, Armin Ronacher,
Lukas Meuser, Tim Hatch, Jarrett Billingsley,
- Tassilo Schweyer, Steven Hazel, Nick Efford.
+ Tassilo Schweyer, Steven Hazel, Nick Efford,
+ Davy Wybiral.
:license: BSD, see LICENSE for more details.
"""
@@ -27,7 +28,7 @@ from pygments import unistring as uni
__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
'RubyLexer', 'RubyConsoleLexer', 'PerlLexer', 'LuaLexer',
- 'MiniDLexer', 'IoLexer', 'TclLexer', 'Python3Lexer']
+ 'MiniDLexer', 'IoLexer', 'TclLexer', 'Python3Lexer', 'ClojureLexer']
# b/w compatibility
from pygments.lexers.functional import SchemeLexer
@@ -60,8 +61,8 @@ class PythonLexer(RegexLexer):
include('keywords'),
(r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
- (r'(import)(\s+)', bygroups(Keyword, Text), 'import'),
+ (r'(from)(\s+)', bygroups(Keyword.Namespace, Text), 'fromimport'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
include('builtins'),
include('backtick'),
('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
@@ -126,13 +127,13 @@ class PythonLexer(RegexLexer):
('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'import': [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'(\s+)(as)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
(r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace),
(r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
(r'', Text, '#pop') # all else: go back
],
'fromimport': [
- (r'(\s+)(import)\b', bygroups(Text, Keyword), '#pop'),
+ (r'(\s+)(import)\b', bygroups(Text, Keyword.Namespace), '#pop'),
(r'[a-zA-Z_.][a-zA-Z0-9_.]*', Name.Namespace),
],
'stringescape': [
@@ -758,6 +759,19 @@ class PerlLexer(RegexLexer):
flags = re.DOTALL | re.MULTILINE
# TODO: give this a perl guy who knows how to parse perl...
tokens = {
+ 'balanced-regex': [
+ (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
+ (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+ (r'{(\\\\|\\}|[^}])*}[egimosx]*', String.Regex, '#pop'),
+ (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'),
+ (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'),
+ (r'\((\\\\|\\\)|[^\)])*\)[egimosx]*', String.Regex, '#pop'),
+ (r'@(\\\\|\\\@|[^\@])*@[egimosx]*', String.Regex, '#pop'),
+ (r'%(\\\\|\\\%|[^\%])*%[egimosx]*', String.Regex, '#pop'),
+ (r'\$(\\\\|\\\$|[^\$])*\$[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
+ ],
'root': [
(r'\#.*?$', Comment.Single),
(r'=[a-zA-Z0-9]+\s+.*?\n[.\n]*?\n\s*=cut', Comment.Multiline),
@@ -767,7 +781,18 @@ class PerlLexer(RegexLexer):
(r'(format)(\s+)([a-zA-Z0-9_]+)(\s*)(=)(\s*\n)',
bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
(r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
+ # common delimiters
(r's/(\\\\|\\/|[^/])*/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex),
+ (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
+ (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
+ (r's@(\\\\|\\@|[^@])*@(\\\\|\\@|[^@])*@[egimosx]*', String.Regex),
+ (r's%(\\\\|\\%|[^%])*%(\\\\|\\%|[^%])*%[egimosx]*', String.Regex),
+ # balanced delimiters
+ (r's{(\\\\|\\}|[^}])*}\s*', String.Regex, 'balanced-regex'),
+ (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'),
+ (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'),
+ (r's\((\\\\|\\\)|[^\)])*\)\s*', String.Regex, 'balanced-regex'),
+
(r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
(r'((?<==~)|(?<=\())\s*/(\\\\|\\/|[^/])*/[gcimosx]*', String.Regex),
(r'\s+', Text),
@@ -811,6 +836,7 @@ class PerlLexer(RegexLexer):
(r"'(\\\\|\\'|[^'])*'", String),
(r'"(\\\\|\\"|[^"])*"', String),
(r'`(\\\\|\\`|[^`])*`', String.Backtick),
+ (r'<([^\s>]+)>', String.Regexp),
(r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
(r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
(r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
@@ -1131,7 +1157,7 @@ class TclLexer(RegexLexer):
"""
keyword_cmds_re = (
- r'\b(after|apply|array|break|catch|continue|else|elseif|error|'
+ r'\b(after|apply|array|break|catch|continue|elseif|else|error|'
r'eval|expr|for|foreach|global|if|namespace|proc|rename|return|'
r'set|switch|then|trace|unset|update|uplevel|upvar|variable|'
r'vwait|while)\b'
@@ -1250,3 +1276,138 @@ class TclLexer(RegexLexer):
def analyse_text(text):
return shebang_matches(text, r'(tcl)')
+
+
+class ClojureLexer(RegexLexer):
+ """
+ Lexer for `Clojure <http://clojure.org/>`_ source code.
+
+ *New in Pygments 0.11.*
+ """
+ name = 'Clojure'
+ aliases = ['clojure', 'clj']
+ filenames = ['*.clj']
+ mimetypes = ['text/x-clojure', 'application/x-clojure']
+
+ keywords = [
+ 'fn', 'def', 'defn', 'defmacro', 'defmethod', 'defmulti', 'defn-',
+ 'defstruct',
+ 'if', 'cond',
+ 'let', 'for'
+ ]
+ builtins = [
+ '.', '..',
+ '*', '+', '-', '->', '..', '/', '<', '<=', '=', '==', '>', '>=',
+ 'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
+ 'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
+ 'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
+ 'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
+ 'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
+ 'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
+ 'butlast', 'byte', 'cast', 'char', 'children', 'class',
+ 'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
+ 'complement', 'concat', 'conj', 'cons', 'constantly',
+ 'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
+ 'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
+ 'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
+ 'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
+ 'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
+ 'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush',
+ 'fnseq', 'frest', 'gensym', 'get', 'get-proxy-class',
+ 'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
+ 'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
+ 'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
+ 'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
+ 'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
+ 'lefts', 'line-seq', 'list', 'list*', 'load', 'load-file',
+ 'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
+ 'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
+ 'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
+ 'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
+ 'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
+ 'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
+ 'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
+ 'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
+ 'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
+ 'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
+ 're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
+ 'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
+ 'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
+ 'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
+ 'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
+ 'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
+ 'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
+ 'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
+ 'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
+ 'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
+ 'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
+ 'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
+ 'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
+ 'vector?', 'when', 'when-first', 'when-let', 'when-not',
+ 'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
+ 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper']
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
+ valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~-]+'
+
+ tokens = {
+ 'root' : [
+ # the comments - always starting with semicolon
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+
+ # whitespaces - usually not relevant
+ (r'\s+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ # support for uncommon kinds of numbers -
+ # have to figure out what the characters mean
+ #(r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'" + valid_name, String.Symbol),
+ (r"\\([()/'\".'_!§$%& ?=+-]{1}|[a-zA-Z0-9]+)", String.Char),
+
+ # constants
+ (r'(#t|#f)', Name.Constant),
+
+ # special operators
+ (r"('|#|`|,@|,|\.)", Operator),
+
+ # highlight the keywords
+ ('(%s)' % '|'.join([
+ re.escape(entry) + ' ' for entry in keywords]),
+ Keyword
+ ),
+
+ # first variable in a quoted string like
+ # '(this is syntactic sugar)
+ (r"(?<='\()" + valid_name, Name.Variable),
+ (r"(?<=#\()" + valid_name, Name.Variable),
+
+ # highlight the builtins
+ ("(?<=\()(%s)" % '|'.join([
+ re.escape(entry) + ' ' for entry in builtins]),
+ Name.Builtin
+ ),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # Clojure accepts vector notation
+ (r'(\[|\])', Punctuation),
+
+ # Clojure accepts map notation
+ (r'(\{|\})', Punctuation),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+ ],
+ }
diff --git a/pygments/lexers/asm.py b/pygments/lexers/asm.py
index 7d5a28c1..fffadcaa 100644
--- a/pygments/lexers/asm.py
+++ b/pygments/lexers/asm.py
@@ -21,7 +21,7 @@ from pygments.lexers.compiled import DLexer, CppLexer, CLexer
from pygments.token import *
__all__ = ['GasLexer', 'ObjdumpLexer','DObjdumpLexer', 'CppObjdumpLexer',
- 'CObjdumpLexer', 'LlvmLexer']
+ 'CObjdumpLexer', 'LlvmLexer', 'NasmLexer']
class GasLexer(RegexLexer):
@@ -62,7 +62,7 @@ class GasLexer(RegexLexer):
],
'instruction-args': [
# For objdump-disassembled code, shouldn't occur in
- # actual assember input
+ # actual assembler input
('([a-z0-9]+)( )(<)('+identifier+')(>)',
bygroups(Number.Hex, Text, Punctuation, Name.Constant,
Punctuation)),
@@ -259,3 +259,74 @@ class LlvmLexer(RegexLexer):
(r'i[1-9]\d*', Keyword)
]
}
+
+
+class NasmLexer(RegexLexer):
+ """
+ For Nasm (Intel) assembly code.
+ """
+ name = 'NASM'
+ aliases = ['nasm']
+ filenames = ['*.asm', '*.ASM']
+ mimetypes = ['text/x-nasm']
+
+ identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?#@~]*'
+ hexn = r'(?:0[xX][0-9a-fA-F]+|$0[0-9a-fA-F]*|[0-9a-fA-F]+h)'
+ octn = r'[0-7]+q'
+ binn = r'[01]+b'
+ decn = r'[0-9]+'
+ floatn = decn + r'\.e?' + decn
+ string = r'"(\\"|[^"])*"|' + r"'(\\'|[^'])*'"
+ declkw = r'(?:res|d)[bwdqt]|times'
+ register = (r'[a-d][lh]|e?[a-d]x|e?[sb]p|e?[sd]i|[c-gs]s|st[0-7]|'
+ r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]')
+ wordop = r'seg|wrt|strict'
+ type = r'byte|[dq]?word'
+ directives = (r'BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|'
+ r'COMMON|CPU|GROUP|UPPERCASE|IMPORT|EXPORT|LIBRARY|MODULE')
+
+ flags = re.IGNORECASE | re.MULTILINE
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ (r'^\s*%', Comment.Preproc, 'preproc'),
+ (identifier + ':', Name.Label),
+ (directives, Keyword, 'instruction-args'),
+ (r'(%s)\s+(equ)' % identifier,
+ bygroups(Name.Constant, Keyword.Declaration),
+ 'instruction-args'),
+ (declkw, Keyword.Declaration, 'instruction-args'),
+ (identifier, Name.Function, 'instruction-args'),
+ (r'[\r\n]+', Text)
+ ],
+ 'instruction-args': [
+ (string, String),
+ (hexn, Number.Hex),
+ (octn, Number.Oct),
+ (binn, Number),
+ (floatn, Number.Float),
+ (decn, Number.Integer),
+ include('punctuation'),
+ (register, Name.Builtin),
+ (identifier, Name.Variable),
+ (r'[\r\n]+', Text, '#pop'),
+ include('whitespace')
+ ],
+ 'preproc': [
+ (r'[^;\n]+', Comment.Preproc),
+ (r';.*?\n', Comment.Single, '#pop'),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'whitespace': [
+ (r'\n', Text),
+ (r'[ \t]+', Text),
+ (r';.*', Comment.Single)
+ ],
+ 'punctuation': [
+ (r'[,():\[\]]+', Punctuation),
+ (r'[&|^<>+*/%~-]+', Operator),
+ (r'[$]+', Keyword.Constant),
+ (wordop, Operator.Word),
+ (type, Keyword.Type)
+ ],
+ }
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
index 7a41de90..b6f68c2b 100644
--- a/pygments/lexers/compiled.py
+++ b/pygments/lexers/compiled.py
@@ -67,8 +67,8 @@ class CLexer(RegexLexer):
(r'(auto|break|case|const|continue|default|do|else|enum|extern|'
r'for|goto|if|register|restricted|return|sizeof|static|struct|'
r'switch|typedef|union|volatile|virtual|while)\b', Keyword),
- (r'(int|long|float|short|double|char|unsigned|signed|void|'
- r'_Complex|_Imaginary|_Bool)\b', Keyword.Type),
+ (r'(int|long|float|short|double|char|unsigned|signed|void)\b',
+ Keyword.Type),
(r'(_{0,2}inline|naked|restrict|thread|typename)\b', Keyword.Reserved),
(r'__(asm|int8|based|except|int16|stdcall|cdecl|fastcall|int32|'
r'declspec|finally|int64|try|leave)\b', Keyword.Reserved),
@@ -83,14 +83,14 @@ class CLexer(RegexLexer):
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
r'(\s*\([^;]*?\))' # signature
r'(' + _ws + r')({)',
- bygroups(using(this), Name.Function, using(this), Text, Punctuation),
+ bygroups(using(this), Name.Function, using(this), using(this), Punctuation),
'function'),
# function declarations
(r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))' # return arguments
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
r'(\s*\([^;]*?\))' # signature
r'(' + _ws + r')(;)',
- bygroups(using(this), Name.Function, using(this), Text, Punctuation)),
+ bygroups(using(this), Name.Function, using(this), using(this), Punctuation)),
('', Text, 'statement'),
],
'statement' : [
@@ -123,11 +123,40 @@ class CLexer(RegexLexer):
],
'if0': [
(r'^\s*#if.*?(?<!\\)\n', Comment, '#push'),
+ (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
(r'^\s*#endif.*?(?<!\\)\n', Comment, '#pop'),
(r'.*?\n', Comment),
]
}
+ stdlib_types = ['size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t',
+ 'sig_atomic_t', 'fpos_t', 'clock_t', 'time_t', 'va_list',
+ 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t', 'mbstate_t',
+ 'wctrans_t', 'wint_t', 'wctype_t']
+ c99_types = ['_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t',
+ 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t',
+ 'int_least16_t', 'int_least32_t', 'int_least64_t',
+ 'uint_least8_t', 'uint_least16_t', 'uint_least32_t',
+ 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
+ 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t',
+ 'uint_fast64_t', 'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t']
+
+ def __init__(self, **options):
+ self.stdlibhighlighting = get_bool_opt(options,
+ 'stdlibhighlighting', True)
+ self.c99highlighting = get_bool_opt(options,
+ 'c99highlighting', True)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if self.stdlibhighlighting and value in self.stdlib_types:
+ token = Keyword.Type
+ elif self.c99highlighting and value in self.c99_types:
+ token = Keyword.Type
+ yield index, token, value
class CppLexer(RegexLexer):
"""
@@ -864,17 +893,18 @@ class JavaLexer(RegexLexer):
(r'//.*?\n', Comment),
(r'/\*.*?\*/', Comment),
(r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
- (r'(abstract|assert|break|case|catch|'
- r'const|continue|default|do|else|enum|extends|final|'
- r'finally|for|if|goto|implements|instanceof|'
- r'native|new|package|private|protected|public|'
- r'return|static|strictfp|super|switch|synchronized|this|'
- r'throw|throws|transient|try|volatile|while)\b', Keyword),
+ (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
+ Keyword),
+ (r'(abstract|const|enum|extends|final|implements|native|private|'
+ r'protected|public|static|strictfp|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)(\s+)', bygroups(Keyword, Text), 'class'),
- (r'(import)(\s+)', bygroups(Keyword, Text), 'import'),
+ (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
(r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
@@ -1011,8 +1041,8 @@ class ObjectiveCLexer(RegexLexer):
tokens = {
'whitespace': [
- (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
- (r'^\s*#', Comment.Preproc, 'macro'),
+ (r'^(\s*)(#if\s+0)', bygroups(Text, Comment.Preproc), 'if0'),
+ (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'macro'),
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
@@ -1031,9 +1061,10 @@ class ObjectiveCLexer(RegexLexer):
(r'[()\[\],.]', Punctuation),
(r'(auto|break|case|const|continue|default|do|else|enum|extern|'
r'for|goto|if|register|restricted|return|sizeof|static|struct|'
- r'switch|typedef|union|volatile|virtual|while|@selector|'
+ r'switch|typedef|union|volatile|virtual|while|in|@selector|'
r'@private|@protected|@public|@encode|'
- r'@synchronized|@try|@throw|@catch|@finally|@end)\b', Keyword),
+ r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
+ r'@synthesize|@dynamic)\b', Keyword),
(r'(int|long|float|short|double|char|unsigned|signed|void|'
r'id|BOOL|IBOutlet|IBAction|SEL)\b', Keyword.Type),
(r'(_{0,2}inline|naked|restrict|thread|typename)\b', Keyword.Reserved),
@@ -1068,7 +1099,7 @@ class ObjectiveCLexer(RegexLexer):
('([a-zA-Z_][a-zA-Z0-9_]*)(\s*:\s*)([a-zA-Z_][a-zA-Z0-9_]*)?',
bygroups(Name.Class, Text, Name.Class), '#pop'),
# interface definition for a category
- ('([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\([a-zA-Z_][a-zA-Z0-9_]\)*)',
+ ('([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\([a-zA-Z_][a-zA-Z0-9_]*\))',
bygroups(Name.Class, Text, Name.Label), '#pop'),
# simple interface / implementation
('([a-zA-Z_][a-zA-Z0-9_]*)', Name.Class, '#pop')
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index 832f21f5..6b990004 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -94,7 +94,10 @@ class CSharpLexer(RegexLexer):
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'#[ \t]*(if|endif|else|elif|define|undef|'
- r'line|error|warning|region|endregion)\b.*?\n', Comment.Preproc),
+ r'line|error|warning|region|endregion|pragma)\b.*?\n',
+ Comment.Preproc),
+ (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
+ Keyword)),
(r'(abstract|as|base|break|case|catch|'
r'checked|const|continue|default|delegate|'
r'do|else|enum|event|explicit|extern|false|finally|'
@@ -104,9 +107,10 @@ class CSharpLexer(RegexLexer):
r'ref|return|sealed|sizeof|stackalloc|static|'
r'switch|this|throw|true|try|typeof|'
r'unchecked|unsafe|virtual|void|while|'
- r'get|set|new|partial)\b', Keyword),
+ r'get|set|new|partial|yield|add|remove|value)\b', Keyword),
+ (r'(global)(::)', bygroups(Keyword, Punctuation)),
(r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
- r'short|string|uint|ulong|ushort)\b', Keyword.Type),
+ r'short|string|uint|ulong|ushort)\b\??', Keyword.Type),
(r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
(r'(namespace|using)(\s+)', bygroups(Keyword, Text), 'namespace'),
(cs_ident, Name),
diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py
index a7a50f7d..18432f9a 100644
--- a/pygments/lexers/functional.py
+++ b/pygments/lexers/functional.py
@@ -97,7 +97,7 @@ class SchemeLexer(RegexLexer):
# valid names for identifiers
# well, names can only not consist fully of numbers
# but this should be good enough for now
- valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~-]+'
+ valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+'
tokens = {
'root' : [
@@ -587,13 +587,13 @@ class OcamlLexer(RegexLexer):
(r'[~?][a-z][\w\']*:', Name.Variable),
],
'comment': [
- (r'[^(*)]', Comment),
+ (r'[^(*)]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
],
'string': [
- (r'[^\\"]', String.Double),
+ (r'[^\\"]+', String.Double),
include('escape-sequence'),
(r'\\\n', String.Double),
(r'"', String.Double, '#pop'),
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index cf628c04..95aa2686 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -252,6 +252,9 @@ class NumPyLexer(PythonLexer):
name = 'NumPy'
aliases = ['numpy']
+ # override the mimetypes to not inherit them from python
+ mimetypes = []
+
EXTRA_KEYWORDS = set([
'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index 0701caa4..5d5346e6 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -12,15 +12,19 @@
import re
-from pygments.lexer import RegexLexer, include, bygroups, using, this
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, this, \
+ do_insertions
from pygments.token import Error, Punctuation, \
- Text, Comment, Operator, Keyword, Name, String, Number
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic
from pygments.util import shebang_matches
-__all__ = ['SqlLexer', 'MySqlLexer', 'BrainfuckLexer', 'BashLexer',
- 'BatchLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
- 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer']
+__all__ = ['SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer', 'BrainfuckLexer',
+ 'BashLexer', 'BatchLexer', 'BefungeLexer', 'RedcodeLexer',
+ 'MOOCodeLexer', 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer',
+ 'GnuplotLexer', 'PovrayLexer']
+
+line_re = re.compile('.*?\n')
class SqlLexer(RegexLexer):
@@ -210,6 +214,44 @@ class MySqlLexer(RegexLexer):
}
+class SqliteConsoleLexer(Lexer):
+ """
+ Lexer for example sessions using sqlite3.
+ """
+
+ name = 'sqlite3con'
+ aliases = []
+ filenames = ['*.sqlite3-console']
+ mimetypes = ['text/x-sqlite3-console']
+
+ def get_tokens_unprocessed(self, data):
+ sql = SqlLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(data):
+ line = match.group()
+ if line.startswith('sqlite> ') or line.startswith(' ...> '):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:8])]))
+ curcode += line[8:]
+ else:
+ if curcode:
+ for item in do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ if line.startswith('SQL error: '):
+ yield (match.start(), Generic.Traceback, line)
+ else:
+ yield (match.start(), Generic.Output, line)
+ if curcode:
+ for item in do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode)):
+ yield item
+
+
class BrainfuckLexer(RegexLexer):
"""
Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
@@ -312,13 +354,14 @@ class BashLexer(RegexLexer):
(r'&&|\|\|', Operator),
],
'data': [
- (r'"(\\\\|\\[0-7]+|\\.|[^"])*"', String.Double),
- (r"'(\\\\|\\[0-7]+|\\.|[^'])*'", String.Single),
+ (r'\$?"(\\\\|\\[0-7]+|\\.|[^"])*"', String.Double),
+ (r"\$?'(\\\\|\\[0-7]+|\\.|[^'])*'", String.Single),
(r';', Text),
(r'\s+', Text),
- (r'[^=\s\n\[\]{}()$"\'`\\]+', Text),
+ (r'[^=\s\n\[\]{}()$"\'`\\<]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
+ (r'<', Text),
],
'curly': [
(r'}', Keyword, '#pop'),
@@ -660,7 +703,8 @@ class LogtalkLexer(RegexLexer):
# DCGs and term expansion
(r'(expand_term|(goal|term)_expansion|phrase)(?=[(])', Keyword),
# Entity
- (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
+ (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])',
+ Keyword),
(r'(object|protocol|category)_property(?=[(])', Keyword),
# Entity relations
(r'complements_object(?=[(])', Keyword),
@@ -682,7 +726,8 @@ class LogtalkLexer(RegexLexer):
# All solutions
(r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
# Multi-threading meta-predicates
- (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
+ (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])',
+ Keyword),
# Term unification
(r'unify_with_occurs_check(?=[(])', Keyword),
# Term creation and decomposition
@@ -694,13 +739,15 @@ class LogtalkLexer(RegexLexer):
# Other arithmetic functors
(r'(cos|atan|exp|log|s(in|qrt))(?=[(])', Keyword),
# Term testing
- (r'(var|atom(ic)?|integer|float|compound|n(onvar|umber))(?=[(])', Keyword),
+ (r'(var|atom(ic)?|integer|float|compound|n(onvar|umber))(?=[(])',
+ Keyword),
# Stream selection and control
(r'(curren|se)t_(in|out)put(?=[(])', Keyword),
(r'(open|close)(?=[(])', Keyword),
(r'flush_output(?=[(])', Keyword),
(r'(at_end_of_stream|flush_output)\b', Keyword),
- (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
+ (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])',
+ Keyword),
# Character and byte input/output
(r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
(r'\bnl\b', Keyword),
@@ -768,21 +815,25 @@ class LogtalkLexer(RegexLexer):
'directive': [
# Entity directives
- (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
+ (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
(r'(end_(category|object|protocol))[.]',Keyword, 'root'),
# Predicate scope directives
(r'(public|protected|private)(?=[(])', Keyword, 'root'),
# Other directives
- (r'\be(ncoding|xport)(?=[(])', Keyword, 'root'),
- (r'\bin(fo|itialization)(?=[(])', Keyword, 'root'),
- (r'\b(dynamic|synchronized|threaded)[.]', Keyword, 'root'),
- (r'\b(alias|d(ynamic|iscontiguous)|m(eta_predicate|ode|ultifile)|synchronized)(?=[(])', Keyword, 'root'),
- (r'\bop(?=[(])', Keyword, 'root'),
- (r'\b(calls|use(s|_module))(?=[(])', Keyword, 'root'),
+ (r'e(ncoding|xport)(?=[(])', Keyword, 'root'),
+ (r'in(fo|itialization)(?=[(])', Keyword, 'root'),
+ (r'(dynamic|synchronized|threaded)[.]', Keyword, 'root'),
+ (r'(alias|d(ynamic|iscontiguous)|m(eta_predicate|ode|ultifile)'
+ r'|synchronized)(?=[(])', Keyword, 'root'),
+ (r'op(?=[(])', Keyword, 'root'),
+ (r'(calls|use(s|_module))(?=[(])', Keyword, 'root'),
+ (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
+ (r'[a-z][a-zA-Z0-9_]*[.]', Text, 'root'),
],
'entityrelations': [
- (r'(extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
+ (r'(extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])',
+ Keyword),
# Numbers
(r"0'.", Number),
(r'0b[01]+', Number),
@@ -797,13 +848,268 @@ class LogtalkLexer(RegexLexer):
# Strings
(r'"(\\\\|\\"|[^"])*"', String),
# End of entity-opening directive
- (r'([)]\.\n)', Text, 'root'),
+ (r'([)]\.)', Text, 'root'),
# Scope operator
(r'(::)', Operator),
# Ponctuation
(r'[()\[\],.|]', Text),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/',Comment),
# Whitespace
(r'\n', Text),
(r'\s+', Text),
]
}
+
+
+def _shortened(word):
+ dpos = word.find('$')
+ return '|'.join(word[:dpos] + word[dpos+1:i] + r'\b'
+ for i in range(len(word), dpos, -1))
+def _shortened_many(*words):
+ return '|'.join(map(_shortened, words))
+
+class GnuplotLexer(RegexLexer):
+ """
+ For `Gnuplot <http://gnuplot.info/>`_ plotting scripts.
+
+ *New in Pygments 0.11.*
+ """
+
+ name = 'Gnuplot'
+ aliases = ['gnuplot']
+ filenames = ['*.plot', '*.plt']
+ mimetypes = ['text/x-gnuplot']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ (_shortened('bi$nd'), Keyword, 'bind'),
+ (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
+ (_shortened('f$it'), Keyword, 'fit'),
+ (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
+ (r'else\b', Keyword),
+ (_shortened('pa$use'), Keyword, 'pause'),
+ (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
+ (_shortened('sa$ve'), Keyword, 'save'),
+ (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
+ (_shortened_many('sh$ow', 'uns$et'),
+ Keyword, ('noargs', 'optionarg')),
+ (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
+ 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
+ 'pwd$', 're$read', 'res$et', 'scr$eendump',
+ 'she$ll', 'sy$stem', 'up$date'),
+ Keyword, 'genericargs'),
+ (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
+ 'she$ll', 'test$'),
+ Keyword, 'noargs'),
+ ('([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(=)',
+ bygroups(Name.Variable, Text, Operator), 'genericargs'),
+ ('([a-zA-Z_][a-zA-Z0-9_]*)(\s*\(.*?\)\s*)(=)',
+ bygroups(Name.Function, Text, Operator), 'genericargs'),
+ (r'@[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), # macros
+ (r';', Keyword),
+ ],
+ 'comment': [
+ (r'[^\\\n]', Comment),
+ (r'\\\n', Comment),
+ (r'\\', Comment),
+ # don't add the newline to the Comment token
+ ('', Comment, '#pop'),
+ ],
+ 'whitespace': [
+ ('#', Comment, 'comment'),
+ (r'[ \t\v\f]+', Text),
+ ],
+ 'noargs': [
+ include('whitespace'),
+ # semicolon and newline end the argument list
+ (r';', Punctuation, '#pop'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'dqstring': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ (r'\n', String, '#pop'), # newline ends the string too
+ ],
+ 'sqstring': [
+ (r"''", String), # escaped single quote
+ (r"'", String, '#pop'),
+ (r"[^\\'\n]+", String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # normal backslash
+ (r'\n', String, '#pop'), # newline ends the string too
+ ],
+ 'genericargs': [
+ include('noargs'),
+ (r'"', String, 'dqstring'),
+ (r"'", String, 'sqstring'),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'-?\d+', Number.Integer),
+ ('[,.~!%^&*+=|?:<>/-]', Operator),
+ ('[{}()\[\]]', Punctuation),
+ (r'(eq|ne)\b', Operator.Word),
+ (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (r'@[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), # macros
+ (r'\\\n', Text),
+ ],
+ 'optionarg': [
+ include('whitespace'),
+ (_shortened_many(
+ "a$ll","an$gles","ar$row","au$toscale","b$ars","bor$der",
+ "box$width","cl$abel","c$lip","cn$trparam","co$ntour","da$ta",
+ "data$file","dg$rid3d","du$mmy","enc$oding","dec$imalsign",
+ "fit$","font$path","fo$rmat","fu$nction","fu$nctions","g$rid",
+ "hid$den3d","his$torysize","is$osamples","k$ey","keyt$itle",
+ "la$bel","li$nestyle","ls$","loa$dpath","loc$ale","log$scale",
+ "mac$ros","map$ping","map$ping3d","mar$gin","lmar$gin",
+ "rmar$gin","tmar$gin","bmar$gin","mo$use","multi$plot",
+ "mxt$ics","nomxt$ics","mx2t$ics","nomx2t$ics","myt$ics",
+ "nomyt$ics","my2t$ics","nomy2t$ics","mzt$ics","nomzt$ics",
+ "mcbt$ics","nomcbt$ics","of$fsets","or$igin","o$utput",
+ "pa$rametric","pm$3d","pal$ette","colorb$ox","p$lot",
+ "poi$ntsize","pol$ar","pr$int","obj$ect","sa$mples","si$ze",
+ "st$yle","su$rface","table$","t$erminal","termo$ptions","ti$cs",
+ "ticsc$ale","ticsl$evel","timef$mt","tim$estamp","tit$le",
+ "v$ariables","ve$rsion","vi$ew","xyp$lane","xda$ta","x2da$ta",
+ "yda$ta","y2da$ta","zda$ta","cbda$ta","xl$abel","x2l$abel",
+ "yl$abel","y2l$abel","zl$abel","cbl$abel","xti$cs","noxti$cs",
+ "x2ti$cs","nox2ti$cs","yti$cs","noyti$cs","y2ti$cs","noy2ti$cs",
+ "zti$cs","nozti$cs","cbti$cs","nocbti$cs","xdti$cs","noxdti$cs",
+ "x2dti$cs","nox2dti$cs","ydti$cs","noydti$cs","y2dti$cs",
+ "noy2dti$cs","zdti$cs","nozdti$cs","cbdti$cs","nocbdti$cs",
+ "xmti$cs","noxmti$cs","x2mti$cs","nox2mti$cs","ymti$cs",
+ "noymti$cs","y2mti$cs","noy2mti$cs","zmti$cs","nozmti$cs",
+ "cbmti$cs","nocbmti$cs","xr$ange","x2r$ange","yr$ange",
+ "y2r$ange","zr$ange","cbr$ange","rr$ange","tr$ange","ur$ange",
+ "vr$ange","xzeroa$xis","x2zeroa$xis","yzeroa$xis","y2zeroa$xis",
+ "zzeroa$xis","zeroa$xis","z$ero"), Name.Builtin, '#pop'),
+ ],
+ 'bind': [
+ ('!', Keyword, '#pop'),
+ (_shortened('all$windows'), Name.Builtin),
+ include('genericargs'),
+ ],
+ 'quit': [
+ (r'gnuplot\b', Keyword),
+ include('noargs'),
+ ],
+ 'fit': [
+ (r'via\b', Name.Builtin),
+ include('plot'),
+ ],
+ 'if': [
+ (r'\)', Punctuation, '#pop'),
+ include('genericargs'),
+ ],
+ 'pause': [
+ (r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
+ (_shortened('key$press'), Name.Builtin),
+ include('genericargs'),
+ ],
+ 'plot': [
+ (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
+ 'mat$rix', 's$mooth', 'thru$', 't$itle',
+ 'not$itle', 'u$sing', 'w$ith'),
+ Name.Builtin),
+ include('genericargs'),
+ ],
+ 'save': [
+ (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
+ Name.Builtin),
+ include('genericargs'),
+ ],
+ }
+
+
+class PovrayLexer(RegexLexer):
+ """
+ For `Persistence of Vision Raytracer http://www.povray.org/>`_ files.
+
+ *New in Pygments 0.11.*
+ """
+ name = 'POVRay'
+ aliases = ['pov']
+ filenames = ['*.pov', '*.inc']
+ mimetypes = ['text/x-povray']
+
+ tokens = {
+ 'root': [
+ (r'/\*[\w\W]*?\*/', Comment.Multiline),
+ (r'//.*\n', Comment.Single),
+ (r'"(?:\\.|[^"])+"', String.Double),
+ (r'#(debug|default|else|end|error|fclose|fopen|if|ifdef|ifndef|'
+ r'include|range|read|render|statistics|switch|undef|version|'
+ r'warning|while|write|define|macro|local|declare)',
+ Comment.Preproc),
+ (r'\b(aa_level|aa_threshold|abs|acos|acosh|adaptive|adc_bailout|'
+ r'agate|agate_turb|all|alpha|ambient|ambient_light|angle|'
+ r'aperture|arc_angle|area_light|asc|asin|asinh|assumed_gamma|'
+ r'atan|atan2|atanh|atmosphere|atmospheric_attenuation|'
+ r'attenuating|average|background|black_hole|blue|blur_samples|'
+ r'bounded_by|box_mapping|bozo|break|brick|brick_size|'
+ r'brightness|brilliance|bumps|bumpy1|bumpy2|bumpy3|bump_map|'
+ r'bump_size|case|caustics|ceil|checker|chr|clipped_by|clock|'
+ r'color|color_map|colour|colour_map|component|composite|concat|'
+ r'confidence|conic_sweep|constant|control0|control1|cos|cosh|'
+ r'count|crackle|crand|cube|cubic_spline|cylindrical_mapping|'
+ r'debug|declare|default|degrees|dents|diffuse|direction|'
+ r'distance|distance_maximum|div|dust|dust_type|eccentricity|'
+ r'else|emitting|end|error|error_bound|exp|exponent|'
+ r'fade_distance|fade_power|falloff|falloff_angle|false|'
+ r'file_exists|filter|finish|fisheye|flatness|flip|floor|'
+ r'focal_point|fog|fog_alt|fog_offset|fog_type|frequency|gif|'
+ r'global_settings|glowing|gradient|granite|gray_threshold|'
+ r'green|halo|hexagon|hf_gray_16|hierarchy|hollow|hypercomplex|'
+ r'if|ifdef|iff|image_map|incidence|include|int|interpolate|'
+ r'inverse|ior|irid|irid_wavelength|jitter|lambda|leopard|'
+ r'linear|linear_spline|linear_sweep|location|log|looks_like|'
+ r'look_at|low_error_factor|mandel|map_type|marble|material_map|'
+ r'matrix|max|max_intersections|max_iteration|max_trace_level|'
+ r'max_value|metallic|min|minimum_reuse|mod|mortar|'
+ r'nearest_count|no|normal|normal_map|no_shadow|number_of_waves|'
+ r'octaves|off|offset|omega|omnimax|on|once|onion|open|'
+ r'orthographic|panoramic|pattern1|pattern2|pattern3|'
+ r'perspective|pgm|phase|phong|phong_size|pi|pigment|'
+ r'pigment_map|planar_mapping|png|point_at|pot|pow|ppm|'
+ r'precision|pwr|quadratic_spline|quaternion|quick_color|'
+ r'quick_colour|quilted|radial|radians|radiosity|radius|rainbow|'
+ r'ramp_wave|rand|range|reciprocal|recursion_limit|red|'
+ r'reflection|refraction|render|repeat|rgb|rgbf|rgbft|rgbt|'
+ r'right|ripples|rotate|roughness|samples|scale|scallop_wave|'
+ r'scattering|seed|shadowless|sin|sine_wave|sinh|sky|sky_sphere|'
+ r'slice|slope_map|smooth|specular|spherical_mapping|spiral|'
+ r'spiral1|spiral2|spotlight|spotted|sqr|sqrt|statistics|str|'
+ r'strcmp|strength|strlen|strlwr|strupr|sturm|substr|switch|sys|'
+ r't|tan|tanh|test_camera_1|test_camera_2|test_camera_3|'
+ r'test_camera_4|texture|texture_map|tga|thickness|threshold|'
+ r'tightness|tile2|tiles|track|transform|translate|transmit|'
+ r'triangle_wave|true|ttf|turbulence|turb_depth|type|'
+ r'ultra_wide_angle|up|use_color|use_colour|use_index|u_steps|'
+ r'val|variance|vaxis_rotate|vcross|vdot|version|vlength|'
+ r'vnormalize|volume_object|volume_rendered|vol_with_light|'
+ r'vrotate|v_steps|warning|warp|water_level|waves|while|width|'
+ r'wood|wrinkles|yes)\b', Keyword),
+ (r'bicubic_patch|blob|box|camera|cone|cubic|cylinder|difference|'
+ r'disc|height_field|intersection|julia_fractal|lathe|'
+ r'light_source|merge|mesh|object|plane|poly|polygon|prism|'
+ r'quadric|quartic|smooth_triangle|sor|sphere|superellipsoid|'
+ r'text|torus|triangle|union', Name.Builtin),
+ #TODO: <=, etc
+ (r'[\[\](){}<>;,]', Punctuation),
+ (r'[-+*/=]', Operator),
+ (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
+ (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
+ (r'[0-9]+\.[0-9]*', Number.Float),
+ (r'\.[0-9]+', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\s+', Text),
+ ]
+ }
diff --git a/pygments/lexers/special.py b/pygments/lexers/special.py
index 8d66fc67..b7b5843e 100644
--- a/pygments/lexers/special.py
+++ b/pygments/lexers/special.py
@@ -39,7 +39,9 @@ line_re = re.compile('.*?\n')
class RawTokenLexer(Lexer):
"""
- Recreate a token stream formatted with the `RawTokenFormatter`.
+ Recreate a token stream formatted with the `RawTokenFormatter`. This
+ lexer raises exceptions during parsing if the token stream in the
+ file is malformed.
Additional options accepted:
@@ -49,7 +51,7 @@ class RawTokenLexer(Lexer):
"""
name = 'Raw token data'
aliases = ['raw']
- filenames = ['*.raw']
+ filenames = []
mimetypes = ['application/x-pygments-tokens']
def __init__(self, **options):
@@ -86,6 +88,8 @@ class RawTokenLexer(Lexer):
ttype = Token
ttypes = ttypestr.split('.')[1:]
for ttype_ in ttypes:
+ if not ttype_ or not ttype_[0].isupper():
+ raise ValueError('malformed token name')
ttype = getattr(ttype, ttype_)
_ttype_cache[ttypestr] = ttype
val = val[2:-2].decode('unicode-escape')
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
index 3a18dac7..fd84073f 100644
--- a/pygments/lexers/templates.py
+++ b/pygments/lexers/templates.py
@@ -23,7 +23,7 @@ from pygments.lexers.compiled import JavaLexer
from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
include, using, this
from pygments.token import Error, Punctuation, \
- Text, Comment, Operator, Keyword, Name, String, Number, Other
+ Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
from pygments.util import html_doctype_matches, looks_like_xml
__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
@@ -37,7 +37,8 @@ __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MakoLexer',
'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
- 'MakoCssLexer', 'JspLexer']
+ 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
+ 'CheetahXmlLexer', 'CheetahJavascriptLexer']
class ErbLexer(Lexer):
@@ -523,7 +524,115 @@ class MakoCssLexer(DelegatingLexer):
**options)
-# Genshi lexers courtesy of Matt Good.
+# Genshi and Cheetah lexers courtesy of Matt Good.
+
+class CheetahPythonLexer(Lexer):
+ """
+ Lexer for handling Cheetah's special $ tokens in Python syntax.
+ """
+
+ def get_tokens_unprocessed(self, text):
+ pylexer = PythonLexer(**self.options)
+ for pos, type_, value in pylexer.get_tokens_unprocessed(text):
+ if type_ == Token.Error and value == '$':
+ type_ = Comment.Preproc
+ yield pos, type_, value
+
+
+class CheetahLexer(RegexLexer):
+ """
+ Generic `cheetah templates`_ lexer. Code that isn't Cheetah
+ markup is yielded as `Token.Other`. This also works for
+ `spitfire templates`_ which use the same syntax.
+
+ .. _cheetah templates: http://www.cheetahtemplate.org/
+ .. _spitfire templates: http://code.google.com/p/spitfire/
+ """
+
+ name = 'Cheetah'
+ aliases = ['cheetah', 'spitfire']
+ filenames = ['*.tmpl', '*.spt']
+ mimetypes = ['application/x-cheetah', 'application/x-spitfire']
+
+ tokens = {
+ 'root': [
+ (r'(##[^\n]*)$',
+ (bygroups(Comment))),
+ (r'#[*](.|\n)*?[*]#', Comment),
+ (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
+ (r'#slurp$', Comment.Preproc),
+ (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
+ (bygroups(Comment.Preproc, using(CheetahPythonLexer),
+ Comment.Preproc))),
+ # TODO support other Python syntax like $foo['bar']
+ (r'(\$)([a-zA-Z_][a-zA-Z0-9_\.]*[a-zA-Z0-9_])',
+ bygroups(Comment.Preproc, using(CheetahPythonLexer))),
+ (r'(\$\{!?)(.*?)(\})(?s)',
+ bygroups(Comment.Preproc, using(CheetahPythonLexer),
+ Comment.Preproc)),
+ (r'''(?sx)
+ (.+?) # anything, followed by:
+ (?:
+ (?=[#][#a-zA-Z]*) | # an eval comment
+ (?=\$[a-zA-Z_{]) | # a substitution
+ \Z # end of string
+ )
+ ''', Other),
+ (r'\s+', Text),
+ ],
+ }
+
+
+class CheetahHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `CheetahLexer` that highlights unlexer data
+ with the `HtmlLexer`.
+ """
+
+ name = 'HTML+Cheetah'
+ aliases = ['html+cheetah', 'html+spitfire']
+ mimetypes = ['text/html+cheetah', 'text/html+spitfire']
+
+ def __init__(self, **options):
+ super(CheetahHtmlLexer, self).__init__(HtmlLexer, CheetahLexer,
+ **options)
+
+
+class CheetahXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `CheetahLexer` that highlights unlexer data
+ with the `XmlLexer`.
+ """
+
+ name = 'XML+Cheetah'
+ aliases = ['xml+cheetah', 'xml+spitfire']
+ mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
+
+ def __init__(self, **options):
+ super(CheetahXmlLexer, self).__init__(XmlLexer, CheetahLexer,
+ **options)
+
+
+class CheetahJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `CheetahLexer` that highlights unlexer data
+ with the `JavascriptLexer`.
+ """
+
+ name = 'JavaScript+Cheetah'
+ aliases = ['js+cheetah', 'javascript+cheetah',
+ 'js+spitfire', 'javascript+spitfire']
+ mimetypes = ['application/x-javascript+cheetah',
+ 'text/x-javascript+cheetah',
+ 'text/javascript+cheetah',
+ 'application/x-javascript+spitfire',
+ 'text/x-javascript+spitfire',
+ 'text/javascript+spitfire']
+
+ def __init__(self, **options):
+ super(CheetahJavascriptLexer, self).__init__(JavascriptLexer,
+ CheetahLexer, **options)
+
class GenshiTextLexer(RegexLexer):
"""
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index 460f8757..b696ef27 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -35,7 +35,8 @@ __all__ = ['IniLexer', 'SourcesListLexer', 'BaseMakefileLexer',
'MakefileLexer', 'DiffLexer', 'IrcLogsLexer', 'TexLexer',
'GroffLexer', 'ApacheConfLexer', 'BBCodeLexer', 'MoinWikiLexer',
'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
- 'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer']
+ 'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
+ 'LighttpdConfLexer', 'NginxConfLexer']
class IniLexer(RegexLexer):
@@ -218,7 +219,7 @@ class DiffLexer(RegexLexer):
(r'-.*\n', Generic.Deleted),
(r'!.*\n', Generic.Strong),
(r'@.*\n', Generic.Subheading),
- (r'Index.*\n', Generic.Heading),
+ (r'(Index|diff).*\n', Generic.Heading),
(r'=.*\n', Generic.Heading),
(r'.*\n', Text),
]
@@ -250,8 +251,9 @@ class DarcsPatchLexer(RegexLexer):
(r'<', Operator),
(r'>', Operator),
(r'{', Operator, 'patch'),
- (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)', bygroups(Operator, Keyword, Name, Text,
- Name, Operator, Literal.Date, Text), 'comment'),
+ (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
+ bygroups(Operator, Keyword, Name, Text, Name, Operator,
+ Literal.Date, Text), 'comment'),
(r'New patches:', Generic.Heading),
(r'Context:', Generic.Heading),
(r'Patch bundle hash:', Generic.Heading),
@@ -296,6 +298,10 @@ class IrcLogsLexer(RegexLexer):
# weechat
\d{4}\s\w{3}\s\d{2}\s # Date
\d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
+ |
+ # xchat
+ \w{3}\s\d{2}\s # Date
+ \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
)?
"""
tokens = {
@@ -303,7 +309,7 @@ class IrcLogsLexer(RegexLexer):
# log start/end
(r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
# hack
- ("^" + timestamp + r'(\s*<.*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
+ ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
# normal msgs
("^" + timestamp + r"""
(\s*<.*?>\s*) # Nick """,
@@ -685,6 +691,16 @@ class RstLexer(RegexLexer):
self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
RegexLexer.__init__(self, **options)
+ def analyse_text(text):
+ if text[:2] == '..' and text[2:3] != '.':
+ return 0.3
+ p1 = text.find("\n")
+ p2 = text.find("\n", p1 + 1)
+ if (p2 > -1 and # has two lines
+ p1 * 2 + 1 == p2 and # they are the same length
+ text[p1+1] in '-=' and # the next line both starts and ends with
+ text[p1+1] == text[p2-1]): # ...a sufficiently high header
+ return 0.5
class VimLexer(RegexLexer):
"""
@@ -710,10 +726,12 @@ class VimLexer(RegexLexer):
(r'"(\\\\|\\"|[^\n"])*"', String.Double),
(r"'(\\\\|\\'|[^\n'])*'", String.Single),
(r'-?\d+', Number),
+ (r'#[0-9a-f]{6}', Number.Hex),
(r'^:', Punctuation),
(r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent.
(r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
Keyword),
+ (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
(r'\b\w+\b', Name.Other), # These are postprocessed below
(r'.', Text),
],
@@ -994,7 +1012,8 @@ class YamlLexerContext(LexerContext):
class YamlLexer(ExtendedRegexLexer):
"""
- Lexer for YAML, a human-friendly data serialization language (http://yaml.org/).
+ Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
+ language.
*New in Pygments 0.11.*
"""
@@ -1390,3 +1409,74 @@ class YamlLexer(ExtendedRegexLexer):
if context is None:
context = YamlLexerContext(text, 0)
return super(YamlLexer, self).get_tokens_unprocessed(text, context)
+
+class LighttpdConfLexer(RegexLexer):
+ """
+ Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
+
+ *New in Pygments 0.11.*
+ """
+ name = 'Lighttpd configuration file'
+ aliases = ['lighty', 'lighttpd']
+ filenames = []
+ mimetypes = ['text/x-lighttpd-conf']
+
+ tokens = {
+ 'root': [
+ (r'#.*\n', Comment.Single),
+ (r'/\S*', Name), # pathname
+ (r'[a-zA-Z._-]+', Keyword),
+ (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
+ (r'[0-9]+', Number),
+ (r'=>|=~|\+=|==|=|\+', Operator),
+ (r'\$[A-Z]+', Name.Builtin),
+ (r'[(){}\[\],]', Punctuation),
+ (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
+ (r'\s+', Text),
+ ],
+
+ }
+
+class NginxConfLexer(RegexLexer):
+ """
+ Lexer for `Nginx <http://nginx.net/>`_ configuration files.
+
+ *New in Pygments 0.11.*
+ """
+ name = 'Nginx configuration file'
+ aliases = ['nginx']
+ filenames = []
+ mimetypes = ['text/x-nginx-conf']
+
+ tokens = {
+ 'root': [
+ (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)),
+ (r'[^\s;#]+', Keyword, 'stmt'),
+ include('base'),
+ ],
+ 'block': [
+ (r'}', Punctuation, '#pop:2'),
+ (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
+ include('base'),
+ ],
+ 'stmt': [
+ (r'{', Punctuation, 'block'),
+ (r';', Punctuation, '#pop'),
+ include('base'),
+ ],
+ 'base': [
+ (r'#.*\n', Comment.Single),
+ (r'on|off', Name.Constant),
+ (r'\$[^\s;#]+', Name.Variable),
+ (r'([a-z0-9.-]+)(:)([0-9]+)',
+ bygroups(Name, Punctuation, Number.Integer)),
+ (r'[a-z-]+/[a-z-]+', Name), # mimetype
+ #(r'[a-zA-Z._-]+', Keyword),
+ (r'[0-9]+[km]?\b', Number.Integer),
+ (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)),
+ (r'[:=~]', Punctuation),
+ (r'[^\s;#{}$]+', String), # catch all
+ (r'/[^\s;#]*', Name), # pathname
+ (r'\s+', Text),
+ ],
+ }
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index a61020cf..c766373c 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -48,8 +48,8 @@ class JavascriptLexer(RegexLexer):
(r'[~\^\*!%&<>\|+=:;,/?\\-]+', Operator),
(r'[{}\[\]();.]+', Punctuation),
(r'(for|in|while|do|break|return|continue|if|else|throw|try|'
- r'catch|var|with|const|label|function|new|typeof|'
- r'instanceof|this)\b', Keyword),
+ r'catch|new|typeof|instanceof|this)\b', Keyword),
+ (r'(var|with|const|label|function)\b', Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|netscape|'
r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
@@ -93,7 +93,8 @@ class ActionScriptLexer(RegexLexer):
r'switch)\b', Keyword),
(r'(class|public|final|internal|native|override|private|protected|'
r'static|import|extends|implements|interface|intrinsic|return|super|'
- r'dynamic|function|const|get|namespace|package|set)\b', Keyword.Declaration),
+ r'dynamic|function|const|get|namespace|package|set)\b',
+ Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
Keyword.Constant),
(r'(Accessibility|AccessibilityProperties|ActionScriptVersion|'
@@ -549,9 +550,10 @@ class PhpLexer(RegexLexer):
def analyse_text(text):
rv = 0.0
- for tag in '<?php', '?>':
- if tag in text:
- rv += 0.2
+ if re.search(r'<\?(?!xml)', text):
+ rv += 0.3
+ if '?>' in text:
+ rv += 0.1
return rv
@@ -633,3 +635,7 @@ class XsltLexer(XmlLexer):
yield index, Keyword, value
else:
yield index, token, value
+
+ def analyse_text(text):
+ if looks_like_xml(text) and '<xsl' in text:
+ return 0.8
diff --git a/pygments/style.py b/pygments/style.py
index e1847e7a..e0963d32 100644
--- a/pygments/style.py
+++ b/pygments/style.py
@@ -40,7 +40,7 @@ class StyleMeta(type):
ndef = _styles.get(token.parent, None)
styledefs = obj.styles.get(token, '').split()
if not ndef or token is None:
- ndef = ['', 0, 0, 0, '', '']
+ ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
elif 'noinherit' in styledefs and token is not Token:
ndef = _styles[Token][:]
else:
@@ -65,6 +65,12 @@ class StyleMeta(type):
ndef[4] = colorformat(styledef[3:])
elif styledef[:7] == 'border:':
ndef[5] = colorformat(styledef[7:])
+ elif styledef == 'roman':
+ ndef[6] = 1
+ elif styledef == 'sans':
+ ndef[7] = 1
+ elif styledef == 'mono':
+ ndef[8] = 1
else:
ndef[0] = colorformat(styledef)
@@ -78,7 +84,10 @@ class StyleMeta(type):
'italic': bool(t[2]),
'underline': bool(t[3]),
'bgcolor': t[4] or None,
- 'border': t[5] or None
+ 'border': t[5] or None,
+ 'roman': bool(t[6]) or None,
+ 'sans': bool(t[7]) or None,
+ 'mono': bool(t[8]) or None,
}
def list_styles(cls):
diff --git a/pygments/styles/vim.py b/pygments/styles/vim.py
index d941d136..43d1771f 100644
--- a/pygments/styles/vim.py
+++ b/pygments/styles/vim.py
@@ -25,11 +25,13 @@ class VimStyle(Style):
styles = {
Token: "#cccccc",
Whitespace: "",
- Comment: "#00cdcd",
+ Comment: "#000080",
Comment.Preproc: "",
Comment.Special: "bold #cd0000",
Keyword: "#cdcd00",
+ Keyword.Declaration: "#00cd00",
+ Keyword.Namespace: "#cd00cd",
Keyword.Pseudo: "",
Keyword.Type: "#00cd00",
@@ -37,9 +39,8 @@ class VimStyle(Style):
Operator.Word: "#cdcd00",
Name: "",
- Name.Class: "#cd00cd",
+ Name.Class: "#00cdcd",
Name.Builtin: "#cd00cd",
- Name.Namespace: "bold #5c5cff",
Name.Exception: "bold #666699",
Name.Variable: "#00cdcd",
@@ -48,8 +49,8 @@ class VimStyle(Style):
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
+ Generic.Deleted: "#cd0000",
+ Generic.Inserted: "#00cd00",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
diff --git a/pygments/token.py b/pygments/token.py
index f45ac284..6db41a56 100644
--- a/pygments/token.py
+++ b/pygments/token.py
@@ -130,6 +130,7 @@ STANDARD_TYPES = {
Keyword: 'k',
Keyword.Constant: 'kc',
Keyword.Declaration: 'kd',
+ Keyword.Namespace: 'kn',
Keyword.Pseudo: 'kp',
Keyword.Reserved: 'kr',
Keyword.Type: 'kt',
diff --git a/pygments/util.py b/pygments/util.py
index 8232964c..0e8c952a 100644
--- a/pygments/util.py
+++ b/pygments/util.py
@@ -179,11 +179,18 @@ def html_doctype_matches(text):
return doctype_matches(text, r'html\s+PUBLIC\s+"-//W3C//DTD X?HTML.*')
+_looks_like_xml_cache = {}
def looks_like_xml(text):
"""
Check if a doctype exists or if we have some tags.
"""
- m = doctype_lookup_re.match(text)
- if m is not None:
- return True
- return tag_re.search(text) is not None
+ key = hash(text)
+ try:
+ return _looks_like_xml_cache[key]
+ except KeyError:
+ m = doctype_lookup_re.match(text)
+ if m is not None:
+ return True
+ rv = tag_re.search(text[:1000]) is not None
+ _looks_like_xml_cache[key] = rv
+ return rv
diff --git a/tests/examplefiles/cheetah_example.html b/tests/examplefiles/cheetah_example.html
new file mode 100644
index 00000000..e2a0f477
--- /dev/null
+++ b/tests/examplefiles/cheetah_example.html
@@ -0,0 +1,13 @@
+<html>
+ <head><title>$title</title></head>
+ <body>
+ <table>
+ #for $client in $clients
+ <tr>
+ <td>$client.surname, $client.firstname</td>
+ <td><a href="mailto:$client.email">$client.email</a></td>
+ </tr>
+ #end for
+ </table>
+ </body>
+</html>
diff --git a/tests/examplefiles/genclass.clj b/tests/examplefiles/genclass.clj
new file mode 100644
index 00000000..c63da8fd
--- /dev/null
+++ b/tests/examplefiles/genclass.clj
@@ -0,0 +1,510 @@
+; Copyright (c) Rich Hickey. All rights reserved.
+; The use and distribution terms for this software are covered by the
+; Common Public License 1.0 (http://opensource.org/licenses/cpl.php)
+; which can be found in the file CPL.TXT at the root of this distribution.
+; By using this software in any fashion, you are agreeing to be bound by
+; the terms of this license.
+; You must not remove this notice, or any other, from this software.
+
+(in-ns 'clojure)
+
+(import '(java.lang.reflect Modifier Constructor)
+ '(clojure.asm ClassWriter ClassVisitor Opcodes Type)
+ '(clojure.asm.commons Method GeneratorAdapter)
+ '(clojure.lang IPersistentMap))
+
+;(defn method-sig [#^java.lang.reflect.Method meth]
+; [(. meth (getName)) (seq (. meth (getParameterTypes)))])
+
+(defn- non-private-methods [#^Class c]
+ (loop [mm {}
+ considered #{}
+ c c]
+ (if c
+ (let [[mm considered]
+ (loop [mm mm
+ considered considered
+ meths (concat
+ (seq (. c (getDeclaredMethods)))
+ (seq (. c (getMethods))))]
+ (if meths
+ (let [#^Method meth (first meths)
+ mods (. meth (getModifiers))
+ mk (method-sig meth)]
+ (if (or (considered mk)
+ (. Modifier (isPrivate mods))
+ (. Modifier (isStatic mods))
+ (. Modifier (isFinal mods)))
+ (recur mm (conj considered mk) (rest meths))
+ (recur (assoc mm mk meth) (conj considered mk) (rest meths))))
+ [mm considered]))]
+ (recur mm considered (. c (getSuperclass))))
+ mm)))
+
+(defn- ctor-sigs [super]
+ (for [#^Constructor ctor (. super (getDeclaredConstructors))
+ :when (not (. Modifier (isPrivate (. ctor (getModifiers)))))]
+ (apply vector (. ctor (getParameterTypes)))))
+
+(defn- escape-class-name [c]
+ (.. (.getSimpleName c)
+ (replace "[]" "<>")))
+
+(defn- overload-name [mname pclasses]
+ (if (seq pclasses)
+ (apply str mname (interleave (repeat \-)
+ (map escape-class-name pclasses)))
+ (str mname "-void")))
+
+;(distinct (map first(keys (mapcat non-private-methods [Object IPersistentMap]))))
+
+(defn gen-class
+ "Generates compiled bytecode for a class with the given
+ package-qualified cname (which, as all names in these parameters, can
+ be a string or symbol). The gen-class construct contains no
+ implementation, as the implementation will be dynamically sought by
+ the generated class in functions in a corresponding Clojure
+ namespace. Given a generated class org.mydomain.MyClass, methods
+ will be implemented that look for same-named functions in a Clojure
+ namespace called org.domain.MyClass. The init and main
+ functions (see below) will be found similarly. The static
+ initializer for the generated class will attempt to load the Clojure
+ support code for the class as a resource from the claspath, e.g. in
+ the example case, org/mydomain/MyClass.clj
+
+ Returns a map containing :name and :bytecode. Most uses will be
+ satisfied by the higher-level gen-and-load-class and
+ gen-and-store-class functions, which generate and immediately load,
+ or generate and store to disk, respectively.
+
+ Options should be a set of key/value pairs, all of which are optional:
+
+ :extends aclass
+
+ Specifies the superclass, the non-private methods of which will be
+ overridden by the class. If not provided, defaults to Object.
+
+ :implements [interface ...]
+
+ One or more interfaces, the methods of which will be implemented by the class.
+
+ :init name
+
+ If supplied, names a function that will be called with the arguments
+ to the constructor. Must return [[superclass-constructor-args] state]
+ If not supplied, the constructor args are passed directly to
+ the superclass constructor and the state will be nil
+
+ :constructors {[param-types] [super-param-types], ...}
+
+ By default, constructors are created for the generated class which
+ match the signature(s) of the constructors for the superclass. This
+ parameter may be used to explicitly specify constructors, each entry
+ providing a mapping from a constructor signature to a superclass
+ constructor signature. When you supply this, you must supply an :init
+ specifier.
+
+ :methods [[name [param-types] return-type], ...]
+
+ The generated class automatically defines all of the non-private
+ methods of its superclasses/interfaces. This parameter can be used
+ to specify the signatures of additional methods of the generated
+ class. Do not repeat superclass/interface signatures here.
+
+ :main boolean
+
+ If supplied and true, a static public main function will be
+ generated. It will pass each string of the String[] argument as a
+ separate argument to a function called 'main.
+
+ :factory name
+
+ If supplied, a (set of) public static factory function(s) will be
+ created with the given name, and the same signature(s) as the
+ constructor(s).
+
+ :state name
+
+ If supplied, a public final instance field with the given name will be
+ created. You must supply an :init function in order to provide a
+ value for the state. Note that, though final, the state can be a ref
+ or agent, supporting the creation of Java objects with transactional
+ or asynchronous mutation semantics.
+
+ :exposes {protected-field-name {:get name :set name}, ...}
+
+ Since the implementations of the methods of the generated class
+ occur in Clojure functions, they have no access to the inherited
+ protected fields of the superclass. This parameter can be used to
+ generate public getter/setter methods exposing the protected field(s)
+ for use in the implementation."
+
+ [cname & options]
+ (let [name (str cname)
+ {:keys [extends implements constructors methods main factory state init exposes]} (apply hash-map options)
+ super (or extends Object)
+ interfaces implements
+ supers (cons super (seq interfaces))
+ ctor-sig-map (or constructors (zipmap (ctor-sigs super) (ctor-sigs super)))
+ cv (new ClassWriter (. ClassWriter COMPUTE_MAXS))
+ cname (. name (replace "." "/"))
+ ctype (. Type (getObjectType cname))
+ iname (fn [c] (.. Type (getType c) (getInternalName)))
+ totype (fn [c] (. Type (getType c)))
+ to-types (fn [cs] (if (pos? (count cs))
+ (into-array (map totype cs))
+ (make-array Type 0)))
+ obj-type (totype Object)
+ arg-types (fn [n] (if (pos? n)
+ (into-array (replicate n obj-type))
+ (make-array Type 0)))
+ super-type (totype super)
+ init-name (str init)
+ factory-name (str factory)
+ state-name (str state)
+ main-name "main"
+ var-name (fn [s] (str s "__var"))
+ rt-type (totype clojure.lang.RT)
+ var-type (totype clojure.lang.Var)
+ ifn-type (totype clojure.lang.IFn)
+ iseq-type (totype clojure.lang.ISeq)
+ ex-type (totype java.lang.UnsupportedOperationException)
+ all-sigs (distinct (concat (map #(let[[m p] (key %)] {m [p]}) (mapcat non-private-methods supers))
+ (map (fn [[m p]] {(str m) [p]}) methods)))
+ sigs-by-name (apply merge-with concat {} all-sigs)
+ overloads (into {} (filter (fn [[m s]] (rest s)) sigs-by-name))
+ var-fields (concat (and init [init-name])
+ (and main [main-name])
+ (distinct (concat (keys sigs-by-name)
+ (mapcat (fn [[m s]] (map #(overload-name m %) s)) overloads)
+ (mapcat (comp (partial map str) vals val) exposes))))
+ emit-get-var (fn [gen v]
+ (let [false-label (. gen newLabel)
+ end-label (. gen newLabel)]
+ (. gen getStatic ctype (var-name v) var-type)
+ (. gen dup)
+ (. gen invokeVirtual var-type (. Method (getMethod "boolean isBound()")))
+ (. gen ifZCmp (. GeneratorAdapter EQ) false-label)
+ (. gen invokeVirtual var-type (. Method (getMethod "Object get()")))
+ (. gen goTo end-label)
+ (. gen mark false-label)
+ (. gen pop)
+ (. gen visitInsn (. Opcodes ACONST_NULL))
+ (. gen mark end-label)))
+ emit-forwarding-method
+ (fn [mname pclasses rclass else-gen]
+ (let [ptypes (to-types pclasses)
+ rtype (totype rclass)
+ m (new Method mname rtype ptypes)
+ is-overload (overloads mname)
+ gen (new GeneratorAdapter (. Opcodes ACC_PUBLIC) m nil nil cv)
+ found-label (. gen (newLabel))
+ else-label (. gen (newLabel))
+ end-label (. gen (newLabel))]
+ (. gen (visitCode))
+ (when is-overload
+ (emit-get-var gen (overload-name mname pclasses))
+ (. gen (dup))
+ (. gen (ifNonNull found-label))
+ (. gen (pop)))
+ (emit-get-var gen mname)
+ (. gen (dup))
+ (. gen (ifNull else-label))
+ (when is-overload
+ (. gen (mark found-label)))
+ ;if found
+ (. gen (loadThis))
+ ;box args
+ (dotimes i (count ptypes)
+ (. gen (loadArg i))
+ (. clojure.lang.Compiler$HostExpr (emitBoxReturn nil gen (nth pclasses i))))
+ ;call fn
+ (. gen (invokeInterface ifn-type (new Method "invoke" obj-type
+ (into-array (cons obj-type
+ (replicate (count ptypes) obj-type))))))
+ ;unbox return
+ (. gen (unbox rtype))
+ (when (= (. rtype (getSort)) (. Type VOID))
+ (. gen (pop)))
+ (. gen (goTo end-label))
+
+ ;else call supplied alternative generator
+ (. gen (mark else-label))
+ (. gen (pop))
+
+ (else-gen gen m)
+
+ (. gen (mark end-label))
+ (. gen (returnValue))
+ (. gen (endMethod))))
+ ]
+ ;start class definition
+ (. cv (visit (. Opcodes V1_5) (. Opcodes ACC_PUBLIC)
+ cname nil (iname super)
+ (when interfaces
+ (into-array (map iname interfaces)))))
+
+ ;static fields for vars
+ (doseq v var-fields
+ (. cv (visitField (+ (. Opcodes ACC_PUBLIC) (. Opcodes ACC_FINAL) (. Opcodes ACC_STATIC))
+ (var-name v)
+ (. var-type getDescriptor)
+ nil nil)))
+
+ ;instance field for state
+ (when state
+ (. cv (visitField (+ (. Opcodes ACC_PUBLIC) (. Opcodes ACC_FINAL))
+ state-name
+ (. obj-type getDescriptor)
+ nil nil)))
+
+ ;static init to set up var fields and load clj
+ (let [gen (new GeneratorAdapter (+ (. Opcodes ACC_PUBLIC) (. Opcodes ACC_STATIC))
+ (. Method getMethod "void <clinit> ()")
+ nil nil cv)]
+ (. gen (visitCode))
+ (doseq v var-fields
+ (. gen push name)
+ (. gen push v)
+ (. gen (invokeStatic rt-type (. Method (getMethod "clojure.lang.Var var(String,String)"))))
+ (. gen putStatic ctype (var-name v) var-type))
+
+ (. gen push ctype)
+ (. gen push (str (. name replace \. (. java.io.File separatorChar)) ".clj"))
+ (. gen (invokeStatic rt-type (. Method (getMethod "void loadResourceScript(Class,String)"))))
+
+ (. gen (returnValue))
+ (. gen (endMethod)))
+
+ ;ctors
+ (doseq [pclasses super-pclasses] ctor-sig-map
+ (let [ptypes (to-types pclasses)
+ super-ptypes (to-types super-pclasses)
+ m (new Method "<init>" (. Type VOID_TYPE) ptypes)
+ super-m (new Method "<init>" (. Type VOID_TYPE) super-ptypes)
+ gen (new GeneratorAdapter (. Opcodes ACC_PUBLIC) m nil nil cv)
+ no-init-label (. gen newLabel)
+ end-label (. gen newLabel)
+ nth-method (. Method (getMethod "Object nth(Object,int)"))
+ local (. gen newLocal obj-type)]
+ (. gen (visitCode))
+
+ (if init
+ (do
+ (emit-get-var gen init-name)
+ (. gen dup)
+ (. gen ifNull no-init-label)
+ ;box init args
+ (dotimes i (count pclasses)
+ (. gen (loadArg i))
+ (. clojure.lang.Compiler$HostExpr (emitBoxReturn nil gen (nth pclasses i))))
+ ;call init fn
+ (. gen (invokeInterface ifn-type (new Method "invoke" obj-type
+ (arg-types (count ptypes)))))
+ ;expecting [[super-ctor-args] state] returned
+ (. gen dup)
+ (. gen push 0)
+ (. gen (invokeStatic rt-type nth-method))
+ (. gen storeLocal local)
+
+ (. gen (loadThis))
+ (. gen dupX1)
+ (dotimes i (count super-pclasses)
+ (. gen loadLocal local)
+ (. gen push i)
+ (. gen (invokeStatic rt-type nth-method))
+ (. clojure.lang.Compiler$HostExpr (emitUnboxArg nil gen (nth super-pclasses i))))
+ (. gen (invokeConstructor super-type super-m))
+
+ (if state
+ (do
+ (. gen push 1)
+ (. gen (invokeStatic rt-type nth-method))
+ (. gen (putField ctype state-name obj-type)))
+ (. gen pop))
+
+ (. gen goTo end-label)
+ ;no init found
+ (. gen mark no-init-label)
+ (. gen (throwException ex-type (str init-name " not defined")))
+ (. gen mark end-label))
+ (if (= pclasses super-pclasses)
+ (do
+ (. gen (loadThis))
+ (. gen (loadArgs))
+ (. gen (invokeConstructor super-type super-m)))
+ (throw (new Exception ":init not specified, but ctor and super ctor args differ"))))
+
+ (. gen (returnValue))
+ (. gen (endMethod))
+ ;factory
+ (when factory
+ (let [fm (new Method factory-name ctype ptypes)
+ gen (new GeneratorAdapter (+ (. Opcodes ACC_PUBLIC) (. Opcodes ACC_STATIC))
+ fm nil nil cv)]
+ (. gen (visitCode))
+ (. gen newInstance ctype)
+ (. gen dup)
+ (. gen (loadArgs))
+ (. gen (invokeConstructor ctype m))
+ (. gen (returnValue))
+ (. gen (endMethod))))))
+
+ ;add methods matching supers', if no fn -> call super
+ (let [mm (non-private-methods super)]
+ (doseq #^java.lang.reflect.Method meth (vals mm)
+ (emit-forwarding-method (.getName meth) (.getParameterTypes meth) (.getReturnType meth)
+ (fn [gen m]
+ (. gen (loadThis))
+ ;push args
+ (. gen (loadArgs))
+ ;call super
+ (. gen (visitMethodInsn (. Opcodes INVOKESPECIAL)
+ (. super-type (getInternalName))
+ (. m (getName))
+ (. m (getDescriptor)))))))
+ ;add methods matching interfaces', if no fn -> throw
+ (doseq #^Class iface interfaces
+ (doseq #^java.lang.reflect.Method meth (. iface (getMethods))
+ (when-not (contains? mm (method-sig meth))
+ (emit-forwarding-method (.getName meth) (.getParameterTypes meth) (.getReturnType meth)
+ (fn [gen m]
+ (. gen (throwException ex-type (. m (getName)))))))))
+ ;extra methods
+ (doseq [mname pclasses rclass :as msig] methods
+ (emit-forwarding-method (str mname) pclasses rclass
+ (fn [gen m]
+ (. gen (throwException ex-type (. m (getName))))))))
+
+ ;main
+ (when main
+ (let [m (. Method getMethod "void main (String[])")
+ gen (new GeneratorAdapter (+ (. Opcodes ACC_PUBLIC) (. Opcodes ACC_STATIC))
+ m nil nil cv)
+ no-main-label (. gen newLabel)
+ end-label (. gen newLabel)]
+ (. gen (visitCode))
+
+ (emit-get-var gen main-name)
+ (. gen dup)
+ (. gen ifNull no-main-label)
+ (. gen loadArgs)
+ (. gen (invokeStatic rt-type (. Method (getMethod "clojure.lang.ISeq seq(Object)"))))
+ (. gen (invokeInterface ifn-type (new Method "applyTo" obj-type
+ (into-array [iseq-type]))))
+ (. gen pop)
+ (. gen goTo end-label)
+ ;no main found
+ (. gen mark no-main-label)
+ (. gen (throwException ex-type (str main-name " not defined")))
+ (. gen mark end-label)
+ (. gen (returnValue))
+ (. gen (endMethod))))
+ ;field exposers
+ (doseq [f {getter :get setter :set}] exposes
+ (let [fld (.getField super (str f))
+ ftype (totype (.getType fld))]
+ (when getter
+ (let [m (new Method (str getter) ftype (to-types []))
+ gen (new GeneratorAdapter (. Opcodes ACC_PUBLIC) m nil nil cv)]
+ (. gen (visitCode))
+ (. gen loadThis)
+ (. gen getField ctype (str f) ftype)
+ (. gen (returnValue))
+ (. gen (endMethod))))
+ (when setter
+ (let [m (new Method (str setter) (. Type VOID_TYPE) (into-array [ftype]))
+ gen (new GeneratorAdapter (. Opcodes ACC_PUBLIC) m nil nil cv)]
+ (. gen (visitCode))
+ (. gen loadThis)
+ (. gen loadArgs)
+ (. gen putField ctype (str f) ftype)
+ (. gen (returnValue))
+ (. gen (endMethod))))))
+ ;finish class def
+ (. cv (visitEnd))
+ {:name name :bytecode (. cv (toByteArray))}))
+
+(defn gen-and-load-class
+ "Generates and immediately loads the bytecode for the specified
+ class. Note that a class generated this way can be loaded only once
+ - the JVM supports only one class with a given name per
+ classloader. Subsequent to generation you can import it into any
+ desired namespaces just like any other class. See gen-class for a
+ description of the options."
+
+ [name & options]
+ (let [{:keys [name bytecode]}
+ (apply gen-class (str name) options)]
+ (.. clojure.lang.RT ROOT_CLASSLOADER (defineClass (str name) bytecode))))
+
+(defn gen-and-save-class
+ "Generates the bytecode for the named class and stores in a .class
+ file in a subpath of the supplied path, the directories for which
+ must already exist. See gen-class for a description of the options"
+
+ [path name & options]
+ (let [{:keys [name bytecode]} (apply gen-class (str name) options)
+ file (java.io.File. path (str (. name replace \. (. java.io.File separatorChar)) ".class"))]
+ (.createNewFile file)
+ (with-open f (java.io.FileOutputStream. file)
+ (.write f bytecode))))
+
+(comment
+;usage
+(gen-class
+ package-qualified-name
+ ;all below are optional
+ :extends aclass
+ :implements [interface ...]
+ :constructors {[param-types] [super-param-types], }
+ :methods [[name [param-types] return-type], ]
+ :main boolean
+ :factory name
+ :state name
+ :init name
+ :exposes {protected-field {:get name :set name}, })
+
+;(gen-and-load-class
+(clojure/gen-and-save-class
+ "/Users/rich/Downloads"
+ 'fred.lucy.Ethel
+ :extends clojure.lang.Box ;APersistentMap
+ :implements [clojure.lang.IPersistentMap]
+ :state 'state
+ ;:constructors {[Object] [Object]}
+ ;:init 'init
+ :main true
+ :factory 'create
+ :methods [['foo [Object] Object]
+ ['foo [] Object]]
+ :exposes {'val {:get 'getVal :set 'setVal}})
+
+(in-ns 'fred.lucy.Ethel__2276)
+(clojure/refer 'clojure :exclude '(assoc seq count cons))
+(defn init [n] [[] n])
+(defn foo
+ ([this] :foo)
+ ([this x] x))
+(defn main [x y] (println x y))
+(in-ns 'user)
+(def ethel (new fred.lucy.Ethel__2276 42))
+(def ethel (fred.lucy.Ethel__2276.create 21))
+(fred.lucy.Ethel__2276.main (into-array ["lucy" "ricky"]))
+(.state ethel)
+(.foo ethel 7)
+(.foo ethel)
+(.getVal ethel)
+(.setVal ethel 12)
+
+(gen-class org.clojure.MyComparator :implements [Comparator])
+(in-ns 'org.clojure.MyComparator)
+(defn compare [this x y] ...)
+
+(load-file "/Users/rich/dev/clojure/src/genclass.clj")
+
+(clojure/gen-and-save-class "/Users/rich/dev/clojure/gen/"
+ 'org.clojure.ClojureServlet
+ :extends javax.servlet.http.HttpServlet)
+
+)
diff --git a/tests/examplefiles/lighttpd_config.conf b/tests/examplefiles/lighttpd_config.conf
new file mode 100644
index 00000000..8475f378
--- /dev/null
+++ b/tests/examplefiles/lighttpd_config.conf
@@ -0,0 +1,13 @@
+fastcgi.server = ( ".php" => ((
+ "bin-path" => "/path/to/php-cgi",
+ "socket" => "/tmp/php.socket",
+ "max-procs" => 2,
+ "bin-environment" => (
+ "PHP_FCGI_CHILDREN" => "16",
+ "PHP_FCGI_MAX_REQUESTS" => "10000"
+ ),
+ "bin-copy-environment" => (
+ "PATH", "SHELL", "USER"
+ ),
+ "broken-scriptfilename" => "enable"
+ )))
diff --git a/tests/examplefiles/nasm_aoutso.asm b/tests/examplefiles/nasm_aoutso.asm
new file mode 100644
index 00000000..9fd9727e
--- /dev/null
+++ b/tests/examplefiles/nasm_aoutso.asm
@@ -0,0 +1,96 @@
+; test source file for assembling to NetBSD/FreeBSD a.out shared library
+; build with:
+; nasm -f aoutb aoutso.asm
+; ld -Bshareable -o aoutso.so aoutso.o
+; test with:
+; cc -o aoutso aouttest.c aoutso.so
+; ./aoutso
+
+; This file should test the following:
+; [1] Define and export a global text-section symbol
+; [2] Define and export a global data-section symbol
+; [3] Define and export a global BSS-section symbol
+; [4] Define a non-global text-section symbol
+; [5] Define a non-global data-section symbol
+; [6] Define a non-global BSS-section symbol
+; [7] Define a COMMON symbol
+; [8] Define a NASM local label
+; [9] Reference a NASM local label
+; [10] Import an external symbol
+; [11] Make a PC-relative call to an external symbol
+; [12] Reference a text-section symbol in the text section
+; [13] Reference a data-section symbol in the text section
+; [14] Reference a BSS-section symbol in the text section
+; [15] Reference a text-section symbol in the data section
+; [16] Reference a data-section symbol in the data section
+; [17] Reference a BSS-section symbol in the data section
+
+ BITS 32
+ EXTERN __GLOBAL_OFFSET_TABLE_
+ GLOBAL _lrotate:function ; [1]
+ GLOBAL _greet:function ; [1]
+ GLOBAL _asmstr:data _asmstr.end-_asmstr ; [2]
+ GLOBAL _textptr:data 4 ; [2]
+ GLOBAL _selfptr:data 4 ; [2]
+ GLOBAL _integer:data 4 ; [3]
+ EXTERN _printf ; [10]
+ COMMON _commvar 4 ; [7]
+
+ SECTION .text
+
+; prototype: long lrotate(long x, int num);
+_lrotate: ; [1]
+ push ebp
+ mov ebp,esp
+ mov eax,[ebp+8]
+ mov ecx,[ebp+12]
+.label rol eax,1 ; [4] [8]
+ loop .label ; [9] [12]
+ mov esp,ebp
+ pop ebp
+ ret
+
+; prototype: void greet(void);
+_greet push ebx ; we'll use EBX for GOT, so save it
+ call .getgot
+.getgot: pop ebx
+ add ebx,__GLOBAL_OFFSET_TABLE_ + $$ - .getgot wrt ..gotpc
+ mov eax,[ebx+_integer wrt ..got] ; [14]
+ mov eax,[eax]
+ inc eax
+ mov [ebx+localint wrt ..gotoff],eax ; [14]
+ mov eax,[ebx+_commvar wrt ..got]
+ push dword [eax]
+ mov eax,[ebx+localptr wrt ..gotoff] ; [13]
+ push dword [eax]
+ mov eax,[ebx+_integer wrt ..got] ; [1] [14]
+ push dword [eax]
+ lea eax,[ebx+_printfstr wrt ..gotoff]
+ push eax ; [13]
+ call _printf wrt ..plt ; [11]
+ add esp,16
+ pop ebx
+ ret
+
+ SECTION .data
+
+; a string
+_asmstr db 'hello, world', 0 ; [2]
+.end
+
+; a string for Printf
+_printfstr db "integer==%d, localint==%d, commvar=%d"
+ db 10, 0
+
+; some pointers
+localptr dd localint ; [5] [17]
+_textptr dd _greet wrt ..sym ; [15]
+_selfptr dd _selfptr wrt ..sym ; [16]
+
+ SECTION .bss
+
+; an integer
+_integer resd 1 ; [3]
+
+; a local integer
+localint resd 1 ; [6]
diff --git a/tests/examplefiles/nasm_objexe.asm b/tests/examplefiles/nasm_objexe.asm
new file mode 100644
index 00000000..dcae5eed
--- /dev/null
+++ b/tests/examplefiles/nasm_objexe.asm
@@ -0,0 +1,30 @@
+; Demonstration of how to write an entire .EXE format program as a .OBJ
+; file to be linked. Tested with the VAL free linker.
+; To build:
+; nasm -fobj objexe.asm
+; val objexe.obj,objexe.exe;
+; To test:
+; objexe
+; (should print `hello, world')
+
+ segment code
+
+..start: mov ax,data
+ mov ds,ax
+ mov ax,stack
+ mov ss,ax
+ mov sp,stacktop
+
+ mov dx,hello
+ mov ah,9
+ int 0x21
+
+ mov ax,0x4c00
+ int 0x21
+
+ segment data
+hello: db 'hello, world', 13, 10, '$'
+
+ segment stack stack
+ resb 64
+stacktop:
diff --git a/tests/examplefiles/nginx_nginx.conf b/tests/examplefiles/nginx_nginx.conf
new file mode 100644
index 00000000..9dcdc8ab
--- /dev/null
+++ b/tests/examplefiles/nginx_nginx.conf
@@ -0,0 +1,118 @@
+
+#user nobody;
+worker_processes 1;
+
+#error_log logs/error.log;
+#error_log logs/error.log notice;
+#error_log logs/error.log info;
+
+#pid logs/nginx.pid;
+
+
+events {
+ worker_connections 1024;
+}
+
+
+http {
+ include mime.types;
+ default_type application/octet-stream;
+
+ log_format main '$remote_addr - $remote_user [$time_local] $request '
+ '"$status" $body_bytes_sent "$http_referer" '
+ '"$http_user_agent" "$http_x_forwarded_for"';
+
+ #access_log logs/access.log main;
+
+ sendfile on;
+ #tcp_nopush on;
+
+ #keepalive_timeout 0;
+ keepalive_timeout 65;
+
+ #gzip on;
+
+ server {
+ listen 80;
+ server_name localhost;
+
+ charset koi8-r;
+
+ #access_log logs/host.access.log main;
+
+ location / {
+ root html;
+ index index.html index.htm;
+ }
+
+ #error_page 404 /404.html;
+
+ # redirect server error pages to the static page /50x.html
+ #
+ error_page 500 502 503 504 /50x.html;
+ location = /50x.html {
+ root html;
+ }
+
+ # proxy the PHP scripts to Apache listening on 127.0.0.1:80
+ #
+ location ~ \.php$ {
+ proxy_pass http://127.0.0.1;
+ }
+
+ # pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
+ #
+ location ~ \.php$ {
+ root html;
+ fastcgi_pass 127.0.0.1:9000;
+ fastcgi_index index.php;
+ fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name;
+ include fastcgi_params;
+ }
+
+ # deny access to .htaccess files, if Apache's document root
+ # concurs with nginx's one
+ #
+ location ~ /\.ht {
+ deny all;
+ }
+ }
+
+
+ # another virtual host using mix of IP-, name-, and port-based configuration
+ #
+ server {
+ listen 8000;
+ listen somename:8080;
+ server_name somename alias another.alias;
+
+ location / {
+ root html;
+ index index.html index.htm;
+ }
+ }
+
+
+ # HTTPS server
+ #
+ server {
+ listen 443;
+ server_name localhost;
+
+ ssl on;
+ ssl_certificate cert.pem;
+ ssl_certificate_key cert.key;
+
+ ssl_session_timeout 5m;
+
+ ssl_protocols SSLv2 SSLv3 TLSv1;
+ ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv2:+EXP;
+ ssl_prefer_server_ciphers on;
+
+ location / {
+ root html;
+ index index.html index.htm;
+ }
+ }
+
+}
diff --git a/tests/examplefiles/objc_example.m b/tests/examplefiles/objc_example.m
new file mode 100644
index 00000000..c2a1c414
--- /dev/null
+++ b/tests/examplefiles/objc_example.m
@@ -0,0 +1,11 @@
+#import "Somefile.h"
+
+NSDictionary *dictionary = [NSDictionary dictionaryWithObjectsAndKeys:
+ @"quattuor", @"four", @"quinque", @"five", @"sex", @"six", nil];
+
+
+NSString *key;
+for (key in dictionary) {
+ NSLog(@"English: %@, Latin: %@", key, [dictionary valueForKey:key]);
+}
+
diff --git a/tests/examplefiles/objc_example2.m b/tests/examplefiles/objc_example2.m
new file mode 100644
index 00000000..8cd9b060
--- /dev/null
+++ b/tests/examplefiles/objc_example2.m
@@ -0,0 +1,24 @@
+// MyClass.h
+@interface MyClass : NSObject
+{
+ NSString *value;
+ NSTextField *textField;
+@private
+ NSDate *lastModifiedDate;
+}
+@property(copy, readwrite) NSString *value;
+@property(retain) IBOutlet NSTextField *textField;
+@end
+
+// MyClass.m
+// Class extension to declare private property
+@interface MyClass ()
+@property(retain) NSDate *lastModifiedDate;
+@end
+
+@implementation MyClass
+@synthesize value;
+@synthesize textField;
+@synthesize lastModifiedDate;
+// implementation continues
+@end
diff --git a/tests/examplefiles/regex-delims.pl b/tests/examplefiles/regex-delims.pl
new file mode 100644
index 00000000..6da5298d
--- /dev/null
+++ b/tests/examplefiles/regex-delims.pl
@@ -0,0 +1,120 @@
+#! /usr/bin/env perl
+
+use strict;
+use warnings;
+
+# common delimiters
+print "a: ";
+my $a = "foo";
+print $a, " - ";
+$a =~ s/foo/bar/;
+print $a, "\n";
+
+print "b: ";
+my $b = "foo";
+print $b, " - ";
+$b =~ s!foo!bar!;
+print $b, "\n";
+
+print "c: ";
+my $c = "foo";
+print $c, " - ";
+$c =~ s@foo@bar@;
+print $c, "\n";
+
+print "d: ";
+my $d = "foo";
+print $d, " - ";
+$d =~ s\foo\bar\;
+print $d, "\n";
+
+print "\n";
+
+# balanced delimiters
+print "e: ";
+my $e = "foo";
+print $e, " - ";
+$e =~ s{foo}{bar};
+print $e, "\n";
+
+print "f: ";
+my $f = "foo";
+print $f, " - ";
+$f =~ s(foo)(bar);
+print $f, "\n";
+
+print "g: ";
+my $g = "foo";
+print $g, " - ";
+$g =~ s<foo><bar>;
+print $g, "\n";
+
+print "h: ";
+my $h = "foo";
+print $h, " - ";
+$h =~ s[foo][bar];
+print $h, "\n";
+
+print "\n";
+
+# balanced delimiters with whitespace
+print "i: ";
+my $i = "foo";
+print $i, " - ";
+$i =~ s{foo} {bar};
+print $i, "\n";
+
+print "j: ";
+my $j = "foo";
+print $j, " - ";
+$j =~ s<foo> <bar>;
+print $j, "\n";
+
+print "k: ";
+my $k = "foo";
+print $k, " - ";
+$k =~
+ s(foo)
+
+ (bar);
+print $k, "\n";
+
+print "\n";
+
+# mixed delimiters
+print "l: ";
+my $l = "foo";
+print $l, " - ";
+$l =~ s{foo} <bar>;
+print $l, "\n";
+
+print "m: ";
+my $m = "foo";
+print $m, " - ";
+$m =~ s(foo) !bar!;
+print $m, "\n";
+
+print "n: ";
+my $n = "foo";
+print $n, " - ";
+$n =~ s[foo] $bar$;
+print $n, "\n";
+
+print "\n";
+
+# /x modifier
+print "o: ";
+my $o = "foo";
+print $o, " - ";
+$o =~ s{
+ foo
+ } {bar}x;
+print $o, "\n";
+
+print "p: ";
+my $p = "foo";
+print $p, " - ";
+$p =~ s%
+ foo
+ %bar%x;
+print $p, "\n";
diff --git a/tests/examplefiles/sphere.pov b/tests/examplefiles/sphere.pov
new file mode 100644
index 00000000..847ed451
--- /dev/null
+++ b/tests/examplefiles/sphere.pov
@@ -0,0 +1,18 @@
+#include "colors.inc"
+
+background { color Cyan }
+
+camera {
+ location <0, 2, -3>
+ look_at <0, 1, 2>
+}
+
+sphere {
+ <0, 1, 2>, 2
+ texture {
+ pigment { color Yellow }
+ }
+}
+
+light_source { <2, 4, -3> color White}
+
diff --git a/tests/examplefiles/sqlite3.sqlite3-console b/tests/examplefiles/sqlite3.sqlite3-console
new file mode 100644
index 00000000..3ec27135
--- /dev/null
+++ b/tests/examplefiles/sqlite3.sqlite3-console
@@ -0,0 +1,27 @@
+SQLite version 3.4.2
+Enter ".help" for instructions
+sqlite> .schema
+CREATE TABLE paste (paste_id integer, code text, parsed_code text, pub_date
+varchar(24), language varchar(64), parent_id integer, url varchar(128));
+CREATE TABLE vars (key varchar(24), value varchar(128));
+sqlite> a '
+ ...> '
+ ...> ;
+SQL error: near "a": syntax error
+sqlite> %;
+SQL error: near "%": syntax error
+sqlite> select count(language), language from paste group by language order
+ ...> by count(language) desc;
+144|python
+76|text
+22|pycon
+9|ruby
+7|c
+7|js
+6|html+django
+4|html
+4|tex
+2|html+php
+1|cpp
+1|scheme
+sqlite>
diff --git a/tests/examplefiles/test.plot b/tests/examplefiles/test.plot
new file mode 100644
index 00000000..cef0f908
--- /dev/null
+++ b/tests/examplefiles/test.plot
@@ -0,0 +1,333 @@
+#
+# $Id: prob2.dem,v 1.9 2006/06/14 03:24:09 sfeam Exp $
+#
+# Demo Statistical Approximations version 1.1
+#
+# Copyright (c) 1991, Jos van der Woude, jvdwoude@hut.nl
+
+# History:
+# -- --- 1991 Jos van der Woude: 1st version
+# 06 Jun 2006 Dan Sebald: Added plot methods for better visual effect.
+
+print ""
+print ""
+print ""
+print ""
+print ""
+print ""
+print " Statistical Approximations, version 1.1"
+print ""
+print " Copyright (c) 1991, 1992, Jos van de Woude, jvdwoude@hut.nl"
+print ""
+print ""
+print ""
+print ""
+print ""
+print ""
+print ""
+print ""
+print ""
+print ""
+print ""
+print " NOTE: contains 10 plots and consequently takes some time to run"
+print " Press Ctrl-C to exit right now"
+print ""
+pause -1 " Press Return to start demo ..."
+
+load "stat.inc"
+rnd(x) = floor(x+0.5)
+r_xmin = -1
+r_sigma = 4.0
+
+# Binomial PDF using normal approximation
+n = 25; p = 0.15
+mu = n * p
+sigma = sqrt(n * p * (1.0 - p))
+xmin = floor(mu - r_sigma * sigma)
+xmin = xmin < r_xmin ? r_xmin : xmin
+xmax = ceil(mu + r_sigma * sigma)
+ymax = 1.1 * binom(floor((n+1)*p), n, p) #mode of binomial PDF used
+set key box
+unset zeroaxis
+set xrange [xmin - 1 : xmax + 1]
+set yrange [0 : ymax]
+set xlabel "k, x ->"
+set ylabel "probability density ->"
+set ytics 0, ymax / 10.0, ymax
+set format x "%2.0f"
+set format y "%3.2f"
+set sample 200
+set title "binomial PDF using normal approximation"
+set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead
+set arrow from mu, normal(mu + sigma, mu, sigma) \
+ to mu + sigma, normal(mu + sigma, mu, sigma) nohead
+set label "mu" at mu + 0.5, ymax / 10
+set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma)
+plot binom(rnd(x), n, p) with histeps, normal(x, mu, sigma)
+pause -1 "Hit return to continue"
+unset arrow
+unset label
+
+# Binomial PDF using poisson approximation
+n = 50; p = 0.1
+mu = n * p
+sigma = sqrt(mu)
+xmin = floor(mu - r_sigma * sigma)
+xmin = xmin < r_xmin ? r_xmin : xmin
+xmax = ceil(mu + r_sigma * sigma)
+ymax = 1.1 * binom(floor((n+1)*p), n, p) #mode of binomial PDF used
+set key box
+unset zeroaxis
+set xrange [xmin - 1 : xmax + 1]
+set yrange [0 : ymax]
+set xlabel "k ->"
+set ylabel "probability density ->"
+set ytics 0, ymax / 10.0, ymax
+set format x "%2.0f"
+set format y "%3.2f"
+set sample (xmax - xmin + 3)
+set title "binomial PDF using poisson approximation"
+set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead
+set arrow from mu, normal(mu + sigma, mu, sigma) \
+ to mu + sigma, normal(mu + sigma, mu, sigma) nohead
+set label "mu" at mu + 0.5, ymax / 10
+set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma)
+plot binom(x, n, p) with histeps, poisson(x, mu) with histeps
+pause -1 "Hit return to continue"
+unset arrow
+unset label
+
+# Geometric PDF using gamma approximation
+p = 0.3
+mu = (1.0 - p) / p
+sigma = sqrt(mu / p)
+lambda = p
+rho = 1.0 - p
+xmin = floor(mu - r_sigma * sigma)
+xmin = xmin < r_xmin ? r_xmin : xmin
+xmax = ceil(mu + r_sigma * sigma)
+ymax = 1.1 * p
+set key box
+unset zeroaxis
+set xrange [xmin - 1 : xmax + 1]
+set yrange [0 : ymax]
+set xlabel "k, x ->"
+set ylabel "probability density ->"
+set ytics 0, ymax / 10.0, ymax
+set format x "%2.0f"
+set format y "%3.2f"
+set sample 200
+set title "geometric PDF using gamma approximation"
+set arrow from mu, 0 to mu, gmm(mu, rho, lambda) nohead
+set arrow from mu, gmm(mu + sigma, rho, lambda) \
+ to mu + sigma, gmm(mu + sigma, rho, lambda) nohead
+set label "mu" at mu + 0.5, ymax / 10
+set label "sigma" at mu + 0.5 + sigma, gmm(mu + sigma, rho, lambda)
+plot geometric(rnd(x),p) with histeps, gmm(x, rho, lambda)
+pause -1 "Hit return to continue"
+unset arrow
+unset label
+
+# Geometric PDF using normal approximation
+p = 0.3
+mu = (1.0 - p) / p
+sigma = sqrt(mu / p)
+xmin = floor(mu - r_sigma * sigma)
+xmin = xmin < r_xmin ? r_xmin : xmin
+xmax = ceil(mu + r_sigma * sigma)
+ymax = 1.1 * p
+set key box
+unset zeroaxis
+set xrange [xmin - 1 : xmax + 1]
+set yrange [0 : ymax]
+set xlabel "k, x ->"
+set ylabel "probability density ->"
+set ytics 0, ymax / 10.0, ymax
+set format x "%2.0f"
+set format y "%3.2f"
+set sample 200
+set title "geometric PDF using normal approximation"
+set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead
+set arrow from mu, normal(mu + sigma, mu, sigma) \
+ to mu + sigma, normal(mu + sigma, mu, sigma) nohead
+set label "mu" at mu + 0.5, ymax / 10
+set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma)
+plot geometric(rnd(x),p) with histeps, normal(x, mu, sigma)
+pause -1 "Hit return to continue"
+unset arrow
+unset label
+
+# Hypergeometric PDF using binomial approximation
+nn = 75; mm = 25; n = 10
+p = real(mm) / nn
+mu = n * p
+sigma = sqrt(real(nn - n) / (nn - 1.0) * n * p * (1.0 - p))
+xmin = floor(mu - r_sigma * sigma)
+xmin = xmin < r_xmin ? r_xmin : xmin
+xmax = ceil(mu + r_sigma * sigma)
+ymax = 1.1 * hypgeo(floor(mu), nn, mm, n) #mode of binom PDF used
+set key box
+unset zeroaxis
+set xrange [xmin - 1 : xmax + 1]
+set yrange [0 : ymax]
+set xlabel "k ->"
+set ylabel "probability density ->"
+set ytics 0, ymax / 10.0, ymax
+set format x "%2.0f"
+set format y "%3.2f"
+set sample (xmax - xmin + 3)
+set title "hypergeometric PDF using binomial approximation"
+set arrow from mu, 0 to mu, binom(floor(mu), n, p) nohead
+set arrow from mu, binom(floor(mu + sigma), n, p) \
+ to mu + sigma, binom(floor(mu + sigma), n, p) nohead
+set label "mu" at mu + 0.5, ymax / 10
+set label "sigma" at mu + 0.5 + sigma, binom(floor(mu + sigma), n, p)
+plot hypgeo(x, nn, mm, n) with histeps, binom(x, n, p) with histeps
+pause -1 "Hit return to continue"
+unset arrow
+unset label
+
+# Hypergeometric PDF using normal approximation
+nn = 75; mm = 25; n = 10
+p = real(mm) / nn
+mu = n * p
+sigma = sqrt(real(nn - n) / (nn - 1.0) * n * p * (1.0 - p))
+xmin = floor(mu - r_sigma * sigma)
+xmin = xmin < r_xmin ? r_xmin : xmin
+xmax = ceil(mu + r_sigma * sigma)
+ymax = 1.1 * hypgeo(floor(mu), nn, mm, n) #mode of binom PDF used
+set key box
+unset zeroaxis
+set xrange [xmin - 1 : xmax + 1]
+set yrange [0 : ymax]
+set xlabel "k, x ->"
+set ylabel "probability density ->"
+set ytics 0, ymax / 10.0, ymax
+set format x "%2.0f"
+set format y "%3.2f"
+set sample 200
+set title "hypergeometric PDF using normal approximation"
+set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead
+set arrow from mu, normal(mu + sigma, mu, sigma) \
+ to mu + sigma, normal(mu + sigma, mu, sigma) nohead
+set label "mu" at mu + 0.5, ymax / 10
+set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma)
+plot hypgeo(rnd(x), nn, mm, n) with histeps, normal(x, mu, sigma)
+pause -1 "Hit return to continue"
+unset arrow
+unset label
+
+# Negative binomial PDF using gamma approximation
+r = 8; p = 0.6
+mu = r * (1.0 - p) / p
+sigma = sqrt(mu / p)
+lambda = p
+rho = r * (1.0 - p)
+xmin = floor(mu - r_sigma * sigma)
+xmin = xmin < r_xmin ? r_xmin : xmin
+xmax = ceil(mu + r_sigma * sigma)
+ymax = 1.1 * gmm((rho - 1) / lambda, rho, lambda) #mode of gamma PDF used
+set key box
+unset zeroaxis
+set xrange [xmin - 1 : xmax + 1]
+set yrange [0 : ymax]
+set xlabel "k, x ->"
+set ylabel "probability density ->"
+set ytics 0, ymax / 10.0, ymax
+set format x "%2.0f"
+set format y "%3.2f"
+set sample 200
+set title "negative binomial PDF using gamma approximation"
+set arrow from mu, 0 to mu, gmm(mu, rho, lambda) nohead
+set arrow from mu, gmm(mu + sigma, rho, lambda) \
+ to mu + sigma, gmm(mu + sigma, rho, lambda) nohead
+set label "mu" at mu + 0.5, ymax / 10
+set label "sigma" at mu + 0.5 + sigma, gmm(mu + sigma, rho, lambda)
+plot negbin(rnd(x), r, p) with histeps, gmm(x, rho, lambda)
+pause -1 "Hit return to continue"
+unset arrow
+unset label
+
+# Negative binomial PDF using normal approximation
+r = 8; p = 0.4
+mu = r * (1.0 - p) / p
+sigma = sqrt(mu / p)
+xmin = floor(mu - r_sigma * sigma)
+xmin = xmin < r_xmin ? r_xmin : xmin
+xmax = ceil(mu + r_sigma * sigma)
+ymax = 1.1 * negbin(floor((r-1)*(1-p)/p), r, p) #mode of gamma PDF used
+set key box
+unset zeroaxis
+set xrange [xmin - 1 : xmax + 1]
+set yrange [0 : ymax]
+set xlabel "k, x ->"
+set ylabel "probability density ->"
+set ytics 0, ymax / 10.0, ymax
+set format x "%2.0f"
+set format y "%3.2f"
+set sample 200
+set title "negative binomial PDF using normal approximation"
+set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead
+set arrow from mu, normal(mu + sigma, mu, sigma) \
+ to mu + sigma, normal(mu + sigma, mu, sigma) nohead
+set label "mu" at mu + 0.5, ymax / 10
+set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma)
+plot negbin(rnd(x), r, p) with histeps, normal(x, mu, sigma)
+pause -1 "Hit return to continue"
+unset arrow
+unset label
+
+# Normal PDF using logistic approximation
+mu = 1.0; sigma = 1.5
+a = mu
+lambda = pi / (sqrt(3.0) * sigma)
+xmin = mu - r_sigma * sigma
+xmax = mu + r_sigma * sigma
+ymax = 1.1 * logistic(mu, a, lambda) #mode of logistic PDF used
+set key box
+unset zeroaxis
+set xrange [xmin: xmax]
+set yrange [0 : ymax]
+set xlabel "x ->"
+set ylabel "probability density ->"
+set ytics 0, ymax / 10.0, ymax
+set format x "%.1f"
+set format y "%.2f"
+set sample 200
+set title "normal PDF using logistic approximation"
+set arrow from mu,0 to mu, normal(mu, mu, sigma) nohead
+set arrow from mu, normal(mu + sigma, mu, sigma) \
+ to mu + sigma, normal(mu + sigma, mu, sigma) nohead
+set label "mu" at mu + 0.5, ymax / 10
+set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma)
+plot logistic(x, a, lambda), normal(x, mu, sigma)
+pause -1 "Hit return to continue"
+unset arrow
+unset label
+
+# Poisson PDF using normal approximation
+mu = 5.0
+sigma = sqrt(mu)
+xmin = floor(mu - r_sigma * sigma)
+xmin = xmin < r_xmin ? r_xmin : xmin
+xmax = ceil(mu + r_sigma * sigma)
+ymax = 1.1 * poisson(mu, mu) #mode of poisson PDF used
+set key box
+unset zeroaxis
+set xrange [xmin - 1 : xmax + 1]
+set yrange [0 : ymax]
+set xlabel "k, x ->"
+set ylabel "probability density ->"
+set ytics 0, ymax / 10.0, ymax
+set format x "%2.0f"
+set format y "%3.2f"
+set sample 200
+set title "poisson PDF using normal approximation"
+set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead
+set arrow from mu, normal(mu + sigma, mu, sigma) \
+ to mu + sigma, normal(mu + sigma, mu, sigma) nohead
+set label "mu" at mu + 0.5, ymax / 10
+set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma)
+plot poisson(rnd(x), mu) with histeps, normal(x, mu, sigma)
+pause -1 "Hit return to continue"
+reset
diff --git a/tests/examplefiles/while.pov b/tests/examplefiles/while.pov
new file mode 100644
index 00000000..fb182454
--- /dev/null
+++ b/tests/examplefiles/while.pov
@@ -0,0 +1,13 @@
+#declare Index1 = 0;
+#while(Index1 <= 9)
+
+ #declare Index2 = 0;
+ #while(Index2 <= 19)
+
+ sphere { <Index1, Index2, 0>, .5 }
+
+ #declare Index2 = Index2 + 1;
+ #end
+
+ #declare Index1 = Index1 + 1;
+#end
diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py
index ab348cbc..50dbd943 100644
--- a/tests/test_basic_api.py
+++ b/tests/test_basic_api.py
@@ -15,6 +15,7 @@ import random
from pygments import lexers, formatters, filters, format
from pygments.token import _TokenType, Text
from pygments.lexer import RegexLexer
+from pygments.formatters.img import FontNotFound
test_content = [chr(i) for i in xrange(33, 128)] * 5
random.shuffle(test_content)
@@ -133,7 +134,7 @@ class FormattersTest(unittest.TestCase):
try:
inst = formatter(opt1="val1")
- except ImportError:
+ except (ImportError, FontNotFound):
continue
inst.get_style_defs()
inst.format(ts, out)
@@ -168,8 +169,8 @@ class FormattersTest(unittest.TestCase):
for formatter, info in formatters.FORMATTERS.iteritems():
try:
inst = formatter(encoding=None)
- except ImportError:
- # some dependency not installed
+ except (ImportError, FontNotFound):
+ # some dependency or font not installed
continue
out = format(tokens, inst)
if formatter.unicodeoutput:
diff --git a/tests/test_examplefiles.py b/tests/test_examplefiles.py
index ee9c78af..ee9af608 100644
--- a/tests/test_examplefiles.py
+++ b/tests/test_examplefiles.py
@@ -3,7 +3,7 @@
Pygments tests with example files
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: 2006-2007 by Georg Brandl.
+ :copyright: 2006-2008 by Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
@@ -13,6 +13,7 @@ import unittest
from pygments import highlight
from pygments.lexers import get_lexer_for_filename, get_lexer_by_name
from pygments.token import Error
+from pygments.util import ClassNotFound
class ExampleFileTest(unittest.TestCase):
@@ -28,11 +29,11 @@ for fn in os.listdir(os.path.join(testdir, 'examplefiles')):
try:
lx = get_lexer_for_filename(absfn)
- except ValueError:
+ except ClassNotFound:
try:
name, rest = fn.split("_", 1)
lx = get_lexer_by_name(name)
- except ValueError:
+ except ClassNotFound:
raise AssertionError('no lexer found for file %r' % fn)
def test(self, lx=lx, absfn=absfn):