summaryrefslogtreecommitdiff
path: root/pygments
diff options
context:
space:
mode:
authorgbrandl <devnull@localhost>2006-10-19 20:27:28 +0200
committergbrandl <devnull@localhost>2006-10-19 20:27:28 +0200
commitf4d019954468db777760d21f9243eca8b852c184 (patch)
tree328b8f8fac25338306b0e7b827686dcc7597df23 /pygments
downloadpygments-f4d019954468db777760d21f9243eca8b852c184.tar.gz
[svn] Name change, round 4 (rename SVN root folder).
Diffstat (limited to 'pygments')
-rw-r--r--pygments/__init__.py270
-rw-r--r--pygments/console.py74
-rw-r--r--pygments/formatter.py64
-rw-r--r--pygments/formatters/__init__.py73
-rw-r--r--pygments/formatters/bbcode.py99
-rw-r--r--pygments/formatters/html.py280
-rw-r--r--pygments/formatters/latex.py191
-rw-r--r--pygments/formatters/other.py73
-rw-r--r--pygments/formatters/terminal.py98
-rw-r--r--pygments/lexer.py486
-rw-r--r--pygments/lexers/__init__.py77
-rw-r--r--pygments/lexers/_luabuiltins.py256
-rw-r--r--pygments/lexers/_mapping.py102
-rw-r--r--pygments/lexers/_phpbuiltins.py3391
-rw-r--r--pygments/lexers/agile.py773
-rw-r--r--pygments/lexers/compiled.py314
-rw-r--r--pygments/lexers/dotnet.py221
-rw-r--r--pygments/lexers/other.py137
-rw-r--r--pygments/lexers/special.py75
-rw-r--r--pygments/lexers/templates.py323
-rw-r--r--pygments/lexers/text.py183
-rw-r--r--pygments/lexers/web.py332
-rw-r--r--pygments/style.py102
-rw-r--r--pygments/styles/__init__.py36
-rw-r--r--pygments/styles/autumn.py59
-rw-r--r--pygments/styles/borland.py44
-rw-r--r--pygments/styles/colorful.py75
-rw-r--r--pygments/styles/default.py70
-rw-r--r--pygments/styles/friendly.py67
-rw-r--r--pygments/styles/manni.py76
-rw-r--r--pygments/styles/murphy.py75
-rw-r--r--pygments/styles/native.py58
-rw-r--r--pygments/styles/pastie.py68
-rw-r--r--pygments/styles/perldoc.py64
-rw-r--r--pygments/styles/trac.py56
-rw-r--r--pygments/token.py156
-rw-r--r--pygments/util.py50
37 files changed, 8948 insertions, 0 deletions
diff --git a/pygments/__init__.py b/pygments/__init__.py
new file mode 100644
index 00000000..23bdbccb
--- /dev/null
+++ b/pygments/__init__.py
@@ -0,0 +1,270 @@
+# -*- coding: utf-8 -*-
+"""
+ Pygments
+ ~~~~~~~
+
+ Pygments is a syntax highlighting package written in Python.
+
+ It aims to be a generic syntax highlighter for general use in all
+ kinds of software such as forum systems, wikis or other applications
+ that need to prettify source code. Highlights are:
+
+ * a wide range of common languages and markup formats is supported
+ * special attention is paid to details, increasing quality by a fair amount
+ * support for new languages and formats are added easily
+ * a number of output formats, presently HTML, LaTeX and ANSI sequences
+ * it is usable as a command-line tool and as a library
+ * ... and it highlights even Brainfuck!
+
+
+ :copyright: 2006 by Georg Brandl, Armin Ronacher, Lukas Meuser and others.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+__version__ = '0.1'
+__docformat__ = 'restructuredtext'
+__license__ = 'GNU Lesser General Public License (LGPL)'
+__author__ = 'Georg Brandl <g.brandl@gmx.net>'
+__url__ = 'http://pygments.pocoo.org/'
+
+
+import sys, os
+# using StringIO because it can handle Unicode strings
+from StringIO import StringIO
+
+from pygments.util import OptionError
+from pygments.lexers import LEXERS, get_lexer_by_name, get_lexer_for_filename
+
+from pygments.formatters import FORMATTERS, get_formatter_by_name, \
+ get_formatter_for_filename, TerminalFormatter
+
+
+def lex(code, lexer):
+ """
+ Lex ``code`` with ``lexer`` and return an iterable of tokens.
+ """
+ return lexer.get_tokens(code)
+
+
+def format(tokens, formatter, outfile=None):
+ """
+ Format a tokenlist ``tokens`` with the formatter ``formatter``.
+
+ If ``outfile`` is given and a valid file object (an object
+ with a ``write`` method), the result will be written to it, otherwise
+ it is returned as a string.
+ """
+ realoutfile = outfile or StringIO()
+ formatter.format(tokens, realoutfile)
+ if not outfile:
+ return realoutfile.getvalue()
+
+
+def highlight(code, lexer, formatter, outfile=None):
+ """
+ Lex ``code`` with ``lexer`` and format it with the formatter
+ ``formatter``.
+
+ If ``outfile`` is given and a valid file object (an object
+ with a ``write`` method), the result will be written to it, otherwise
+ it is returned as a string.
+ """
+ return format(lex(code, lexer), formatter, outfile)
+
+
+def cmdline_main(args):
+ """
+ Make pygments usable as a command line utility.
+ """
+ import getopt
+
+ USAGE = """\
+Usage: %s [-l <lexer>] [-f <formatter>] [-O <options>] [-o <outfile>] [<infile>]
+ %s -S <style> -f <formatter> [-a <arg>] [-O <options>]
+ %s -L | -h | -V
+
+Highlight the input file and write the result to <outfile>.
+
+If no input file is given, use stdin, if -o is not given, use stdout.
+
+<lexer> is a lexer name (query all lexer names with -L). If -l is not
+given, the lexer is guessed from the extension of the input file name
+(this obviously doesn't work if the input is stdin).
+
+Likewise, <formatter> is a formatter name, and will be guessed from
+the extension of the output file name. If no output file is given,
+the terminal formatter will be used by default.
+
+With the -O option, you can give the lexer and formatter a comma-
+separated list of options, e.g. ``-O bg=light,python=cool``.
+
+With the -S option, print out style definitions for style <style>
+for formatter <formatter>. The argument given by -a is formatter
+dependent.
+
+The -L option lists all available lexers and formatters.
+The -h option prints this help.
+The -V option prints the package version.
+""" % ((args[0],)*3)
+
+ try:
+ opts, args = getopt.getopt(args[1:], "l:f:o:O:LhVS:a:")
+ except getopt.GetoptError:
+ print >>sys.stderr, USAGE
+ return 2
+ opts = dict(opts)
+
+ if not opts and not args:
+ print USAGE
+ return 0
+
+ if opts.pop('-h', None) is not None:
+ print USAGE
+ return 0
+
+ if opts.pop('-V', None) is not None:
+ print 'Pygments version %s, (c) 2006 by %s.' % (__version__, __author__)
+ return 0
+
+ L_opt = opts.pop('-L', None)
+ if L_opt is not None:
+ if opts or args:
+ print >>sys.stderr, USAGE
+ return 2
+
+ # print version
+ cmdline_main(['', '-V'])
+ print
+ print "Lexers:"
+ print "~~~~~~~"
+
+ info = []
+ maxlen = 0
+ for mod, fullname, names, exts in LEXERS.itervalues():
+ tup = (', '.join(names)+':', fullname,
+ exts and '(extensions ' + ', '.join(exts) + ')' or '')
+ info.append(tup)
+ if len(tup[0]) > maxlen: maxlen = len(tup[0])
+ info.sort()
+ for i in info:
+ print ('%-'+str(maxlen)+'s %s %s') % i
+
+ print
+ print "Formatters:"
+ print "~~~~~~~~~~~"
+
+ info = []
+ maxlen = 0
+ for fullname, names, exts, doc in FORMATTERS.itervalues():
+ tup = (', '.join(names)+':', doc,
+ exts and '(extensions ' + ', '.join(exts) + ')' or '')
+ info.append(tup)
+ if len(tup[0]) > maxlen: maxlen = len(tup[0])
+ info.sort()
+ for i in info:
+ print ('%-'+str(maxlen)+'s %s %s') % i
+ return 0
+
+ O_opts = {}
+ o_str = opts.pop('-O', None)
+ if o_str:
+ try:
+ o_args = o_str.split(',')
+ for o_arg in o_args:
+ o_key, o_val = o_arg.split('=')
+ O_opts[o_key] = o_val
+ except ValueError:
+ print >>sys.stderr, 'Error in -O specification.'
+ return 2
+
+ S_opt = opts.pop('-S', None)
+ a_opt = opts.pop('-a', None)
+ if S_opt is not None:
+ f_opt = opts.pop('-f', None)
+ if not f_opt:
+ print >>sys.stderr, USAGE
+ return 2
+ if opts or args:
+ print >>sys.stderr, USAGE
+ return 2
+
+ try:
+ O_opts['style'] = S_opt
+ fmter = get_formatter_by_name(f_opt, **O_opts)
+ except ValueError, err:
+ print >>sys.stderr, err
+ return 1
+
+ arg = a_opt or ''
+ print fmter.get_style_defs(arg)
+ return 0
+
+ if a_opt is not None:
+ print >>sys.stderr, USAGE
+ return 2
+
+ outfn = opts.pop('-o', None)
+ fmter = opts.pop('-f', None)
+ if fmter:
+ try:
+ fmter = get_formatter_by_name(fmter, **O_opts)
+ except (OptionError, ValueError), err:
+ print >>sys.stderr, 'Error:', err
+ return 1
+
+ if outfn:
+ if not fmter:
+ try:
+ fmter = get_formatter_for_filename(outfn, **O_opts)
+ except (OptionError, ValueError), err:
+ print >>sys.stderr, 'Error:', err
+ return 1
+ try:
+ outfile = file(outfn, 'wb')
+ except Exception, err:
+ print >>sys.stderr, 'Error: cannot open outfile:', err
+ return 1
+ else:
+ if not fmter:
+ fmter = TerminalFormatter(**O_opts)
+ outfile = sys.stdout
+
+ lexer = opts.pop('-l', None)
+ if lexer:
+ try:
+ lexer = get_lexer_by_name(lexer, **O_opts)
+ except (OptionError, ValueError), err:
+ print >>sys.stderr, 'Error:', err
+ return 1
+
+ if args:
+ infn = args[0]
+ if not lexer:
+ try:
+ lexer = get_lexer_for_filename(infn, **O_opts)
+ except (OptionError, ValueError), err:
+ print >>sys.stderr, 'Error:', err
+ return 1
+
+ try:
+ code = file(infn).read()
+ except Exception, err:
+ print >>sys.stderr, 'Error: cannot read infile:', err
+ return 1
+ else:
+ if not lexer:
+ print >>sys.stderr, 'Error: no lexer name given and reading from stdin'
+ return 2
+ code = sys.stdin.read()
+
+ try:
+ highlight(code, lexer, fmter, outfile)
+ except Exception, err:
+ raise
+ print >>sys.stderr, 'Error while highlighting:', err
+ return 1
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(cmdline_main(sys.argv))
diff --git a/pygments/console.py b/pygments/console.py
new file mode 100644
index 00000000..b7653172
--- /dev/null
+++ b/pygments/console.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.console
+ ~~~~~~~~~~~~~~~
+
+ Format colored console output.
+
+ :copyright: 2006 by Georg Brandl, Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+esc = "\x1b["
+
+codes = {}
+codes[""] = ""
+codes["reset"] = esc + "39;49;00m"
+
+codes["bold"] = esc + "01m"
+codes["faint"] = esc + "02m"
+codes["standout"] = esc + "03m"
+codes["underline"] = esc + "04m"
+codes["blink"] = esc + "05m"
+codes["overline"] = esc + "06m"
+
+dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue",
+ "purple", "teal", "lightgray"]
+light_colors = ["darkgray", "red", "green", "yellow", "blue",
+ "fuchsia", "turquoise", "white"]
+
+x = 30
+for d, l in zip(dark_colors, light_colors):
+ codes[d] = esc + "%im" % x
+ codes[l] = esc + "%i;01m" % x
+ x += 1
+
+del d, l, x
+
+codes["darkteal"] = codes["turquoise"]
+codes["darkyellow"] = codes["brown"]
+codes["fuscia"] = codes["fuchsia"]
+codes["white"] = codes["bold"]
+
+
+def reset_color():
+ return codes["reset"]
+
+
+def colorize(color_key, text):
+ return codes[color_key] + text + codes["reset"]
+
+
+def ansiformat(attr, text):
+ """
+ Format ``text`` with a color and/or some attributes::
+
+ color normal color
+ *color* bold color
+ _color_ underlined color
+ +color+ blinking color
+ """
+ result = []
+ if attr[:1] == attr[-1:] == '+':
+ result.append(codes['blink'])
+ attr = attr[1:-1]
+ if attr[:1] == attr[-1:] == '*':
+ result.append(codes['bold'])
+ attr = attr[1:-1]
+ if attr[:1] == attr[-1:] == '_':
+ result.append(codes['underline'])
+ attr = attr[1:-1]
+ result.append(codes[attr])
+ result.append(text)
+ result.append(codes['reset'])
+ return ''.join(result)
diff --git a/pygments/formatter.py b/pygments/formatter.py
new file mode 100644
index 00000000..168401e3
--- /dev/null
+++ b/pygments/formatter.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatter
+ ~~~~~~~~~~~~~~~~~
+
+ Base formatter class.
+
+ :copyright: 2006 by Georg Brandl, Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.util import get_bool_opt
+from pygments.style import Style
+from pygments.styles import get_style_by_name
+
+__all__ = ['Formatter']
+
+
+def _lookup_style(style):
+ if isinstance(style, basestring):
+ return get_style_by_name(style)
+ return style
+
+
+class Formatter(object):
+ """
+ Converts a token stream to text.
+
+ Options accepted:
+
+ ``style``
+ The style to use, can be a string or a Style subclass
+ (default: "default"). Not used by e.g. the
+ TerminalFormatter.
+ ``full``
+ Tells the formatter to output a "full" document, i.e.
+ a complete self-contained document. This doesn't have
+ any effect for some formatters (default: false).
+ ``title``
+ If ``full`` is true, the title that should be used to
+ caption the document (default: '').
+ """
+
+ def __init__(self, **options):
+ self.style = _lookup_style(options.get('style', 'default'))
+ self.full = get_bool_opt(options, 'full', False)
+ self.title = options.get('title', '')
+ self.options = options
+
+ def get_style_defs(self, arg=''):
+ """
+ Return the style definitions for the current style as a string.
+
+ ``arg`` is an additional argument whose meaning depends on the
+ formatter used.
+ """
+ return ''
+
+ def format(self, tokensource, outfile):
+ """
+ Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
+ tuples and write it into ``outfile``.
+ """
+ raise NotImplementedError()
diff --git a/pygments/formatters/__init__.py b/pygments/formatters/__init__.py
new file mode 100644
index 00000000..b701efb4
--- /dev/null
+++ b/pygments/formatters/__init__.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters
+ ~~~~~~~~~~~~~~~~~~
+
+ Pygments formatters.
+
+ :copyright: 2006 by Georg Brandl, Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+import os.path
+from pygments.formatters.html import HtmlFormatter
+from pygments.formatters.terminal import TerminalFormatter
+from pygments.formatters.latex import LatexFormatter
+from pygments.formatters.bbcode import BBCodeFormatter
+from pygments.formatters.other import NullFormatter, RawTokenFormatter
+
+
+def _doc_desc(obj):
+ res = ''
+ for line in obj.__doc__.strip().splitlines():
+ if line.strip(): res += line.strip() + " "
+ else: break
+ return res
+
+
+#: Map formatter classes to ``(longname, names, file extensions, descr)``.
+FORMATTERS = {
+ HtmlFormatter: ('HTML', ('html',), ('.htm', '.html'),
+ _doc_desc(HtmlFormatter)),
+ TerminalFormatter: ('Terminal', ('terminal', 'console'), (),
+ _doc_desc(TerminalFormatter)),
+ LatexFormatter: ('LaTeX', ('latex', 'tex'), ('.tex',),
+ _doc_desc(LatexFormatter)),
+ RawTokenFormatter: ('Raw tokens', ('raw', 'tokens'), ('.raw',),
+ _doc_desc(RawTokenFormatter)),
+ NullFormatter: ('Text only', ('text', 'null'), ('.txt',),
+ _doc_desc(NullFormatter)),
+ BBCodeFormatter: ('BBcode', ('bbcode', 'bb'), (),
+ _doc_desc(BBCodeFormatter))
+}
+
+
+_formatter_cache = {}
+
+def _init_formatter_cache():
+ if _formatter_cache: return
+ for cls, info in FORMATTERS.iteritems():
+ for alias in info[1]:
+ _formatter_cache[alias] = cls
+ for ext in info[2]:
+ _formatter_cache["/"+ext] = cls
+
+
+def get_formatter_by_name(name, **options):
+ _init_formatter_cache()
+ cls = _formatter_cache.get(name, None)
+ if not cls:
+ raise ValueError("No formatter found for name %r" % name)
+ return cls(**options)
+
+
+def get_formatter_for_filename(fn, **options):
+ _init_formatter_cache()
+ # try by filename extension
+ cls = _formatter_cache.get("/"+os.path.splitext(fn)[1], None)
+ if cls:
+ return cls(**options)
+ # try by whole file name
+ cls = _formatter_cache.get("/"+os.path.basename(fn), None)
+ if not cls:
+ raise ValueError("No formatter found for file name %r" % fn)
+ return cls(**options)
diff --git a/pygments/formatters/bbcode.py b/pygments/formatters/bbcode.py
new file mode 100644
index 00000000..61384c7f
--- /dev/null
+++ b/pygments/formatters/bbcode.py
@@ -0,0 +1,99 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.bbcode
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ BBcode formatter.
+
+ :copyright: 2006 by Lukas Meuser.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+
+from pygments.formatter import Formatter
+from pygments.util import get_bool_opt
+
+__all__ = ['BBCodeFormatter']
+
+
+class BBCodeFormatter(Formatter):
+ """
+ Output BBCode tags with appropiate colors and formatting.
+
+ This formatter doesn't support background colors and borders, as there are
+ no common BBcodes for that.
+
+ Some board systems (e.g. phpBB) don't support colors in their [code] tag,
+ so you can't use the highlighting together with that tag.
+ Text in a [code] tag usually is shown with a monospace font (which this
+ formatter can do with the ``monofont`` option) and no spaces (which you
+ need for indentation) are removed.
+
+ Additional options accepted:
+
+ ``codetag``
+ If set to true, put the output into [code] tags (default: false).
+
+ ``monofont``
+ If set to true, add a tag to show the code with a monospace font
+ (default: false).
+ """
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self._make_styles()
+ self._code = get_bool_opt(options, 'codetag', False)
+ self._mono = get_bool_opt(options, 'monofont', False)
+
+ def _make_styles(self):
+ self.styles = {}
+ for token, style in self.style._styles.iteritems():
+ start = end = ''
+ color, bold, italic, underline, bg, border = style
+ if color:
+ start += '[color=#%s]' % color
+ end = '[/color]' + end
+ if bold:
+ start += '[b]'
+ end = '[/b]' + end
+ if italic:
+ start += '[i]'
+ end = '[/i]' + end
+ if underline:
+ start += '[u]'
+ end = '[/u]' + end
+ # there are no common BBcodes for background-color and border
+
+ self.styles[token] = start, end
+
+ def format(self, tokensource, outfile):
+ if self._code:
+ outfile.write('[code]')
+ if self._mono:
+ outfile.write('[font=monospace]')
+
+ lastval = ''
+ lasttype = None
+
+ for ttype, value in tokensource:
+ while ttype not in self.styles:
+ ttype = ttype.parent
+ if ttype == lasttype:
+ lastval += value
+ else:
+ if lastval:
+ start, end = self.styles[lasttype]
+ outfile.write(''.join((start, lastval, end)))
+ lastval = value
+ lasttype = ttype
+
+ if lastval:
+ start, end = self.styles[lasttype]
+ outfile.write(''.join((start, lastval, end)))
+
+ if self._mono:
+ outfile.write('[/font]')
+ if self._code:
+ outfile.write('[/code]')
+ if self._code or self._mono:
+ outfile.write('\n')
diff --git a/pygments/formatters/html.py b/pygments/formatters/html.py
new file mode 100644
index 00000000..6223aba6
--- /dev/null
+++ b/pygments/formatters/html.py
@@ -0,0 +1,280 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.html
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for HTML output.
+
+ :copyright: 2006 by Georg Brandl, Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+import StringIO
+
+from pygments.formatter import Formatter
+from pygments.token import Token, Text, STANDARD_TYPES
+from pygments.util import get_bool_opt, get_int_opt, get_list_opt
+
+
+__all__ = ['HtmlFormatter']
+
+
+def escape_html(text):
+ """Escape &, <, > as well as single and double quotes for HTML."""
+ return text.replace('&', '&amp;'). \
+ replace('<', '&lt;'). \
+ replace('>', '&gt;'). \
+ replace('"', '&quot;'). \
+ replace("'", '&#39;')
+
+
+def get_random_id():
+ """Return a random id for javascript fields."""
+ from random import random
+ from time import time
+ try:
+ from hashlib import sha1 as sha
+ except ImportError:
+ import sha
+ sha = sha.new
+ return sha('%s|%s' % (random(), time())).hexdigest()
+
+
+def _get_ttype_class(ttype):
+ fname = STANDARD_TYPES.get(ttype)
+ if fname: return fname
+ aname = ''
+ while fname is None:
+ aname = '-' + ttype[-1] + aname
+ ttype = ttype.parent
+ fname = STANDARD_TYPES.get(ttype)
+ return fname + aname
+
+
+DOC_TEMPLATE = '''\
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
+ "http://www.w3.org/TR/html4/strict.dtd">
+
+<html>
+<head>
+ <title>%(title)s</title>
+ <style type="text/css">
+td.linenos { background-color: #f0f0f0; padding-right: 10px; }
+%(styledefs)s
+ </style>
+</head>
+<body>
+<h2>%(title)s</h2>
+
+%(code)s
+
+</body>
+</html>
+'''
+
+
+class HtmlFormatter(Formatter):
+ """
+ Output HTML <span> tags with appropriate classes.
+
+ Additional options accepted:
+
+ ``nowrap``
+ If set to true, don't wrap the tokens at all. This disables
+ all other options (default: False).
+ ``noclasses``
+ If set to true, token <span>s will not use CSS classes, but
+ inline styles.
+ ``classprefix``
+ Prefix for token CSS classes, is prepended to all token style
+ classes (e.g. class="o" -> class="_o" if classprefix == '_')
+ (default: '').
+ ``cssclass``
+ CSS class for the wrapping <div> (default: 'highlight').
+ ``cssstyles``
+ Inline CSS styles for the wrapping <div>. (default: '').
+ ``linenos``
+ If set to ``True``, output line numbers (default: False).
+ ``linenostart``
+ The line number for the first line (default: 1).
+ ``linenostep``
+ If set to a number n > 1, only every nth line number is printed
+ (default: 1).
+ ``linenospecial``
+ If set to a number n > 0, every nth line number is given a special
+ CSS class ``special`` (default: 0).
+ ``nobackground``
+ If set to ``True`` the formatter won't output the background color
+ for the overall element (this automatically defaults to ``False``
+ when there is no overall element [eg: no argument for the
+ `get_syntax_defs` method given]) (default: ``False``)
+ """
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.nowrap = get_bool_opt(options, 'nowrap', False)
+ self.noclasses = get_bool_opt(options, 'noclasses', False)
+ self.classprefix = options.get('classprefix', '')
+ self.cssclass = options.get('cssclass', 'highlight')
+ self.cssstyles = options.get('cssstyles', '')
+ self.linenos = get_bool_opt(options, 'linenos', False)
+ self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
+ self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
+ self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
+ self.nobackground = get_bool_opt(options, 'nobackground', False)
+
+ self._class_cache = {}
+ self._create_stylesheet()
+
+ def _get_css_class(self, ttype):
+ """Return the css class of this token type prefixed with
+ the classprefix option."""
+ if ttype in self._class_cache:
+ return self._class_cache[ttype]
+
+ return self.classprefix + STANDARD_TYPES.get(ttype) or _get_ttype_class(ttype)
+
+ def _create_stylesheet(self):
+ t2c = self.ttype2class = {Token: ''}
+ c2s = self.class2style = {}
+ cp = self.classprefix
+ for ttype, ndef in self.style:
+ name = cp + _get_ttype_class(ttype)
+ style = ''
+ if ndef['color']:
+ style += 'color: #%s; ' % ndef['color']
+ if ndef['bold']:
+ style += 'font-weight: bold; '
+ if ndef['italic']:
+ style += 'font-style: italic; '
+ if ndef['underline']:
+ style += 'text-decoration: underline; '
+ if ndef['bgcolor']:
+ style += 'background-color: #%s; ' % ndef['bgcolor']
+ if ndef['border']:
+ style += 'border: 1px solid #%s; ' % ndef['border']
+ if style:
+ t2c[ttype] = name
+ # save len(ttype) to enable ordering the styles by
+ # hierarchy (necessary for CSS cascading rules!)
+ c2s[name] = (style[:-2], ttype, len(ttype))
+
+ def get_style_defs(self, arg=''):
+ """
+ Return CSS style definitions for the classes produced by the
+ current highlighting style. ``arg`` can be a string of selectors
+ to insert before the token type classes.
+ """
+ if arg:
+ arg += ' '
+ styles = [(level, ttype, cls, style)
+ for cls, (style, ttype, level) in self.class2style.iteritems()
+ if cls and style]
+ styles.sort()
+ lines = ['%s.%s { %s } /* %s */' % (arg, cls, style, repr(ttype)[6:])
+ for level, ttype, cls, style in styles]
+ if arg and not self.nobackground and \
+ self.style.background_color is not None:
+ text_style = ''
+ if Text in self.ttype2class:
+ text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
+ lines.insert(0, '%s{ background: %s;%s }' %
+ (arg, self.style.background_color, text_style))
+ return '\n'.join(lines)
+
+ def _format_nowrap(self, tokensource, outfile, lnos=False):
+ lncount = 0
+ nocls = self.noclasses
+ # for <span style=""> lookup only
+ getcls = self.ttype2class.get
+ c2s = self.class2style
+
+ write = outfile.write
+ lspan = ''
+ for ttype, value in tokensource:
+ htmlvalue = escape_html(value)
+ if lnos:
+ lncount += value.count("\n")
+
+ if nocls:
+ cclass = getcls(ttype)
+ while cclass is None:
+ ttype = ttype.parent
+ cclass = getcls(ttype)
+ cspan = cclass and '<span style="%s">' % c2s[cclass][0]
+ else:
+ cls = self._get_css_class(ttype)
+ cspan = cls and '<span class="%s">' % cls
+
+ if cspan == lspan:
+ if not cspan:
+ write(htmlvalue)
+ else:
+ write(htmlvalue.replace('\n', '</span>\n' + cspan))
+ elif htmlvalue: # if no value, leave old span open
+ if lspan:
+ write('</span>')
+ lspan = cspan
+ if cspan:
+ htmlvalue = htmlvalue.replace('\n', '</span>\n' + cspan)
+ write(cspan + htmlvalue)
+ else:
+ write(htmlvalue)
+ if lspan:
+ write('</span>')
+ return lncount
+
+ def format(self, tokensource, outfile):
+ if self.nowrap:
+ self._format_nowrap(tokensource, outfile)
+ return
+
+ realoutfile = outfile
+ lnos = self.linenos
+ full = self.full
+
+ div = ('<div' + (self.cssclass and ' class="%s" ' % self.cssclass)
+ + (self.cssstyles and ' style="%s"' % self.cssstyles) + '>')
+ if full or lnos:
+ outfile = StringIO.StringIO()
+ else:
+ outfile.write(div)
+
+ outfile.write('<pre>')
+ lncount = self._format_nowrap(tokensource, outfile, lnos)
+ outfile.write('</pre>')
+
+ ret = ''
+ if lnos:
+ fl = self.linenostart
+ mw = len(str(lncount + fl - 1))
+ sp = self.linenospecial
+ st = self.linenostep
+ if sp:
+ ls = '\n'.join([(i%st == 0 and
+ (i%sp == 0 and '<span class="special">%*d</span>'
+ or '%*d') % (mw, i)
+ or '')
+ for i in range(fl, fl + lncount)])
+ else:
+ ls = '\n'.join([(i%st == 0 and ('%*d' % (mw, i)) or '')
+ for i in range(fl, fl + lncount)])
+
+ ret = div + ('<table><tr>'
+ '<td class="linenos" title="click to toggle" '
+ 'onclick="with (this.firstChild.style) { display = '
+ '''(display == '') ? 'none' : '' }"><pre>'''
+ + ls + '</pre></td><td class="code">')
+ ret += outfile.getvalue()
+ ret += '</td></tr></table>'
+
+ if full:
+ if not ret:
+ ret = div + outfile.getvalue() + '</div>\n'
+ realoutfile.write(DOC_TEMPLATE %
+ dict(title = self.title,
+ styledefs = self.get_style_defs('body'),
+ code = ret))
+ elif lnos:
+ realoutfile.write(ret + '</div>\n')
+ else:
+ realoutfile.write('</div>\n')
diff --git a/pygments/formatters/latex.py b/pygments/formatters/latex.py
new file mode 100644
index 00000000..84fe5d12
--- /dev/null
+++ b/pygments/formatters/latex.py
@@ -0,0 +1,191 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.latex
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for LaTeX fancyvrb output.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+import StringIO
+
+from pygments.formatter import Formatter
+from pygments.token import Token
+from pygments.util import get_bool_opt, get_int_opt
+
+
+__all__ = ['LatexFormatter']
+
+
+def escape_tex(text):
+ return text.replace('@', '\x00'). \
+ replace('[', '\x01'). \
+ replace(']', '\x02'). \
+ replace('\x00', '@at[]').\
+ replace('\x01', '@lb[]').\
+ replace('\x02', '@rb[]')
+
+
+DOC_TEMPLATE = r'''
+\documentclass{%(docclass)s}
+\usepackage{fancyvrb}
+\usepackage{color}
+%(preamble)s
+
+%(styledefs)s
+
+\begin{document}
+
+\section*{%(title)s}
+
+%(code)s
+\end{document}
+'''
+
+
+class LatexFormatter(Formatter):
+ """
+ Output LaTeX "color" and "fancyvrb" control sequences.
+ """
+
+ def __init__(self, **options):
+ """
+ Additional options accepted:
+
+ ``docclass``
+ If ``full`` is true, this is the document class to use (default: 'article').
+ ``preamble``
+ If ``full`` is true, this can be further preamble commands (default: '').
+ ``linenos``
+ If true, output line numbers (default: False).
+ ``linenostart``
+ The line number for the first line (default: 1).
+ ``linenostep``
+ If set to a number n > 1, only every nth line number is printed (default: 1).
+ ``verboptions``
+ Additional options given to the Verbatim environment (default: '').
+ ``nobackground``
+ If set to ``True`` the formatter won't output the background color
+ for the overall element (default: ``False``)
+ Note that light colors on dark background with this option disabled
+ won't be readable very good.
+ """
+ Formatter.__init__(self, **options)
+ self.docclass = options.get('docclass', 'article')
+ self.preamble = options.get('preamble', '')
+ self.linenos = get_bool_opt(options, 'linenos', False)
+ self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
+ self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
+ self.verboptions = options.get('verboptions', '')
+ self.nobackground = get_bool_opt(options, 'nobackground', False)
+
+ self._create_stylecmds()
+
+
+ def _create_stylecmds(self):
+ t2c = self.ttype2cmd = {Token: ''}
+ c2d = self.cmd2def = {}
+
+ letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ first = iter(letters)
+ second = iter(letters)
+ firstl = first.next()
+
+ def rgbcolor(col):
+ if col:
+ return ','.join(['%.2f' %(int(col[i] + col[i + 1], 16) / 255.0)
+ for i in (0, 2, 4)])
+ else:
+ return '1,1,1'
+
+ for ttype, ndef in self.style:
+ cmndef = '#1'
+ if ndef['bold']:
+ cmndef = r'\textbf{' + cmndef + '}'
+ if ndef['italic']:
+ cmndef = r'\textit{' + cmndef + '}'
+ if ndef['underline']:
+ cmndef = r'\underline{' + cmndef + '}'
+ if ndef['color']:
+ cmndef = r'\textcolor[rgb]{%s}{%s}' % (
+ rgbcolor(ndef['color']),
+ cmndef
+ )
+ if ndef['border']:
+ cmndef = r'\fcolorbox[rgb]{%s}{%s}{%s}' % (
+ rgbcolor(ndef['border']),
+ rgbcolor(ndef['bgcolor']),
+ cmndef
+ )
+ elif ndef['bgcolor']:
+ cmndef = r'\colorbox[rgb]{%s}{%s}' % (
+ rgbcolor(ndef['bgcolor']),
+ cmndef
+ )
+ if cmndef == '#1':
+ continue
+ try:
+ alias = 'C' + firstl + second.next()
+ except StopIteration:
+ firstl = first.next()
+ second = iter(letters)
+ alias = 'C' + firstl + second.next()
+ t2c[ttype] = alias
+ c2d[alias] = cmndef
+
+ def get_style_defs(self, arg=''):
+ """
+ Return the \\newcommand sequences needed to define the commands
+ used to format text in the verbatim environment. If ``arg`` is
+ given and true, use \\renewcommand instead.
+ """
+ nc = (arg and r'\renewcommand' or r'\newcommand')
+ return '%s\\at{@}\n%s\\lb{[}\n%s\\rb{]}\n' % (nc, nc, nc) + \
+ '\n'.join(['%s\\%s[1]{%s}' % (nc, alias, cmndef)
+ for alias, cmndef in self.cmd2def.iteritems()
+ if cmndef != '#1'])
+
+ def format(self, tokensource, outfile):
+ #XXX: add support for background colors!!!!!!!111!1
+
+ if self.full:
+ realoutfile = outfile
+ outfile = StringIO.StringIO()
+
+ outfile.write(r'\begin{Verbatim}[commandchars=@\[\]')
+ if self.linenos:
+ start, step = self.linenostart, self.linenostep
+ outfile.write(',numbers=left' +
+ (start and ',firstnumber=%d' % start or '') +
+ (step and ',stepnumber=%d' % step or ''))
+ if self.verboptions:
+ outfile.write(',' + self.verboptions)
+ outfile.write(']\n')
+
+ for ttype, value in tokensource:
+ value = escape_tex(value)
+ cmd = self.ttype2cmd.get(ttype)
+ while cmd is None:
+ ttype = ttype.parent
+ cmd = self.ttype2cmd.get(ttype)
+ if cmd:
+ spl = value.split('\n')
+ for line in spl[:-1]:
+ if line:
+ outfile.write("@%s[%s]" % (cmd, line))
+ outfile.write('\n')
+ if spl[-1]:
+ outfile.write("@%s[%s]" % (cmd, spl[-1]))
+ else:
+ outfile.write(value)
+
+ outfile.write('\n\\end{Verbatim}\n')
+
+ if self.full:
+ realoutfile.write(DOC_TEMPLATE %
+ dict(docclass = self.docclass,
+ preamble = self.preamble,
+ title = self.title,
+ styledefs = self.get_style_defs(),
+ code = outfile.getvalue()))
diff --git a/pygments/formatters/other.py b/pygments/formatters/other.py
new file mode 100644
index 00000000..eb8d72fc
--- /dev/null
+++ b/pygments/formatters/other.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.other
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Other formatters.
+
+ :copyright: 2006 by Georg Brandl, Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.formatter import Formatter
+import StringIO
+
+__all__ = ['NullFormatter', 'RawTokenFormatter']
+
+
+class NullFormatter(Formatter):
+ """
+ Output the text unchanged without any formatting.
+ """
+ def format(self, tokensource, outfile):
+ for ttype, value in tokensource:
+ outfile.write(value)
+
+
+class RawTokenFormatter(Formatter):
+ """
+ Output a raw token representation for storing token streams.
+
+ The format is ``tokentype<TAB>repr(tokenstring)``
+
+ Additional options accepted:
+
+ ``compress``
+ If set to "gz" or "bz2", compress the token stream with
+ the given compression algorithm (default: '').
+ """
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.compress = options.get('compress', '')
+
+ def format(self, tokensource, outfile):
+ if self.compress == 'gz':
+ import gzip
+ outfile = gzip.GzipFile('', 'wb', 9, outfile)
+ write = outfile.write
+ flush = outfile.flush
+ elif self.compress == 'bz2':
+ import bz2
+ compressor = bz2.BZ2Compressor(9)
+ def write(text):
+ outfile.write(compressor.compress(text))
+ def flush():
+ outfile.write(compressor.flush())
+ outfile.flush()
+ else:
+ write = outfile.write
+ flush = outfile.flush
+
+ lasttype = None
+ lastval = ''
+ for ttype, value in tokensource:
+ if ttype is lasttype:
+ lastval += value
+ else:
+ if lasttype:
+ write("%s\t%r\n" % (lasttype, lastval))
+ lastval = value
+ lasttype = ttype
+ write("%s\t%r\n" % (lasttype, lastval))
+ flush()
diff --git a/pygments/formatters/terminal.py b/pygments/formatters/terminal.py
new file mode 100644
index 00000000..b4b0071e
--- /dev/null
+++ b/pygments/formatters/terminal.py
@@ -0,0 +1,98 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.terminal
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for terminal output with ANSI sequences.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.formatter import Formatter
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Token
+from pygments.console import ansiformat
+from pygments.util import get_bool_opt
+
+
+__all__ = ['TerminalFormatter']
+
+
+#: Map token types to a tuple of color values for light and dark
+#: backgrounds.
+TERMINAL_COLORS = {
+ Token: ('', ''),
+
+ Comment: ('lightgray', 'darkgray'),
+ Keyword: ('darkblue', 'blue'),
+ Keyword.Type: ('teal', 'turquoise'),
+ Operator.Word: ('purple', 'fuchsia'),
+ Name.Builtin: ('teal', 'turquoise'),
+ Name.Function: ('darkgreen', 'green'),
+ Name.Namespace: ('_teal_', '_turquoise_'),
+ Name.Class: ('_darkgreen_', '_green_'),
+ Name.Exception: ('teal', 'turquoise'),
+ Name.Decorator: ('darkgray', 'lightgray'),
+ Name.Variable: ('darkred', 'red'),
+ Name.Constant: ('darkred', 'red'),
+ Name.Attribute: ('teal', 'turquoise'),
+ Name.Tag: ('blue', 'blue'),
+ String: ('brown', 'brown'),
+ Number: ('darkblue', 'blue'),
+
+ Generic.Deleted: ('red', 'red'),
+ Generic.Inserted: ('darkgreen', 'green'),
+ Generic.Heading: ('**', '**'),
+ Generic.Subheading: ('*purple*', '*fuchsia*'),
+ Generic.Error: ('red', 'red'),
+
+ Error: ('_red_', '_red_'),
+}
+
+
+class TerminalFormatter(Formatter):
+ """
+ Output plain text with coloring ANSI sequences.
+ """
+
+ def __init__(self, **options):
+ """
+ Accepted options:
+
+ ``bg``
+ Set to ``'light'`` or ``'dark'`` depending on the
+ terminal's background.
+
+ ``colorscheme``
+ ``None`` or a dictionary mapping token types to
+ ``(lightbg, darkbg)`` color names.
+
+ ``debug``
+ If true, output "<<ERROR>>" after each error token.
+ """
+ Formatter.__init__(self, **options)
+ self.darkbg = options.get('bg', 'light') == 'dark'
+ self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
+ self.debug = get_bool_opt(options, 'debug', False)
+
+ def format(self, tokensource, outfile):
+ dbg = self.debug
+ for ttype, value in tokensource:
+ color = self.colorscheme.get(ttype)
+ while color is None:
+ ttype = ttype[:-1]
+ color = self.colorscheme.get(ttype)
+ if color:
+ color = color[self.darkbg]
+ spl = value.split('\n')
+ for line in spl[:-1]:
+ if line:
+ outfile.write(ansiformat(color, line))
+ outfile.write('\n')
+ if spl[-1]:
+ outfile.write(ansiformat(color, spl[-1]))
+ else:
+ outfile.write(value)
+ if dbg and ttype is Error:
+ outfile.write('<<ERROR>>')
diff --git a/pygments/lexer.py b/pygments/lexer.py
new file mode 100644
index 00000000..3d3ee865
--- /dev/null
+++ b/pygments/lexer.py
@@ -0,0 +1,486 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexer
+ ~~~~~~~~~~~~~
+
+ Base lexer classes.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+import re
+
+from pygments.token import Error, Text, Other, _TokenType
+from pygments.util import get_bool_opt, get_int_opt
+
+
+__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
+ 'LexerContext', 'include', 'flags', 'bygroups', 'using', 'this']
+
+
+class Lexer(object):
+ """
+ Lexer for a specific language.
+
+ Basic options recognized:
+ ``stripnl``
+ Strip leading and trailing newlines from the input (default: True).
+ ``stripall``
+ Strip all leading and trailing whitespace from the input
+ (default: False).
+ ``tabsize``
+ If given and greater than 0, expand tabs in the input (default: 0).
+ """
+
+ #: Name of the lexer
+ name = None
+
+ #: Shortcuts for the lexer
+ aliases = []
+
+ #: fn match rules
+ filenames = []
+
+ def __init__(self, **options):
+ self.options = options
+ self.stripnl = get_bool_opt(options, 'stripnl', True)
+ self.stripall = get_bool_opt(options, 'stripall', False)
+ self.tabsize = get_int_opt(options, 'tabsize', 0)
+
+ def get_tokens(self, text):
+ """
+ Return an iterable of (tokentype, value) pairs generated from ``text``.
+
+ Also preprocess the text, i.e. expand tabs and strip it if wanted.
+ """
+ text = type(text)('\n').join(text.splitlines())
+ if self.stripall:
+ text = text.strip()
+ elif self.stripnl:
+ text = text.strip('\n')
+ if self.tabsize > 0:
+ text = text.expandtabs(self.tabsize)
+ if not text.endswith('\n'):
+ text += '\n'
+
+ for i, t, v in self.get_tokens_unprocessed(text):
+ yield t, v
+
+ def get_tokens_unprocessed(self, text):
+ """
+ Return an iterable of (tokentype, value) pairs.
+ In subclasses, implement this method as a generator to
+ maximize effectiveness.
+ """
+ raise NotImplementedError
+
+
+class DelegatingLexer(Lexer):
+ """
+ This lexer takes two lexer as arguments. A root lexer and
+ a language lexer. First everything is scanned using the language
+ lexer, afterwards all ``Other`` tokens are lexed using the root
+ lexer.
+
+ The lexers from the ``template`` lexer package use this base lexer.
+ """
+
+ def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
+ self.root_lexer = _root_lexer(**options)
+ self.language_lexer = _language_lexer(**options)
+ self.needle = _needle
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ buffered = ''
+ insertions = []
+ lng_buffer = []
+ for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
+ if t is self.needle:
+ if lng_buffer:
+ insertions.append((len(buffered), lng_buffer))
+ lng_buffer = []
+ buffered += v
+ else:
+ lng_buffer.append((i, t, v))
+ # the last "\n" should always be lexed by the root lexer
+ return do_insertions(insertions,
+ self.root_lexer.get_tokens_unprocessed(buffered))
+
+
+#-------------------------------------------------------------------------------
+# RegexLexer and ExtendedRegexLexer
+#
+
+
+class include(str):
+ """
+ Indicates that a state should include rules from another state.
+ """
+ pass
+
+
+class combined(tuple):
+ """
+ Indicates a state combined from multiple states.
+ """
+
+ def __new__(cls, *args):
+ return tuple.__new__(cls, args)
+
+ def __init__(self, *args):
+ tuple.__init__(self, args)
+
+
+class _PseudoMatch(object):
+ """
+ A pseudo match object constructed from a string.
+ """
+
+ def __init__(self, start, text):
+ self._text = text
+ self._start = start
+
+ def start(self, arg=None):
+ return self._start
+
+ def end(self, arg=None):
+ return self._start + len(self._text)
+
+ def group(self, arg=None):
+ if arg:
+ raise IndexError('No such group')
+ return self._text
+
+ def groups(self):
+ return (self._text,)
+
+ def groupdict(self):
+ return {}
+
+
+def bygroups(*args):
+ """
+ Callback that yields multiple actions for each group in the match.
+ """
+ def callback(lexer, match, ctx=None):
+ for i, action in enumerate(args):
+ if type(action) is _TokenType:
+ data = match.group(i + 1)
+ if data:
+ yield match.start(i + 1), action, data
+ else:
+ if ctx:
+ ctx.pos = match.start(i+1)
+ for item in action(lexer, _PseudoMatch(match.start(i + 1),
+ match.group(i + 1)), ctx):
+ if item:
+ yield item
+ if ctx:
+ ctx.pos = match.end()
+ return callback
+
+
+class _This(object):
+ """
+ Special singleton used for indicating the caller class.
+ Used by ``using``.
+ """
+this = _This()
+
+
+def using(_other, **kwargs):
+ """
+ Callback that processes the match with a different lexer.
+
+ The keyword arguments are forwarded to the lexer.
+ """
+ if _other is this:
+ def callback(lexer, match, ctx=None):
+ s = match.start()
+ for i, t, v in lexer.get_tokens_unprocessed(match.group()):
+ yield i + s, t, v
+ if ctx:
+ ctx.pos = match.end()
+ else:
+ def callback(lexer, match, ctx=None):
+ # XXX: cache that somehow
+ kwargs.update(lexer.options)
+ lx = _other(**kwargs)
+
+ s = match.start()
+ for i, t, v in lx.get_tokens_unprocessed(match.group()):
+ yield i + s, t, v
+ if ctx:
+ ctx.pos = match.end()
+ return callback
+
+
+class RegexLexerMeta(type):
+ """
+ Metaclass for RegexLexer, creates the self._tokens attribute from
+ self.tokens on the first instantiation.
+ """
+
+ def _process_state(cls, state):
+ assert type(state) is str, "wrong state name %r" % state
+ assert state[0] != '#', "invalid state name %r" % state
+ if state in cls._tokens:
+ return cls._tokens[state]
+ tokens = cls._tokens[state] = []
+ rflags = cls.flags
+ for tdef in cls.tokens[state]:
+ if isinstance(tdef, include):
+ # it's a state reference
+ assert tdef != state, "circular state reference %r" % state
+ tokens.extend(cls._process_state(str(tdef)))
+ continue
+
+ assert type(tdef) is tuple, "wrong rule def %r" % tdef
+
+ rex = re.compile(tdef[0], rflags)
+
+ assert type(tdef[1]) is _TokenType or callable(tdef[1]), \
+ 'token type must be simple type or callable, not %r' % tdef[1]
+
+ if len(tdef) == 2:
+ new_state = None
+ else:
+ tdef2 = tdef[2]
+ if isinstance(tdef2, str):
+ # an existing state
+ if tdef2 == '#pop':
+ new_state = -1
+ elif tdef2 in cls.tokens:
+ new_state = (tdef2,)
+ elif tdef2 == '#push':
+ new_state = tdef2
+ elif tdef2[:5] == '#pop:':
+ new_state = -int(tdef2[5:])
+ else:
+ assert False, 'unknown new state %r' % tdef2
+ elif isinstance(tdef2, combined):
+ # combine a new state from existing ones
+ new_state = '_tmp_%d' % cls._tmpname
+ cls._tmpname += 1
+ itokens = []
+ for istate in tdef2:
+ assert istate != state, 'circular state ref %r' % istate
+ itokens.extend(cls._process_state(istate))
+ cls._tokens[new_state] = itokens
+ new_state = (new_state,)
+ elif isinstance(tdef2, tuple):
+ # push more than one state
+ for state in tdef2:
+ assert state in cls.tokens, \
+ 'unknown new state ' + state
+ new_state = tdef2
+ else:
+ assert False, 'unknown new state def %r' % tdef2
+ tokens.append((rex, tdef[1], new_state))
+ return tokens
+
+ def __call__(cls, *args, **kwds):
+ if not hasattr(cls, '_tokens'):
+ cls._tokens = {}
+ cls._tmpname = 0
+ for state in cls.tokens.keys():
+ cls._process_state(state)
+
+ return type.__call__(cls, *args, **kwds)
+
+
+class RegexLexer(Lexer):
+ """
+ Base for simple stateful regular expression-based lexers.
+ Simplifies the lexing process so that you need only
+ provide a list of states and regular expressions.
+ """
+ __metaclass__ = RegexLexerMeta
+
+ #: Flags for compiling the regular expressions.
+ #: Defaults to MULTILINE.
+ flags = re.MULTILINE
+
+ #: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
+ #:
+ #: The initial state is 'root'.
+ #: ``new_state`` can be omitted to signify no state transition.
+ #: If it is a string, the state is pushed on the stack and changed.
+ #: If it is a tuple of strings, all states are pushed on the stack and
+ #: the current state will be the topmost.
+ #: It can also be ``combined('state1', 'state2', ...)``
+ #: to signify a new, anonymous state combined from the rules of two
+ #: or more existing ones.
+ #: Furthermore, it can be '#pop' to signify going back one step in
+ #: the state stack, or '#push' to push the current state on the stack
+ #: again.
+ #:
+ #: The tuple can also be replaced with ``include('state')``, in which
+ #: case the rules from the state named by the string are included in the
+ #: current one.
+ tokens = {}
+
+ def get_tokens_unprocessed(self, text, stack=['root']):
+ """
+ Split ``text`` into (tokentype, text) pairs.
+
+ ``stack`` is the inital stack (default: ``['root']``)
+ """
+ pos = 0
+ statestack = stack[:]
+ statetokens = self._tokens[statestack[-1]]
+ while 1:
+ for rex, action, new_state in statetokens:
+ m = rex.match(text, pos)
+ if m:
+ if type(action) is _TokenType:
+ yield pos, action, m.group()
+ else:
+ for item in action(self, m):
+ yield item
+ pos = m.end()
+ if new_state is not None:
+ # state transition
+ if isinstance(new_state, tuple):
+ statestack.extend(new_state)
+ elif isinstance(new_state, int):
+ # pop
+ del statestack[new_state:]
+ elif new_state == '#push':
+ statestack.append(statestack[-1])
+ else:
+ assert False, "wrong state def: %r" % new_state
+ statetokens = self._tokens[statestack[-1]]
+ break
+ else:
+ try:
+ if text[pos] == '\n':
+ # at EOL, reset state to "root"
+ pos += 1
+ statestack = ['root']
+ statetokens = self._tokens['root']
+ yield pos, Text, '\n'
+ continue
+ yield pos, Error, text[pos]
+ pos += 1
+ except IndexError:
+ break
+
+
+class LexerContext(object):
+ """
+ A helper object that holds lexer position data.
+ """
+
+ def __init__(self, text, pos, stack=None, end=None):
+ self.text = text
+ self.pos = pos
+ self.end = end or len(text) # end=0 not supported ;-)
+ self.stack = stack or ['root']
+
+ def __repr__(self):
+ return 'LexerContext(%r, %r, %r)' % (
+ self.text, self.pos, self.stack)
+
+
+class ExtendedRegexLexer(RegexLexer):
+ """
+ A RegexLexer which additionally allows functions to be
+ specified as "token types", in which case the function will
+ be called.
+ """
+
+ def get_tokens_unprocessed(self, text=None, context=None):
+ """
+ Split ``text`` into (tokentype, text) pairs.
+ If ``context`` is given, use this lexer context instead.
+ """
+ if not context:
+ ctx = LexerContext(text, 0)
+ statetokens = self._tokens['root']
+ else:
+ ctx = context
+ statetokens = self._tokens[ctx.stack[-1]]
+ text = ctx.text
+ while 1:
+ for rex, action, new_state in statetokens:
+ m = rex.match(text, ctx.pos, ctx.end)
+ if m:
+ if type(action) is _TokenType:
+ yield ctx.pos, action, m.group()
+ ctx.pos = m.end()
+ else:
+ for item in action(self, m, ctx):
+ yield item
+ if not new_state:
+ # altered the state stack?
+ statetokens = self._tokens[ctx.stack[-1]]
+ # CAUTION: callback must set ctx.pos!
+ if new_state is not None:
+ # state transition
+ if isinstance(new_state, tuple):
+ ctx.stack.extend(new_state)
+ elif isinstance(new_state, int):
+ # pop
+ del ctx.stack[new_state:]
+ elif new_state == '#push':
+ ctx.stack.append(ctx.stack[-1])
+ else:
+ assert False, "wrong state def: %r" % new_state
+ statetokens = self._tokens[ctx.stack[-1]]
+ break
+ else:
+ try:
+ if ctx.pos >= ctx.end:
+ break
+ if text[ctx.pos] == '\n':
+ # at EOL, reset state to "root"
+ ctx.pos += 1
+ ctx.stack = ['root']
+ statetokens = self._tokens['root']
+ yield ctx.pos, Text, '\n'
+ continue
+ yield ctx.pos, Error, text[ctx.pos]
+ ctx.pos += 1
+ except IndexError:
+ break
+
+
+def do_insertions(insertions, tokens):
+ """
+ Helper for lexers which must combine the results of several
+ sublexers.
+
+ ``insertions`` is a list of ``(index, itokens)`` pairs.
+ Each ``itokens`` iterable should be inserted at position
+ ``index`` into the token stream given by the ``tokens``
+ argument.
+
+ The result is a combined token stream.
+
+ XXX: The indices yielded by this function are not correct!
+ """
+ insertions = iter(insertions)
+ try:
+ index, itokens = insertions.next()
+ except StopIteration:
+ # no insertions
+ for item in tokens:
+ yield item
+ return
+
+ insleft = True
+ for i, t, v in tokens:
+ oldi = 0
+ while insleft and i + len(v) >= index:
+ yield i, t, v[oldi:index-i]
+ for item in itokens:
+ yield item
+ oldi = index-i
+ try:
+ index, itokens = insertions.next()
+ except StopIteration:
+ insleft = False
+ break # not strictly necessary
+ yield i, t, v[oldi:]
diff --git a/pygments/lexers/__init__.py b/pygments/lexers/__init__.py
new file mode 100644
index 00000000..0ef200db
--- /dev/null
+++ b/pygments/lexers/__init__.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers
+ ~~~~~~~~~~~~~~
+
+ Pygments lexers.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+import fnmatch
+import types
+from os.path import basename
+
+from pygments.lexers._mapping import LEXERS
+
+
+__all__ = ['get_lexer_by_name', 'get_lexer_for_filename'] + LEXERS.keys()
+
+_lexer_cache = {}
+
+
+def _load_lexers(module_name):
+ """
+ Loads a lexer (and all others in the module too)
+ """
+ mod = __import__(module_name, None, None, ['__all__'])
+ for lexer_name in mod.__all__:
+ cls = getattr(mod, lexer_name)
+ _lexer_cache[cls.name] = cls
+
+
+def get_lexer_by_name(alias, **options):
+ """
+ Get a lexer by an alias
+ """
+ for module_name, name, aliases, _ in LEXERS.itervalues():
+ if alias in aliases:
+ if name not in _lexer_cache:
+ _load_lexers(module_name)
+ return _lexer_cache[name](**options)
+ raise ValueError('no lexer for alias %r found' % alias)
+
+
+def get_lexer_for_filename(fn, **options):
+ """
+ Guess a lexer by a filename
+ """
+ fn = basename(fn)
+ for module_name, name, _, filenames in LEXERS.itervalues():
+ for filename in filenames:
+ if fnmatch.fnmatch(fn, filename):
+ if name not in _lexer_cache:
+ _load_lexers(module_name)
+ return _lexer_cache[name](**options)
+ raise ValueError('no lexer for filename %r found' % fn)
+
+
+class _automodule(types.ModuleType):
+
+ def __getattr__(self, name):
+ """Automatically import lexers."""
+ info = LEXERS.get(name)
+ if info:
+ _load_lexers(info[0])
+ cls = _lexer_cache[info[1]]
+ setattr(self, name, cls)
+ return cls
+ raise AttributeError(name)
+
+
+import sys
+oldmod = sys.modules['pygments.lexers']
+newmod = _automodule('pygments.lexers')
+newmod.__dict__.update(oldmod.__dict__)
+sys.modules['pygments.lexers'] = newmod
+del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/pygments/lexers/_luabuiltins.py b/pygments/lexers/_luabuiltins.py
new file mode 100644
index 00000000..69a87cfc
--- /dev/null
+++ b/pygments/lexers/_luabuiltins.py
@@ -0,0 +1,256 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._luabuiltins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file contains the names and modules of lua functions
+ It is able to re-generate itself, but for adding new functions you
+ probably have to add some callbacks (see function module_callbacks).
+
+ Do not edit the list by hand.
+
+ :copyright: 2006 by Lukas Meuser.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+MODULES = {'basic': ['_G',
+ '_VERSION',
+ 'assert',
+ 'collectgarbage',
+ 'dofile',
+ 'error',
+ 'getfenv',
+ 'getmetatable',
+ 'ipairs',
+ 'load',
+ 'loadfile',
+ 'loadstring',
+ 'next',
+ 'pairs',
+ 'pcall',
+ 'print',
+ 'rawequal',
+ 'rawget',
+ 'rawset',
+ 'select',
+ 'setfenv',
+ 'setmetatable',
+ 'tonumber',
+ 'tostring',
+ 'type',
+ 'unpack',
+ 'xpcall'],
+ 'coroutine': ['coroutine.create',
+ 'coroutine.resume',
+ 'coroutine.running',
+ 'coroutine.status',
+ 'coroutine.wrap',
+ 'coroutine.yield'],
+ 'debug': ['debug.debug',
+ 'debug.getfenv',
+ 'debug.gethook',
+ 'debug.getinfo',
+ 'debug.getlocal',
+ 'debug.getmetatable',
+ 'debug.getregistry',
+ 'debug.getupvalue',
+ 'debug.setfenv',
+ 'debug.sethook',
+ 'debug.setlocal',
+ 'debug.setmetatable',
+ 'debug.setupvalue',
+ 'debug.traceback'],
+ 'io': ['file:close',
+ 'file:flush',
+ 'file:lines',
+ 'file:read',
+ 'file:seek',
+ 'file:setvbuf',
+ 'file:write',
+ 'io.close',
+ 'io.flush',
+ 'io.input',
+ 'io.lines',
+ 'io.open',
+ 'io.output',
+ 'io.popen',
+ 'io.read',
+ 'io.tmpfile',
+ 'io.type',
+ 'io.write'],
+ 'math': ['math.abs',
+ 'math.acos',
+ 'math.asin',
+ 'math.atan2',
+ 'math.atan',
+ 'math.ceil',
+ 'math.cosh',
+ 'math.cos',
+ 'math.deg',
+ 'math.exp',
+ 'math.floor',
+ 'math.fmod',
+ 'math.frexp',
+ 'math.huge',
+ 'math.ldexp',
+ 'math.log10',
+ 'math.log',
+ 'math.max',
+ 'math.min',
+ 'math.modf',
+ 'math.pi',
+ 'math.pow',
+ 'math.rad',
+ 'math.random',
+ 'math.randomseed',
+ 'math.sinh',
+ 'math.sin',
+ 'math.sqrt',
+ 'math.tanh',
+ 'math.tan'],
+ 'modules': ['module',
+ 'require',
+ 'package.cpath',
+ 'package.loaded',
+ 'package.loadlib',
+ 'package.path',
+ 'package.preload',
+ 'package.seeall'],
+ 'os': ['os.clock',
+ 'os.date',
+ 'os.difftime',
+ 'os.execute',
+ 'os.exit',
+ 'os.getenv',
+ 'os.remove',
+ 'os.rename',
+ 'os.setlocale',
+ 'os.time',
+ 'os.tmpname'],
+ 'string': ['string.byte',
+ 'string.char',
+ 'string.dump',
+ 'string.find',
+ 'string.format',
+ 'string.gmatch',
+ 'string.gsub',
+ 'string.len',
+ 'string.lower',
+ 'string.match',
+ 'string.rep',
+ 'string.reverse',
+ 'string.sub',
+ 'string.upper'],
+ 'table': ['table.concat',
+ 'table.insert',
+ 'table.maxn',
+ 'table.remove',
+ 'table.sort']}
+
+if __name__ == '__main__':
+ import re
+ import urllib
+ import pprint
+
+ # you can't generally find out what module a function belongs to if you
+ # have only it's name. Because of this, here are some callback functions
+ # that recognize if a gioven function belongs to a specific module
+ def module_callbacks():
+ def is_in_coroutine_module(name):
+ return name.startswith('coroutine.')
+
+ def is_in_modules_module(name):
+ if name in ['require', 'module'] or name.startswith('package'):
+ return True
+ else:
+ return False
+
+ def is_in_string_module(name):
+ return name.startswith('string.')
+
+ def is_in_table_module(name):
+ return name.startswith('table.')
+
+ def is_in_math_module(name):
+ return name.startswith('math')
+
+ def is_in_io_module(name):
+ return name.startswith('io.') or name.startswith('file:')
+
+ def is_in_os_module(name):
+ return name.startswith('os.')
+
+ def is_in_debug_module(name):
+ return name.startswith('debug.')
+
+ return {'coroutine': is_in_coroutine_module,
+ 'modules': is_in_modules_module,
+ 'string': is_in_string_module,
+ 'table': is_in_table_module,
+ 'math': is_in_math_module,
+ 'io': is_in_io_module,
+ 'os': is_in_os_module,
+ 'debug': is_in_debug_module}
+
+
+
+ def get_newest_version():
+ f = urllib.urlopen('http://www.lua.org/manual/')
+ r = re.compile(r'^<A HREF="(\d\.\d)/">Lua \1</A>')
+ for line in f:
+ m = r.match(line)
+ if m is not None:
+ return m.groups()[0]
+
+ def get_lua_functions(version):
+ f = urllib.urlopen('http://www.lua.org/manual/%s/' % version)
+ r = re.compile(r'^<A HREF="manual.html#pdf-(.+)">\1</A>')
+ functions = []
+ for line in f:
+ m = r.match(line)
+ if m is not None:
+ functions.append(m.groups()[0])
+ return functions
+
+ def get_function_module(name):
+ for mod, cb in module_callbacks().iteritems():
+ if cb(name):
+ return mod
+ if '.' in name:
+ return name.split('.')[0]
+ else:
+ return 'basic'
+
+ def regenerate(filename, modules):
+ f = file(filename)
+ try:
+ content = f.read()
+ finally:
+ f.close()
+
+ header = content[:content.find('MODULES = {')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+
+ f = file(filename, 'w')
+ f.write(header)
+ f.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ f.write(footer)
+ f.close()
+
+ def run():
+ version = get_newest_version()
+ print '> Downloading function index for Lua %s' % version
+ functions = get_lua_functions(version)
+ print '> %d functions found:' % len(functions)
+
+ modules = {}
+ for full_function_name in functions:
+ print '>> %s' % full_function_name
+ m = get_function_module(full_function_name)
+ modules.setdefault(m, []).append(full_function_name)
+
+ regenerate(__file__, modules)
+
+
+ run()
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
new file mode 100644
index 00000000..ef80adc6
--- /dev/null
+++ b/pygments/lexers/_mapping.py
@@ -0,0 +1,102 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._mapping
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer mapping defintions. This file is generated by itself. Everytime
+ you change something on a builtin lexer defintion, run this script from
+ the lexers folder to update it.
+
+ Do not alter this file by hand!
+
+ :copyright: 2006 by Armin Ronacher, Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+LEXERS = {
+ 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',)),
+ 'BrainfuckLexer': ('pygments.lexers.other', 'Brainfuck', ('brainfuck',), ('*.bf', '*.b')),
+ 'CLexer': ('pygments.lexers.compiled', 'C', ('c',), ('*.c', '*.h')),
+ 'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',)),
+ 'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++')),
+ 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django', ('css+django',), ()),
+ 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), ()),
+ 'CssLexer': ('pygments.lexers.web', 'CSS', ('css',), ('*.css',)),
+ 'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), ()),
+ 'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), ()),
+ 'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',)),
+ 'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff',), ('*.diff', '*.patch')),
+ 'DjangoLexer': ('pygments.lexers.templates', 'django template', ('django',), ()),
+ 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), ()),
+ 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django', ('html+django',), ()),
+ 'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml')),
+ 'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',)),
+ 'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), ()),
+ 'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg'), ('*.ini', '*.cfg')),
+ 'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ()),
+ 'JavaLexer': ('pygments.lexers.compiled', 'Java', ('java',), ('*.java',)),
+ 'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django', ('js+django', 'javascript+django'), ()),
+ 'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), ()),
+ 'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',)),
+ 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), ()),
+ 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), ()),
+ 'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua',)),
+ 'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf'), ('*.mak', 'Makefile', 'makefile')),
+ 'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm')),
+ 'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]')),
+ 'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), ()),
+ 'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py'), ('*.py', '*.pyw')),
+ 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), ('*.raw',)),
+ 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',)),
+ 'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), ()),
+ 'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx')),
+ 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ()),
+ 'SqlLexer': ('pygments.lexers.other', 'SQL', ('sql',), ('*.sql',)),
+ 'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc')),
+ 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',)),
+ 'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas')),
+ 'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django', ('xml+django',), ()),
+ 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), ()),
+ 'XmlLexer': ('pygments.lexers.web', 'XML', ('xml',), ('*.xml',)),
+ 'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), ()),
+ 'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), ())
+}
+
+if __name__ == '__main__':
+ import sys
+ import os
+
+ # lookup lexers
+ found_lexers = []
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
+ for filename in os.listdir('.'):
+ if filename.endswith('.py') and not filename.startswith('_'):
+ module_name = 'pygments.lexers.%s' % filename[:-3]
+ print module_name
+ module = __import__(module_name, None, None, [''])
+ for lexer_name in module.__all__:
+ lexer = getattr(module, lexer_name)
+ found_lexers.append(
+ '%r: %r' % (lexer_name,
+ (module_name,
+ lexer.name,
+ tuple(lexer.aliases),
+ tuple(lexer.filenames))))
+ # sort them, that should make the diff files for svn smaller
+ found_lexers.sort()
+
+ # extract useful sourcecode from this file
+ f = file(__file__)
+ try:
+ content = f.read()
+ finally:
+ f.close()
+ header = content[:content.find('LEXERS = {')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ # write new file
+ f = file(__file__, 'w')
+ f.write(header)
+ f.write('LEXERS = {\n %s\n}\n\n' % ',\n '.join(found_lexers))
+ f.write(footer)
+ f.close()
diff --git a/pygments/lexers/_phpbuiltins.py b/pygments/lexers/_phpbuiltins.py
new file mode 100644
index 00000000..ca6076c5
--- /dev/null
+++ b/pygments/lexers/_phpbuiltins.py
@@ -0,0 +1,3391 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._phpbuiltins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file loads the function names and their modules from the
+ php webpage and generates itself.
+
+ Do not alter this file by hand!
+
+ WARNING: the generation transfers quite much data over your
+ internet connection. don't run that at home, use
+ a server ;-)
+
+ :copyright: 2006 by Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+
+MODULES = {'.NET': ['dotnet_load'],
+ 'APD': ['apd_breakpoint',
+ 'apd_callstack',
+ 'apd_clunk',
+ 'apd_continue',
+ 'apd_croak',
+ 'apd_dump_function_table',
+ 'apd_dump_persistent_resources',
+ 'apd_dump_regular_resources',
+ 'apd_echo',
+ 'apd_get_active_symbols',
+ 'apd_set_pprof_trace',
+ 'apd_set_session',
+ 'apd_set_session_trace',
+ 'apd_set_socket_session_trace',
+ 'override_function',
+ 'rename_function'],
+ 'Apache': ['apache_child_terminate',
+ 'apache_get_modules',
+ 'apache_get_version',
+ 'apache_getenv',
+ 'apache_lookup_uri',
+ 'apache_note',
+ 'apache_request_headers',
+ 'apache_reset_timeout',
+ 'apache_response_headers',
+ 'apache_setenv',
+ 'ascii2ebcdic',
+ 'ebcdic2ascii',
+ 'getallheaders',
+ 'virtual'],
+ 'Arrays': ['array',
+ 'array_change_key_case',
+ 'array_chunk',
+ 'array_combine',
+ 'array_count_values',
+ 'array_diff',
+ 'array_diff_assoc',
+ 'array_diff_key',
+ 'array_diff_uassoc',
+ 'array_diff_ukey',
+ 'array_fill',
+ 'array_filter',
+ 'array_flip',
+ 'array_intersect',
+ 'array_intersect_assoc',
+ 'array_intersect_key',
+ 'array_intersect_uassoc',
+ 'array_intersect_ukey',
+ 'array_key_exists',
+ 'array_keys',
+ 'array_map',
+ 'array_merge',
+ 'array_merge_recursive',
+ 'array_multisort',
+ 'array_pad',
+ 'array_pop',
+ 'array_push',
+ 'array_rand',
+ 'array_reduce',
+ 'array_reverse',
+ 'array_search',
+ 'array_shift',
+ 'array_slice',
+ 'array_splice',
+ 'array_sum',
+ 'array_udiff',
+ 'array_udiff_assoc',
+ 'array_udiff_uassoc',
+ 'array_uintersect',
+ 'array_uintersect_assoc',
+ 'array_uintersect_uassoc',
+ 'array_unique',
+ 'array_unshift',
+ 'array_values',
+ 'array_walk',
+ 'array_walk_recursive',
+ 'arsort',
+ 'asort',
+ 'compact',
+ 'count',
+ 'current',
+ 'each',
+ 'end',
+ 'extract',
+ 'in_array',
+ 'key',
+ 'krsort',
+ 'ksort',
+ 'list',
+ 'natcasesort',
+ 'natsort',
+ 'next',
+ 'pos',
+ 'prev',
+ 'range',
+ 'reset',
+ 'rsort',
+ 'shuffle',
+ 'sizeof',
+ 'sort',
+ 'uasort',
+ 'uksort',
+ 'usort'],
+ 'Aspell': ['aspell_check',
+ 'aspell_check_raw',
+ 'aspell_new',
+ 'aspell_suggest'],
+ 'BC math': ['bcadd',
+ 'bccomp',
+ 'bcdiv',
+ 'bcmod',
+ 'bcmul',
+ 'bcpow',
+ 'bcpowmod',
+ 'bcscale',
+ 'bcsqrt',
+ 'bcsub'],
+ 'Bzip2': ['bzclose',
+ 'bzcompress',
+ 'bzdecompress',
+ 'bzerrno',
+ 'bzerror',
+ 'bzerrstr',
+ 'bzflush',
+ 'bzopen',
+ 'bzread',
+ 'bzwrite'],
+ 'CCVS': ['ccvs_add',
+ 'ccvs_auth',
+ 'ccvs_command',
+ 'ccvs_count',
+ 'ccvs_delete',
+ 'ccvs_done',
+ 'ccvs_init',
+ 'ccvs_lookup',
+ 'ccvs_new',
+ 'ccvs_report',
+ 'ccvs_return',
+ 'ccvs_reverse',
+ 'ccvs_sale',
+ 'ccvs_status',
+ 'ccvs_textvalue',
+ 'ccvs_void'],
+ 'COM': ['com_addref',
+ 'com_create_guid',
+ 'com_event_sink',
+ 'com_get',
+ 'com_get_active_object',
+ 'com_invoke',
+ 'com_isenum',
+ 'com_load',
+ 'com_load_typelib',
+ 'com_message_pump',
+ 'com_print_typeinfo',
+ 'com_propget',
+ 'com_propput',
+ 'com_propset',
+ 'com_release',
+ 'com_set',
+ 'variant_abs',
+ 'variant_add',
+ 'variant_and',
+ 'variant_cast',
+ 'variant_cat',
+ 'variant_cmp',
+ 'variant_date_from_timestamp',
+ 'variant_date_to_timestamp',
+ 'variant_div',
+ 'variant_eqv',
+ 'variant_fix',
+ 'variant_get_type',
+ 'variant_idiv',
+ 'variant_imp',
+ 'variant_int',
+ 'variant_mod',
+ 'variant_mul',
+ 'variant_neg',
+ 'variant_not',
+ 'variant_or',
+ 'variant_pow',
+ 'variant_round',
+ 'variant_set',
+ 'variant_set_type',
+ 'variant_sub',
+ 'variant_xor'],
+ 'CURL': ['curl_close',
+ 'curl_copy_handle',
+ 'curl_errno',
+ 'curl_error',
+ 'curl_exec',
+ 'curl_getinfo',
+ 'curl_init',
+ 'curl_multi_add_handle',
+ 'curl_multi_close',
+ 'curl_multi_exec',
+ 'curl_multi_getcontent',
+ 'curl_multi_info_read',
+ 'curl_multi_init',
+ 'curl_multi_remove_handle',
+ 'curl_multi_select',
+ 'curl_setopt',
+ 'curl_version'],
+ 'Calendar': ['cal_days_in_month',
+ 'cal_from_jd',
+ 'cal_info',
+ 'cal_to_jd',
+ 'easter_date',
+ 'easter_days',
+ 'frenchtojd',
+ 'gregoriantojd',
+ 'jddayofweek',
+ 'jdmonthname',
+ 'jdtofrench',
+ 'jdtogregorian',
+ 'jdtojewish',
+ 'jdtojulian',
+ 'jdtounix',
+ 'jewishtojd',
+ 'juliantojd',
+ 'unixtojd'],
+ 'Classes/Objects': ['call_user_method',
+ 'call_user_method_array',
+ 'class_exists',
+ 'get_class',
+ 'get_class_methods',
+ 'get_class_vars',
+ 'get_declared_classes',
+ 'get_declared_interfaces',
+ 'get_object_vars',
+ 'get_parent_class',
+ 'interface_exists',
+ 'is_a',
+ 'is_subclass_of',
+ 'method_exists'],
+ 'Classkit': ['classkit_import',
+ 'classkit_method_add',
+ 'classkit_method_copy',
+ 'classkit_method_redefine',
+ 'classkit_method_remove',
+ 'classkit_method_rename'],
+ 'ClibPDF': ['cpdf_add_annotation',
+ 'cpdf_add_outline',
+ 'cpdf_arc',
+ 'cpdf_begin_text',
+ 'cpdf_circle',
+ 'cpdf_clip',
+ 'cpdf_close',
+ 'cpdf_closepath',
+ 'cpdf_closepath_fill_stroke',
+ 'cpdf_closepath_stroke',
+ 'cpdf_continue_text',
+ 'cpdf_curveto',
+ 'cpdf_end_text',
+ 'cpdf_fill',
+ 'cpdf_fill_stroke',
+ 'cpdf_finalize',
+ 'cpdf_finalize_page',
+ 'cpdf_global_set_document_limits',
+ 'cpdf_import_jpeg',
+ 'cpdf_lineto',
+ 'cpdf_moveto',
+ 'cpdf_newpath',
+ 'cpdf_open',
+ 'cpdf_output_buffer',
+ 'cpdf_page_init',
+ 'cpdf_place_inline_image',
+ 'cpdf_rect',
+ 'cpdf_restore',
+ 'cpdf_rlineto',
+ 'cpdf_rmoveto',
+ 'cpdf_rotate',
+ 'cpdf_rotate_text',
+ 'cpdf_save',
+ 'cpdf_save_to_file',
+ 'cpdf_scale',
+ 'cpdf_set_action_url',
+ 'cpdf_set_char_spacing',
+ 'cpdf_set_creator',
+ 'cpdf_set_current_page',
+ 'cpdf_set_font',
+ 'cpdf_set_font_directories',
+ 'cpdf_set_font_map_file',
+ 'cpdf_set_horiz_scaling',
+ 'cpdf_set_keywords',
+ 'cpdf_set_leading',
+ 'cpdf_set_page_animation',
+ 'cpdf_set_subject',
+ 'cpdf_set_text_matrix',
+ 'cpdf_set_text_pos',
+ 'cpdf_set_text_rendering',
+ 'cpdf_set_text_rise',
+ 'cpdf_set_title',
+ 'cpdf_set_viewer_preferences',
+ 'cpdf_set_word_spacing',
+ 'cpdf_setdash',
+ 'cpdf_setflat',
+ 'cpdf_setgray',
+ 'cpdf_setgray_fill',
+ 'cpdf_setgray_stroke',
+ 'cpdf_setlinecap',
+ 'cpdf_setlinejoin',
+ 'cpdf_setlinewidth',
+ 'cpdf_setmiterlimit',
+ 'cpdf_setrgbcolor',
+ 'cpdf_setrgbcolor_fill',
+ 'cpdf_setrgbcolor_stroke',
+ 'cpdf_show',
+ 'cpdf_show_xy',
+ 'cpdf_stringwidth',
+ 'cpdf_stroke',
+ 'cpdf_text',
+ 'cpdf_translate'],
+ 'Crack': ['crack_check',
+ 'crack_closedict',
+ 'crack_getlastmessage',
+ 'crack_opendict'],
+ 'Cybercash': ['cybercash_base64_decode',
+ 'cybercash_base64_encode',
+ 'cybercash_decr',
+ 'cybercash_encr'],
+ 'Cyrus IMAP': ['cyrus_authenticate',
+ 'cyrus_bind',
+ 'cyrus_close',
+ 'cyrus_connect',
+ 'cyrus_query',
+ 'cyrus_unbind'],
+ 'DB++': ['dbplus_add',
+ 'dbplus_aql',
+ 'dbplus_chdir',
+ 'dbplus_close',
+ 'dbplus_curr',
+ 'dbplus_errcode',
+ 'dbplus_errno',
+ 'dbplus_find',
+ 'dbplus_first',
+ 'dbplus_flush',
+ 'dbplus_freealllocks',
+ 'dbplus_freelock',
+ 'dbplus_freerlocks',
+ 'dbplus_getlock',
+ 'dbplus_getunique',
+ 'dbplus_info',
+ 'dbplus_last',
+ 'dbplus_lockrel',
+ 'dbplus_next',
+ 'dbplus_open',
+ 'dbplus_prev',
+ 'dbplus_rchperm',
+ 'dbplus_rcreate',
+ 'dbplus_rcrtexact',
+ 'dbplus_rcrtlike',
+ 'dbplus_resolve',
+ 'dbplus_restorepos',
+ 'dbplus_rkeys',
+ 'dbplus_ropen',
+ 'dbplus_rquery',
+ 'dbplus_rrename',
+ 'dbplus_rsecindex',
+ 'dbplus_runlink',
+ 'dbplus_rzap',
+ 'dbplus_savepos',
+ 'dbplus_setindex',
+ 'dbplus_setindexbynumber',
+ 'dbplus_sql',
+ 'dbplus_tcl',
+ 'dbplus_tremove',
+ 'dbplus_undo',
+ 'dbplus_undoprepare',
+ 'dbplus_unlockrel',
+ 'dbplus_unselect',
+ 'dbplus_update',
+ 'dbplus_xlockrel',
+ 'dbplus_xunlockrel'],
+ 'DBM': ['dblist',
+ 'dbmclose',
+ 'dbmdelete',
+ 'dbmexists',
+ 'dbmfetch',
+ 'dbmfirstkey',
+ 'dbminsert',
+ 'dbmnextkey',
+ 'dbmopen',
+ 'dbmreplace'],
+ 'DOM': ['dom_import_simplexml'],
+ 'DOM XML': ['domxml_new_doc',
+ 'domxml_open_file',
+ 'domxml_open_mem',
+ 'domxml_version',
+ 'domxml_xmltree',
+ 'domxml_xslt_stylesheet',
+ 'domxml_xslt_stylesheet_doc',
+ 'domxml_xslt_stylesheet_file',
+ 'xpath_eval',
+ 'xpath_eval_expression',
+ 'xpath_new_context',
+ 'xptr_eval',
+ 'xptr_new_context'],
+ 'Date/Time': ['checkdate',
+ 'date',
+ 'date_sunrise',
+ 'date_sunset',
+ 'getdate',
+ 'gettimeofday',
+ 'gmdate',
+ 'gmmktime',
+ 'gmstrftime',
+ 'idate',
+ 'localtime',
+ 'microtime',
+ 'mktime',
+ 'strftime',
+ 'strptime',
+ 'strtotime',
+ 'time'],
+ 'Direct IO': ['dio_close',
+ 'dio_fcntl',
+ 'dio_open',
+ 'dio_read',
+ 'dio_seek',
+ 'dio_stat',
+ 'dio_tcsetattr',
+ 'dio_truncate',
+ 'dio_write'],
+ 'Directories': ['chdir',
+ 'chroot',
+ 'closedir',
+ 'getcwd',
+ 'opendir',
+ 'readdir',
+ 'rewinddir',
+ 'scandir'],
+ 'Errors and Logging': ['debug_backtrace',
+ 'debug_print_backtrace',
+ 'error_log',
+ 'error_reporting',
+ 'restore_error_handler',
+ 'restore_exception_handler',
+ 'set_error_handler',
+ 'set_exception_handler',
+ 'trigger_error',
+ 'user_error'],
+ 'Exif': ['exif_imagetype',
+ 'exif_read_data',
+ 'exif_tagname',
+ 'exif_thumbnail',
+ 'read_exif_data'],
+ 'FDF': ['fdf_add_doc_javascript',
+ 'fdf_add_template',
+ 'fdf_close',
+ 'fdf_create',
+ 'fdf_enum_values',
+ 'fdf_errno',
+ 'fdf_error',
+ 'fdf_get_ap',
+ 'fdf_get_attachment',
+ 'fdf_get_encoding',
+ 'fdf_get_file',
+ 'fdf_get_flags',
+ 'fdf_get_opt',
+ 'fdf_get_status',
+ 'fdf_get_value',
+ 'fdf_get_version',
+ 'fdf_header',
+ 'fdf_next_field_name',
+ 'fdf_open',
+ 'fdf_open_string',
+ 'fdf_remove_item',
+ 'fdf_save',
+ 'fdf_save_string',
+ 'fdf_set_ap',
+ 'fdf_set_encoding',
+ 'fdf_set_file',
+ 'fdf_set_flags',
+ 'fdf_set_javascript_action',
+ 'fdf_set_on_import_javascript',
+ 'fdf_set_opt',
+ 'fdf_set_status',
+ 'fdf_set_submit_form_action',
+ 'fdf_set_target_frame',
+ 'fdf_set_value',
+ 'fdf_set_version'],
+ 'FTP': ['ftp_alloc',
+ 'ftp_cdup',
+ 'ftp_chdir',
+ 'ftp_chmod',
+ 'ftp_close',
+ 'ftp_connect',
+ 'ftp_delete',
+ 'ftp_exec',
+ 'ftp_fget',
+ 'ftp_fput',
+ 'ftp_get',
+ 'ftp_get_option',
+ 'ftp_login',
+ 'ftp_mdtm',
+ 'ftp_mkdir',
+ 'ftp_nb_continue',
+ 'ftp_nb_fget',
+ 'ftp_nb_fput',
+ 'ftp_nb_get',
+ 'ftp_nb_put',
+ 'ftp_nlist',
+ 'ftp_pasv',
+ 'ftp_put',
+ 'ftp_pwd',
+ 'ftp_quit',
+ 'ftp_raw',
+ 'ftp_rawlist',
+ 'ftp_rename',
+ 'ftp_rmdir',
+ 'ftp_set_option',
+ 'ftp_site',
+ 'ftp_size',
+ 'ftp_ssl_connect',
+ 'ftp_systype'],
+ 'Filesystem': ['basename',
+ 'chgrp',
+ 'chmod',
+ 'chown',
+ 'clearstatcache',
+ 'copy',
+ 'delete',
+ 'dirname',
+ 'disk_free_space',
+ 'disk_total_space',
+ 'diskfreespace',
+ 'fclose',
+ 'feof',
+ 'fflush',
+ 'fgetc',
+ 'fgetcsv',
+ 'fgets',
+ 'fgetss',
+ 'file',
+ 'file_exists',
+ 'file_get_contents',
+ 'file_put_contents',
+ 'fileatime',
+ 'filectime',
+ 'filegroup',
+ 'fileinode',
+ 'filemtime',
+ 'fileowner',
+ 'fileperms',
+ 'filesize',
+ 'filetype',
+ 'flock',
+ 'fnmatch',
+ 'fopen',
+ 'fpassthru',
+ 'fputcsv',
+ 'fputs',
+ 'fread',
+ 'fscanf',
+ 'fseek',
+ 'fstat',
+ 'ftell',
+ 'ftruncate',
+ 'fwrite',
+ 'glob',
+ 'is_dir',
+ 'is_executable',
+ 'is_file',
+ 'is_link',
+ 'is_readable',
+ 'is_uploaded_file',
+ 'is_writable',
+ 'is_writeable',
+ 'link',
+ 'linkinfo',
+ 'lstat',
+ 'mkdir',
+ 'move_uploaded_file',
+ 'parse_ini_file',
+ 'pathinfo',
+ 'pclose',
+ 'popen',
+ 'readfile',
+ 'readlink',
+ 'realpath',
+ 'rename',
+ 'rewind',
+ 'rmdir',
+ 'set_file_buffer',
+ 'stat',
+ 'symlink',
+ 'tempnam',
+ 'tmpfile',
+ 'touch',
+ 'umask',
+ 'unlink'],
+ 'Firebird/InterBase': ['ibase_add_user',
+ 'ibase_affected_rows',
+ 'ibase_backup',
+ 'ibase_blob_add',
+ 'ibase_blob_cancel',
+ 'ibase_blob_close',
+ 'ibase_blob_create',
+ 'ibase_blob_echo',
+ 'ibase_blob_get',
+ 'ibase_blob_import',
+ 'ibase_blob_info',
+ 'ibase_blob_open',
+ 'ibase_close',
+ 'ibase_commit',
+ 'ibase_commit_ret',
+ 'ibase_connect',
+ 'ibase_db_info',
+ 'ibase_delete_user',
+ 'ibase_drop_db',
+ 'ibase_errcode',
+ 'ibase_errmsg',
+ 'ibase_execute',
+ 'ibase_fetch_assoc',
+ 'ibase_fetch_object',
+ 'ibase_fetch_row',
+ 'ibase_field_info',
+ 'ibase_free_event_handler',
+ 'ibase_free_query',
+ 'ibase_free_result',
+ 'ibase_gen_id',
+ 'ibase_maintain_db',
+ 'ibase_modify_user',
+ 'ibase_name_result',
+ 'ibase_num_fields',
+ 'ibase_num_params',
+ 'ibase_param_info',
+ 'ibase_pconnect',
+ 'ibase_prepare',
+ 'ibase_query',
+ 'ibase_restore',
+ 'ibase_rollback',
+ 'ibase_rollback_ret',
+ 'ibase_server_info',
+ 'ibase_service_attach',
+ 'ibase_service_detach',
+ 'ibase_set_event_handler',
+ 'ibase_timefmt',
+ 'ibase_trans',
+ 'ibase_wait_event'],
+ 'FriBiDi': ['fribidi_log2vis'],
+ 'FrontBase': ['fbsql_affected_rows',
+ 'fbsql_autocommit',
+ 'fbsql_blob_size',
+ 'fbsql_change_user',
+ 'fbsql_clob_size',
+ 'fbsql_close',
+ 'fbsql_commit',
+ 'fbsql_connect',
+ 'fbsql_create_blob',
+ 'fbsql_create_clob',
+ 'fbsql_create_db',
+ 'fbsql_data_seek',
+ 'fbsql_database',
+ 'fbsql_database_password',
+ 'fbsql_db_query',
+ 'fbsql_db_status',
+ 'fbsql_drop_db',
+ 'fbsql_errno',
+ 'fbsql_error',
+ 'fbsql_fetch_array',
+ 'fbsql_fetch_assoc',
+ 'fbsql_fetch_field',
+ 'fbsql_fetch_lengths',
+ 'fbsql_fetch_object',
+ 'fbsql_fetch_row',
+ 'fbsql_field_flags',
+ 'fbsql_field_len',
+ 'fbsql_field_name',
+ 'fbsql_field_seek',
+ 'fbsql_field_table',
+ 'fbsql_field_type',
+ 'fbsql_free_result',
+ 'fbsql_get_autostart_info',
+ 'fbsql_hostname',
+ 'fbsql_insert_id',
+ 'fbsql_list_dbs',
+ 'fbsql_list_fields',
+ 'fbsql_list_tables',
+ 'fbsql_next_result',
+ 'fbsql_num_fields',
+ 'fbsql_num_rows',
+ 'fbsql_password',
+ 'fbsql_pconnect',
+ 'fbsql_query',
+ 'fbsql_read_blob',
+ 'fbsql_read_clob',
+ 'fbsql_result',
+ 'fbsql_rollback',
+ 'fbsql_select_db',
+ 'fbsql_set_lob_mode',
+ 'fbsql_set_password',
+ 'fbsql_set_transaction',
+ 'fbsql_start_db',
+ 'fbsql_stop_db',
+ 'fbsql_tablename',
+ 'fbsql_username',
+ 'fbsql_warnings'],
+ 'Function handling': ['call_user_func',
+ 'call_user_func_array',
+ 'create_function',
+ 'func_get_arg',
+ 'func_get_args',
+ 'func_num_args',
+ 'function_exists',
+ 'get_defined_functions',
+ 'register_shutdown_function',
+ 'register_tick_function',
+ 'unregister_tick_function'],
+ 'GMP': ['gmp_abs',
+ 'gmp_add',
+ 'gmp_and',
+ 'gmp_clrbit',
+ 'gmp_cmp',
+ 'gmp_com',
+ 'gmp_div',
+ 'gmp_div_q',
+ 'gmp_div_qr',
+ 'gmp_div_r',
+ 'gmp_divexact',
+ 'gmp_fact',
+ 'gmp_gcd',
+ 'gmp_gcdext',
+ 'gmp_hamdist',
+ 'gmp_init',
+ 'gmp_intval',
+ 'gmp_invert',
+ 'gmp_jacobi',
+ 'gmp_legendre',
+ 'gmp_mod',
+ 'gmp_mul',
+ 'gmp_neg',
+ 'gmp_or',
+ 'gmp_perfect_square',
+ 'gmp_popcount',
+ 'gmp_pow',
+ 'gmp_powm',
+ 'gmp_prob_prime',
+ 'gmp_random',
+ 'gmp_scan0',
+ 'gmp_scan1',
+ 'gmp_setbit',
+ 'gmp_sign',
+ 'gmp_sqrt',
+ 'gmp_sqrtrem',
+ 'gmp_strval',
+ 'gmp_sub',
+ 'gmp_xor'],
+ 'Hyperwave': ['hw_array2objrec',
+ 'hw_changeobject',
+ 'hw_children',
+ 'hw_childrenobj',
+ 'hw_close',
+ 'hw_connect',
+ 'hw_connection_info',
+ 'hw_cp',
+ 'hw_deleteobject',
+ 'hw_docbyanchor',
+ 'hw_docbyanchorobj',
+ 'hw_document_attributes',
+ 'hw_document_bodytag',
+ 'hw_document_content',
+ 'hw_document_setcontent',
+ 'hw_document_size',
+ 'hw_dummy',
+ 'hw_edittext',
+ 'hw_error',
+ 'hw_errormsg',
+ 'hw_free_document',
+ 'hw_getanchors',
+ 'hw_getanchorsobj',
+ 'hw_getandlock',
+ 'hw_getchildcoll',
+ 'hw_getchildcollobj',
+ 'hw_getchilddoccoll',
+ 'hw_getchilddoccollobj',
+ 'hw_getobject',
+ 'hw_getobjectbyquery',
+ 'hw_getobjectbyquerycoll',
+ 'hw_getobjectbyquerycollobj',
+ 'hw_getobjectbyqueryobj',
+ 'hw_getparents',
+ 'hw_getparentsobj',
+ 'hw_getrellink',
+ 'hw_getremote',
+ 'hw_getremotechildren',
+ 'hw_getsrcbydestobj',
+ 'hw_gettext',
+ 'hw_getusername',
+ 'hw_identify',
+ 'hw_incollections',
+ 'hw_info',
+ 'hw_inscoll',
+ 'hw_insdoc',
+ 'hw_insertanchors',
+ 'hw_insertdocument',
+ 'hw_insertobject',
+ 'hw_mapid',
+ 'hw_modifyobject',
+ 'hw_mv',
+ 'hw_new_document',
+ 'hw_objrec2array',
+ 'hw_output_document',
+ 'hw_pconnect',
+ 'hw_pipedocument',
+ 'hw_root',
+ 'hw_setlinkroot',
+ 'hw_stat',
+ 'hw_unlock',
+ 'hw_who'],
+ 'Hyperwave API': ['hwapi_hgcsp'],
+ 'IMAP': ['imap_8bit',
+ 'imap_alerts',
+ 'imap_append',
+ 'imap_base64',
+ 'imap_binary',
+ 'imap_body',
+ 'imap_bodystruct',
+ 'imap_check',
+ 'imap_clearflag_full',
+ 'imap_close',
+ 'imap_createmailbox',
+ 'imap_delete',
+ 'imap_deletemailbox',
+ 'imap_errors',
+ 'imap_expunge',
+ 'imap_fetch_overview',
+ 'imap_fetchbody',
+ 'imap_fetchheader',
+ 'imap_fetchstructure',
+ 'imap_get_quota',
+ 'imap_get_quotaroot',
+ 'imap_getacl',
+ 'imap_getmailboxes',
+ 'imap_getsubscribed',
+ 'imap_header',
+ 'imap_headerinfo',
+ 'imap_headers',
+ 'imap_last_error',
+ 'imap_list',
+ 'imap_listmailbox',
+ 'imap_listscan',
+ 'imap_listsubscribed',
+ 'imap_lsub',
+ 'imap_mail',
+ 'imap_mail_compose',
+ 'imap_mail_copy',
+ 'imap_mail_move',
+ 'imap_mailboxmsginfo',
+ 'imap_mime_header_decode',
+ 'imap_msgno',
+ 'imap_num_msg',
+ 'imap_num_recent',
+ 'imap_open',
+ 'imap_ping',
+ 'imap_qprint',
+ 'imap_renamemailbox',
+ 'imap_reopen',
+ 'imap_rfc822_parse_adrlist',
+ 'imap_rfc822_parse_headers',
+ 'imap_rfc822_write_address',
+ 'imap_scanmailbox',
+ 'imap_search',
+ 'imap_set_quota',
+ 'imap_setacl',
+ 'imap_setflag_full',
+ 'imap_sort',
+ 'imap_status',
+ 'imap_subscribe',
+ 'imap_thread',
+ 'imap_timeout',
+ 'imap_uid',
+ 'imap_undelete',
+ 'imap_unsubscribe',
+ 'imap_utf7_decode',
+ 'imap_utf7_encode',
+ 'imap_utf8'],
+ 'IRC Gateway': ['ircg_channel_mode',
+ 'ircg_disconnect',
+ 'ircg_eval_ecmascript_params',
+ 'ircg_fetch_error_msg',
+ 'ircg_get_username',
+ 'ircg_html_encode',
+ 'ircg_ignore_add',
+ 'ircg_ignore_del',
+ 'ircg_invite',
+ 'ircg_is_conn_alive',
+ 'ircg_join',
+ 'ircg_kick',
+ 'ircg_list',
+ 'ircg_lookup_format_messages',
+ 'ircg_lusers',
+ 'ircg_msg',
+ 'ircg_names',
+ 'ircg_nick',
+ 'ircg_nickname_escape',
+ 'ircg_nickname_unescape',
+ 'ircg_notice',
+ 'ircg_oper',
+ 'ircg_part',
+ 'ircg_pconnect',
+ 'ircg_register_format_messages',
+ 'ircg_set_current',
+ 'ircg_set_file',
+ 'ircg_set_on_die',
+ 'ircg_topic',
+ 'ircg_who',
+ 'ircg_whois'],
+ 'Image': ['gd_info',
+ 'getimagesize',
+ 'image2wbmp',
+ 'image_type_to_extension',
+ 'image_type_to_mime_type',
+ 'imagealphablending',
+ 'imageantialias',
+ 'imagearc',
+ 'imagechar',
+ 'imagecharup',
+ 'imagecolorallocate',
+ 'imagecolorallocatealpha',
+ 'imagecolorat',
+ 'imagecolorclosest',
+ 'imagecolorclosestalpha',
+ 'imagecolorclosesthwb',
+ 'imagecolordeallocate',
+ 'imagecolorexact',
+ 'imagecolorexactalpha',
+ 'imagecolormatch',
+ 'imagecolorresolve',
+ 'imagecolorresolvealpha',
+ 'imagecolorset',
+ 'imagecolorsforindex',
+ 'imagecolorstotal',
+ 'imagecolortransparent',
+ 'imagecopy',
+ 'imagecopymerge',
+ 'imagecopymergegray',
+ 'imagecopyresampled',
+ 'imagecopyresized',
+ 'imagecreate',
+ 'imagecreatefromgd',
+ 'imagecreatefromgd2',
+ 'imagecreatefromgd2part',
+ 'imagecreatefromgif',
+ 'imagecreatefromjpeg',
+ 'imagecreatefrompng',
+ 'imagecreatefromstring',
+ 'imagecreatefromwbmp',
+ 'imagecreatefromxbm',
+ 'imagecreatefromxpm',
+ 'imagecreatetruecolor',
+ 'imagedashedline',
+ 'imagedestroy',
+ 'imageellipse',
+ 'imagefill',
+ 'imagefilledarc',
+ 'imagefilledellipse',
+ 'imagefilledpolygon',
+ 'imagefilledrectangle',
+ 'imagefilltoborder',
+ 'imagefilter',
+ 'imagefontheight',
+ 'imagefontwidth',
+ 'imageftbbox',
+ 'imagefttext',
+ 'imagegammacorrect',
+ 'imagegd',
+ 'imagegd2',
+ 'imagegif',
+ 'imageinterlace',
+ 'imageistruecolor',
+ 'imagejpeg',
+ 'imagelayereffect',
+ 'imageline',
+ 'imageloadfont',
+ 'imagepalettecopy',
+ 'imagepng',
+ 'imagepolygon',
+ 'imagepsbbox',
+ 'imagepsencodefont',
+ 'imagepsextendfont',
+ 'imagepsfreefont',
+ 'imagepsloadfont',
+ 'imagepsslantfont',
+ 'imagepstext',
+ 'imagerectangle',
+ 'imagerotate',
+ 'imagesavealpha',
+ 'imagesetbrush',
+ 'imagesetpixel',
+ 'imagesetstyle',
+ 'imagesetthickness',
+ 'imagesettile',
+ 'imagestring',
+ 'imagestringup',
+ 'imagesx',
+ 'imagesy',
+ 'imagetruecolortopalette',
+ 'imagettfbbox',
+ 'imagettftext',
+ 'imagetypes',
+ 'imagewbmp',
+ 'imagexbm',
+ 'iptcembed',
+ 'iptcparse',
+ 'jpeg2wbmp',
+ 'png2wbmp'],
+ 'Informix': ['ifx_affected_rows',
+ 'ifx_blobinfile_mode',
+ 'ifx_byteasvarchar',
+ 'ifx_close',
+ 'ifx_connect',
+ 'ifx_copy_blob',
+ 'ifx_create_blob',
+ 'ifx_create_char',
+ 'ifx_do',
+ 'ifx_error',
+ 'ifx_errormsg',
+ 'ifx_fetch_row',
+ 'ifx_fieldproperties',
+ 'ifx_fieldtypes',
+ 'ifx_free_blob',
+ 'ifx_free_char',
+ 'ifx_free_result',
+ 'ifx_get_blob',
+ 'ifx_get_char',
+ 'ifx_getsqlca',
+ 'ifx_htmltbl_result',
+ 'ifx_nullformat',
+ 'ifx_num_fields',
+ 'ifx_num_rows',
+ 'ifx_pconnect',
+ 'ifx_prepare',
+ 'ifx_query',
+ 'ifx_textasvarchar',
+ 'ifx_update_blob',
+ 'ifx_update_char',
+ 'ifxus_close_slob',
+ 'ifxus_create_slob',
+ 'ifxus_free_slob',
+ 'ifxus_open_slob',
+ 'ifxus_read_slob',
+ 'ifxus_seek_slob',
+ 'ifxus_tell_slob',
+ 'ifxus_write_slob'],
+ 'Ingres II': ['ingres_autocommit',
+ 'ingres_close',
+ 'ingres_commit',
+ 'ingres_connect',
+ 'ingres_fetch_array',
+ 'ingres_fetch_object',
+ 'ingres_fetch_row',
+ 'ingres_field_length',
+ 'ingres_field_name',
+ 'ingres_field_nullable',
+ 'ingres_field_precision',
+ 'ingres_field_scale',
+ 'ingres_field_type',
+ 'ingres_num_fields',
+ 'ingres_num_rows',
+ 'ingres_pconnect',
+ 'ingres_query',
+ 'ingres_rollback'],
+ 'Java': ['java_last_exception_clear', 'java_last_exception_get'],
+ 'LDAP': ['ldap_8859_to_t61',
+ 'ldap_add',
+ 'ldap_bind',
+ 'ldap_close',
+ 'ldap_compare',
+ 'ldap_connect',
+ 'ldap_count_entries',
+ 'ldap_delete',
+ 'ldap_dn2ufn',
+ 'ldap_err2str',
+ 'ldap_errno',
+ 'ldap_error',
+ 'ldap_explode_dn',
+ 'ldap_first_attribute',
+ 'ldap_first_entry',
+ 'ldap_first_reference',
+ 'ldap_free_result',
+ 'ldap_get_attributes',
+ 'ldap_get_dn',
+ 'ldap_get_entries',
+ 'ldap_get_option',
+ 'ldap_get_values',
+ 'ldap_get_values_len',
+ 'ldap_list',
+ 'ldap_mod_add',
+ 'ldap_mod_del',
+ 'ldap_mod_replace',
+ 'ldap_modify',
+ 'ldap_next_attribute',
+ 'ldap_next_entry',
+ 'ldap_next_reference',
+ 'ldap_parse_reference',
+ 'ldap_parse_result',
+ 'ldap_read',
+ 'ldap_rename',
+ 'ldap_sasl_bind',
+ 'ldap_search',
+ 'ldap_set_option',
+ 'ldap_set_rebind_proc',
+ 'ldap_sort',
+ 'ldap_start_tls',
+ 'ldap_t61_to_8859',
+ 'ldap_unbind'],
+ 'LZF': ['lzf_compress', 'lzf_decompress', 'lzf_optimized_for'],
+ 'Lotus Notes': ['notes_body',
+ 'notes_copy_db',
+ 'notes_create_db',
+ 'notes_create_note',
+ 'notes_drop_db',
+ 'notes_find_note',
+ 'notes_header_info',
+ 'notes_list_msgs',
+ 'notes_mark_read',
+ 'notes_mark_unread',
+ 'notes_nav_create',
+ 'notes_search',
+ 'notes_unread',
+ 'notes_version'],
+ 'MCAL': ['mcal_append_event',
+ 'mcal_close',
+ 'mcal_create_calendar',
+ 'mcal_date_compare',
+ 'mcal_date_valid',
+ 'mcal_day_of_week',
+ 'mcal_day_of_year',
+ 'mcal_days_in_month',
+ 'mcal_delete_calendar',
+ 'mcal_delete_event',
+ 'mcal_event_add_attribute',
+ 'mcal_event_init',
+ 'mcal_event_set_alarm',
+ 'mcal_event_set_category',
+ 'mcal_event_set_class',
+ 'mcal_event_set_description',
+ 'mcal_event_set_end',
+ 'mcal_event_set_recur_daily',
+ 'mcal_event_set_recur_monthly_mday',
+ 'mcal_event_set_recur_monthly_wday',
+ 'mcal_event_set_recur_none',
+ 'mcal_event_set_recur_weekly',
+ 'mcal_event_set_recur_yearly',
+ 'mcal_event_set_start',
+ 'mcal_event_set_title',
+ 'mcal_expunge',
+ 'mcal_fetch_current_stream_event',
+ 'mcal_fetch_event',
+ 'mcal_is_leap_year',
+ 'mcal_list_alarms',
+ 'mcal_list_events',
+ 'mcal_next_recurrence',
+ 'mcal_open',
+ 'mcal_popen',
+ 'mcal_rename_calendar',
+ 'mcal_reopen',
+ 'mcal_snooze',
+ 'mcal_store_event',
+ 'mcal_time_valid',
+ 'mcal_week_of_year'],
+ 'MS SQL Server': ['mssql_bind',
+ 'mssql_close',
+ 'mssql_connect',
+ 'mssql_data_seek',
+ 'mssql_execute',
+ 'mssql_fetch_array',
+ 'mssql_fetch_assoc',
+ 'mssql_fetch_batch',
+ 'mssql_fetch_field',
+ 'mssql_fetch_object',
+ 'mssql_fetch_row',
+ 'mssql_field_length',
+ 'mssql_field_name',
+ 'mssql_field_seek',
+ 'mssql_field_type',
+ 'mssql_free_result',
+ 'mssql_free_statement',
+ 'mssql_get_last_message',
+ 'mssql_guid_string',
+ 'mssql_init',
+ 'mssql_min_error_severity',
+ 'mssql_min_message_severity',
+ 'mssql_next_result',
+ 'mssql_num_fields',
+ 'mssql_num_rows',
+ 'mssql_pconnect',
+ 'mssql_query',
+ 'mssql_result',
+ 'mssql_rows_affected',
+ 'mssql_select_db'],
+ 'Mail': ['ezmlm_hash', 'mail'],
+ 'Math': ['abs',
+ 'acos',
+ 'acosh',
+ 'asin',
+ 'asinh',
+ 'atan',
+ 'atan2',
+ 'atanh',
+ 'base_convert',
+ 'bindec',
+ 'ceil',
+ 'cos',
+ 'cosh',
+ 'decbin',
+ 'dechex',
+ 'decoct',
+ 'deg2rad',
+ 'exp',
+ 'expm1',
+ 'floor',
+ 'fmod',
+ 'getrandmax',
+ 'hexdec',
+ 'hypot',
+ 'is_finite',
+ 'is_infinite',
+ 'is_nan',
+ 'lcg_value',
+ 'log',
+ 'log10',
+ 'log1p',
+ 'max',
+ 'min',
+ 'mt_getrandmax',
+ 'mt_rand',
+ 'mt_srand',
+ 'octdec',
+ 'pi',
+ 'pow',
+ 'rad2deg',
+ 'rand',
+ 'round',
+ 'sin',
+ 'sinh',
+ 'sqrt',
+ 'srand',
+ 'tan',
+ 'tanh'],
+ 'Memcache': ['memcache_debug'],
+ 'Mimetype': ['mime_content_type'],
+ 'Ming (flash)': ['ming_setcubicthreshold',
+ 'ming_setscale',
+ 'ming_useswfversion',
+ 'swfaction',
+ 'swfbitmap',
+ 'swfbutton',
+ 'swffill',
+ 'swffont',
+ 'swfgradient',
+ 'swfmorph',
+ 'swfmovie',
+ 'swfshape',
+ 'swfsprite',
+ 'swftext',
+ 'swftextfield'],
+ 'Misc.': ['connection_aborted',
+ 'connection_status',
+ 'connection_timeout',
+ 'constant',
+ 'define',
+ 'defined',
+ 'die',
+ 'eval',
+ 'exit',
+ 'get_browser',
+ 'highlight_file',
+ 'highlight_string',
+ 'ignore_user_abort',
+ 'pack',
+ 'php_check_syntax',
+ 'php_strip_whitespace',
+ 'show_source',
+ 'sleep',
+ 'time_nanosleep',
+ 'uniqid',
+ 'unpack',
+ 'usleep'],
+ 'Msession': ['msession_connect',
+ 'msession_count',
+ 'msession_create',
+ 'msession_destroy',
+ 'msession_disconnect',
+ 'msession_find',
+ 'msession_get',
+ 'msession_get_array',
+ 'msession_get_data',
+ 'msession_inc',
+ 'msession_list',
+ 'msession_listvar',
+ 'msession_lock',
+ 'msession_plugin',
+ 'msession_randstr',
+ 'msession_set',
+ 'msession_set_array',
+ 'msession_set_data',
+ 'msession_timeout',
+ 'msession_uniq',
+ 'msession_unlock'],
+ 'Multibyte String': ['mb_convert_case',
+ 'mb_convert_encoding',
+ 'mb_convert_kana',
+ 'mb_convert_variables',
+ 'mb_decode_mimeheader',
+ 'mb_decode_numericentity',
+ 'mb_detect_encoding',
+ 'mb_detect_order',
+ 'mb_encode_mimeheader',
+ 'mb_encode_numericentity',
+ 'mb_ereg',
+ 'mb_ereg_match',
+ 'mb_ereg_replace',
+ 'mb_ereg_search',
+ 'mb_ereg_search_getpos',
+ 'mb_ereg_search_getregs',
+ 'mb_ereg_search_init',
+ 'mb_ereg_search_pos',
+ 'mb_ereg_search_regs',
+ 'mb_ereg_search_setpos',
+ 'mb_eregi',
+ 'mb_eregi_replace',
+ 'mb_get_info',
+ 'mb_http_input',
+ 'mb_http_output',
+ 'mb_internal_encoding',
+ 'mb_language',
+ 'mb_list_encodings',
+ 'mb_output_handler',
+ 'mb_parse_str',
+ 'mb_preferred_mime_name',
+ 'mb_regex_encoding',
+ 'mb_regex_set_options',
+ 'mb_send_mail',
+ 'mb_split',
+ 'mb_strcut',
+ 'mb_strimwidth',
+ 'mb_strlen',
+ 'mb_strpos',
+ 'mb_strrpos',
+ 'mb_strtolower',
+ 'mb_strtoupper',
+ 'mb_strwidth',
+ 'mb_substitute_character',
+ 'mb_substr',
+ 'mb_substr_count'],
+ 'MySQL': ['mysql_affected_rows',
+ 'mysql_change_user',
+ 'mysql_client_encoding',
+ 'mysql_close',
+ 'mysql_connect',
+ 'mysql_create_db',
+ 'mysql_data_seek',
+ 'mysql_db_name',
+ 'mysql_db_query',
+ 'mysql_drop_db',
+ 'mysql_errno',
+ 'mysql_error',
+ 'mysql_escape_string',
+ 'mysql_fetch_array',
+ 'mysql_fetch_assoc',
+ 'mysql_fetch_field',
+ 'mysql_fetch_lengths',
+ 'mysql_fetch_object',
+ 'mysql_fetch_row',
+ 'mysql_field_flags',
+ 'mysql_field_len',
+ 'mysql_field_name',
+ 'mysql_field_seek',
+ 'mysql_field_table',
+ 'mysql_field_type',
+ 'mysql_free_result',
+ 'mysql_get_client_info',
+ 'mysql_get_host_info',
+ 'mysql_get_proto_info',
+ 'mysql_get_server_info',
+ 'mysql_info',
+ 'mysql_insert_id',
+ 'mysql_list_dbs',
+ 'mysql_list_fields',
+ 'mysql_list_processes',
+ 'mysql_list_tables',
+ 'mysql_num_fields',
+ 'mysql_num_rows',
+ 'mysql_pconnect',
+ 'mysql_ping',
+ 'mysql_query',
+ 'mysql_real_escape_string',
+ 'mysql_result',
+ 'mysql_select_db',
+ 'mysql_stat',
+ 'mysql_tablename',
+ 'mysql_thread_id',
+ 'mysql_unbuffered_query'],
+ 'NSAPI': ['nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'],
+ 'Ncurses': ['ncurses_addch',
+ 'ncurses_addchnstr',
+ 'ncurses_addchstr',
+ 'ncurses_addnstr',
+ 'ncurses_addstr',
+ 'ncurses_assume_default_colors',
+ 'ncurses_attroff',
+ 'ncurses_attron',
+ 'ncurses_attrset',
+ 'ncurses_baudrate',
+ 'ncurses_beep',
+ 'ncurses_bkgd',
+ 'ncurses_bkgdset',
+ 'ncurses_border',
+ 'ncurses_bottom_panel',
+ 'ncurses_can_change_color',
+ 'ncurses_cbreak',
+ 'ncurses_clear',
+ 'ncurses_clrtobot',
+ 'ncurses_clrtoeol',
+ 'ncurses_color_content',
+ 'ncurses_color_set',
+ 'ncurses_curs_set',
+ 'ncurses_def_prog_mode',
+ 'ncurses_def_shell_mode',
+ 'ncurses_define_key',
+ 'ncurses_del_panel',
+ 'ncurses_delay_output',
+ 'ncurses_delch',
+ 'ncurses_deleteln',
+ 'ncurses_delwin',
+ 'ncurses_doupdate',
+ 'ncurses_echo',
+ 'ncurses_echochar',
+ 'ncurses_end',
+ 'ncurses_erase',
+ 'ncurses_erasechar',
+ 'ncurses_filter',
+ 'ncurses_flash',
+ 'ncurses_flushinp',
+ 'ncurses_getch',
+ 'ncurses_getmaxyx',
+ 'ncurses_getmouse',
+ 'ncurses_getyx',
+ 'ncurses_halfdelay',
+ 'ncurses_has_colors',
+ 'ncurses_has_ic',
+ 'ncurses_has_il',
+ 'ncurses_has_key',
+ 'ncurses_hide_panel',
+ 'ncurses_hline',
+ 'ncurses_inch',
+ 'ncurses_init',
+ 'ncurses_init_color',
+ 'ncurses_init_pair',
+ 'ncurses_insch',
+ 'ncurses_insdelln',
+ 'ncurses_insertln',
+ 'ncurses_insstr',
+ 'ncurses_instr',
+ 'ncurses_isendwin',
+ 'ncurses_keyok',
+ 'ncurses_keypad',
+ 'ncurses_killchar',
+ 'ncurses_longname',
+ 'ncurses_meta',
+ 'ncurses_mouse_trafo',
+ 'ncurses_mouseinterval',
+ 'ncurses_mousemask',
+ 'ncurses_move',
+ 'ncurses_move_panel',
+ 'ncurses_mvaddch',
+ 'ncurses_mvaddchnstr',
+ 'ncurses_mvaddchstr',
+ 'ncurses_mvaddnstr',
+ 'ncurses_mvaddstr',
+ 'ncurses_mvcur',
+ 'ncurses_mvdelch',
+ 'ncurses_mvgetch',
+ 'ncurses_mvhline',
+ 'ncurses_mvinch',
+ 'ncurses_mvvline',
+ 'ncurses_mvwaddstr',
+ 'ncurses_napms',
+ 'ncurses_new_panel',
+ 'ncurses_newpad',
+ 'ncurses_newwin',
+ 'ncurses_nl',
+ 'ncurses_nocbreak',
+ 'ncurses_noecho',
+ 'ncurses_nonl',
+ 'ncurses_noqiflush',
+ 'ncurses_noraw',
+ 'ncurses_pair_content',
+ 'ncurses_panel_above',
+ 'ncurses_panel_below',
+ 'ncurses_panel_window',
+ 'ncurses_pnoutrefresh',
+ 'ncurses_prefresh',
+ 'ncurses_putp',
+ 'ncurses_qiflush',
+ 'ncurses_raw',
+ 'ncurses_refresh',
+ 'ncurses_replace_panel',
+ 'ncurses_reset_prog_mode',
+ 'ncurses_reset_shell_mode',
+ 'ncurses_resetty',
+ 'ncurses_savetty',
+ 'ncurses_scr_dump',
+ 'ncurses_scr_init',
+ 'ncurses_scr_restore',
+ 'ncurses_scr_set',
+ 'ncurses_scrl',
+ 'ncurses_show_panel',
+ 'ncurses_slk_attr',
+ 'ncurses_slk_attroff',
+ 'ncurses_slk_attron',
+ 'ncurses_slk_attrset',
+ 'ncurses_slk_clear',
+ 'ncurses_slk_color',
+ 'ncurses_slk_init',
+ 'ncurses_slk_noutrefresh',
+ 'ncurses_slk_refresh',
+ 'ncurses_slk_restore',
+ 'ncurses_slk_set',
+ 'ncurses_slk_touch',
+ 'ncurses_standend',
+ 'ncurses_standout',
+ 'ncurses_start_color',
+ 'ncurses_termattrs',
+ 'ncurses_termname',
+ 'ncurses_timeout',
+ 'ncurses_top_panel',
+ 'ncurses_typeahead',
+ 'ncurses_ungetch',
+ 'ncurses_ungetmouse',
+ 'ncurses_update_panels',
+ 'ncurses_use_default_colors',
+ 'ncurses_use_env',
+ 'ncurses_use_extended_names',
+ 'ncurses_vidattr',
+ 'ncurses_vline',
+ 'ncurses_waddch',
+ 'ncurses_waddstr',
+ 'ncurses_wattroff',
+ 'ncurses_wattron',
+ 'ncurses_wattrset',
+ 'ncurses_wborder',
+ 'ncurses_wclear',
+ 'ncurses_wcolor_set',
+ 'ncurses_werase',
+ 'ncurses_wgetch',
+ 'ncurses_whline',
+ 'ncurses_wmouse_trafo',
+ 'ncurses_wmove',
+ 'ncurses_wnoutrefresh',
+ 'ncurses_wrefresh',
+ 'ncurses_wstandend',
+ 'ncurses_wstandout',
+ 'ncurses_wvline'],
+ 'Network': ['checkdnsrr',
+ 'closelog',
+ 'debugger_off',
+ 'debugger_on',
+ 'define_syslog_variables',
+ 'dns_check_record',
+ 'dns_get_mx',
+ 'dns_get_record',
+ 'fsockopen',
+ 'gethostbyaddr',
+ 'gethostbyname',
+ 'gethostbynamel',
+ 'getmxrr',
+ 'getprotobyname',
+ 'getprotobynumber',
+ 'getservbyname',
+ 'getservbyport',
+ 'header',
+ 'headers_list',
+ 'headers_sent',
+ 'inet_ntop',
+ 'inet_pton',
+ 'ip2long',
+ 'long2ip',
+ 'openlog',
+ 'pfsockopen',
+ 'setcookie',
+ 'setrawcookie',
+ 'socket_get_status',
+ 'socket_set_blocking',
+ 'socket_set_timeout',
+ 'syslog'],
+ 'OCI8': ['oci_bind_by_name',
+ 'oci_cancel',
+ 'oci_close',
+ 'oci_commit',
+ 'oci_connect',
+ 'oci_define_by_name',
+ 'oci_error',
+ 'oci_execute',
+ 'oci_fetch',
+ 'oci_fetch_all',
+ 'oci_fetch_array',
+ 'oci_fetch_assoc',
+ 'oci_fetch_object',
+ 'oci_fetch_row',
+ 'oci_field_is_null',
+ 'oci_field_name',
+ 'oci_field_precision',
+ 'oci_field_scale',
+ 'oci_field_size',
+ 'oci_field_type',
+ 'oci_field_type_raw',
+ 'oci_free_statement',
+ 'oci_internal_debug',
+ 'oci_lob_copy',
+ 'oci_lob_is_equal',
+ 'oci_new_collection',
+ 'oci_new_connect',
+ 'oci_new_cursor',
+ 'oci_new_descriptor',
+ 'oci_num_fields',
+ 'oci_num_rows',
+ 'oci_parse',
+ 'oci_password_change',
+ 'oci_pconnect',
+ 'oci_result',
+ 'oci_rollback',
+ 'oci_server_version',
+ 'oci_set_prefetch',
+ 'oci_statement_type',
+ 'ocibindbyname',
+ 'ocicancel',
+ 'ocicloselob',
+ 'ocicollappend',
+ 'ocicollassign',
+ 'ocicollassignelem',
+ 'ocicollgetelem',
+ 'ocicollmax',
+ 'ocicollsize',
+ 'ocicolltrim',
+ 'ocicolumnisnull',
+ 'ocicolumnname',
+ 'ocicolumnprecision',
+ 'ocicolumnscale',
+ 'ocicolumnsize',
+ 'ocicolumntype',
+ 'ocicolumntyperaw',
+ 'ocicommit',
+ 'ocidefinebyname',
+ 'ocierror',
+ 'ociexecute',
+ 'ocifetch',
+ 'ocifetchinto',
+ 'ocifetchstatement',
+ 'ocifreecollection',
+ 'ocifreecursor',
+ 'ocifreedesc',
+ 'ocifreestatement',
+ 'ociinternaldebug',
+ 'ociloadlob',
+ 'ocilogoff',
+ 'ocilogon',
+ 'ocinewcollection',
+ 'ocinewcursor',
+ 'ocinewdescriptor',
+ 'ocinlogon',
+ 'ocinumcols',
+ 'ociparse',
+ 'ociplogon',
+ 'ociresult',
+ 'ocirollback',
+ 'ocirowcount',
+ 'ocisavelob',
+ 'ocisavelobfile',
+ 'ociserverversion',
+ 'ocisetprefetch',
+ 'ocistatementtype',
+ 'ociwritelobtofile',
+ 'ociwritetemporarylob'],
+ 'ODBC': ['odbc_autocommit',
+ 'odbc_binmode',
+ 'odbc_close',
+ 'odbc_close_all',
+ 'odbc_columnprivileges',
+ 'odbc_columns',
+ 'odbc_commit',
+ 'odbc_connect',
+ 'odbc_cursor',
+ 'odbc_data_source',
+ 'odbc_do',
+ 'odbc_error',
+ 'odbc_errormsg',
+ 'odbc_exec',
+ 'odbc_execute',
+ 'odbc_fetch_array',
+ 'odbc_fetch_into',
+ 'odbc_fetch_object',
+ 'odbc_fetch_row',
+ 'odbc_field_len',
+ 'odbc_field_name',
+ 'odbc_field_num',
+ 'odbc_field_precision',
+ 'odbc_field_scale',
+ 'odbc_field_type',
+ 'odbc_foreignkeys',
+ 'odbc_free_result',
+ 'odbc_gettypeinfo',
+ 'odbc_longreadlen',
+ 'odbc_next_result',
+ 'odbc_num_fields',
+ 'odbc_num_rows',
+ 'odbc_pconnect',
+ 'odbc_prepare',
+ 'odbc_primarykeys',
+ 'odbc_procedurecolumns',
+ 'odbc_procedures',
+ 'odbc_result',
+ 'odbc_result_all',
+ 'odbc_rollback',
+ 'odbc_setoption',
+ 'odbc_specialcolumns',
+ 'odbc_statistics',
+ 'odbc_tableprivileges',
+ 'odbc_tables'],
+ 'Object Aggregation': ['aggregate',
+ 'aggregate_info',
+ 'aggregate_methods',
+ 'aggregate_methods_by_list',
+ 'aggregate_methods_by_regexp',
+ 'aggregate_properties',
+ 'aggregate_properties_by_list',
+ 'aggregate_properties_by_regexp',
+ 'aggregation_info',
+ 'deaggregate'],
+ 'Object overloading': ['overload'],
+ 'OpenSSL': ['openssl_csr_export',
+ 'openssl_csr_export_to_file',
+ 'openssl_csr_new',
+ 'openssl_csr_sign',
+ 'openssl_error_string',
+ 'openssl_free_key',
+ 'openssl_get_privatekey',
+ 'openssl_get_publickey',
+ 'openssl_open',
+ 'openssl_pkcs7_decrypt',
+ 'openssl_pkcs7_encrypt',
+ 'openssl_pkcs7_sign',
+ 'openssl_pkcs7_verify',
+ 'openssl_pkey_export',
+ 'openssl_pkey_export_to_file',
+ 'openssl_pkey_get_private',
+ 'openssl_pkey_get_public',
+ 'openssl_pkey_new',
+ 'openssl_private_decrypt',
+ 'openssl_private_encrypt',
+ 'openssl_public_decrypt',
+ 'openssl_public_encrypt',
+ 'openssl_seal',
+ 'openssl_sign',
+ 'openssl_verify',
+ 'openssl_x509_check_private_key',
+ 'openssl_x509_checkpurpose',
+ 'openssl_x509_export',
+ 'openssl_x509_export_to_file',
+ 'openssl_x509_free',
+ 'openssl_x509_parse',
+ 'openssl_x509_read'],
+ 'Oracle': ['ora_bind',
+ 'ora_close',
+ 'ora_columnname',
+ 'ora_columnsize',
+ 'ora_columntype',
+ 'ora_commit',
+ 'ora_commitoff',
+ 'ora_commiton',
+ 'ora_do',
+ 'ora_error',
+ 'ora_errorcode',
+ 'ora_exec',
+ 'ora_fetch',
+ 'ora_fetch_into',
+ 'ora_getcolumn',
+ 'ora_logoff',
+ 'ora_logon',
+ 'ora_numcols',
+ 'ora_numrows',
+ 'ora_open',
+ 'ora_parse',
+ 'ora_plogon',
+ 'ora_rollback'],
+ 'Output Control': ['flush',
+ 'ob_clean',
+ 'ob_end_clean',
+ 'ob_end_flush',
+ 'ob_flush',
+ 'ob_get_clean',
+ 'ob_get_contents',
+ 'ob_get_flush',
+ 'ob_get_length',
+ 'ob_get_level',
+ 'ob_get_status',
+ 'ob_gzhandler',
+ 'ob_implicit_flush',
+ 'ob_list_handlers',
+ 'ob_start',
+ 'output_add_rewrite_var',
+ 'output_reset_rewrite_vars'],
+ 'OvrimosSQL': ['ovrimos_close',
+ 'ovrimos_commit',
+ 'ovrimos_connect',
+ 'ovrimos_cursor',
+ 'ovrimos_exec',
+ 'ovrimos_execute',
+ 'ovrimos_fetch_into',
+ 'ovrimos_fetch_row',
+ 'ovrimos_field_len',
+ 'ovrimos_field_name',
+ 'ovrimos_field_num',
+ 'ovrimos_field_type',
+ 'ovrimos_free_result',
+ 'ovrimos_longreadlen',
+ 'ovrimos_num_fields',
+ 'ovrimos_num_rows',
+ 'ovrimos_prepare',
+ 'ovrimos_result',
+ 'ovrimos_result_all',
+ 'ovrimos_rollback'],
+ 'PCNTL': ['pcntl_alarm',
+ 'pcntl_exec',
+ 'pcntl_fork',
+ 'pcntl_getpriority',
+ 'pcntl_setpriority',
+ 'pcntl_signal',
+ 'pcntl_wait',
+ 'pcntl_waitpid',
+ 'pcntl_wexitstatus',
+ 'pcntl_wifexited',
+ 'pcntl_wifsignaled',
+ 'pcntl_wifstopped',
+ 'pcntl_wstopsig',
+ 'pcntl_wtermsig'],
+ 'PCRE': ['preg_grep',
+ 'preg_match',
+ 'preg_match_all',
+ 'preg_quote',
+ 'preg_replace',
+ 'preg_replace_callback',
+ 'preg_split'],
+ 'PDF': ['pdf_add_annotation',
+ 'pdf_add_bookmark',
+ 'pdf_add_launchlink',
+ 'pdf_add_locallink',
+ 'pdf_add_note',
+ 'pdf_add_outline',
+ 'pdf_add_pdflink',
+ 'pdf_add_thumbnail',
+ 'pdf_add_weblink',
+ 'pdf_arc',
+ 'pdf_arcn',
+ 'pdf_attach_file',
+ 'pdf_begin_page',
+ 'pdf_begin_pattern',
+ 'pdf_begin_template',
+ 'pdf_circle',
+ 'pdf_clip',
+ 'pdf_close',
+ 'pdf_close_image',
+ 'pdf_close_pdi',
+ 'pdf_close_pdi_page',
+ 'pdf_closepath',
+ 'pdf_closepath_fill_stroke',
+ 'pdf_closepath_stroke',
+ 'pdf_concat',
+ 'pdf_continue_text',
+ 'pdf_curveto',
+ 'pdf_delete',
+ 'pdf_end_page',
+ 'pdf_end_pattern',
+ 'pdf_end_template',
+ 'pdf_endpath',
+ 'pdf_fill',
+ 'pdf_fill_stroke',
+ 'pdf_findfont',
+ 'pdf_get_buffer',
+ 'pdf_get_font',
+ 'pdf_get_fontname',
+ 'pdf_get_fontsize',
+ 'pdf_get_image_height',
+ 'pdf_get_image_width',
+ 'pdf_get_majorversion',
+ 'pdf_get_minorversion',
+ 'pdf_get_parameter',
+ 'pdf_get_pdi_parameter',
+ 'pdf_get_pdi_value',
+ 'pdf_get_value',
+ 'pdf_initgraphics',
+ 'pdf_lineto',
+ 'pdf_makespotcolor',
+ 'pdf_moveto',
+ 'pdf_new',
+ 'pdf_open_ccitt',
+ 'pdf_open_file',
+ 'pdf_open_gif',
+ 'pdf_open_image',
+ 'pdf_open_image_file',
+ 'pdf_open_jpeg',
+ 'pdf_open_memory_image',
+ 'pdf_open_pdi',
+ 'pdf_open_pdi_page',
+ 'pdf_open_tiff',
+ 'pdf_place_image',
+ 'pdf_place_pdi_page',
+ 'pdf_rect',
+ 'pdf_restore',
+ 'pdf_rotate',
+ 'pdf_save',
+ 'pdf_scale',
+ 'pdf_set_border_color',
+ 'pdf_set_border_dash',
+ 'pdf_set_border_style',
+ 'pdf_set_char_spacing',
+ 'pdf_set_duration',
+ 'pdf_set_horiz_scaling',
+ 'pdf_set_info',
+ 'pdf_set_info_author',
+ 'pdf_set_info_creator',
+ 'pdf_set_info_keywords',
+ 'pdf_set_info_subject',
+ 'pdf_set_info_title',
+ 'pdf_set_leading',
+ 'pdf_set_parameter',
+ 'pdf_set_text_matrix',
+ 'pdf_set_text_pos',
+ 'pdf_set_text_rendering',
+ 'pdf_set_text_rise',
+ 'pdf_set_value',
+ 'pdf_set_word_spacing',
+ 'pdf_setcolor',
+ 'pdf_setdash',
+ 'pdf_setflat',
+ 'pdf_setfont',
+ 'pdf_setgray',
+ 'pdf_setgray_fill',
+ 'pdf_setgray_stroke',
+ 'pdf_setlinecap',
+ 'pdf_setlinejoin',
+ 'pdf_setlinewidth',
+ 'pdf_setmatrix',
+ 'pdf_setmiterlimit',
+ 'pdf_setpolydash',
+ 'pdf_setrgbcolor',
+ 'pdf_setrgbcolor_fill',
+ 'pdf_setrgbcolor_stroke',
+ 'pdf_show',
+ 'pdf_show_boxed',
+ 'pdf_show_xy',
+ 'pdf_skew',
+ 'pdf_stringwidth',
+ 'pdf_stroke',
+ 'pdf_translate'],
+ 'PHP Options/Info': ['assert',
+ 'assert_options',
+ 'dl',
+ 'extension_loaded',
+ 'get_cfg_var',
+ 'get_current_user',
+ 'get_defined_constants',
+ 'get_extension_funcs',
+ 'get_include_path',
+ 'get_included_files',
+ 'get_loaded_extensions',
+ 'get_magic_quotes_gpc',
+ 'get_magic_quotes_runtime',
+ 'get_required_files',
+ 'getenv',
+ 'getlastmod',
+ 'getmygid',
+ 'getmyinode',
+ 'getmypid',
+ 'getmyuid',
+ 'getopt',
+ 'getrusage',
+ 'ini_alter',
+ 'ini_get',
+ 'ini_get_all',
+ 'ini_restore',
+ 'ini_set',
+ 'main',
+ 'memory_get_usage',
+ 'php_ini_scanned_files',
+ 'php_logo_guid',
+ 'php_sapi_name',
+ 'php_uname',
+ 'phpcredits',
+ 'phpinfo',
+ 'phpversion',
+ 'putenv',
+ 'restore_include_path',
+ 'set_include_path',
+ 'set_magic_quotes_runtime',
+ 'set_time_limit',
+ 'version_compare',
+ 'zend_logo_guid',
+ 'zend_version'],
+ 'POSIX': ['posix_ctermid',
+ 'posix_get_last_error',
+ 'posix_getcwd',
+ 'posix_getegid',
+ 'posix_geteuid',
+ 'posix_getgid',
+ 'posix_getgrgid',
+ 'posix_getgrnam',
+ 'posix_getgroups',
+ 'posix_getlogin',
+ 'posix_getpgid',
+ 'posix_getpgrp',
+ 'posix_getpid',
+ 'posix_getppid',
+ 'posix_getpwnam',
+ 'posix_getpwuid',
+ 'posix_getrlimit',
+ 'posix_getsid',
+ 'posix_getuid',
+ 'posix_isatty',
+ 'posix_kill',
+ 'posix_mkfifo',
+ 'posix_setegid',
+ 'posix_seteuid',
+ 'posix_setgid',
+ 'posix_setpgid',
+ 'posix_setsid',
+ 'posix_setuid',
+ 'posix_strerror',
+ 'posix_times',
+ 'posix_ttyname',
+ 'posix_uname'],
+ 'POSIX Regex': ['ereg',
+ 'ereg_replace',
+ 'eregi',
+ 'eregi_replace',
+ 'split',
+ 'spliti',
+ 'sql_regcase'],
+ 'Parsekit': ['parsekit_compile_file',
+ 'parsekit_compile_string',
+ 'parsekit_func_arginfo'],
+ 'PostgreSQL': ['pg_affected_rows',
+ 'pg_cancel_query',
+ 'pg_client_encoding',
+ 'pg_close',
+ 'pg_connect',
+ 'pg_connection_busy',
+ 'pg_connection_reset',
+ 'pg_connection_status',
+ 'pg_convert',
+ 'pg_copy_from',
+ 'pg_copy_to',
+ 'pg_dbname',
+ 'pg_delete',
+ 'pg_end_copy',
+ 'pg_escape_bytea',
+ 'pg_escape_string',
+ 'pg_fetch_all',
+ 'pg_fetch_array',
+ 'pg_fetch_assoc',
+ 'pg_fetch_object',
+ 'pg_fetch_result',
+ 'pg_fetch_row',
+ 'pg_field_is_null',
+ 'pg_field_name',
+ 'pg_field_num',
+ 'pg_field_prtlen',
+ 'pg_field_size',
+ 'pg_field_type',
+ 'pg_free_result',
+ 'pg_get_notify',
+ 'pg_get_pid',
+ 'pg_get_result',
+ 'pg_host',
+ 'pg_insert',
+ 'pg_last_error',
+ 'pg_last_notice',
+ 'pg_last_oid',
+ 'pg_lo_close',
+ 'pg_lo_create',
+ 'pg_lo_export',
+ 'pg_lo_import',
+ 'pg_lo_open',
+ 'pg_lo_read',
+ 'pg_lo_read_all',
+ 'pg_lo_seek',
+ 'pg_lo_tell',
+ 'pg_lo_unlink',
+ 'pg_lo_write',
+ 'pg_meta_data',
+ 'pg_num_fields',
+ 'pg_num_rows',
+ 'pg_options',
+ 'pg_parameter_status',
+ 'pg_pconnect',
+ 'pg_ping',
+ 'pg_port',
+ 'pg_put_line',
+ 'pg_query',
+ 'pg_result_error',
+ 'pg_result_seek',
+ 'pg_result_status',
+ 'pg_select',
+ 'pg_send_query',
+ 'pg_set_client_encoding',
+ 'pg_trace',
+ 'pg_tty',
+ 'pg_unescape_bytea',
+ 'pg_untrace',
+ 'pg_update',
+ 'pg_version'],
+ 'Printer': ['printer_abort',
+ 'printer_close',
+ 'printer_create_brush',
+ 'printer_create_dc',
+ 'printer_create_font',
+ 'printer_create_pen',
+ 'printer_delete_brush',
+ 'printer_delete_dc',
+ 'printer_delete_font',
+ 'printer_delete_pen',
+ 'printer_draw_bmp',
+ 'printer_draw_chord',
+ 'printer_draw_elipse',
+ 'printer_draw_line',
+ 'printer_draw_pie',
+ 'printer_draw_rectangle',
+ 'printer_draw_roundrect',
+ 'printer_draw_text',
+ 'printer_end_doc',
+ 'printer_end_page',
+ 'printer_get_option',
+ 'printer_list',
+ 'printer_logical_fontheight',
+ 'printer_open',
+ 'printer_select_brush',
+ 'printer_select_font',
+ 'printer_select_pen',
+ 'printer_set_option',
+ 'printer_start_doc',
+ 'printer_start_page',
+ 'printer_write'],
+ 'Program Execution': ['escapeshellarg',
+ 'escapeshellcmd',
+ 'exec',
+ 'passthru',
+ 'proc_close',
+ 'proc_get_status',
+ 'proc_nice',
+ 'proc_open',
+ 'proc_terminate',
+ 'shell_exec',
+ 'system'],
+ 'Pspell': ['pspell_add_to_personal',
+ 'pspell_add_to_session',
+ 'pspell_check',
+ 'pspell_clear_session',
+ 'pspell_config_create',
+ 'pspell_config_data_dir',
+ 'pspell_config_dict_dir',
+ 'pspell_config_ignore',
+ 'pspell_config_mode',
+ 'pspell_config_personal',
+ 'pspell_config_repl',
+ 'pspell_config_runtogether',
+ 'pspell_config_save_repl',
+ 'pspell_new',
+ 'pspell_new_config',
+ 'pspell_new_personal',
+ 'pspell_save_wordlist',
+ 'pspell_store_replacement',
+ 'pspell_suggest'],
+ 'Rar': ['rar_close', 'rar_entry_get', 'rar_list', 'rar_open'],
+ 'Readline': ['readline',
+ 'readline_add_history',
+ 'readline_callback_handler_install',
+ 'readline_callback_handler_remove',
+ 'readline_callback_read_char',
+ 'readline_clear_history',
+ 'readline_completion_function',
+ 'readline_info',
+ 'readline_list_history',
+ 'readline_on_new_line',
+ 'readline_read_history',
+ 'readline_redisplay',
+ 'readline_write_history'],
+ 'Recode': ['recode', 'recode_file', 'recode_string'],
+ 'SESAM': ['sesam_affected_rows',
+ 'sesam_commit',
+ 'sesam_connect',
+ 'sesam_diagnostic',
+ 'sesam_disconnect',
+ 'sesam_errormsg',
+ 'sesam_execimm',
+ 'sesam_fetch_array',
+ 'sesam_fetch_result',
+ 'sesam_fetch_row',
+ 'sesam_field_array',
+ 'sesam_field_name',
+ 'sesam_free_result',
+ 'sesam_num_fields',
+ 'sesam_query',
+ 'sesam_rollback',
+ 'sesam_seek_row',
+ 'sesam_settransaction'],
+ 'SNMP': ['snmp_get_quick_print',
+ 'snmp_get_valueretrieval',
+ 'snmp_read_mib',
+ 'snmp_set_enum_print',
+ 'snmp_set_oid_numeric_print',
+ 'snmp_set_quick_print',
+ 'snmp_set_valueretrieval',
+ 'snmpget',
+ 'snmpgetnext',
+ 'snmprealwalk',
+ 'snmpset',
+ 'snmpwalk',
+ 'snmpwalkoid'],
+ 'SOAP': ['is_soap_fault'],
+ 'SQLite': ['sqlite_array_query',
+ 'sqlite_busy_timeout',
+ 'sqlite_changes',
+ 'sqlite_close',
+ 'sqlite_column',
+ 'sqlite_create_aggregate',
+ 'sqlite_create_function',
+ 'sqlite_current',
+ 'sqlite_error_string',
+ 'sqlite_escape_string',
+ 'sqlite_exec',
+ 'sqlite_factory',
+ 'sqlite_fetch_all',
+ 'sqlite_fetch_array',
+ 'sqlite_fetch_column_types',
+ 'sqlite_fetch_object',
+ 'sqlite_fetch_single',
+ 'sqlite_fetch_string',
+ 'sqlite_field_name',
+ 'sqlite_has_more',
+ 'sqlite_has_prev',
+ 'sqlite_last_error',
+ 'sqlite_last_insert_rowid',
+ 'sqlite_libencoding',
+ 'sqlite_libversion',
+ 'sqlite_next',
+ 'sqlite_num_fields',
+ 'sqlite_num_rows',
+ 'sqlite_open',
+ 'sqlite_popen',
+ 'sqlite_prev',
+ 'sqlite_query',
+ 'sqlite_rewind',
+ 'sqlite_seek',
+ 'sqlite_single_query',
+ 'sqlite_udf_decode_binary',
+ 'sqlite_udf_encode_binary',
+ 'sqlite_unbuffered_query'],
+ 'SWF': ['swf_actiongeturl',
+ 'swf_actiongotoframe',
+ 'swf_actiongotolabel',
+ 'swf_actionnextframe',
+ 'swf_actionplay',
+ 'swf_actionprevframe',
+ 'swf_actionsettarget',
+ 'swf_actionstop',
+ 'swf_actiontogglequality',
+ 'swf_actionwaitforframe',
+ 'swf_addbuttonrecord',
+ 'swf_addcolor',
+ 'swf_closefile',
+ 'swf_definebitmap',
+ 'swf_definefont',
+ 'swf_defineline',
+ 'swf_definepoly',
+ 'swf_definerect',
+ 'swf_definetext',
+ 'swf_endbutton',
+ 'swf_enddoaction',
+ 'swf_endshape',
+ 'swf_endsymbol',
+ 'swf_fontsize',
+ 'swf_fontslant',
+ 'swf_fonttracking',
+ 'swf_getbitmapinfo',
+ 'swf_getfontinfo',
+ 'swf_getframe',
+ 'swf_labelframe',
+ 'swf_lookat',
+ 'swf_modifyobject',
+ 'swf_mulcolor',
+ 'swf_nextid',
+ 'swf_oncondition',
+ 'swf_openfile',
+ 'swf_ortho',
+ 'swf_ortho2',
+ 'swf_perspective',
+ 'swf_placeobject',
+ 'swf_polarview',
+ 'swf_popmatrix',
+ 'swf_posround',
+ 'swf_pushmatrix',
+ 'swf_removeobject',
+ 'swf_rotate',
+ 'swf_scale',
+ 'swf_setfont',
+ 'swf_setframe',
+ 'swf_shapearc',
+ 'swf_shapecurveto',
+ 'swf_shapecurveto3',
+ 'swf_shapefillbitmapclip',
+ 'swf_shapefillbitmaptile',
+ 'swf_shapefilloff',
+ 'swf_shapefillsolid',
+ 'swf_shapelinesolid',
+ 'swf_shapelineto',
+ 'swf_shapemoveto',
+ 'swf_showframe',
+ 'swf_startbutton',
+ 'swf_startdoaction',
+ 'swf_startshape',
+ 'swf_startsymbol',
+ 'swf_textwidth',
+ 'swf_translate',
+ 'swf_viewport'],
+ 'Semaphore': ['ftok',
+ 'msg_get_queue',
+ 'msg_receive',
+ 'msg_remove_queue',
+ 'msg_send',
+ 'msg_set_queue',
+ 'msg_stat_queue',
+ 'sem_acquire',
+ 'sem_get',
+ 'sem_release',
+ 'sem_remove',
+ 'shm_attach',
+ 'shm_detach',
+ 'shm_get_var',
+ 'shm_put_var',
+ 'shm_remove',
+ 'shm_remove_var'],
+ 'Sessions': ['session_cache_expire',
+ 'session_cache_limiter',
+ 'session_commit',
+ 'session_decode',
+ 'session_destroy',
+ 'session_encode',
+ 'session_get_cookie_params',
+ 'session_id',
+ 'session_is_registered',
+ 'session_module_name',
+ 'session_name',
+ 'session_regenerate_id',
+ 'session_register',
+ 'session_save_path',
+ 'session_set_cookie_params',
+ 'session_set_save_handler',
+ 'session_start',
+ 'session_unregister',
+ 'session_unset',
+ 'session_write_close'],
+ 'SimpleXML': ['simplexml_import_dom',
+ 'simplexml_load_file',
+ 'simplexml_load_string'],
+ 'Sockets': ['socket_accept',
+ 'socket_bind',
+ 'socket_clear_error',
+ 'socket_close',
+ 'socket_connect',
+ 'socket_create',
+ 'socket_create_listen',
+ 'socket_create_pair',
+ 'socket_get_option',
+ 'socket_getpeername',
+ 'socket_getsockname',
+ 'socket_last_error',
+ 'socket_listen',
+ 'socket_read',
+ 'socket_recv',
+ 'socket_recvfrom',
+ 'socket_select',
+ 'socket_send',
+ 'socket_sendto',
+ 'socket_set_block',
+ 'socket_set_nonblock',
+ 'socket_set_option',
+ 'socket_shutdown',
+ 'socket_strerror',
+ 'socket_write'],
+ 'Streams': ['stream_context_create',
+ 'stream_context_get_default',
+ 'stream_context_get_options',
+ 'stream_context_set_option',
+ 'stream_context_set_params',
+ 'stream_copy_to_stream',
+ 'stream_filter_append',
+ 'stream_filter_prepend',
+ 'stream_filter_register',
+ 'stream_filter_remove',
+ 'stream_get_contents',
+ 'stream_get_filters',
+ 'stream_get_line',
+ 'stream_get_meta_data',
+ 'stream_get_transports',
+ 'stream_get_wrappers',
+ 'stream_register_wrapper',
+ 'stream_select',
+ 'stream_set_blocking',
+ 'stream_set_timeout',
+ 'stream_set_write_buffer',
+ 'stream_socket_accept',
+ 'stream_socket_client',
+ 'stream_socket_enable_crypto',
+ 'stream_socket_get_name',
+ 'stream_socket_pair',
+ 'stream_socket_recvfrom',
+ 'stream_socket_sendto',
+ 'stream_socket_server',
+ 'stream_wrapper_register',
+ 'stream_wrapper_restore',
+ 'stream_wrapper_unregister'],
+ 'Strings': ['addcslashes',
+ 'addslashes',
+ 'bin2hex',
+ 'chop',
+ 'chr',
+ 'chunk_split',
+ 'convert_cyr_string',
+ 'convert_uudecode',
+ 'convert_uuencode',
+ 'count_chars',
+ 'crc32',
+ 'crypt',
+ 'echo',
+ 'explode',
+ 'fprintf',
+ 'get_html_translation_table',
+ 'hebrev',
+ 'hebrevc',
+ 'html_entity_decode',
+ 'htmlentities',
+ 'htmlspecialchars',
+ 'implode',
+ 'join',
+ 'levenshtein',
+ 'localeconv',
+ 'ltrim',
+ 'md5',
+ 'md5_file',
+ 'metaphone',
+ 'money_format',
+ 'nl2br',
+ 'nl_langinfo',
+ 'number_format',
+ 'ord',
+ 'parse_str',
+ 'print',
+ 'printf',
+ 'quoted_printable_decode',
+ 'quotemeta',
+ 'rtrim',
+ 'setlocale',
+ 'sha1',
+ 'sha1_file',
+ 'similar_text',
+ 'soundex',
+ 'sprintf',
+ 'sscanf',
+ 'str_ireplace',
+ 'str_pad',
+ 'str_repeat',
+ 'str_replace',
+ 'str_rot13',
+ 'str_shuffle',
+ 'str_split',
+ 'str_word_count',
+ 'strcasecmp',
+ 'strchr',
+ 'strcmp',
+ 'strcoll',
+ 'strcspn',
+ 'strip_tags',
+ 'stripcslashes',
+ 'stripos',
+ 'stripslashes',
+ 'stristr',
+ 'strlen',
+ 'strnatcasecmp',
+ 'strnatcmp',
+ 'strncasecmp',
+ 'strncmp',
+ 'strpbrk',
+ 'strpos',
+ 'strrchr',
+ 'strrev',
+ 'strripos',
+ 'strrpos',
+ 'strspn',
+ 'strstr',
+ 'strtok',
+ 'strtolower',
+ 'strtoupper',
+ 'strtr',
+ 'substr',
+ 'substr_compare',
+ 'substr_count',
+ 'substr_replace',
+ 'trim',
+ 'ucfirst',
+ 'ucwords',
+ 'vfprintf',
+ 'vprintf',
+ 'vsprintf',
+ 'wordwrap'],
+ 'Sybase': ['sybase_affected_rows',
+ 'sybase_close',
+ 'sybase_connect',
+ 'sybase_data_seek',
+ 'sybase_deadlock_retry_count',
+ 'sybase_fetch_array',
+ 'sybase_fetch_assoc',
+ 'sybase_fetch_field',
+ 'sybase_fetch_object',
+ 'sybase_fetch_row',
+ 'sybase_field_seek',
+ 'sybase_free_result',
+ 'sybase_get_last_message',
+ 'sybase_min_client_severity',
+ 'sybase_min_error_severity',
+ 'sybase_min_message_severity',
+ 'sybase_min_server_severity',
+ 'sybase_num_fields',
+ 'sybase_num_rows',
+ 'sybase_pconnect',
+ 'sybase_query',
+ 'sybase_result',
+ 'sybase_select_db',
+ 'sybase_set_message_handler',
+ 'sybase_unbuffered_query'],
+ 'TCP Wrappers': ['tcpwrap_check'],
+ 'Tokenizer': ['token_get_all', 'token_name'],
+ 'URLs': ['base64_decode',
+ 'base64_encode',
+ 'get_headers',
+ 'get_meta_tags',
+ 'http_build_query',
+ 'parse_url',
+ 'rawurldecode',
+ 'rawurlencode',
+ 'urldecode',
+ 'urlencode'],
+ 'Variables handling': ['debug_zval_dump',
+ 'doubleval',
+ 'empty',
+ 'floatval',
+ 'get_defined_vars',
+ 'get_resource_type',
+ 'gettype',
+ 'import_request_variables',
+ 'intval',
+ 'is_array',
+ 'is_bool',
+ 'is_callable',
+ 'is_double',
+ 'is_float',
+ 'is_int',
+ 'is_integer',
+ 'is_long',
+ 'is_null',
+ 'is_numeric',
+ 'is_object',
+ 'is_real',
+ 'is_resource',
+ 'is_scalar',
+ 'is_string',
+ 'isset',
+ 'print_r',
+ 'serialize',
+ 'settype',
+ 'strval',
+ 'unserialize',
+ 'unset',
+ 'var_dump',
+ 'var_export'],
+ 'Verisign Payflow Pro': ['pfpro_cleanup',
+ 'pfpro_init',
+ 'pfpro_process',
+ 'pfpro_process_raw',
+ 'pfpro_version'],
+ 'W32api': ['w32api_deftype',
+ 'w32api_init_dtype',
+ 'w32api_invoke_function',
+ 'w32api_register_function',
+ 'w32api_set_call_method'],
+ 'WDDX': ['wddx_add_vars',
+ 'wddx_deserialize',
+ 'wddx_packet_end',
+ 'wddx_packet_start',
+ 'wddx_serialize_value',
+ 'wddx_serialize_vars'],
+ 'XML': ['utf8_decode',
+ 'utf8_encode',
+ 'xml_error_string',
+ 'xml_get_current_byte_index',
+ 'xml_get_current_column_number',
+ 'xml_get_current_line_number',
+ 'xml_get_error_code',
+ 'xml_parse',
+ 'xml_parse_into_struct',
+ 'xml_parser_create',
+ 'xml_parser_create_ns',
+ 'xml_parser_free',
+ 'xml_parser_get_option',
+ 'xml_parser_set_option',
+ 'xml_set_character_data_handler',
+ 'xml_set_default_handler',
+ 'xml_set_element_handler',
+ 'xml_set_end_namespace_decl_handler',
+ 'xml_set_external_entity_ref_handler',
+ 'xml_set_notation_decl_handler',
+ 'xml_set_object',
+ 'xml_set_processing_instruction_handler',
+ 'xml_set_start_namespace_decl_handler',
+ 'xml_set_unparsed_entity_decl_handler'],
+ 'XML-RPC': ['xmlrpc_decode',
+ 'xmlrpc_decode_request',
+ 'xmlrpc_encode',
+ 'xmlrpc_encode_request',
+ 'xmlrpc_get_type',
+ 'xmlrpc_is_fault',
+ 'xmlrpc_parse_method_descriptions',
+ 'xmlrpc_server_add_introspection_data',
+ 'xmlrpc_server_call_method',
+ 'xmlrpc_server_create',
+ 'xmlrpc_server_destroy',
+ 'xmlrpc_server_register_introspection_callback',
+ 'xmlrpc_server_register_method',
+ 'xmlrpc_set_type'],
+ 'XSL': ['xsl_xsltprocessor_get_parameter',
+ 'xsl_xsltprocessor_has_exslt_support',
+ 'xsl_xsltprocessor_import_stylesheet',
+ 'xsl_xsltprocessor_register_php_functions',
+ 'xsl_xsltprocessor_remove_parameter',
+ 'xsl_xsltprocessor_set_parameter',
+ 'xsl_xsltprocessor_transform_to_doc',
+ 'xsl_xsltprocessor_transform_to_uri',
+ 'xsl_xsltprocessor_transform_to_xml'],
+ 'XSLT': ['xslt_backend_info',
+ 'xslt_backend_name',
+ 'xslt_backend_version',
+ 'xslt_create',
+ 'xslt_errno',
+ 'xslt_error',
+ 'xslt_free',
+ 'xslt_getopt',
+ 'xslt_process',
+ 'xslt_set_base',
+ 'xslt_set_encoding',
+ 'xslt_set_error_handler',
+ 'xslt_set_log',
+ 'xslt_set_object',
+ 'xslt_set_sax_handler',
+ 'xslt_set_sax_handlers',
+ 'xslt_set_scheme_handler',
+ 'xslt_set_scheme_handlers',
+ 'xslt_setopt'],
+ 'YAZ': ['yaz_addinfo',
+ 'yaz_ccl_conf',
+ 'yaz_ccl_parse',
+ 'yaz_close',
+ 'yaz_connect',
+ 'yaz_database',
+ 'yaz_element',
+ 'yaz_errno',
+ 'yaz_error',
+ 'yaz_es_result',
+ 'yaz_get_option',
+ 'yaz_hits',
+ 'yaz_itemorder',
+ 'yaz_present',
+ 'yaz_range',
+ 'yaz_record',
+ 'yaz_scan',
+ 'yaz_scan_result',
+ 'yaz_schema',
+ 'yaz_search',
+ 'yaz_set_option',
+ 'yaz_sort',
+ 'yaz_syntax',
+ 'yaz_wait'],
+ 'YP/NIS': ['yp_all',
+ 'yp_cat',
+ 'yp_err_string',
+ 'yp_errno',
+ 'yp_first',
+ 'yp_get_default_domain',
+ 'yp_master',
+ 'yp_match',
+ 'yp_next',
+ 'yp_order'],
+ 'Zip': ['zip_close',
+ 'zip_entry_close',
+ 'zip_entry_compressedsize',
+ 'zip_entry_compressionmethod',
+ 'zip_entry_filesize',
+ 'zip_entry_name',
+ 'zip_entry_open',
+ 'zip_entry_read',
+ 'zip_open',
+ 'zip_read'],
+ 'Zlib': ['gzclose',
+ 'gzcompress',
+ 'gzdeflate',
+ 'gzencode',
+ 'gzeof',
+ 'gzfile',
+ 'gzgetc',
+ 'gzgets',
+ 'gzgetss',
+ 'gzinflate',
+ 'gzopen',
+ 'gzpassthru',
+ 'gzputs',
+ 'gzread',
+ 'gzrewind',
+ 'gzseek',
+ 'gztell',
+ 'gzuncompress',
+ 'gzwrite',
+ 'readgzfile',
+ 'zlib_get_coding_type'],
+ 'bcompiler': ['bcompiler_load',
+ 'bcompiler_load_exe',
+ 'bcompiler_parse_class',
+ 'bcompiler_read',
+ 'bcompiler_write_class',
+ 'bcompiler_write_constant',
+ 'bcompiler_write_exe_footer',
+ 'bcompiler_write_footer',
+ 'bcompiler_write_function',
+ 'bcompiler_write_functions_from_file',
+ 'bcompiler_write_header'],
+ 'ctype': ['ctype_alnum',
+ 'ctype_alpha',
+ 'ctype_cntrl',
+ 'ctype_digit',
+ 'ctype_graph',
+ 'ctype_lower',
+ 'ctype_print',
+ 'ctype_punct',
+ 'ctype_space',
+ 'ctype_upper',
+ 'ctype_xdigit'],
+ 'dBase': ['dbase_add_record',
+ 'dbase_close',
+ 'dbase_create',
+ 'dbase_delete_record',
+ 'dbase_get_header_info',
+ 'dbase_get_record',
+ 'dbase_get_record_with_names',
+ 'dbase_numfields',
+ 'dbase_numrecords',
+ 'dbase_open',
+ 'dbase_pack',
+ 'dbase_replace_record'],
+ 'dba': ['dba_close',
+ 'dba_delete',
+ 'dba_exists',
+ 'dba_fetch',
+ 'dba_firstkey',
+ 'dba_handlers',
+ 'dba_insert',
+ 'dba_key_split',
+ 'dba_list',
+ 'dba_nextkey',
+ 'dba_open',
+ 'dba_optimize',
+ 'dba_popen',
+ 'dba_replace',
+ 'dba_sync'],
+ 'dbx': ['dbx_close',
+ 'dbx_compare',
+ 'dbx_connect',
+ 'dbx_error',
+ 'dbx_escape_string',
+ 'dbx_fetch_row',
+ 'dbx_query',
+ 'dbx_sort'],
+ 'fam': ['fam_cancel_monitor',
+ 'fam_close',
+ 'fam_monitor_collection',
+ 'fam_monitor_directory',
+ 'fam_monitor_file',
+ 'fam_next_event',
+ 'fam_open',
+ 'fam_pending',
+ 'fam_resume_monitor',
+ 'fam_suspend_monitor'],
+ 'filePro': ['filepro',
+ 'filepro_fieldcount',
+ 'filepro_fieldname',
+ 'filepro_fieldtype',
+ 'filepro_fieldwidth',
+ 'filepro_retrieve',
+ 'filepro_rowcount'],
+ 'gettext': ['bind_textdomain_codeset',
+ 'bindtextdomain',
+ 'dcgettext',
+ 'dcngettext',
+ 'dgettext',
+ 'dngettext',
+ 'gettext',
+ 'ngettext',
+ 'textdomain'],
+ 'iconv': ['iconv',
+ 'iconv_get_encoding',
+ 'iconv_mime_decode',
+ 'iconv_mime_decode_headers',
+ 'iconv_mime_encode',
+ 'iconv_set_encoding',
+ 'iconv_strlen',
+ 'iconv_strpos',
+ 'iconv_strrpos',
+ 'iconv_substr',
+ 'ob_iconv_handler'],
+ 'id3': ['id3_get_frame_long_name',
+ 'id3_get_frame_short_name',
+ 'id3_get_genre_id',
+ 'id3_get_genre_list',
+ 'id3_get_genre_name',
+ 'id3_get_tag',
+ 'id3_get_version',
+ 'id3_remove_tag',
+ 'id3_set_tag'],
+ 'mSQL': ['msql',
+ 'msql_affected_rows',
+ 'msql_close',
+ 'msql_connect',
+ 'msql_create_db',
+ 'msql_createdb',
+ 'msql_data_seek',
+ 'msql_db_query',
+ 'msql_dbname',
+ 'msql_drop_db',
+ 'msql_error',
+ 'msql_fetch_array',
+ 'msql_fetch_field',
+ 'msql_fetch_object',
+ 'msql_fetch_row',
+ 'msql_field_flags',
+ 'msql_field_len',
+ 'msql_field_name',
+ 'msql_field_seek',
+ 'msql_field_table',
+ 'msql_field_type',
+ 'msql_fieldflags',
+ 'msql_fieldlen',
+ 'msql_fieldname',
+ 'msql_fieldtable',
+ 'msql_fieldtype',
+ 'msql_free_result',
+ 'msql_list_dbs',
+ 'msql_list_fields',
+ 'msql_list_tables',
+ 'msql_num_fields',
+ 'msql_num_rows',
+ 'msql_numfields',
+ 'msql_numrows',
+ 'msql_pconnect',
+ 'msql_query',
+ 'msql_regcase',
+ 'msql_result',
+ 'msql_select_db',
+ 'msql_tablename'],
+ 'mailparse': ['mailparse_determine_best_xfer_encoding',
+ 'mailparse_msg_create',
+ 'mailparse_msg_extract_part',
+ 'mailparse_msg_extract_part_file',
+ 'mailparse_msg_free',
+ 'mailparse_msg_get_part',
+ 'mailparse_msg_get_part_data',
+ 'mailparse_msg_get_structure',
+ 'mailparse_msg_parse',
+ 'mailparse_msg_parse_file',
+ 'mailparse_rfc822_parse_addresses',
+ 'mailparse_stream_encode',
+ 'mailparse_uudecode_all'],
+ 'mcrypt': ['mcrypt_cbc',
+ 'mcrypt_cfb',
+ 'mcrypt_create_iv',
+ 'mcrypt_decrypt',
+ 'mcrypt_ecb',
+ 'mcrypt_enc_get_algorithms_name',
+ 'mcrypt_enc_get_block_size',
+ 'mcrypt_enc_get_iv_size',
+ 'mcrypt_enc_get_key_size',
+ 'mcrypt_enc_get_modes_name',
+ 'mcrypt_enc_get_supported_key_sizes',
+ 'mcrypt_enc_is_block_algorithm',
+ 'mcrypt_enc_is_block_algorithm_mode',
+ 'mcrypt_enc_is_block_mode',
+ 'mcrypt_enc_self_test',
+ 'mcrypt_encrypt',
+ 'mcrypt_generic',
+ 'mcrypt_generic_deinit',
+ 'mcrypt_generic_end',
+ 'mcrypt_generic_init',
+ 'mcrypt_get_block_size',
+ 'mcrypt_get_cipher_name',
+ 'mcrypt_get_iv_size',
+ 'mcrypt_get_key_size',
+ 'mcrypt_list_algorithms',
+ 'mcrypt_list_modes',
+ 'mcrypt_module_close',
+ 'mcrypt_module_get_algo_block_size',
+ 'mcrypt_module_get_algo_key_size',
+ 'mcrypt_module_get_supported_key_sizes',
+ 'mcrypt_module_is_block_algorithm',
+ 'mcrypt_module_is_block_algorithm_mode',
+ 'mcrypt_module_is_block_mode',
+ 'mcrypt_module_open',
+ 'mcrypt_module_self_test',
+ 'mcrypt_ofb',
+ 'mdecrypt_generic'],
+ 'mhash': ['mhash',
+ 'mhash_count',
+ 'mhash_get_block_size',
+ 'mhash_get_hash_name',
+ 'mhash_keygen_s2k'],
+ 'mnoGoSearch': ['udm_add_search_limit',
+ 'udm_alloc_agent',
+ 'udm_alloc_agent_array',
+ 'udm_api_version',
+ 'udm_cat_list',
+ 'udm_cat_path',
+ 'udm_check_charset',
+ 'udm_check_stored',
+ 'udm_clear_search_limits',
+ 'udm_close_stored',
+ 'udm_crc32',
+ 'udm_errno',
+ 'udm_error',
+ 'udm_find',
+ 'udm_free_agent',
+ 'udm_free_ispell_data',
+ 'udm_free_res',
+ 'udm_get_doc_count',
+ 'udm_get_res_field',
+ 'udm_get_res_param',
+ 'udm_hash32',
+ 'udm_load_ispell_data',
+ 'udm_open_stored',
+ 'udm_set_agent_param'],
+ 'muscat': ['muscat_close',
+ 'muscat_get',
+ 'muscat_give',
+ 'muscat_setup',
+ 'muscat_setup_net'],
+ 'mysqli': ['mysqli_affected_rows',
+ 'mysqli_autocommit',
+ 'mysqli_bind_param',
+ 'mysqli_bind_result',
+ 'mysqli_change_user',
+ 'mysqli_character_set_name',
+ 'mysqli_client_encoding',
+ 'mysqli_close',
+ 'mysqli_commit',
+ 'mysqli_connect',
+ 'mysqli_connect_errno',
+ 'mysqli_connect_error',
+ 'mysqli_data_seek',
+ 'mysqli_debug',
+ 'mysqli_disable_reads_from_master',
+ 'mysqli_disable_rpl_parse',
+ 'mysqli_dump_debug_info',
+ 'mysqli_embedded_connect',
+ 'mysqli_enable_reads_from_master',
+ 'mysqli_enable_rpl_parse',
+ 'mysqli_errno',
+ 'mysqli_error',
+ 'mysqli_escape_string',
+ 'mysqli_execute',
+ 'mysqli_fetch',
+ 'mysqli_fetch_array',
+ 'mysqli_fetch_assoc',
+ 'mysqli_fetch_field',
+ 'mysqli_fetch_field_direct',
+ 'mysqli_fetch_fields',
+ 'mysqli_fetch_lengths',
+ 'mysqli_fetch_object',
+ 'mysqli_fetch_row',
+ 'mysqli_field_count',
+ 'mysqli_field_seek',
+ 'mysqli_field_tell',
+ 'mysqli_free_result',
+ 'mysqli_get_client_info',
+ 'mysqli_get_client_version',
+ 'mysqli_get_host_info',
+ 'mysqli_get_metadata',
+ 'mysqli_get_proto_info',
+ 'mysqli_get_server_info',
+ 'mysqli_get_server_version',
+ 'mysqli_info',
+ 'mysqli_init',
+ 'mysqli_insert_id',
+ 'mysqli_kill',
+ 'mysqli_master_query',
+ 'mysqli_more_results',
+ 'mysqli_multi_query',
+ 'mysqli_next_result',
+ 'mysqli_num_fields',
+ 'mysqli_num_rows',
+ 'mysqli_options',
+ 'mysqli_param_count',
+ 'mysqli_ping',
+ 'mysqli_prepare',
+ 'mysqli_query',
+ 'mysqli_real_connect',
+ 'mysqli_real_escape_string',
+ 'mysqli_real_query',
+ 'mysqli_report',
+ 'mysqli_rollback',
+ 'mysqli_rpl_parse_enabled',
+ 'mysqli_rpl_probe',
+ 'mysqli_rpl_query_type',
+ 'mysqli_select_db',
+ 'mysqli_send_long_data',
+ 'mysqli_send_query',
+ 'mysqli_server_end',
+ 'mysqli_server_init',
+ 'mysqli_set_opt',
+ 'mysqli_sqlstate',
+ 'mysqli_ssl_set',
+ 'mysqli_stat',
+ 'mysqli_stmt_affected_rows',
+ 'mysqli_stmt_bind_param',
+ 'mysqli_stmt_bind_result',
+ 'mysqli_stmt_close',
+ 'mysqli_stmt_data_seek',
+ 'mysqli_stmt_errno',
+ 'mysqli_stmt_error',
+ 'mysqli_stmt_execute',
+ 'mysqli_stmt_fetch',
+ 'mysqli_stmt_free_result',
+ 'mysqli_stmt_init',
+ 'mysqli_stmt_num_rows',
+ 'mysqli_stmt_param_count',
+ 'mysqli_stmt_prepare',
+ 'mysqli_stmt_reset',
+ 'mysqli_stmt_result_metadata',
+ 'mysqli_stmt_send_long_data',
+ 'mysqli_stmt_sqlstate',
+ 'mysqli_stmt_store_result',
+ 'mysqli_store_result',
+ 'mysqli_thread_id',
+ 'mysqli_thread_safe',
+ 'mysqli_use_result',
+ 'mysqli_warning_count'],
+ 'openal': ['openal_buffer_create',
+ 'openal_buffer_data',
+ 'openal_buffer_destroy',
+ 'openal_buffer_get',
+ 'openal_buffer_loadwav',
+ 'openal_context_create',
+ 'openal_context_current',
+ 'openal_context_destroy',
+ 'openal_context_process',
+ 'openal_context_suspend',
+ 'openal_device_close',
+ 'openal_device_open',
+ 'openal_listener_get',
+ 'openal_listener_set',
+ 'openal_source_create',
+ 'openal_source_destroy',
+ 'openal_source_get',
+ 'openal_source_pause',
+ 'openal_source_play',
+ 'openal_source_rewind',
+ 'openal_source_set',
+ 'openal_source_stop',
+ 'openal_stream'],
+ 'qtdom': ['qdom_error', 'qdom_tree'],
+ 'shmop': ['shmop_close',
+ 'shmop_delete',
+ 'shmop_open',
+ 'shmop_read',
+ 'shmop_size',
+ 'shmop_write'],
+ 'spl': ['class_implements',
+ 'class_parents',
+ 'iterator-to-array',
+ 'iterator_count',
+ 'spl_classes'],
+ 'ssh2': ['ssh2_auth_none',
+ 'ssh2_auth_password',
+ 'ssh2_auth_pubkey_file',
+ 'ssh2_connect',
+ 'ssh2_exec',
+ 'ssh2_fetch_stream',
+ 'ssh2_fingerprint',
+ 'ssh2_methods_negotiated',
+ 'ssh2_scp_recv',
+ 'ssh2_scp_send',
+ 'ssh2_sftp',
+ 'ssh2_sftp_lstat',
+ 'ssh2_sftp_mkdir',
+ 'ssh2_sftp_readlink',
+ 'ssh2_sftp_realpath',
+ 'ssh2_sftp_rename',
+ 'ssh2_sftp_rmdir',
+ 'ssh2_sftp_stat',
+ 'ssh2_sftp_symlink',
+ 'ssh2_sftp_unlink',
+ 'ssh2_shell',
+ 'ssh2_tunnel'],
+ 'tidy': ['ob_tidyhandler',
+ 'tidy_access_count',
+ 'tidy_clean_repair',
+ 'tidy_config_count',
+ 'tidy_diagnose',
+ 'tidy_error_count',
+ 'tidy_get_body',
+ 'tidy_get_config',
+ 'tidy_get_error_buffer',
+ 'tidy_get_head',
+ 'tidy_get_html',
+ 'tidy_get_html_ver',
+ 'tidy_get_output',
+ 'tidy_get_release',
+ 'tidy_get_root',
+ 'tidy_get_status',
+ 'tidy_getopt',
+ 'tidy_is_xhtml',
+ 'tidy_is_xml',
+ 'tidy_load_config',
+ 'tidy_parse_file',
+ 'tidy_parse_string',
+ 'tidy_repair_file',
+ 'tidy_repair_string',
+ 'tidy_reset_config',
+ 'tidy_save_config',
+ 'tidy_set_encoding',
+ 'tidy_setopt',
+ 'tidy_warning_count'],
+ 'unknown': ['bcompile_write_file',
+ 'com',
+ 'dir',
+ 'dotnet',
+ 'hw_api_attribute',
+ 'hw_api_content',
+ 'hw_api_object',
+ 'imagepscopyfont',
+ 'mcve_adduser',
+ 'mcve_adduserarg',
+ 'mcve_bt',
+ 'mcve_checkstatus',
+ 'mcve_chkpwd',
+ 'mcve_chngpwd',
+ 'mcve_completeauthorizations',
+ 'mcve_connect',
+ 'mcve_connectionerror',
+ 'mcve_deleteresponse',
+ 'mcve_deletetrans',
+ 'mcve_deleteusersetup',
+ 'mcve_deluser',
+ 'mcve_destroyconn',
+ 'mcve_destroyengine',
+ 'mcve_disableuser',
+ 'mcve_edituser',
+ 'mcve_enableuser',
+ 'mcve_force',
+ 'mcve_getcell',
+ 'mcve_getcellbynum',
+ 'mcve_getcommadelimited',
+ 'mcve_getheader',
+ 'mcve_getuserarg',
+ 'mcve_getuserparam',
+ 'mcve_gft',
+ 'mcve_gl',
+ 'mcve_gut',
+ 'mcve_initconn',
+ 'mcve_initengine',
+ 'mcve_initusersetup',
+ 'mcve_iscommadelimited',
+ 'mcve_liststats',
+ 'mcve_listusers',
+ 'mcve_maxconntimeout',
+ 'mcve_monitor',
+ 'mcve_numcolumns',
+ 'mcve_numrows',
+ 'mcve_override',
+ 'mcve_parsecommadelimited',
+ 'mcve_ping',
+ 'mcve_preauth',
+ 'mcve_preauthcompletion',
+ 'mcve_qc',
+ 'mcve_responseparam',
+ 'mcve_return',
+ 'mcve_returncode',
+ 'mcve_returnstatus',
+ 'mcve_sale',
+ 'mcve_setblocking',
+ 'mcve_setdropfile',
+ 'mcve_setip',
+ 'mcve_setssl',
+ 'mcve_setssl_files',
+ 'mcve_settimeout',
+ 'mcve_settle',
+ 'mcve_text_avs',
+ 'mcve_text_code',
+ 'mcve_text_cv',
+ 'mcve_transactionauth',
+ 'mcve_transactionavs',
+ 'mcve_transactionbatch',
+ 'mcve_transactioncv',
+ 'mcve_transactionid',
+ 'mcve_transactionitem',
+ 'mcve_transactionssent',
+ 'mcve_transactiontext',
+ 'mcve_transinqueue',
+ 'mcve_transnew',
+ 'mcve_transparam',
+ 'mcve_transsend',
+ 'mcve_ub',
+ 'mcve_uwait',
+ 'mcve_verifyconnection',
+ 'mcve_verifysslcert',
+ 'mcve_void',
+ 'mysqli()',
+ 'pdf_open',
+ 'pdf_open_png',
+ 'pdf_set_font',
+ 'php_register_url_stream_wrapper',
+ 'php_stream_can_cast',
+ 'php_stream_cast',
+ 'php_stream_close',
+ 'php_stream_closedir',
+ 'php_stream_copy_to_mem',
+ 'php_stream_copy_to_stream',
+ 'php_stream_eof',
+ 'php_stream_filter_register_factory',
+ 'php_stream_filter_unregister_factory',
+ 'php_stream_flush',
+ 'php_stream_fopen_from_file',
+ 'php_stream_fopen_temporary_file',
+ 'php_stream_fopen_tmpfile',
+ 'php_stream_getc',
+ 'php_stream_gets',
+ 'php_stream_is',
+ 'php_stream_is_persistent',
+ 'php_stream_make_seekable',
+ 'php_stream_open_wrapper',
+ 'php_stream_open_wrapper_as_file',
+ 'php_stream_open_wrapper_ex',
+ 'php_stream_opendir',
+ 'php_stream_passthru',
+ 'php_stream_read',
+ 'php_stream_readdir',
+ 'php_stream_rewinddir',
+ 'php_stream_seek',
+ 'php_stream_sock_open_from_socket',
+ 'php_stream_sock_open_host',
+ 'php_stream_sock_open_unix',
+ 'php_stream_stat',
+ 'php_stream_stat_path',
+ 'php_stream_tell',
+ 'php_stream_write',
+ 'php_unregister_url_stream_wrapper',
+ 'swfbutton_keypress',
+ 'swfdisplayitem',
+ 'variant'],
+ 'vpopmail': ['vpopmail_add_alias_domain',
+ 'vpopmail_add_alias_domain_ex',
+ 'vpopmail_add_domain',
+ 'vpopmail_add_domain_ex',
+ 'vpopmail_add_user',
+ 'vpopmail_alias_add',
+ 'vpopmail_alias_del',
+ 'vpopmail_alias_del_domain',
+ 'vpopmail_alias_get',
+ 'vpopmail_alias_get_all',
+ 'vpopmail_auth_user',
+ 'vpopmail_del_domain',
+ 'vpopmail_del_domain_ex',
+ 'vpopmail_del_user',
+ 'vpopmail_error',
+ 'vpopmail_passwd',
+ 'vpopmail_set_user_quota'],
+ 'xattr': ['xattr_get',
+ 'xattr_list',
+ 'xattr_remove',
+ 'xattr_set',
+ 'xattr_supported'],
+ 'xdiff': ['xdiff_file_diff',
+ 'xdiff_file_diff_binary',
+ 'xdiff_file_merge3',
+ 'xdiff_file_patch',
+ 'xdiff_file_patch_binary',
+ 'xdiff_string_diff',
+ 'xdiff_string_diff_binary',
+ 'xdiff_string_merge3',
+ 'xdiff_string_patch',
+ 'xdiff_string_patch_binary']}
+
+
+if __name__ == '__main__':
+ import pprint
+ import sys
+ import os
+ import re
+ import urllib
+ _function_re = re.compile('<B\s+CLASS="function"\s*>(.*?)\(\)</B\s*>(?uism)')
+
+ def get_php_functions():
+ f = urllib.urlopen('http://de.php.net/manual/en/index.functions.php')
+ data = f.read()
+ f.close()
+ results = set()
+ for match in _function_re.finditer(data):
+ fn = match.group(1)
+ if '-&#62;' not in fn and '::' not in fn:
+ results.add(fn)
+ # PY24: use sorted()
+ results = list(results)
+ results.sort()
+ return results
+
+ def get_function_module(function_name):
+ fn = function_name.replace('_', '-')
+ f = urllib.urlopen('http://de.php.net/manual/en/function.%s.php' % fn)
+ regex = re.compile('<li class="header up">'
+ '<a href="ref\..*?\.php">([a-zA-Z0-9\s]+)</a></li>')
+ for line in f:
+ m = regex.search(line)
+ if m is not None:
+ return m.group(1)
+
+ print '>> Downloading Function Index'
+ functions = get_php_functions()
+ total = len(functions)
+ print '%d functions found' % total
+ modules = {}
+ idx = 1
+ for function_name in get_php_functions():
+ print '>> %r (%d/%d)' % (function_name, idx, total)
+ m = get_function_module(function_name)
+ if m is None:
+ print 'NOT_FOUND'
+ m = 'unknown'
+ else:
+ print repr(m)
+ modules.setdefault(m, []).append(function_name)
+ idx += 1
+
+ # extract useful sourcecode from this file
+ f = file(__file__)
+ try:
+ content = f.read()
+ finally:
+ f.close()
+ header = content[:content.find('MODULES = {')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ # write new file
+ f = file(__file__, 'w')
+ f.write(header)
+ f.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ f.write(footer)
+ f.close()
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
new file mode 100644
index 00000000..a5d370a1
--- /dev/null
+++ b/pygments/lexers/agile.py
@@ -0,0 +1,773 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.agile
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for agile languages: Python, Ruby, Perl.
+
+ :copyright: 2006 by Georg Brandl, Armin Ronacher, Lukas Meuser.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+import re
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, \
+ LexerContext, include, combined, do_insertions, bygroups
+from pygments.token import Error, Text, \
+ Comment, Operator, Keyword, Name, String, Number, Generic
+from pygments.util import get_bool_opt, get_list_opt
+
+
+__all__ = ['PythonLexer', 'PythonConsoleLexer', 'RubyLexer',
+ 'RubyConsoleLexer', 'PerlLexer', 'LuaLexer']
+
+line_re = re.compile('.*?\n')
+
+
+class PythonLexer(RegexLexer):
+ name = 'Python'
+ aliases = ['python', 'py']
+ filenames = ['*.py', '*.pyw']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'^\s*"""(.|\n)*?"""', String.Doc),
+ (r"^\s*'''(.|\n)*?'''", String.Doc),
+ (r'[^\S\n]+', Text),
+ (r'#.*$', Comment),
+ (r'[]{}:(),.;[]', Text),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
+ (r'(assert|break|continue|del|elif|else|except|exec|'
+ r'finally|for|global|if|lambda|pass|print|raise|'
+ r'return|try|while|yield)\b', Keyword),
+ (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
+ (r'(import)(\s+)', bygroups(Keyword, Text), 'import'),
+ (r'@[a-zA-Z0-9.]+', Name.Decorator),
+ (r'(?<!\.)(__import__|abs|apply|basestring|bool|buffer|callable|'
+ r'chr|classmethod|cmp|coerce|compile|complex|delattr|dict|dir|'
+ r'divmod|enumerate|eval|execfile|exit|file|filter|float|getattr|'
+ r'globals|hasattr|hash|hex|id|input|int|intern|isinstance|'
+ r'issubclass|iter|len|list|locals|long|map|max|min|object|oct|'
+ r'open|ord|pow|property|range|raw_input|reduce|reload|repr|'
+ r'round|setattr|slice|staticmethod|str|sum|super|tuple|type|'
+ r'unichr|unicode|vars|xrange|zip)\b', Name.Builtin),
+ (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True'
+ r')\b', Name.Builtin.Pseudo),
+ (r'(?<!\.)(ArithmeticError|AssertionError|AttributeError|'
+ r'DeprecationWarning|EOFError|EnvironmentError|'
+ r'Exception|FloatingPointError|FutureWarning|IOError|'
+ r'ImportError|IndentationError|IndexError|KeyError|'
+ r'KeyboardInterrupt|LookupError|MemoryError|NameError|'
+ r'NotImplemented|NotImplementedError|OSError|OverflowError|'
+ r'OverflowWarning|PendingDeprecationWarning|ReferenceError|'
+ r'RuntimeError|RuntimeWarning|StandardError|StopIteration|'
+ r'SyntaxError|SyntaxWarning|SystemError|SystemExit|TabError|'
+ r'TypeError|UnboundLocalError|UnicodeDecodeError|'
+ r'UnicodeEncodeError|UnicodeError|UnicodeTranslateError|'
+ r'UserWarning|ValueError|Warning|ZeroDivisionError'
+ r')\b', Name.Exception),
+ ('`.*?`', String.Backtick),
+ ('r"""', String, 'tdqs'),
+ ("r'''", String, 'tsqs'),
+ ('r"', String, 'dqs'),
+ ("r'", String, 'sqs'),
+ ('"""', String, combined('stringescape', 'tdqs')),
+ ("'''", String, combined('stringescape', 'tsqs')),
+ ('"', String, combined('stringescape', 'dqs')),
+ ("'", String, combined('stringescape', 'sqs')),
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'funcname': [
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
+ ],
+ 'classname': [
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'(\s*)(as)(\s*)', bygroups(Text, Keyword, Text)),
+ (r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace),
+ (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+ (r'', Text, '#pop') # all else: go back
+ ],
+ 'fromimport': [
+ (r'(\s+)(import)\b', bygroups(Text, Keyword), '#pop'),
+ (r'[a-zA-Z_.][a-zA-Z0-9_.]*', Name.Namespace),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|N{.*?}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ ('[\'"\\\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ }
+
+
+class PythonConsoleLexer(Lexer):
+ """
+ Parses Python console output or doctests, like::
+
+ >>> a = 1
+ >>> print a
+ 1
+ """
+ name = 'Python console session'
+ aliases = ['pycon']
+
+ def get_tokens_unprocessed(self, text):
+ pylexer = PythonLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ tb = 0
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith('>>> ') or line.startswith('... '):
+ tb = 0
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:4])]))
+ curcode += line[4:]
+ else:
+ if curcode:
+ for item in do_insertions(insertions,
+ pylexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ if line.startswith('Traceback (most recent call last):'):
+ tb = 1
+ yield match.start(), Generic.Traceback, line
+ elif tb:
+ if not line.startswith(' '):
+ tb = 0
+ yield match.start(), Generic.Traceback, line
+ else:
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(insertions,
+ pylexer.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+class RubyLexer(ExtendedRegexLexer):
+ name = 'Ruby'
+ aliases = ['rb', 'ruby']
+ filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ def heredoc_callback(self, match, ctx):
+ # okay, this is the hardest part of parsing Ruby...
+ # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
+
+ start = match.start(1)
+ yield start, Operator, match.group(1) # <<-?
+ yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
+ yield match.start(3), Name.Constant, match.group(3) # heredoc name
+ yield match.start(4), String.Heredoc, match.group(4) # quote again
+
+ heredocstack = ctx.__dict__.setdefault('heredocstack', [])
+ outermost = not bool(heredocstack)
+ heredocstack.append((match.group(1) == '<<-', match.group(3)))
+
+ ctx.pos = match.start(5)
+ ctx.end = match.end(5)
+ # this may find other heredocs
+ for i, t, v in self.get_tokens_unprocessed(context=ctx):
+ yield i+start, t, v
+ ctx.pos = match.end()
+
+ if outermost:
+ # this is the outer heredoc again, now we can process them all
+ for tolerant, hdname in heredocstack:
+ lines = []
+ for match in line_re.finditer(ctx.text, ctx.pos):
+ if tolerant:
+ check = match.group().strip()
+ else:
+ check = match.group().rstrip()
+ if check == hdname:
+ for amatch in lines:
+ yield amatch.start(), String.Heredoc, amatch.group()
+ yield match.start(), Name.Constant, match.group()
+ ctx.pos = match.end()
+ break
+ else:
+ lines.append(match)
+ else:
+ # end of heredoc not found -- error!
+ for amatch in lines:
+ yield amatch.start(), Error, amatch.group()
+ ctx.end = len(ctx.text)
+ del heredocstack[:]
+
+
+ def gen_rubystrings_rules():
+ def intp_regex_callback(self, match, ctx):
+ yield match.start(1), String.Regex, match.group(1) # begin
+ nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
+ ctx.pos = match.end()
+
+ def intp_string_callback(self, match, ctx):
+ yield match.start(1), String.Other, match.group(1)
+ nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Other, match.group(4) # end
+ ctx.pos = match.end()
+
+ states = {}
+ states['strings'] = [
+ # easy ones
+ (r'\:([a-zA-Z_][\w_]*[\!\?]?|\*\*?|[-+]@?|'
+ r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)', String.Symbol),
+ (r":'(\\\\|\\'|[^'])*'", String.Symbol),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r':"', String.Symbol, 'simple-sym'),
+ (r'"', String.Double, 'simple-string'),
+ (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
+ ]
+ # double-quoted string and symbol
+
+ for name, ttype, end in ('string', String.Double, '"'), \
+ ('sym', String.Symbol, '"'), \
+ ('backtick', String.Backtick, '`'):
+ states['simple-'+name] = [
+ include('string-intp-escaped'),
+ (r'[^\\%s#]+' % end, ttype),
+ (r'[\\#]', ttype),
+ (end, ttype, '#pop'),
+ ]
+
+ # braced quoted strings
+
+ for lbrace, rbrace, name in ('\\{', '\\}', 'cb'), \
+ ('\\[', '\\]', 'sb'), \
+ ('\\(', '\\)', 'pa'), \
+ ('<', '>', 'ab'):
+ states[name+'-intp-string'] = [
+ (r'\\[\\' + lbrace + rbrace + ']', String.Other),
+ (r'(?<!\\)' + lbrace, String.Other, '#push'),
+ (r'(?<!\\)' + rbrace, String.Other, '#pop'),
+ include('string-intp-escaped'),
+ (r'[\\#' + lbrace + rbrace + ']', String.Other),
+ (r'[^\\#' + lbrace + rbrace + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
+ name+'-intp-string'))
+ states[name+'-string'] = [
+ (r'\\[\\' + lbrace + rbrace + ']', String.Other),
+ (r'(?<!\\)' + lbrace, String.Other, '#push'),
+ (r'(?<!\\)' + rbrace, String.Other, '#pop'),
+ (r'[\\#' + lbrace + rbrace + ']', String.Other),
+ (r'[^\\#' + lbrace + rbrace + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[qsw]' + lbrace, String.Other,
+ name+'-string'))
+ states[name+'-regex'] = [
+ (r'\\[\\' + lbrace + rbrace + ']', String.Regex),
+ (r'(?<!\\)' + lbrace, String.Regex, '#push'),
+ (r'(?<!\\)' + rbrace + '[mixounse]*', String.Regex, '#pop'),
+ include('string-intp'),
+ (r'[\\#' + lbrace + rbrace + ']', String.Regex),
+ (r'[^\\#' + lbrace + rbrace + ']+', String.Regex),
+ ]
+ states['strings'].append((r'%r' + lbrace, String.Regex,
+ name+'-regex'))
+
+ # these must come after %<brace>!
+ states['strings'] += [
+ # %r regex
+ (r'(%r(.))(.*?)(\2[mixounse]*)', intp_regex_callback),
+ # regular fancy strings
+ (r'%[qsw](.).*?\1', String.Other),
+ (r'(%[QWx](.))(.*?)(\2)', intp_string_callback),
+ # special forms of fancy strings after operators or
+ # in method calls with braces
+ # we need to regexes here for " " and "\t" because of bygroups()
+ (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(% .*? )',
+ bygroups(Text, String.Other)),
+ (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%\t.*?\t)',
+ bygroups(Text, String.Other)),
+ # and because of fixed with lookbehinds the whole thing a
+ # second time for line startings...
+ (r'^(\s*)(% .*? )',
+ bygroups(Text, String.Other)),
+ (r'^(\s*)(%\t.*?\t)',
+ bygroups(Text, String.Other)),
+ # all regular fancy strings
+ (r'(%([^a-zA-Z0-9\s]))(.*?)(\2)', intp_string_callback),
+ ]
+
+ return states
+
+ tokens = {
+ 'root': [
+ (r'#.*?$', Comment.Single),
+ (r'=begin\n.*?\n=end', Comment.Multiline),
+ (r'(BEGIN|END|alias|begin|break|case|defined\?|'
+ r'do|else|elsif|end|ensure|for|if|in|next|redo|'
+ r'rescue|raise|retry|return|super|then|undef|unless|until|when|'
+ r'while|yield)\b', Keyword),
+ (r'(initialize|new|loop|include|extend|raise|attr_reader|'
+ r'attr_writer|attr_accessor|attr|catch|throw|private|'
+ r'module_function|public|protected|true|false|nil)\b', Keyword.Pseudo),
+ (r'(not|and|or)\b', Operator.Word),
+ (r'(autoload|block_given|const_defined|eql|equal|frozen|include|'
+ r'instance_of|is_a|iterator|kind_of|method_defined|nil|'
+ r'private_method_defined|protected_method_defined|'
+ r'public_method_defined|respond_to|tainted)\?', Name.Builtin),
+ (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
+ (r'(?<!\.)(Array|Float|Integer|String|__id__|__send__|abort|ancestors|'
+ r'at_exit|autoload|binding|callcc|caller|'
+ r'catch|chomp|chop|class_eval|class_variables|'
+ r'clone|const_defined\?|const_get|const_missing|const_set|constants|'
+ r'display|dup|eval|exec|exit|extend|fail|fork|'
+ r'format|freeze|getc|gets|global_variables|gsub|'
+ r'hash|id|included_modules|inspect|instance_eval|'
+ r'instance_method|instance_methods|'
+ r'instance_variable_get|instance_variable_set|instance_variables|'
+ r'lambda|load|local_variables|loop|'
+ r'method|method_missing|methods|module_eval|name|'
+ r'object_id|open|p|print|printf|private_class_method|'
+ r'private_instance_methods|'
+ r'private_methods|proc|protected_instance_methods|'
+ r'protected_methods|public_class_method|'
+ r'public_instance_methods|public_methods|'
+ r'putc|puts|raise|rand|readline|readlines|require|'
+ r'scan|select|self|send|set_trace_func|singleton_methods|sleep|'
+ r'split|sprintf|srand|sub|syscall|system|taint|'
+ r'test|throw|to_a|to_s|trace_var|trap|type|untaint|untrace_var|'
+ r'warn)\b', Name.Builtin),
+ (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
+ # normal heredocs
+ (r'(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)', heredoc_callback),
+ # empty string heredocs
+ (r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
+ (r'__END__', Comment.Preproc, 'end-part'),
+ # multiline regex (after keywords or assignemnts)
+ (r'(?:^|(?<=[=<>~!])|'
+ r'(?<=(?:\s|;)when\s)|'
+ r'(?<=(?:\s|;)or\s)|'
+ r'(?<=(?:\s|;)and\s)|'
+ r'(?<=(?:\s|;|\.)index\s)|'
+ r'(?<=(?:\s|;|\.)scan\s)|'
+ r'(?<=(?:\s|;|\.)sub\s)|'
+ r'(?<=(?:\s|;|\.)sub!\s)|'
+ r'(?<=(?:\s|;|\.)gsub\s)|'
+ r'(?<=(?:\s|;|\.)gsub!\s)|'
+ r'(?<=(?:\s|;|\.)match\s)|'
+ r'(?<=(?:\s|;)if\s)|'
+ r'(?<=(?:\s|;)elsif\s)|'
+ r'(?<=^when\s)|'
+ r'(?<=^index\s)|'
+ r'(?<=^scan\s)|'
+ r'(?<=^sub\s)|'
+ r'(?<=^gsub\s)|'
+ r'(?<=^sub!\s)|'
+ r'(?<=^gsub!\s)|'
+ r'(?<=^match\s)|'
+ r'(?<=^if\s)|'
+ r'(?<=^elsif\s)'
+ r')(\s*)(/)(?!=)', bygroups(Text, String.Regex), 'multiline-regex'),
+ # multiline regex (in method calls)
+ (r'(?<=\(|,)/', String.Regex, 'multiline-regex'),
+ # multiline regex (this time the funny no whitespace rule)
+ (r'(\s+)(/[^\s=])', String.Regex, 'multiline-regex'),
+ # lex numbers and ignore following regular expressions which
+ # are division operators in fact (grrrr. i hate that. any
+ # better ideas?)
+ (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)(/)?',
+ bygroups(Number.Oct, Text, Operator)),
+ (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)(/)?',
+ bygroups(Number.Hex, Text, Operator)),
+ (r'(0b[01]+(?:_[01]+)*)(\s*)(/)?',
+ bygroups(Number.Bin, Text, Operator)),
+ (r'([\d]+(?:_\d+)*)(\s*)(/)?',
+ bygroups(Number.Integer, Text, Operator)),
+ # Names
+ (r'@@[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable.Class),
+ (r'@[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable.Instance),
+ (r'\$[a-zA-Z0-9_]+', Name.Variable.Global),
+ (r'\$[!@&`\'+~=/\\,;.<>_*$?:"]', Name.Variable.Global),
+ (r'\$-[0adFiIlpvw]', Name.Variable.Global),
+ (r'::', Operator),
+ include('strings'),
+ # chars
+ (r'\?(\\[MC]-)*' # modifiers
+ r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)',
+ String.Char),
+ (r'[A-Z][a-zA-Z0-9_]+', Name.Constant),
+ # this is needed because ruby attributes can look
+ # like keywords (class) or like this: ` ?!?
+ (r'(?<=\.)([a-zA-Z_]\w*[\!\?]?|[*%&^`~+-/\[<>=])', Name),
+ # module name
+ (r'(module)(\s+)([a-zA-Z_]\w*)', bygroups(Keyword, Text, Name.Namespace)),
+ # start of function name, a bit tricky
+ (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'[a-zA-Z_][\w_]*[\!\?]?', Name),
+ (r'(\[\]|\*\*|<<|>>|>=|<=|<=>|=~|={3}|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&!^|~]=?', Operator),
+ (r'[\[\](){}:;,<>/?\\]', Text),
+ (r'\s+', Text)
+ ],
+ 'funcname': [
+ (r'([a-zA-Z_][\w_]*[\!\?]?|\*\*?|[-+]@?|'
+ r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)', Name.Function, '#pop')
+ ],
+ 'classname': [
+ (r'<<', Operator, '#pop'),
+ (r'[a-zA-Z_][\w_]*', Name.Class, '#pop')
+ ],
+ 'in-intp': [
+ ('}', String.Interpol, '#pop'),
+ include('root'),
+ ],
+ 'string-intp': [
+ (r'#{', String.Interpol, 'in-intp'),
+ (r'#@@?[a-zA-Z_][a-zA-Z0-9_]*', String.Interpol),
+ (r'#\$[a-zA-Z_][a-zA-Z0-9_]*', String.Interpol)
+ ],
+ 'string-intp-escaped': [
+ include('string-intp'),
+ (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'interpolated-regex': [
+ include('string-intp'),
+ (r'[\\#]', String.Regex),
+ (r'[^\\#]+', String.Regex),
+ ],
+ 'interpolated-string': [
+ include('string-intp'),
+ (r'[\\#]', String.Other),
+ (r'[^\\#]+', String.Other),
+ ],
+ 'multiline-regex': [
+ include('string-intp'),
+ (r'\\/', String.Regex),
+ (r'[\\#]', String.Regex),
+ (r'[^\\/#]+', String.Regex),
+ (r'/[mixounse]*', String.Regex, '#pop'),
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ]
+ }
+ tokens.update(gen_rubystrings_rules())
+
+
+class RubyConsoleLexer(Lexer):
+ """
+ Parses Ruby console output like::
+
+ irb(main):001:0> a = 1
+ => 1
+ irb(main):002:0> puts a
+ 1
+ => nil
+ """
+ name = 'Ruby irb session'
+ aliases = ['rbcon', 'irb']
+
+ _prompt_re = re.compile('irb\([a-zA-Z_][a-zA-Z0-9_]*\):\d{3}:\d+[>*] ')
+
+ def get_tokens_unprocessed(self, text):
+ rblexer = RubyLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(insertions,
+ rblexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(insertions,
+ rblexer.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+class PerlLexer(RegexLexer):
+ name = 'Perl'
+ aliases = ['perl', 'pl']
+ filenames = ['*.pl', '*.pm']
+
+ flags = re.DOTALL | re.MULTILINE
+ # TODO: give this a perl guy who knows how to parse perl...
+ tokens = {
+ 'root': [
+ (r'\#.*?$', Comment.Single),
+ (r'=[a-zA-Z0-9]+\s+.*\n[.\n]*?\n\s*=cut', Comment.Multiline),
+ (r'(case|continue|do|else|elsif|for|foreach|if|last|my|'
+ r'next|our|redo|reset|then|unless|until|while|use|'
+ r'print|new|BEGIN|END|return)\b', Keyword),
+ (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
+ (r's/(\\\\|\\/|[^/])*/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex),
+ (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
+ (r'((?<==~)|(?<=\())\s*/(\\\\|\\/|[^/])*/[gcimosx]*', String.Regex),
+ (r'\s+', Text),
+ (r'(abs|accept|alarm|atan2|bind|binmode|bless|caller|chdir|'
+ r'chmod|chomp|chop|chown|chr|chroot|close|closedir|connect|'
+ r'continue|cos|crypt|dbmclose|dbmopen|defined|delete|die|'
+ r'dump|each|endgrent|endhostent|endnetent|endprotoent|'
+ r'endpwent|endservent|eof|eval|exec|exists|exit|exp|fcntl|'
+ r'fileno|flock|fork|format|formline|getc|getgrent|getgrgid|'
+ r'getgrnam|gethostbyaddr|gethostbyname|gethostent|getlogin|'
+ r'getnetbyaddr|getnetbyname|getnetent|getpeername|getpgrp|'
+ r'getppid|getpriority|getprotobyname|getprotobynumber|'
+ r'getprotoent|getpwent|getpwnam|getpwuid|getservbyname|'
+ r'getservbyport|getservent|getsockname|getsockopt|glob|gmtime|'
+ r'goto|grep|hex|import|index|int|ioctl|join|keys|kill|last|'
+ r'lc|lcfirst|length|link|listen|local|localtime|log|lstat|'
+ r'map|mkdir|msgctl|msgget|msgrcv|msgsnd|my|next|no|oct|open|'
+ r'opendir|ord|our|pack|package|pipe|pop|pos|printf|'
+ r'prototype|push|quotemeta|rand|read|readdir|'
+ r'readline|readlink|readpipe|recv|redo|ref|rename|require|'
+ r'reverse|rewinddir|rindex|rmdir|scalar|seek|seekdir|'
+ r'select|semctl|semget|semop|send|setgrent|sethostent|setnetent|'
+ r'setpgrp|setpriority|setprotoent|setpwent|setservent|'
+ r'setsockopt|shift|shmctl|shmget|shmread|shmwrite|shutdown|'
+ r'sin|sleep|socket|socketpair|sort|splice|split|sprintf|sqrt|'
+ r'srand|stat|study|substr|symlink|syscall|sysopen|sysread|'
+ r'sysseek|system|syswrite|tell|telldir|tie|tied|time|times|tr|'
+ r'truncate|uc|ucfirst|umask|undef|unlink|unpack|unshift|untie|'
+ r'utime|values|vec|wait|waitpid|wantarray|warn|write'
+ r'|y)\b', Name.Builtin),
+ (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
+ (r'<<([a-zA-Z_][a-zA-Z0-9_]*)\n.*?\n\1\n', String),
+ (r'__END__', Comment.Preproc, 'end-part'),
+ (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
+ (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
+ (r'[$@%#]+', Name.Variable, 'varname'),
+ (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
+ (r'\d+', Number.Integer),
+ (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
+ (r'0b[01]+(_[01]+)*', Number.Bin),
+ (r"'(\\\\|\\'|[^'])*'", String),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'`(\\\\|\\`|[^`])*`', String.Backtick),
+ (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
+ (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
+ (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
+ (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
+ (r'(q|qq|qw|qr|qx)(.)[.\n]*?\1', String.Other),
+ (r'package\s+', Keyword, 'modulename'),
+ (r'sub\s+', Keyword, 'funcname'),
+ (r'(\[\]|\*\*|::|<<|>>|>=|<=|<=>|={3}|!=|=~|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&^|!\\~]=?', Operator),
+ (r'[\(\)\[\]:;,<>/\?\{\}]', Text),
+ (r'(?=\w)', Name, 'name'),
+ ],
+ 'varname': [
+ (r'\s+', Text),
+ (r'\{', Text, '#pop'), # hash syntax?
+ (r'\)|,', Text, '#pop'), # argument specifier
+ (r'[a-zA-Z0-9_]+::', Name.Namespace),
+ (r'[a-zA-Z0-9_:]+', Name.Variable, '#pop'),
+ ],
+ 'name': [
+ (r'[a-zA-Z0-9_]+::', Name.Namespace),
+ (r'[a-zA-Z0-9_:]+', Name, '#pop'),
+ (r'[A-Z_]+(?=[^a-zA-Z0-9_])', Name.Constant, '#pop'),
+ (r'(?=[^a-zA-Z0-9_])', Text, '#pop'),
+ ],
+ 'modulename': [
+ (r'[a-zA-Z_][\w_]*', Name.Namespace, '#pop')
+ ],
+ 'funcname': [
+ (r'[a-zA-Z_][\w_]*[\!\?]?', Name.Function),
+ (r'\s+', Text),
+ # argument declaration
+ (r'\([$@%]*\)\s*', Text),
+ (r'.*?{', Text, '#pop'),
+ ],
+ 'cb-string': [
+ (r'\\[\{\}\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\{', String.Other, 'cb-string'),
+ (r'\}', String.Other, '#pop'),
+ (r'[^\{\}\\]+', String.Other)
+ ],
+ 'rb-string': [
+ (r'\\[\(\)\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\(', String.Other, 'rb-string'),
+ (r'\)', String.Other, '#pop'),
+ (r'[^\(\)]+', String.Other)
+ ],
+ 'sb-string': [
+ (r'\\[\[\]\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\[', String.Other, 'sb-string'),
+ (r'\]', String.Other, '#pop'),
+ (r'[^\[\]]+', String.Other)
+ ],
+ 'lt-string': [
+ (r'\\[\<\>\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\<', String.Other, 'lt-string'),
+ (r'\>', String.Other, '#pop'),
+ (r'[^\<\>]]+', String.Other)
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ]
+ }
+
+
+class LuaLexer(RegexLexer):
+ name = 'Lua'
+ aliases = ['lua']
+ filenames = ['*.lua']
+
+ tokens = {
+ 'root': [
+ ('--.*$', Comment.Single),
+
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ ('(?i)0x[0-9a-f]*', Number.Hex),
+ (r'\d+', Number.Integer),
+
+ (r'\n', Text),
+ (r'[^\S\n]', Text),
+ (r'[\[\]\{\}\(\)\.,:;]', Text),
+
+ (r'(==|~=|<=|>=|\.\.|\.\.\.|[=+\-*/%^<>#])', Operator),
+ (r'(and|or|not)\b', Operator.Word),
+
+ ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
+ r'while)\b', Keyword),
+ (r'(local)\b', Keyword.Declaration),
+ (r'(true|false|nil)\b', Keyword.Constant),
+
+ (r'(function)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+
+ (r'[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)?', Name),
+
+ # multiline strings
+ (r'(?s)\[(=*)\[(.*?)\]\1\]', String),
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+
+ 'funcname': [
+ ('[A-Za-z_][A-Za-z0-9_]*', Name.Function, '#pop'),
+ # inline function
+ ('\(', Text, '#pop'),
+ ],
+
+ 'classname': [
+ ('[A-Za-z_][A-Za-z0-9_]*', Name.Class, '#pop')
+ ],
+
+ # if I understand correctly, every character is valid in a lua string,
+ # so this state is only for later corrections
+ 'string': [
+ ('.', String)
+ ],
+
+ 'stringescape': [
+ (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
+ ],
+
+ 'sqs': [
+ ("'", String, '#pop'),
+ include('string')
+ ],
+
+ 'dqs': [
+ ('"', String, '#pop'),
+ include('string')
+ ]
+ }
+
+ def __init__(self, **options):
+ self.func_name_highlighting = get_bool_opt(
+ options, 'func_name_highlighting', True)
+ self.disabled_modules = get_list_opt(options, 'disabled_module', [])
+
+ self._functions = set()
+ if self.func_name_highlighting:
+ from pygments.lexers._luabuiltins import MODULES
+ for mod, func in MODULES.iteritems():
+ if mod not in self.disabled_modules:
+ self._functions.update(func)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self._functions:
+ yield index, Name.Function, value
+ continue
+ elif '.' in value:
+ a, b = value.split('.')
+ yield index, Name, a
+ yield index + len(a), Text, '.'
+ yield index + len(a) + 1, Name, b
+ continue
+ yield index, token, value
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
new file mode 100644
index 00000000..2655c255
--- /dev/null
+++ b/pygments/lexers/compiled.py
@@ -0,0 +1,314 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.compiled
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for compiled languages: C/C++, Delphi, Java.
+
+ :copyright: 2006 by Georg Brandl, Armin Ronacher, Christoph Hack.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this
+from pygments.token import \
+ Text, Comment, Operator, Keyword, Name, String, Number
+
+
+__all__ = ['CLexer', 'CppLexer', 'DelphiLexer', 'JavaLexer']
+
+
+class CLexer(RegexLexer):
+ name = 'C'
+ aliases = ['c']
+ filenames = ['*.c', '*.h']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'whitespace': [
+ (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
+ (r'^\s*#', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//.*?\n', Comment),
+ (r'/[*](.|\n)*?[*]/', Comment),
+ ],
+ 'statements': [
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(0x[0-9a-fA-F]|0[0-7]+|(\d+\.\d*|\.\d+)|\d+)'
+ r'e[+-]\d+[lL]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'[~!%^&*()+=|\[\]:,.<>/?-]', Text),
+ (r'(auto|break|case|const|continue|default|do|else|enum|extern|'
+ r'for|goto|if|register|return|sizeof|static|struct|switch|typedef|'
+ r'union|volatile|virtual|while)\b', Keyword),
+ (r'(int|long|float|short|double|char|unsigned|signed|void)\b',
+ Keyword.Type),
+ (r'(_{0,2}inline|naked|restrict|thread|typename)\b', Keyword.Reserved),
+ (r'__(asm|int8|based|except|int16|stdcall|cdecl|fastcall|int32|'
+ r'declspec|finally|int64|try|leave)\b', Keyword.Reserved),
+ (r'(true|false|NULL)\b', Keyword.Constant),
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ ],
+ 'root': [
+ include('whitespace'),
+ # functions
+ (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))' # return arguments
+ r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')({)',
+ bygroups(using(this), Name.Function, using(this), Text, Keyword),
+ 'function'),
+ # function declarations
+ (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))' # return arguments
+ r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')(;)',
+ bygroups(using(this), Name.Function, using(this), Text, Text)),
+ ('', Text, 'statement'),
+ ],
+ 'statement' : [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Keyword),
+ (';', Text, '#pop'),
+ ],
+ 'function': [
+ include('whitespace'),
+ include('statements'),
+ (';', Text),
+ ('{', Keyword, '#push'),
+ ('}', Keyword, '#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment),
+ (r'//.*?\n', Comment, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
+
+class CppLexer(RegexLexer):
+ name = 'C++'
+ aliases = ['cpp', 'c++']
+ filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++']
+
+ tokens = {
+ 'root': [
+ (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
+ (r'^\s*#', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//.*?\n', Comment),
+ (r'/[*](.|\n)*?[*]/', Comment),
+ (r'[{}]', Keyword),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(0x[0-9a-fA-F]|0[0-7]+|(\d+\.\d*|\.\d+)|\d+)'
+ r'e[+-]\d+[lL]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Text),
+ (r'(asm|auto|break|case|catch|const|const_cast|continue|'
+ r'default|delete|do|dynamic_cast|else|enum|explicit|export|'
+ r'extern|for|friend|goto|if|mutable|namespace|new|operator|'
+ r'private|protected|public|register|reinterpret_cast|return|'
+ r'sizeof|static|static_cast|struct|switch|template|this|throw|'
+ r'throws|try|typedef|typeid|typename|union|using|volatile|'
+ r'virtual|while)\b', Keyword),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(bool|int|long|float|short|double|char|unsigned|signed|'
+ r'void|wchar_t)\b', Keyword.Type),
+ (r'(_{0,2}inline|naked|thread)\b', Keyword.Reserved),
+ (r'__(asm|int8|based|except|int16|stdcall|cdecl|fastcall|int32|'
+ r'declspec|finally|int64|try|leave|wchar_t|w64|virtual_inheritance|'
+ r'uuidof|unaligned|super|single_inheritance|raise|noop|'
+ r'multiple_inheritance|m128i|m128d|m128|m64|interface|'
+ r'identifier|forceinline|event|assume)\b', Keyword.Reserved),
+ (r'(true|false|NULL)\b', Keyword.Constant),
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ ],
+ 'classname': [
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment),
+ (r'//.*?\n', Comment, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
+
+class DelphiLexer(RegexLexer):
+ name = 'Delphi'
+ aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
+ filenames = ['*.pas']
+
+ flags = re.IGNORECASE | re.MULTILINE | re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'asm\b', Keyword, 'asm'),
+ (r'(uses)(\s+)', bygroups(Keyword, Text), 'uses'),
+ (r'(procedure|function)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(abstract|and|array|as|assembler|at|begin|case|cdecl|'
+ r'class|const|constructor|contains|destructor|dispinterface|'
+ r'div|do|downto|else|end|except|false|far|file|finalization|'
+ r'finally|for|goto|if|implementation|in|inherited|'
+ r'initialization|inline|interface|is|label|mod|near|nil|not|'
+ r'object|of|on|or|overload|override|package|packed|pascal|'
+ r'private|program|property|protected|public|'
+ r'published|raise|record|register|repeat|requires|resourcestring|'
+ r'safecall|self|set|shl|shr|stdcall|then|threadvar|to|true|try|'
+ r'type|unit|until|uses|var|varargs|virtual|while|with|xor|'
+ r'break|assert|dec|inc)\b', Keyword),
+ (r'(AnsiString|Boolean|Byte|ByteBool|Cardinal|Char|Comp|'
+ r'Currency|Double|Extended|Int64|Integer|LongBool|LongInt|Real|'
+ r'Real48|ShortInt|ShortString|Single|SmallInt|String|WideChar|'
+ r'WideString|Word|WordBool)\b', Keyword.Type),
+ (r'\{.*?\}', Comment),
+ (r'\(\*.*?\*\)', Comment),
+ (r'//.*?\n', Comment),
+ (r"'(''|[^']*)'", String),
+ (r'\$[0-9a-fA-F]+', Number),
+ (r'\#\$?[0-9]{1,3}', Number),
+ (r'[0-9]', Number),
+ (r'[@~!%^&*()+=|\[\]:;,.<>/?-]', Text),
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name)
+ ],
+ 'uses': [
+ (r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace),
+ (r'\s*,\s*', Text),
+ (r';', Text, '#pop')
+ ],
+ 'funcname': [
+ (r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Function, '#pop')
+ ],
+ 'asm': [
+ (r'end', Keyword, '#pop'),
+ (r'\s+', Text),
+ (r'\{.*?\}', Comment),
+ (r'\(\*.*?\*\)', Comment),
+ (r'//.*?\n', Comment),
+ (r'(AAA|AAD|AAM|AAS|ADC|ADD|AND|ARPL|BOUND|BSF|BSR|BSWAP|BT|'
+ r'BTC|BTR|BTS|CALL|CBW|CDQ|CLC|CLD|CLI|CLTS|CMC|CMP|CMPSB|'
+ r'CMPSD|CMPSW|CMPXCHG|CMPXCHG486|CMPXCHG8B|CPUID|CWD|CWDE|'
+ r'DAA|DAS|DEC|DIV|EMMS|ENTER|HLT|IBTS|ICEBP|IDIV|IMUL|IN|INC|'
+ r'INSB|INSD|INSW|INT|INT01|INT03|INT1|INT3|INTO|INVD|INVLPG|'
+ r'IRET|IRETD|IRETW|JCXZ|JECXZ|JMP|LAHF|LAR|LCALL|LDS|LEA|LEAVE|'
+ r'LES|LFS|LGDT|LGS|LIDT|LJMP|LLDT|LMSW|LOADALL|LOADALL286|LOCK|'
+ r'LODSB|LODSD|LODSW|LOOP|LOOPE|LOOPNE|LOOPNZ|LOOPZ|LSL|LSS|LTR|'
+ r'MOV|MOVD|MOVQ|MOVSB|MOVSD|MOVSW|MOVSX|MOVZX|MUL|NEG|NOP|NOT|'
+ r'OR|OUT|OUTSB|OUTSD|OUTSW|POP|POPA|POPAD|POPAW|POPF|POPFD|'
+ r'POPFW|PUSH|PUSHA|PUSHAD|PUSHAW|PUSHF|PUSHFD|PUSHFW|RCL|RCR|'
+ r'RDMSR|RDPMC|RDSHR|RDTSC|REP|REPE|REPNE|REPNZ|REPZ|RET|RETF|'
+ r'RETN|ROL|ROR|RSDC|RSLDT|RSM|SAHF|SAL|SALC|SAR|SBB|SCASB|SCASD|'
+ r'SCASW|SGDT|SHL|SHLD|SHR|SHRD|SIDT|SLDT|SMI|SMINT|SMINTOLD|'
+ r'SMSW|STC|STD|STI|STOSB|STOSD|STOSW|STR|SUB|SVDC|SVLDT|SVTS|'
+ r'SYSCALL|SYSENTER|SYSEXIT|SYSRET|TEST|UD1|UD2|UMOV|VERR|VERW|'
+ r'WAIT|WBINVD|WRMSR|WRSHR|XADD|XBTS|XCHG|XLAT|XLATB|XOR|cmova|'
+ r'cmovae|cmovb|cmovbe|cmovc|cmovcxz|cmove|cmovg|cmovge|cmovl|'
+ r'cmovle|cmovna|cmovnae|cmovnb|cmovnbe|cmovnc|cmovne|cmovng|'
+ r'cmovnge|cmovnl|cmovnle|cmovno|cmovnp|cmovns|cmovnz|cmovo|'
+ r'cmovp|cmovpe|cmovpo|cmovs|cmovz|ja|jae|jb|jbe|jc|jcxz|je|jg|'
+ r'jge|jl|jle|jna|jnae|jnb|jnbe|jnc|jne|jng|jnge|jnl|jnle|jno|'
+ r'jnp|jns|jnz|jo|jp|jpe|jpo|js|jz|seta|setae|setb|setbe|setc|'
+ r'setcxz|sete|setg|setge|setl|setle|setna|setnae|setnb|setnbe|'
+ r'setnc|setne|setng|setnge|setnl|setnle|setno|setnp|setns|setnz|'
+ r'seto|setp|setpe|setpo|sets|setz)\b', Keyword),
+ ('[a-zA-Z_@][a-zA-Z0-9_]*', Name),
+ (r'\$[0-9]+', Number),
+ (r"'(''|[^']+)'", String),
+ (r'.', Text)
+ ]
+ }
+
+
+class JavaLexer(RegexLexer):
+ name = 'Java'
+ aliases = ['java']
+ filenames = ['*.java']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.]*\s+)+?)' # return arguments
+ r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(?=' + _ws + # exception declaration
+ r'(?:throws\s+(?:[a-zA-Z_][a-zA-Z0-9_]*,?\s*)+)?' +
+ _ws + r'\{)',
+ bygroups(using(this), Name.Function, using(this))),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment),
+ (r'/\*.*?\*/', Comment),
+ (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+ (r'(abstract|assert|break|case|catch|'
+ r'const|continue|default|do|else|enum|extends|final|'
+ r'finally|for|if|goto|implements|import|instanceof|'
+ r'interface|native|new|package|private|protected|public|'
+ r'return|static|strictfp|super|switch|synchronized|this|'
+ r'throw|throws|transient|try|volatile|while)\b', Keyword),
+ (r'(boolean|byte|char|double|float|int|long|short|void)\b',
+ Keyword.Type),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'class'),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+ (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
+ (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number),
+ (r'[0-9]+L?', Number),
+ (r'0x[0-9a-f]+', Number),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ ]
+ }
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
new file mode 100644
index 00000000..664af07e
--- /dev/null
+++ b/pygments/lexers/dotnet.py
@@ -0,0 +1,221 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.dotnet
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ .net languages
+
+ :copyright: 2006 by Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+import re
+
+from pygments.lexer import RegexLexer, bygroups, using, this
+from pygments.token import \
+ Text, Comment, Operator, Keyword, Name, String, Number, Literal
+
+__all__ = ['CSharpLexer', 'BooLexer', 'VbNetLexer']
+
+
+class CSharpLexer(RegexLexer):
+ name = 'C#'
+ aliases = ['csharp', 'c#']
+ filenames = ['*.cs']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^([ \t]*(?:[a-zA-Z_][a-zA-Z0-9_\.]*\s+)+?)' # return arguments
+ r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(?=' + _ws + '\{)', # lookahead for {
+ bygroups(using(this), Name.Function, using(this))),
+ # properties
+ (r'^([ \t]*(?:[a-zA-Z_][a-zA-Z0-9_\.]*\s+)+?)' # return arguments
+ r'([a-zA-Z_][a-zA-Z0-9_]*)' # property name
+ r'(?=' + _ws + r'\{' + _ws + # lookahead for
+ r'(?:get|set)' + _ws + r'\{)', # get/set
+ bygroups(using(this), Name.Function)),
+ (r'^\s*\[.*?\]', Name.Attribute),
+ (r'[^\S\n]+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//.*?\n', Comment),
+ (r'/[*](.|\n)*?[*]/', Comment),
+ (r'\n', Text),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Text),
+ (r'[{}]', Keyword),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'\\.'|'[^\\]'", String.Char),
+ (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
+ r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r'#\s+(if|endif|else|elif|define|undef|'
+ r'line|error|warning|region|endregion)', Comment.Preproc),
+ (r'(abstract|case|as|base|break|case|catch|'
+ r'checked|const|continue|default|delegate|'
+ r'do|else|enum|event|explicit|extern|false|finally|'
+ r'fixed|for|foreach|goto|if|implicit|in|interface|'
+ r'internal|is|lock|nwe|null|operator|'
+ r'out|override|params|private|protected|public|readonly|'
+ r'ref|return|sealed|sizeof|stackalloc|static|'
+ r'switch|this|throw|true|try|typeof|'
+ r'unchecked|unsafe|virtual|void|while|'
+ r'get|set|new)\b', Keyword),
+ (r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
+ r'short|string|uint|ulong|ushort)\b', Keyword.Type),
+ (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
+ (r'(namespace|using)(\s+)', bygroups(Keyword, Text), 'namespace'),
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ ],
+ 'class': [
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ ],
+ 'namespace': [
+ (r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace, '#pop')
+ ]
+ }
+
+
+class BooLexer(RegexLexer):
+ name = 'Boo'
+ aliases = ['boo']
+ filenames = ['*.boo']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(#|//).*$', Comment),
+ (r'/[*]', Comment, 'comment'),
+ (r'[]{}:(),.;[]', Text),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'/(\\\\|\\/|[^/\s])/', String.Regex),
+ (r'@/(\\\\|\\/|[^/])*/', String.Regex),
+ (r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
+ (r'(as|abstract|callable|constructor|destructor|do|import|'
+ r'enum|event|final|get|interface|internal|of|override|'
+ r'partial|private|protected|public|return|set|static|'
+ r'struct|transient|virtual|yield|super|and|break|cast|'
+ r'continue|elif|else|ensure|except|for|given|goto|if|in|'
+ r'is|isa|not|or|otherwise|pass|raise|ref|try|unless|when|'
+ r'while|from|as)\b', Keyword),
+ (r'def(?=\s+\(.*?\))', Keyword),
+ (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(namespace)(\s+)', bygroups(Keyword, Text), 'namespace'),
+ (r'(?<!\.)(true|false|null|self|__eval__|__switch__|array|'
+ r'assert|checked|enumerate|filter|getter|len|lock|map|'
+ r'matrix|max|min|normalArrayIndexing|print|property|range|'
+ r'rawArrayIndexing|required|typeof|unchecked|using|'
+ r'yieldAll|zip)\b', Name.Builtin),
+ ('"""(\\\\|\\"|.*?)"""', String.Double),
+ ('"(\\\\|\\"|[^"]*?)"', String.Double),
+ ("'(\\\\|\\'|[^']*?)'", String.Single),
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
+ (r'[0-9][0-9\.]*(m|ms|d|h|s)', Number),
+ (r'0\d+', Number.Oct),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer),
+ ],
+ 'comment': [
+ ('/[*]', Comment.Multiline, '#push'),
+ ('[*]/', Comment.Multiline, '#pop'),
+ ('[^/*]', Comment.Multiline),
+ ('[*/]', Comment.Multiline)
+ ],
+ 'funcname': [
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
+ ],
+ 'classname': [
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ ],
+ 'namespace': [
+ ('[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace, '#pop')
+ ]
+ }
+
+
+class VbNetLexer(RegexLexer):
+ name = 'VB.net'
+ aliases = ['vb.net', 'vbnet']
+ filenames = ['*.vb', '*.bas']
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'^\s*<.*?>', Name.Attribute),
+ (r'\s+', Text),
+ (r'\n', Text),
+ (r'rem\b.*?\n', Comment),
+ (r"'.*?\n", Comment),
+ (r'[\(\){}!#,.:]', Text),
+ (r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#End\s+If|#Const|'
+ r'#ExternalSource.*?\n|#End\s+ExternalSource|'
+ r'#Region.*?\n|#End\s+Region|#ExternalChecksum',
+ Comment.Preproc),
+ (r'Option\s+(Strict|Explicit|Compare)\s+'
+ r'(On|Off|Binary|Text)', Keyword.Declaration),
+ (r'(?<!\.)(AddHandler|Alias|'
+ r'ByRef|ByVal|Call|Case|Catch|CBool|CByte|CChar|CDate|'
+ r'CDec|CDbl|CInt|CLng|CObj|Const|Continue|CSByte|CShort|'
+ r'CSng|CStr|CType|CUInt|CULng|CUShort|Declare|'
+ r'Default|Delegate|Dim|DirectCast|Do|Each|Else|ElseIf|'
+ r'End|EndIf|Enum|Erase|Error|Event|Exit|False|Finally|For|'
+ r'Friend|Function|Get|Global|GoSub|GoTo|Handles|If|'
+ r'Implements|Imports|Inherits|Interface|'
+ r'Let|Lib|Loop|Me|Module|MustInherit|'
+ r'MustOverride|MyBase|MyClass|Namespace|Narrowing|New|Next|'
+ r'Not|Nothing|NotInheritable|NotOverridable|Of|On|'
+ r'Operator|Option|Optional|Overloads|Overridable|'
+ r'Overrides|ParamArray|Partial|Private|Property|Protected|'
+ r'Public|RaiseEvent|ReadOnly|ReDim|RemoveHandler|Resume|'
+ r'Return|Select|Set|Shadows|Shared|Single|'
+ r'Static|Step|Stop|Structure|Sub|SyncLock|Then|'
+ r'Throw|To|True|Try|TryCast|Wend|'
+ r'Using|When|While|Widening|With|WithEvents|'
+ r'WriteOnly)\b', Keyword),
+ (r'(?<!\.)(Function|Sub|Property)(\s+)',
+ bygroups(Keyword, Text), 'funcname'),
+ (r'(?<!\.)(Class|Structure|Enum)(\s+)',
+ bygroups(Keyword, Text), 'classname'),
+ (r'(?<!\.)(Namespace|Imports)(\s+)',
+ bygroups(Keyword, Text), 'namespace'),
+ (r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
+ r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
+ r'UShort)\b', Keyword.Type),
+ (r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
+ r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
+ (r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
+ r'<=|>=|<>|[-&*/\\^+=<>]',
+ Operator),
+ ('"', String, 'string'),
+ ('[a-zA-Z_][a-zA-Z0-9_]*[%&@!#$]?', Name),
+ ('#.*?#', Literal.Date),
+ (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
+ (r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
+ (r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
+ (r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
+ (r'_\n', Text), # Line continuation
+ ],
+ 'string': [
+ (r'""', String),
+ (r'"C?', String, '#pop'),
+ (r'[^"]+', String),
+ ],
+ 'funcname': [
+ (r'[a-z_][a-z0-9_]*', Name.Function, '#pop')
+ ],
+ 'classname': [
+ (r'[a-z_][a-z0-9_]*', Name.Class, '#pop')
+ ],
+ 'namespace': [
+ (r'[a-z_][a-z0-9_.]*', Name.Namespace, '#pop')
+ ],
+ }
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
new file mode 100644
index 00000000..3c0f0b13
--- /dev/null
+++ b/pygments/lexers/other.py
@@ -0,0 +1,137 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.other
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for other languages: SQL, BrainFuck.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer
+from pygments.token import Token, \
+ Text, Comment, Operator, Keyword, Name, String, Number
+
+
+__all__ = ['SqlLexer', 'BrainfuckLexer']
+
+
+class SqlLexer(RegexLexer):
+ name = 'SQL'
+ aliases = ['sql']
+ filenames = ['*.sql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'--.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|'
+ r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|'
+ r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|'
+ r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|'
+ r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|'
+ r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|'
+ r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|'
+ r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|'
+ r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|'
+ r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|'
+ r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|'
+ r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|'
+ r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|'
+ r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|'
+ r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|'
+ r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|'
+ r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|'
+ r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|'
+ r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|'
+ r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|'
+ r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|'
+ r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|'
+ r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|'
+ r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
+ r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
+ r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
+ r'EXCEPT|ESCEPTION|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
+ r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
+ r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
+ r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
+ r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|'
+ r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|'
+ r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|'
+ r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|'
+ r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|'
+ r'K|KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|'
+ r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LILMIT|LISTEN|LOAD|'
+ r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|M|'
+ r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|'
+ r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|'
+ r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|'
+ r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|'
+ r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|'
+ r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|'
+ r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|'
+ r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|'
+ r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|'
+ r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|'
+ r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|'
+ r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|'
+ r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|'
+ r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|'
+ r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|'
+ r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|'
+ r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|'
+ r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|'
+ r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|'
+ r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|'
+ r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|'
+ r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|'
+ r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|'
+ r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|'
+ r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|'
+ r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|'
+ r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|'
+ r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|'
+ r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|'
+ r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|'
+ r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|'
+ r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|'
+ r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|'
+ r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|'
+ r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|'
+ r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|'
+ r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword),
+ (r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|'
+ r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|'
+ r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b',
+ Name.Builtin),
+ (r'[+*/<>=~!@#%^&|`?^-]', Operator),
+ (r'[0-9]+', Number.Integer),
+ (r"'(''|[^'])*'", String),
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (r'[;:()\[\],\.]', Text)
+ ],
+ 'multiline-comments': [
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[^/\*]+', Comment.Multiline),
+ (r'[/*]', Comment.Multiline)
+ ]
+ }
+
+
+class BrainfuckLexer(RegexLexer):
+ name = 'Brainfuck'
+ aliases = ['brainfuck']
+ filenames = ['*.bf', '*.b']
+
+ tokens = {
+ 'root': [
+ (r'[.,+\-<>\[\]]+', Keyword),
+ (r'[^.,+\-<>\[\]]+', Comment)
+ ]
+ }
diff --git a/pygments/lexers/special.py b/pygments/lexers/special.py
new file mode 100644
index 00000000..9076ca51
--- /dev/null
+++ b/pygments/lexers/special.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.special
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Special lexers.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+import re
+import cStringIO
+
+from pygments.lexer import Lexer, RegexLexer
+from pygments.token import Token, \
+ Text, Comment, Operator, Keyword, Name, String, Number
+
+
+__all__ = ['TextLexer', 'RawTokenLexer']
+
+
+class TextLexer(Lexer):
+ name = 'Text only'
+ aliases = ['text']
+ filenames = ['*.txt']
+
+ def get_tokens_unprocessed(self, text):
+ yield 0, Text, text
+
+
+_ttype_cache = {}
+
+line_re = re.compile('.*?\n')
+
+class RawTokenLexer(Lexer):
+ """
+ Recreate a token stream formatted with the RawTokenFormatter.
+
+ Additional options accepted:
+
+ ``compress``
+ If set to "gz" or "bz2", decompress the token stream with
+ the given compression algorithm (default: '').
+ """
+ name = 'Raw token data'
+ aliases = ['raw']
+ filenames = ['*.raw']
+
+ def __init__(self, **options):
+ self.compress = options.get('compress', '')
+ Lexer.__init__(self, **options)
+
+ def get_tokens(self, text):
+ if self.compress == 'gz':
+ import gzip
+ gzipfile = gzip.GzipFile('', 'rb', 9, cStringIO.StringIO(text))
+ text = gzipfile.read()
+ elif self.compress == 'bz2':
+ import bz2
+ text = bz2.decompress(text)
+ return Lexer.get_tokens(self, text)
+
+ def get_tokens_unprocessed(self, text):
+ for match in line_re.finditer(text):
+ ttypestr, val = match.group().split('\t', 1)
+ ttype = _ttype_cache.get(ttypestr)
+ if not ttype:
+ ttype = Token
+ ttypes = ttypestr.split('.')[1:]
+ for ttype_ in ttypes:
+ ttype = getattr(ttype, ttype_)
+ _ttype_cache[ttypestr] = ttype
+ val = val[1:-2].decode('string-escape')
+ yield 0, ttype, val
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
new file mode 100644
index 00000000..3a735d15
--- /dev/null
+++ b/pygments/lexers/templates.py
@@ -0,0 +1,323 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.templates
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various template engines.
+
+ :copyright: 2006 by Armin Ronacher, Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+import re
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+from pygments.lexers.web import \
+ PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer
+from pygments.lexer import \
+ Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, include, using
+from pygments.token import \
+ Text, Comment, Operator, Keyword, Name, String, Number, Other
+
+__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
+ 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
+ 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
+ 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
+ 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
+ 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
+ 'JavascriptDjangoLexer']
+
+
+class ErbLexer(Lexer):
+ name = 'ERB'
+ aliases = ['erb']
+
+ _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
+
+ def __init__(self, **options):
+ from pygments.lexers.agile import RubyLexer
+ self.ruby_lexer = RubyLexer(**options)
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ """
+ Since ERB doesn't allow "<%" and other tags inside of ruby
+ blocks we have to use a split approach here that fails for
+ that too.
+ """
+ tokens = self._block_re.split(text)
+ tokens.reverse()
+ state = idx = 0
+ try:
+ while True:
+ # text
+ if state == 0:
+ val = tokens.pop()
+ yield idx, Other, val
+ idx += len(val)
+ state = 1
+ # block starts
+ elif state == 1:
+ tag = tokens.pop()
+ # literals
+ if tag in ('<%%', '%%>'):
+ yield idx, Other, tag
+ idx += 3
+ state = 0
+ # comment
+ elif tag == '<%#':
+ yield idx, Comment.Preproc, tag
+ val = tokens.pop()
+ yield idx + 3, Comment, val
+ idx += 3 + len(val)
+ state = 2
+ # blocks or output
+ elif tag in ('<%', '<%=', '<%-'):
+ yield idx, Comment.Preproc, tag
+ idx += len(tag)
+ data = tokens.pop()
+ r_idx = 0
+ for r_idx, r_token, r_value in \
+ self.ruby_lexer.get_tokens_unprocessed(data):
+ yield r_idx + idx, r_token, r_value
+ idx += len(data)
+ state = 2
+ elif tag in ('%>', '-%>'):
+ yield idx, Error, tag
+ idx += len(tag)
+ state = 0
+ # % raw ruby statements
+ else:
+ yield idx, Comment.Preproc, tag[0]
+ r_idx = 0
+ for r_idx, r_token, r_value in \
+ self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
+ yield idx + 1 + r_idx, r_token, r_value
+ idx += len(tag)
+ state = 0
+ # block ends
+ elif state == 2:
+ tag = tokens.pop()
+ if tag not in ('%>', '-%>'):
+ yield idx, Other, tag
+ else:
+ yield idx, Comment.Preproc, tag
+ idx += len(tag)
+ state = 0
+ except IndexError:
+ return
+
+
+class SmartyLexer(RegexLexer):
+ name = 'Smarty'
+ aliases = ['smarty']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ # XXX: make marty delimiters customizable somehow
+ 'root': [
+ (r'[^{]+', Other),
+ (r'(\{)(\*.*?\*)(\})',
+ bygroups(Comment.Preproc, Comment, Comment.Preproc)),
+ (r'(\{php\})(.*?)(\{/php\})',
+ bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
+ Comment.Preproc)),
+ (r'(\{)(/?[a-zA-Z_][a-zA-Z0-9_]*)(\s*)',
+ bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
+ (r'\{', Comment.Preproc, 'smarty')
+ ],
+ 'smarty': [
+ (r'\s+', Text),
+ (r'\}', Comment.Preproc, '#pop'),
+ (r'#[a-zA-Z_][a-zA-Z0-9_]*#', Name.Variable),
+ (r'\$[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z0-9_]+)*', Name.Variable),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?{}@-]', Operator),
+ ('(true|false|null)\b', Keyword.Constant),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Attribute)
+ ]
+ }
+
+
+class DjangoLexer(RegexLexer):
+ name = 'django template'
+ aliases = ['django']
+
+ tokens = {
+ 'root': [
+ (r'[^\{]+', Other),
+ (r'\{\{', Comment.Preproc, 'var'),
+ (r'(\{\%)(\s*)(comment)(\s*)(\%\})(.*?)'
+ r'(\{\%)(\s*)(endcomment)(\s*)(\%\})',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
+ Comment, Comment.Preproc, Text, Keyword, Text,
+ Comment.Preproc)),
+ (r'(\{\%)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
+ bygroups(Comment.Preproc, Text, Keyword), 'block'),
+ (r'\{', Other)
+ ],
+ 'varnames': [
+ (r'[a-zA-Z][a-zA-Z0-9_]*(\.[a-zA-Z][a-zA-Z0-9_]*)*', Name.Variable),
+ (r"(\|)([a-zA-Z_][a-zA-Z0-9_]*)(:'(\\\\|\\'|[^'])*')",
+ bygroups(Operator, Name.Function, String.Single)),
+ (r'(\|)([a-zA-Z_][a-zA-Z0-9_]*)(:"(\\\\|\\"|[^"])*")',
+ bygroups(Operator, Name.Function, String.Double)),
+ (r'(\|)([a-zA-Z_][a-zA-Z0-9_]*)',
+ bygroups(Operator, Name.Function))
+ ],
+ 'var': [
+ (r'\s+', Text),
+ include('varnames'),
+ (r'\}\}', Comment.Preproc, '#pop')
+ ],
+ 'block': [
+ (r'\s+', Text),
+ (r'(in|as|reversed|not|count|and|or|with)\b', Keyword),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ include('varnames'),
+ (r'\%\}', Comment.Preproc, '#pop'),
+ (r'.', Text)
+ ]
+ }
+
+
+class RhtmlLexer(DelegatingLexer):
+ name = 'RHTML'
+ aliases = ['rhtml', 'html+erb', 'html+ruby']
+ filenames = ['*.rhtml']
+
+ def __init__(self, **options):
+ super(RhtmlLexer, self).__init__(HtmlLexer, ErbLexer, **options)
+
+
+class XmlErbLexer(DelegatingLexer):
+ name = 'XML+Ruby'
+ aliases = ['xml+erb', 'xml+ruby']
+
+ def __init__(self, **options):
+ super(XmlErbLexer, self).__init__(XmlLexer, ErbLexer, **options)
+
+
+class CssErbLexer(DelegatingLexer):
+ name = 'CSS+Ruby'
+ aliases = ['css+erb', 'css+ruby']
+
+ def __init__(self, **options):
+ super(CssErbLexer, self).__init__(CssLexer, ErbLexer, **options)
+
+
+class JavascriptErbLexer(DelegatingLexer):
+ name = 'JavaScript+Ruby'
+ aliases = ['js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby']
+
+ def __init__(self, **options):
+ super(JavascriptErbLexer, self).__init__(JavascriptLexer, ErbLexer,
+ **options)
+
+
+class HtmlPhpLexer(DelegatingLexer):
+ name = 'HTML+PHP'
+ aliases = ['html+php']
+ filenames = ['*.phtml']
+
+ def __init__(self, **options):
+ super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
+
+
+class XmlPhpLexer(DelegatingLexer):
+ name = 'XML+PHP'
+ aliases = ['xml+php']
+
+ def __init__(self, **options):
+ super(XmlPhpLexer, self).__init__(XmlLexer, PhpLexer, **options)
+
+
+class CssPhpLexer(DelegatingLexer):
+ name = 'CSS+PHP'
+ aliases = ['css+php']
+
+ def __init__(self, **options):
+ super(CssPhpLexer, self).__init__(CssLexer, PhpLexer, **options)
+
+
+class JavascriptPhpLexer(DelegatingLexer):
+ name = 'JavaScript+PHP'
+ aliases = ['js+php', 'javascript+php']
+
+ def __init__(self, **options):
+ super(JavascriptPhpLexer, self).__init__(JavascriptLexer, PhpLexer,
+ **options)
+
+
+class HtmlSmartyLexer(DelegatingLexer):
+ name = 'HTML+Smarty'
+ aliases = ['html+smarty']
+
+ def __init__(self, **options):
+ super(HtmlSmartyLexer, self).__init__(HtmlLexer, SmartyLexer, **options)
+
+
+class XmlSmartyLexer(DelegatingLexer):
+ name = 'XML+Smarty'
+ aliases = ['xml+smarty']
+
+ def __init__(self, **options):
+ super(XmlSmartyLexer, self).__init__(XmlLexer, SmartyLexer, **options)
+
+
+class CssSmartyLexer(DelegatingLexer):
+ name = 'CSS+Smarty'
+ aliases = ['css+smarty']
+
+ def __init__(self, **options):
+ super(CssSmartyLexer, self).__init__(CssLexer, SmartyLexer, **options)
+
+
+class JavascriptSmartyLexer(DelegatingLexer):
+ name = 'JavaScript+Smarty'
+ aliases = ['js+smarty', 'javascript+smarty']
+
+ def __init__(self, **options):
+ super(JavascriptSmartyLexer, self).__init__(JavascriptLexer, SmartyLexer,
+ **options)
+
+
+class HtmlDjangoLexer(DelegatingLexer):
+ name = 'HTML+Django'
+ aliases = ['html+django']
+
+ def __init__(self, **options):
+ super(HtmlDjangoLexer, self).__init__(HtmlLexer, DjangoLexer, **options)
+
+
+class XmlDjangoLexer(DelegatingLexer):
+ name = 'XML+Django'
+ aliases = ['xml+django']
+
+ def __init__(self, **options):
+ super(XmlDjangoLexer, self).__init__(XmlLexer, DjangoLexer, **options)
+
+
+class CssDjangoLexer(DelegatingLexer):
+ name = 'CSS+Django'
+ aliases = ['css+django']
+
+ def __init__(self, **options):
+ super(CssDjangoLexer, self).__init__(CssLexer, DjangoLexer, **options)
+
+
+class JavascriptDjangoLexer(DelegatingLexer):
+ name = 'JavaScript+Django'
+ aliases = ['js+django', 'javascript+django']
+
+ def __init__(self, **options):
+ super(JavascriptDjangoLexer, self).__init__(JavascriptLexer, DjangoLexer,
+ **options)
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
new file mode 100644
index 00000000..3461000a
--- /dev/null
+++ b/pygments/lexers/text.py
@@ -0,0 +1,183 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.text
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for non-source code file types: Diff, Makefiles, Ini configs etc.
+
+ :copyright: 2006 by Armin Ronacher, Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, include
+from pygments.token import \
+ Text, Comment, Keyword, Name, String, Generic, Operator, Number
+
+
+__all__ = ['IniLexer', 'MakefileLexer', 'DiffLexer', 'IrcLogsLexer',
+ 'TexLexer']
+
+
+class IniLexer(RegexLexer):
+ name = 'INI'
+ aliases = ['ini', 'cfg']
+ filenames = ['*.ini', '*.cfg']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r';.*?$', Comment),
+ (r'\[.*?\]$', Keyword),
+ (r'(.*?)(\s*)(=)(\s*)(.*?)$',
+ bygroups(Name.Attribute, Text, Operator, Text, String))
+ ]
+ }
+
+
+class MakefileLexer(RegexLexer):
+ name = 'Makefile'
+ aliases = ['make', 'makefile', 'mf']
+ filenames = ['*.mak', 'Makefile', 'makefile']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?\n', Comment),
+ (r'(cmdswitches|error|message|include|if|ifdef|ifndef|else|'
+ r'else\s*if|else\s*ifdef|else\s*ifndef|endif|undef)\b', Keyword),
+ (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(=)(\s*)',
+ bygroups(Name.Variable, Text, Operator, Text), 'var'),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'([^\n:]+)(:)([ \t]*)', bygroups(Name.Function, Operator, Text),
+ 'block-header')
+ ],
+ 'var': [
+ (r'\\\n', String),
+ (r'\n', Text, '#pop'),
+ (r'\\', String),
+ (r'[^\\\n]+', String),
+ ],
+ 'block-header': [
+ (r'[^,\n]', String),
+ (r',', Text),
+ (r'\n[\t ]+', Text, 'block'),
+ (r'\n', Text, '#pop')
+ ],
+ 'block': [
+ (r'#.*?(?=\n)', Comment),
+ (r'\n[\t ]+', Text),
+ (r'[^\n$]+', String),
+ (r'\$[A-Za-z0-9_]+', String.Interpol),
+ (r'\$\(.*?\)', String.Interpol),
+ (r'\$', String),
+ (r'\n', Text, '#pop:2'),
+ ]
+ }
+
+
+class DiffLexer(RegexLexer):
+ name = 'Diff'
+ aliases = ['diff']
+ filenames = ['*.diff', '*.patch']
+
+ tokens = {
+ 'root': [
+ (r' .*\n', Text),
+ (r'\+.*\n', Generic.Inserted),
+ (r'-.*\n', Generic.Deleted),
+ (r'!.*\n', Generic.Strong),
+ (r'@.*\n', Generic.Subheading),
+ (r'Index.*\n', Generic.Heading),
+ (r'=.*\n', Generic.Heading),
+ (r'.*\n', Text),
+ ]
+ }
+
+
+class IrcLogsLexer(RegexLexer):
+ name = 'IRC logs'
+ aliases = ['irc']
+
+ flags = re.VERBOSE | re.MULTILINE
+ timestamp = r"""
+ ( (?: \[|\()? # Opening bracket or paren for the timestamp
+ (?: # Timestamp
+ (?: (?:\d{1,4} [-/]?)+ # Date as - or /-separated groups of digits
+ [T ])? # Date/time separator: T or space
+ (?: \d?\d [:.]?)+ # Time as :/.-separated groups of 1 or 2 digits
+ )
+ (?: \]|\))?\s+ )? # Closing bracket or paren for the timestamp
+ """
+ tokens = {
+ 'root': [
+ # normal msgs
+ ("^" + timestamp + r"""
+ (\s*<.*?>\s+) # Nick """,
+ bygroups(Comment.Preproc, Name.Tag), 'msg'),
+ # /me msgs
+ ("^" + timestamp + r"""
+ (\s*[*]\s+) # Star
+ ([^\s]+\s+.*?\n) # Nick + rest of message """,
+ bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
+ # join/part msgs
+ ("^" + timestamp + r"""
+ (\s*(?:[*]{3}|-!-)\s*) # Star(s)
+ ([^\s]+\s+) # Nick + Space
+ (.*?\n) # Rest of message """,
+ bygroups(Comment.Preproc, Keyword, String, Comment)),
+ (r"^.*?\n", Text),
+ ],
+ 'msg': [
+ (r"[^\s]+:", Name.Attribute), # Prefix
+ (r".*?\n", Text, '#pop'),
+ ],
+ }
+
+
+class TexLexer(RegexLexer):
+ name = 'TeX'
+ aliases = ['tex', 'latex']
+ filenames = ['*.tex', '*.aux', '*.toc']
+
+ tokens = {
+ 'general': [
+ (r'%.*?\n', Comment),
+ (r'[{}]', Name.Builtin),
+ (r'[&_^]', Name.Builtin),
+ ],
+ 'root': [
+ (r'\\\[', String.Backtick, 'displaymath'),
+ (r'\\\(', String, 'inlinemath'),
+ (r'\$\$', String.Backtick, 'displaymath'),
+ (r'\$', String, 'inlinemath'),
+ (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
+ include('general'),
+ (r'[^\\$%&_^{}]+', Text),
+ ],
+ 'math': [
+ (r'\\([a-zA-Z]+|.)', Name.Variable),
+ include('general'),
+ (r'[0-9]+', Number),
+ (r'[-=!+*/()\[\]]', Operator),
+ (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
+ ],
+ 'inlinemath': [
+ (r'\\\)', String, '#pop'),
+ (r'\$', String, '#pop'),
+ include('math'),
+ ],
+ 'displaymath': [
+ (r'\\\]', String, '#pop'),
+ (r'\$\$', String, '#pop'),
+ (r'\$', Name.Builtin),
+ include('math'),
+ ],
+ 'command': [
+ (r'\[.*?\]', Name.Attribute),
+ (r'\*', Keyword),
+ (r'', Text, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
new file mode 100644
index 00000000..5bf0cfcb
--- /dev/null
+++ b/pygments/lexers/web.py
@@ -0,0 +1,332 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.web
+ ~~~~~~~~~~~~~~~~~~
+
+ Lexers for web-related languages: JavaScript, CSS, HTML, XML, PHP.
+
+ :copyright: 2006 by Georg Brandl, Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+import re
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, using
+from pygments.token import \
+ Text, Comment, Operator, Keyword, Name, String, Number, Other
+from pygments.util import get_bool_opt, get_list_opt
+
+
+__all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'CssLexer',
+ 'PhpLexer']
+
+
+class JavascriptLexer(RegexLexer):
+ name = 'JavaScript'
+ aliases = ['js', 'javascript']
+ filenames = ['*.js']
+
+ flags = re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment),
+ (r'/\*.*?\*/', Comment),
+ (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
+ (r'[~\^\*!%&\[\]\{\}\(\)<>\|+=:;,./?-]', Operator),
+ (r'(for|in|while|do|break|return|continue|if|else|throw|try|'
+ r'catch|var|with|const|label|function|new|typeof|'
+ r'instanceof|this)\b', Keyword),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+ (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
+ (r'[0-9]+', Number),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class CssLexer(RegexLexer):
+ name = 'CSS'
+ aliases = ['css']
+ filenames = ['*.css']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'/\*(?:.|\n)*?\*/', Comment),
+ (r'{', Operator, 'content'),
+ (r'\:[a-zA-Z0-9_-]+', Name.Decorator),
+ (r'\.[a-zA-Z0-9_-]+', Name.Class),
+ (r'\#[a-zA-Z0-9_-]+', Name.Function),
+ (r'[a-zA-Z0-9_-]+', Name.Tag),
+ (r'[~\^\*!%&\[\]\(\)<>\|+=@:;,./?-]', Operator),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single)
+ ],
+ 'content': [
+ (r'\s+', Text),
+ (r'}', Operator, '#pop'),
+ (r'url\(.*?\)', String.Other),
+ (r'^@.*?$', Comment.Preproc),
+ (r'(azimuth|background-attachment|background-color|'
+ r'background-image|background-position|background-repeat|'
+ r'background|border-bottom-color|border-bottom-style|'
+ r'border-bottom-width|border-left-color|border-left-style|'
+ r'border-left-width|border-right|border-right-color|'
+ r'border-right-style|border-right-width|border-top-color|'
+ r'border-top-style|border-top-width|border-bottom|'
+ r'border-collapse|border-left|border-width|border-color|'
+ r'border-spacing|border-style|border-top|border|caption-side|'
+ r'clear|clip|color|content|counter-increment|counter-reset|'
+ r'cue-after|cue-before|cue|cursor|direction|display|'
+ r'elevation|empty-cells|float|font-family|font-size|'
+ r'font-size-adjust|font-stretch|font-style|font-variant|'
+ r'font-weight|font|height|letter-spacing|line-height|'
+ r'list-style-type|list-style-image|list-style-position|'
+ r'list-style|margin-bottom|margin-left|margin-right|'
+ r'margin-top|margin|marker-offset|marks|max-height|max-width|'
+ r'min-height|min-width|opacity|orphans|outline|outline-color|'
+ r'outline-style|outline-width|overflow|padding-bottom|'
+ r'padding-left|padding-right|padding-top|padding|page|'
+ r'page-break-after|page-break-before|page-break-inside|'
+ r'pause-after|pause-before|pause|pitch|pitch-range|'
+ r'play-during|position|quotes|richness|right|size|'
+ r'speak-header|speak-numeral|speak-punctuation|speak|'
+ r'speech-rate|stress|table-layout|text-align|text-decoration|'
+ r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
+ r'vertical-align|visibility|voice-family|volume|white-space|'
+ r'widows|width|word-spacing|z-index|bottom|left|'
+ r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
+ r'behind|below|bidi-override|blink|block|bold|bolder|both|'
+ r'capitalize|center-left|center-right|center|circle|'
+ r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
+ r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
+ r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
+ r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
+ r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
+ r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
+ r'inherit|inline-table|inline|inset|inside|invert|italic|'
+ r'justify|katakana-iroha|katakana|landscape|larger|large|'
+ r'left-side|leftwards|level|lighter|line-through|list-item|'
+ r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
+ r'lower|low|medium|message-box|middle|mix|monospace|'
+ r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
+ r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
+ r'open-quote|outset|outside|overline|pointer|portrait|px|'
+ r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
+ r'rightwards|s-resize|sans-serif|scroll|se-resize|'
+ r'semi-condensed|semi-expanded|separate|serif|show|silent|'
+ r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
+ r'spell-out|square|static|status-bar|super|sw-resize|'
+ r'table-caption|table-cell|table-column|table-column-group|'
+ r'table-footer-group|table-header-group|table-row|'
+ r'table-row-group|text|text-bottom|text-top|thick|thin|'
+ r'transparent|ultra-condensed|ultra-expanded|underline|'
+ r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
+ r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
+ r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Keyword),
+ (r'(indigo|gold|firebrick|indianred|yellow|darkolivegreen|'
+ r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
+ r'mediumslateblue|black|springgreen|crimson|lightsalmon|brown|'
+ r'turquoise|olivedrab|cyan|silver|skyblue|gray|darkturquoise|'
+ r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|teal|'
+ r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
+ r'violet|navy|orchid|blue|ghostwhite|honeydew|cornflowerblue|'
+ r'darkblue|darkkhaki|mediumpurple|cornsilk|red|bisque|slategray|'
+ r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
+ r'gainsboro|mediumturquoise|floralwhite|coral|purple|lightgrey|'
+ r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
+ r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
+ r'lightcoral|orangered|navajowhite|lime|palegreen|burlywood|'
+ r'seashell|mediumspringgreen|fuchsia|papayawhip|blanchedalmond|'
+ r'peru|aquamarine|white|darkslategray|ivory|dodgerblue|'
+ r'lemonchiffon|chocolate|orange|forestgreen|slateblue|olive|'
+ r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
+ r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
+ r'plum|aqua|darkgoldenrod|maroon|sandybrown|magenta|tan|'
+ r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
+ r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
+ r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
+ r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
+ r'lightyellow|lavenderblush|linen|mediumaquamarine|green|'
+ r'blueviolet|peachpuff)\b', Name.Builtin),
+ (r'\!important', Comment.Preproc),
+ (r'/\*(?:.|\n)*?\*/', Comment),
+ (r'\#[a-zA-Z0-9]{1,6}', Number),
+ (r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|\%|pt|pc|in|mm|cm|ex)', Number),
+ (r'-?[0-9]+', Number),
+ (r'[~\^\*!%&\[\]\(\)<>\|+=@:;,./?-]', Operator),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[a-zA-Z][a-zA-Z0-9]+', Name)
+ ]
+ }
+
+
+class HtmlLexer(RegexLexer):
+ name = 'HTML'
+ aliases = ['html']
+ filenames = ['*.html', '*.htm', '*.xhtml']
+
+ flags = re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ ('&.*?;', Name.Entity),
+ (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'<\s*script\s*', Name.Tag, ('script-content', 'tag')),
+ (r'<\s*style\s*', Name.Tag, ('style-content', 'tag')),
+ (r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[a-zA-Z0-9:]+\s*>', Name.Tag),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'script-content': [
+ (r'<\s*/\s*script\s*>', Name.Tag, '#pop'),
+ (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
+ ],
+ 'style-content': [
+ (r'<\s*/\s*style\s*>', Name.Tag, '#pop'),
+ (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
+ ],
+ 'attr': [
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+
+class PhpLexer(RegexLexer):
+ name = 'PHP'
+ aliases = ['php', 'php3', 'php4', 'php5']
+ filenames = ['*.php', '*.php[345]']
+
+ flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+ tokens = {
+ 'root': [
+ (r'<\?(php)?', Comment.Preproc, 'php'),
+ (r'[^<]+', Other),
+ (r'<', Other)
+ ],
+ 'php': [
+ (r'\?>', Comment.Preproc, '#pop'),
+ (r'<<<([a-zA-Z_][a-zA-Z0-9_]*)\n.*?\n\1\;?\n', String),
+ (r'\s+', Text),
+ (r'#.*?\n', Comment),
+ (r'//.*?\n', Comment),
+ (r'/\*.*?\*/', Comment),
+ (r'(->|::)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
+ bygroups(Operator, Text, Name.Attribute)),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?{}@-]', Text),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(function)(\s+)', bygroups(Keyword, Text), 'functionname'),
+ (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
+ r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
+ r'FALSE|print|for|require|continue|foreach|require_once|'
+ r'declare|return|default|static|do|switch|die|stdClass|'
+ r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
+ r'virtual|endfor|include_once|while|endforeach|global|__FILE__|'
+ r'endif|list|__LINE__|endswitch|new|__sleep|endwhile|not|'
+ r'array|__wakeup|E_ALL|NULL)\b', Keyword),
+ ('(true|false|null)\b', Keyword.Constant),
+ (r'\$[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
+ ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single)
+ ],
+ 'classname': [
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ ],
+ 'functionname': [
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
+ ]
+ }
+
+ def __init__(self, **options):
+ self.funcnamehighlighting = get_bool_opt(
+ options, 'funcnamehighlighting', True)
+ self.disabledmodules = get_list_opt(
+ options, 'disabledmodules', ['unknown'])
+ self.startinline = get_bool_opt(options, 'startinline', False)
+
+ # collect activated functions in a set
+ self._functions = set()
+ if self.funcnamehighlighting:
+ from pygments.lexers._phpbuiltins import MODULES
+ for key, value in MODULES.iteritems():
+ if key not in self.disabledmodules:
+ self._functions.update(value)
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ if self.startinline:
+ stack.append('php')
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Other:
+ if value in self._functions:
+ yield index, Name.Function, value
+ continue
+ yield index, token, value
+
+
+class XmlLexer(RegexLexer):
+ flags = re.MULTILINE | re.DOTALL
+
+ name = 'XML'
+ aliases = ['xml']
+ filenames = ['*.xml']
+
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ ('&.*?;', Name.Entity),
+ (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'<\s*[a-zA-Z0-9:-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[a-zA-Z0-9:-]+\s*>', Name.Tag),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'attr': [
+ ('\s+', Text),
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
diff --git a/pygments/style.py b/pygments/style.py
new file mode 100644
index 00000000..d1c2b4eb
--- /dev/null
+++ b/pygments/style.py
@@ -0,0 +1,102 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.style
+ ~~~~~~~~~~~~~
+
+ Basic style object.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.token import Token, STANDARD_TYPES
+
+
+class StyleMeta(type):
+
+ def __new__(mcs, name, bases, dct):
+ obj = type.__new__(mcs, name, bases, dct)
+ for token in STANDARD_TYPES:
+ if not token in obj.styles:
+ obj.styles[token] = ''
+
+ def colorformat(text):
+ if text[0:1] == '#':
+ col = text[1:]
+ if len(col) == 6:
+ return col
+ elif len(col) == 3:
+ return col[0]+'0'+col[1]+'0'+col[2]+'0'
+ elif text == '':
+ return ''
+ assert False, "wrong color format %r" % text
+
+ _styles = obj._styles = {}
+
+ for ttype in obj.styles:
+ for token in ttype.split():
+ if token in _styles:
+ continue
+ ndef = _styles.get(token.parent, None)
+ styledefs = obj.styles.get(token, '').split()
+ if not ndef or token is None:
+ ndef = ['', 0, 0, 0, '', '']
+ elif 'noinherit' in styledefs and token is not Token:
+ ndef = _styles[Token][:]
+ else:
+ ndef = ndef[:]
+ _styles[token] = ndef
+ for styledef in obj.styles.get(token, '').split():
+ if styledef == 'noinherit':
+ pass
+ elif styledef == 'bold':
+ ndef[1] = 1
+ elif styledef == 'nobold':
+ ndef[1] = 0
+ elif styledef == 'italic':
+ ndef[2] = 1
+ elif styledef == 'noitalic':
+ ndef[2] = 0
+ elif styledef == 'underline':
+ ndef[3] = 1
+ elif styledef == 'nounderline':
+ ndef[3] = 0
+ elif styledef[:3] == 'bg:':
+ ndef[4] = colorformat(styledef[3:])
+ elif styledef[:7] == 'border:':
+ ndef[5] = colorformat(styledef[7:])
+ else:
+ ndef[0] = colorformat(styledef)
+
+ return obj
+
+ def style_for_token(cls, token):
+ t = cls._styles[token]
+ return {
+ 'color': t[0] or None,
+ 'bold': bool(t[1]),
+ 'italic': bool(t[2]),
+ 'underline': bool(t[3]),
+ 'bgcolor': t[4] or None,
+ 'border': t[5] or None
+ }
+
+ def list_styles(cls):
+ return list(self)
+
+ def __iter__(cls):
+ for token in cls._styles:
+ yield token, cls.style_for_token(token)
+
+ def __len__(cls):
+ return len(cls._styles)
+
+
+class Style(object):
+ __metaclass__ = StyleMeta
+
+ #: overall background color (``None`` means transparent)
+ background_color = '#ffffff'
+
+ #: Style definitions for individual token types.
+ styles = {}
diff --git a/pygments/styles/__init__.py b/pygments/styles/__init__.py
new file mode 100644
index 00000000..bfa0730d
--- /dev/null
+++ b/pygments/styles/__init__.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles
+ ~~~~~~~~~~~~~~
+
+ Contains built-in styles.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+#: Maps style names to 'submodule::classname'.
+STYLE_MAP = {
+ 'default': 'default::DefaultStyle',
+ 'emacs': 'default::DefaultStyle',
+ 'friendly': 'friendly::FriendlyStyle',
+ 'colorful': 'colorful::ColorfulStyle',
+ 'autumn': 'autumn::AutumnStyle',
+ 'murphy': 'murphy::MurphyStyle',
+ 'manni': 'manni::ManniStyle',
+ 'perldoc': 'perldoc::PerldocStyle',
+ 'pastie': 'pastie::PastieStyle',
+ 'borland': 'borland::BorlandStyle',
+ 'trac': 'trac::TracStyle',
+ 'native': 'native::NativeStyle'
+}
+
+
+def get_style_by_name(name):
+ if name not in STYLE_MAP:
+ raise ValueError("Style %r not found" % name)
+
+ mod, cls = STYLE_MAP[name].split('::')
+
+ mod = __import__('pygments.styles.' + mod, None, None, [cls])
+ return getattr(mod, cls)
diff --git a/pygments/styles/autumn.py b/pygments/styles/autumn.py
new file mode 100644
index 00000000..3fce11fd
--- /dev/null
+++ b/pygments/styles/autumn.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.autumn
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ A colorful style, inspired by the terminal highlighting style.
+
+ :copyright: 2006 by Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic
+
+
+class AutumnStyle(Style):
+
+ default_style = ""
+
+ styles = {
+ Comment: "italic #aaaaaa",
+ Comment.Preproc: "noitalic #4c8317",
+
+ Keyword: "#0000aa",
+ Keyword.Type: "#00aaaa",
+
+ Operator.Word: "#0000aa",
+
+ Name.Builtin: "#00aaaa",
+ Name.Function: "#00aa00",
+ Name.Class: "underline #00aa00",
+ Name.Namespace: "underline #00aaaa",
+ Name.Variable: "#aa0000",
+ Name.Constant: "#aa0000",
+ Name.Entity: "bold #800",
+ Name.Attribute: "#1e90ff",
+ Name.Tag: "bold #1e90ff",
+ Name.Decorator: "#888888",
+
+ String: "#aa5500",
+ String.Symbol: "#0000aa",
+ String.Regex: "#009999",
+
+ Number: "#009999",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#aa0000",
+ Generic.Inserted: "#00aa00",
+ Generic.Error: "#aa0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "#555555",
+ Generic.Output: "#888888",
+ Generic.Traceback: "#aa0000",
+
+ Error: "#F00 bg:#FAA"
+ }
diff --git a/pygments/styles/borland.py b/pygments/styles/borland.py
new file mode 100644
index 00000000..cc5fb6af
--- /dev/null
+++ b/pygments/styles/borland.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.borland
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the style used in the borland ides.
+
+ :copyright: 2006 by Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic
+
+
+class BorlandStyle(Style):
+
+ default_style = ''
+
+ styles = {
+ Comment: 'italic #008800',
+ Comment.Preproc: 'noitalic',
+
+ String: '#0000FF',
+ Number: '#0000FF',
+ Keyword: 'bold',
+ Operator.Word: 'bold',
+ Name.Tag: 'bold',
+ Name.Attribute: 'italic',
+
+ Generic.Heading: '#999999',
+ Generic.Subheading: '#aaaaaa',
+ Generic.Deleted: 'bg:#ffdddd #000000',
+ Generic.Inserted: 'bg:#ddffdd #000000',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/pygments/styles/colorful.py b/pygments/styles/colorful.py
new file mode 100644
index 00000000..ecb983d5
--- /dev/null
+++ b/pygments/styles/colorful.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.colorful
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ A colorful style, inspired by CodeRay.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic
+
+
+class ColorfulStyle(Style):
+
+ default_style = ""
+
+ styles = {
+ Comment: "#888",
+ Comment.Preproc: "#579",
+
+ Keyword: "bold #080",
+ Keyword.Pseudo: "#038",
+ Keyword.Type: "#339",
+
+ Operator: "#333",
+ Operator.Word: "bold #000",
+
+ Name.Builtin: "#007020",
+ Name.Function: "bold #06B",
+ Name.Class: "bold #B06",
+ Name.Namespace: "bold #0e84b5",
+ Name.Exception: "bold #F00",
+ Name.Variable: "#963",
+ Name.Variable.Instance: "#33B",
+ Name.Variable.Class: "#369",
+ Name.Variable.Global: "bold #d70",
+ Name.Constant: "bold #036",
+ Name.Label: "bold #970",
+ Name.Entity: "bold #800",
+ Name.Attribute: "#00C",
+ Name.Tag: "#070",
+ Name.Decorator: "bold #555",
+
+ String: "bg:#fff0f0",
+ String.Char: "#04D bg:",
+ String.Doc: "#D42 bg:",
+ String.Interpol: "bg:#eee",
+ String.Escape: "bold #666",
+ String.Regex: "bg:#fff0ff #000",
+ String.Symbol: "#A60 bg:",
+ String.Other: "#D20",
+
+ Number: "bold #60E",
+ Number.Integer: "bold #00D",
+ Number.Float: "bold #60E",
+ Number.Hex: "bold #058",
+ Number.Oct: "bold #40E",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #c65d09",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "#F00 bg:#FAA"
+ }
diff --git a/pygments/styles/default.py b/pygments/styles/default.py
new file mode 100644
index 00000000..471cf55c
--- /dev/null
+++ b/pygments/styles/default.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.default
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ The default highlighting style for Pygments.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic
+
+
+class DefaultStyle(Style):
+ """
+ The default style (inspired by Emacs 22).
+ """
+
+ background_color = "#f2f2f2"
+ default_style = ""
+
+ styles = {
+ Comment: "italic #008800",
+ Comment.Preproc: "noitalic",
+
+ Keyword: "bold #AA22FF",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "",
+
+ Operator: "#666666",
+ Operator.Word: "bold #AA22FF",
+
+ Name.Builtin: "#AA22FF",
+ Name.Function: "#00A000",
+ Name.Class: "#0000FF",
+ Name.Namespace: "bold #0000FF",
+ Name.Exception: "bold #D2413A",
+ Name.Variable: "#B8860B",
+ Name.Constant: "#880000",
+ Name.Label: "#A0A000",
+ Name.Entity: "bold #999999",
+ Name.Attribute: "#BB4444",
+ Name.Tag: "bold #008000",
+ Name.Decorator: "#AA22FF",
+
+ String: "#BB4444",
+ String.Doc: "italic",
+ String.Interpol: "bold #BB6688",
+ String.Escape: "bold #BB6622",
+ String.Regex: "#BB6688",
+ String.Symbol: "#B8860B",
+ String.Other: "#008000",
+ Number: "#666666",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #000080",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "border:#FF0000"
+ }
diff --git a/pygments/styles/friendly.py b/pygments/styles/friendly.py
new file mode 100644
index 00000000..6a11f66f
--- /dev/null
+++ b/pygments/styles/friendly.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.friendly
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ A modern style based on the VIM pyte theme.
+
+ :copyright: 2006 by Georg Brandl, Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic
+
+
+class FriendlyStyle(Style):
+
+ background_color = "#f0f0f0"
+ default_style = ""
+
+ styles = {
+ Comment: "italic #60a0b0",
+ Comment.Preproc: "noitalic #007020",
+
+ Keyword: "bold #007020",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "",
+
+ Operator: "#666666",
+ Operator.Word: "bold #007020",
+
+ Name.Builtin: "#007020",
+ Name.Function: "#06287e",
+ Name.Class: "bold #0e84b5",
+ Name.Namespace: "bold #0e84b5",
+ Name.Exception: "#007020",
+ Name.Variable: "#bb60d5",
+ Name.Constant: "#60add5",
+ Name.Label: "bold #002070",
+ Name.Entity: "bold #d55537",
+ Name.Attribute: "#4070a0",
+ Name.Tag: "bold #062873",
+ Name.Decorator: "bold #555555",
+
+ String: "#4070a0",
+ String.Doc: "italic",
+ String.Interpol: "italic #70a0d0",
+ String.Escape: "bold #4070a0",
+ String.Regex: "#235388",
+ String.Symbol: "#517918",
+ String.Other: "#c65d09",
+ Number: "#40a070",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #c65d09",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "border:#FF0000"
+ }
diff --git a/pygments/styles/manni.py b/pygments/styles/manni.py
new file mode 100644
index 00000000..8e485355
--- /dev/null
+++ b/pygments/styles/manni.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.manni
+ ~~~~~~~~~~~~~~~~~~~~
+
+ A colorful style, inspired by the terminal highlighting style.
+
+ This is a port of the style used in the `php port`_ of pygments
+ by Manni. The style is called 'default' there.
+
+ By now Mannis php highlighter isn't licensed under a open source
+ license but because it uses code from the pygments library it must
+ be LGPL compatible sooner or later. ;-)
+
+ .. _php_port:: http://svn.fnord.name/manni/fnord.bb/lib/Highlighter/
+
+ :copyright: 2006 by Armin Ronacher, Manni <manni@fnord.name>.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic
+
+
+class ManniStyle(Style):
+
+ background_color = '#f0f3f3'
+
+ styles = {
+ Comment: 'italic #0099FF',
+ Comment.Preproc: 'noitalic #009999',
+
+ Keyword: 'bold #006699',
+ Keyword.Pseudo: 'nobold',
+ Keyword.Type: '#007788',
+
+ Operator: '#555555',
+ Operator.Word: 'bold #000000',
+
+ Name.Builtin: '#336666',
+ Name.Function: '#CC00FF',
+ Name.Class: 'bold #00AA88',
+ Name.Namespace: 'bold #00CCFF',
+ Name.Exception: 'bold #CC0000',
+ Name.Variable: '#003333',
+ Name.Constant: '#336600',
+ Name.Label: '#9999FF',
+ Name.Entity: 'bold #999999',
+ Name.Attribute: '#330099',
+ Name.Tag: 'bold #330099',
+ Name.Decorator: '#9999FF',
+
+ String: '#CC3300',
+ String.Doc: 'italic',
+ String.Interpol: '#AA0000',
+ String.Escape: 'bold #CC3300',
+ String.Regex: '#33AAAA',
+ String.Symbol: '#FFCC33',
+ String.Other: '#CC3300',
+
+ Number: '#FF6600',
+
+ Generic.Heading: 'bold #003300',
+ Generic.Subheading: 'bold #003300',
+ Generic.Deleted: 'border:#CC0000 bg:#FFCCCC',
+ Generic.Inserted: 'border:#00CC00 bg:#CCFFCC',
+ Generic.Error: '#FF0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: 'bold #000099',
+ Generic.Output: '#AAAAAA',
+ Generic.Traceback: '#99CC66',
+
+ Error: 'bg:#FFAAAA #AA0000'
+ }
diff --git a/pygments/styles/murphy.py b/pygments/styles/murphy.py
new file mode 100644
index 00000000..c9e8dc7f
--- /dev/null
+++ b/pygments/styles/murphy.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.murphy
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Murphy's style from CodeRay.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic
+
+
+class MurphyStyle(Style):
+
+ default_style = ""
+
+ styles = {
+ Comment: "#666 italic",
+ Comment.Preproc: "#579 noitalic",
+
+ Keyword: "bold #289",
+ Keyword.Pseudo: "#08f",
+ Keyword.Type: "#66f",
+
+ Operator: "#333",
+ Operator.Word: "bold #000",
+
+ Name.Builtin: "#072",
+ Name.Function: "bold #5ed",
+ Name.Class: "bold #e9e",
+ Name.Namespace: "bold #0e84b5",
+ Name.Exception: "bold #F00",
+ Name.Variable: "#036",
+ Name.Variable.Instance: "#aaf",
+ Name.Variable.Class: "#ccf",
+ Name.Variable.Global: "#f84",
+ Name.Constant: "bold #5ed",
+ Name.Label: "bold #970",
+ Name.Entity: "#800",
+ Name.Attribute: "#007",
+ Name.Tag: "#070",
+ Name.Decorator: "bold #555",
+
+ String: "bg:#e0e0ff",
+ String.Char: "#88F bg:",
+ String.Doc: "#D42 bg:",
+ String.Interpol: "bg:#eee",
+ String.Escape: "bold #666",
+ String.Regex: "bg:#e0e0ff #000",
+ String.Symbol: "#fc8 bg:",
+ String.Other: "#f88",
+
+ Number: "bold #60E",
+ Number.Integer: "bold #66f",
+ Number.Float: "bold #60E",
+ Number.Hex: "bold #058",
+ Number.Oct: "bold #40E",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #c65d09",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "#F00 bg:#FAA"
+ }
diff --git a/pygments/styles/native.py b/pygments/styles/native.py
new file mode 100644
index 00000000..e50a6242
--- /dev/null
+++ b/pygments/styles/native.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.native
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ pygments version of my "native" vim theme.
+
+ :copyright: 2006 by Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Text, Token
+
+
+class NativeStyle(Style):
+
+ background_color = '#202020'
+
+ styles = {
+ Token: '#d0d0d0',
+
+ Comment: 'italic #999999',
+ Comment.Preproc: 'noitalic bold #cd2828',
+
+ Keyword: 'bold #6ab825',
+ Keyword.Pseudo: 'nobold',
+
+ String: '#ed9d13',
+ String.Other: '#ffa500',
+
+ Number: '#3677a9',
+
+ Name.Builtin: '#24909d',
+ Name.Variable: '#40ffff',
+ Name.Constant: '#40ffff',
+ Name.Class: 'underline #447fcf',
+ Name.Function: '#447fcf',
+ Name.Namespace: 'underline #447fcf',
+ Name.Exception: '#bbbbbb',
+ Name.Tag: 'bold #6ab825',
+ Name.Attribute: '#bbbbbb',
+ Name.Decorator: '#ffa500',
+
+ Generic.Heading: 'bold #ffffff',
+ Generic.Subheading: 'underline #ffffff',
+ Generic.Deleted: '#d22323',
+ Generic.Inserted: '#589819',
+ Generic.Error: '#d22323',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#aaaaaa',
+ Generic.Output: '#cccccc',
+ Generic.Traceback: '#d22323',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/pygments/styles/pastie.py b/pygments/styles/pastie.py
new file mode 100644
index 00000000..fdee664d
--- /dev/null
+++ b/pygments/styles/pastie.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.pastie
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the `pastie`_ default style.
+
+ .. _pastie: http://pastie.caboo.se/
+
+ :copyright: 2006 by Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic
+
+
+class PastieStyle(Style):
+
+ default_style = ''
+
+ styles = {
+ Comment: '#888888',
+ Comment.Preproc: 'bold #cc0000',
+
+ String: 'bg:#fff0f0 #dd2200',
+ String.Regex: 'bg:#fff0ff #008800',
+ String.Other: 'bg:#f0fff0 #22bb22',
+ String.Symbol: '#aa6600',
+ String.Interpol: '#3333bb',
+ String.Escape: '#0044dd',
+
+ Operator.Word: '#008800',
+
+ Keyword: 'bold #008800',
+ Keyword.Pseudo: 'nobold',
+ Keyword.Type: '#888888',
+
+ Name.Class: 'bold #bb0066',
+ Name.Exception: 'bold #bb0066',
+ Name.Function: 'bold #0066bb',
+ Name.Module: 'bold #bb0066',
+ Name.Builtin: '#003388',
+ Name.Variable: '#336699',
+ Name.Variable.Class: '#336699',
+ Name.Variable.Instance: '#3333bb',
+ Name.Variable.Global: '#dd7700',
+ Name.Constant: 'bold #003366',
+ Name.Tag: 'bold #bb0066',
+ Name.Attribute: '#336699',
+ Name.Decorator: '#555555',
+
+ Number: 'bold #0000DD',
+
+ Generic.Heading: '#999999',
+ Generic.Subheading: '#aaaaaa',
+ Generic.Deleted: 'bg:#ffdddd #000000',
+ Generic.Inserted: 'bg:#ddffdd #000000',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/pygments/styles/perldoc.py b/pygments/styles/perldoc.py
new file mode 100644
index 00000000..27c9d834
--- /dev/null
+++ b/pygments/styles/perldoc.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.perldoc
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the style used in the `perldoc`_ code blocks.
+
+ .. _perldoc: http://perldoc.perl.org/
+
+ :copyright: 2006 by Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic
+
+
+class PerldocStyle(Style):
+
+ background_color = '#eeeedd'
+ default_style = ''
+
+ styles = {
+ Comment: '#228B22',
+ Comment.Preproc: '#1e889b',
+
+ String: '#CD5555',
+ String.Heredoc: '#1c7e71 italic',
+ String.Regex: '#B452CD',
+ String.Other: '#cb6c20',
+ String.Regex: '#1c7e71',
+
+ Number: '#B452CD',
+
+ Operator.Word: '#8B008B',
+
+ Keyword: '#8B008B bold',
+ Keyword.Type: '#a7a7a7',
+
+ Name.Class: '#008b45 bold',
+ Name.Exception: '#008b45 bold',
+ Name.Function: '#008b45',
+ Name.Namespace: '#008b45 underline',
+ Name.Variable: '#00688B',
+ Name.Constant: '#00688B',
+ Name.Decorator: '#707a7c',
+ Name.Tag: '#8B008B bold',
+ Name.Attribute: '#658b00',
+ Name.Builtin: '#658b00',
+
+ Generic.Heading: 'bold #000080',
+ Generic.Subheading: 'bold #800080',
+ Generic.Deleted: '#aa0000',
+ Generic.Inserted: '#00aa00',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/pygments/styles/trac.py b/pygments/styles/trac.py
new file mode 100644
index 00000000..65662925
--- /dev/null
+++ b/pygments/styles/trac.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.trac
+ ~~~~~~~~~~~~~~~~~~~
+
+ Port of the default trac highlighter design.
+
+ :copyright: 2006 by Armin Ronacher.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic
+
+
+class TracStyle(Style):
+
+ default_style = ''
+
+ styles = {
+ Comment: 'italic #999988',
+ Comment.Preproc: 'bold noitalic #999999',
+
+ String: '#bb8844',
+ String.Regex: '#808000',
+
+ Number: '#009999',
+
+ Keyword: 'bold',
+ Keyword.Type: '#445588',
+
+ Name.Builtin: '#999999',
+ Name.Function: 'bold #990000',
+ Name.Class: 'bold #445588',
+ Name.Exception: 'bold #990000',
+ Name.Namespace: '#555555',
+ Name.Variable: '#ff99ff',
+ Name.Constant: '#ff99ff',
+ Name.Tag: '#000080',
+ Name.Attribute: '#008080',
+ Name.Entity: '#800080',
+
+ Generic.Heading: '#999999',
+ Generic.Subheading: '#aaaaaa',
+ Generic.Deleted: 'bg:#ffdddd #000000',
+ Generic.Inserted: 'bg:#ddffdd #000000',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/pygments/token.py b/pygments/token.py
new file mode 100644
index 00000000..ca542bc0
--- /dev/null
+++ b/pygments/token.py
@@ -0,0 +1,156 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.token
+ ~~~~~~~~~~~~~
+
+ Basic token types and the standard tokens.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+class _TokenType(tuple):
+ parent = None
+
+ def split(self):
+ buffer = []
+ node = self
+ while node is not None:
+ buffer.append(node)
+ node = node.parent
+ buffer.reverse()
+ return buffer
+
+ def __getattr__(self, val):
+ if not val or not val[0].isupper():
+ return tuple.__getattr__(self, val)
+ new = _TokenType(self + (val,))
+ setattr(self, val, new)
+ new.parent = self
+ return new
+
+ def __hash__(self):
+ return hash(tuple(self))
+
+ def __repr__(self):
+ return 'Token' + (self and '.' or '') + '.'.join(self)
+
+
+Token = _TokenType()
+
+# Special token types
+Text = Token.Text
+Error = Token.Error
+# Text that doesn't belong to this lexer (e.g. HTML in PHP)
+Other = Token.Other
+
+# Common token types for source code
+Keyword = Token.Keyword
+Name = Token.Name
+Literal = Token.Literal
+String = Literal.String
+Number = Literal.Number
+Operator = Token.Operator
+Comment = Token.Comment
+
+# Generic types for non-source code
+Generic = Token.Generic
+
+
+# Map standard token types to short names, used in CSS class naming.
+# If you add a new item, please be sure to run this file to perform
+# a consistency check for duplicate values.
+STANDARD_TYPES = {
+ Token: '',
+
+ Text: '',
+ Error: 'err',
+ Other: 'x',
+
+ Keyword: 'k',
+ Keyword.Constant: 'kc',
+ Keyword.Declaration: 'kd',
+ Keyword.Pseudo: 'kp',
+ Keyword.Reserved: 'kr',
+ Keyword.Type: 'kt',
+
+ Name: 'n',
+ Name.Attribute: 'na',
+ Name.Builtin: 'nb',
+ Name.Builtin.Pseudo: 'bp',
+ Name.Class: 'nc',
+ Name.Constant: 'no',
+ Name.Decorator: 'nd',
+ Name.Entity: 'ni',
+ Name.Exception: 'ne',
+ Name.Function: 'nf',
+ Name.Label: 'nl',
+ Name.Namespace: 'nn',
+ Name.Other: 'nx',
+ Name.Tag: 'nt',
+ Name.Variable: 'nv',
+ Name.Variable.Class: 'vc',
+ Name.Variable.Global: 'vg',
+ Name.Variable.Instance: 'vi',
+
+ Literal: 'l',
+ Literal.Date: 'ld',
+
+ String: 's',
+ String.Backtick: 'sb',
+ String.Char: 'sc',
+ String.Doc: 'sd',
+ String.Double: 's2',
+ String.Escape: 'se',
+ String.Heredoc: 'sh',
+ String.Interpol: 'si',
+ String.Other: 'sx',
+ String.Regex: 'sr',
+ String.Single: 's1',
+ String.Symbol: 'ss',
+
+ Number: 'm',
+ Number.Float: 'mf',
+ Number.Hex: 'mh',
+ Number.Integer: 'mi',
+ Number.Integer.Long: 'il',
+ Number.Oct: 'mo',
+
+ Operator: 'o',
+ Operator.Word: 'ow',
+
+ Comment: 'c',
+ Comment.Multiline: 'cm',
+ Comment.Preproc: 'cp',
+ Comment.Single: 'c1',
+
+ Generic: 'g',
+ Generic.Deleted: 'gd',
+ Generic.Emph: 'ge',
+ Generic.Error: 'gr',
+ Generic.Heading: 'gh',
+ Generic.Inserted: 'gi',
+ Generic.Output: 'go',
+ Generic.Prompt: 'gp',
+ Generic.Strong: 'gs',
+ Generic.Subheading: 'gu',
+ Generic.Traceback: 'gt',
+}
+
+
+
+if __name__ == '__main__':
+ import sys
+ # sanity check for token name dict: no duplicate entries!
+ s = STANDARD_TYPES.copy()
+ s[Token] = '---' # Token and Text do conflict, that is okay
+ t = {}
+ for k, v in s.iteritems():
+ t.setdefault(v, []).append(k)
+ if len(t) == len(s):
+ print 'Okay!'
+ sys.exit()
+
+ for k, v in t.iteritems():
+ if len(v) > 1:
+ print "%r has more than one key: %r" % (k, v)
diff --git a/pygments/util.py b/pygments/util.py
new file mode 100644
index 00000000..65261964
--- /dev/null
+++ b/pygments/util.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.util
+ ~~~~~~~~~~~~
+
+ Utility functions.
+
+ :copyright: 2006 by Georg Brandl.
+ :license: GNU LGPL, see LICENSE for more details.
+"""
+
+
+class OptionError(Exception):
+ pass
+
+
+def get_bool_opt(options, optname, default=None):
+ string = options.get(optname, default)
+ if isinstance(string, bool):
+ return string
+ elif string.lower() in ('1', 'yes', 'true', 'on'):
+ return True
+ elif string.lower() in ('0', 'no', 'false', 'off'):
+ return False
+ else:
+ raise OptionError('Invalid value %r for option %s; use '
+ '1/0, yes/no, true/false, on/off' %
+ string, optname)
+
+
+def get_int_opt(options, optname, default=None):
+ string = options.get(optname, default)
+ try:
+ return int(string)
+ except ValueError:
+ raise OptionError('Invalid value %r for option %s; you '
+ 'must give an integer value' %
+ string, optname)
+
+
+def get_list_opt(options, optname, default=None):
+ val = options.get(optname, default)
+ if isinstance(val, basestring):
+ return val.split()
+ elif isinstance(val, (list, tuple)):
+ return list(val)
+ else:
+ raise OptionError('Invalid value %r for option %s; you '
+ 'must give a list value' %
+ val, optname)