summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorgbrandl <devnull@localhost>2007-05-04 08:42:07 +0200
committergbrandl <devnull@localhost>2007-05-04 08:42:07 +0200
commit9270113fff36124ff0b5c7b593a3a1c8cb595095 (patch)
tree822913f753ec66601e7063bafa2b5d1d3562f414
parent61baa23235b73b0272fad58e1c4a75a7320ff908 (diff)
downloadpygments-9270113fff36124ff0b5c7b593a3a1c8cb595095.tar.gz
[svn] Fix a few inconsistencies and nits, mainly in the docs.
Use the get_choice_opt function consistently.
-rw-r--r--docs/generate.py4
-rw-r--r--docs/src/api.txt27
-rw-r--r--docs/src/lexers.txt5
-rw-r--r--docs/src/plugins.txt34
-rw-r--r--docs/src/quickstart.txt6
-rw-r--r--pygments/filters/__init__.py6
-rw-r--r--pygments/formatters/other.py4
-rw-r--r--pygments/formatters/terminal.py3
-rw-r--r--pygments/lexers/agile.py2
-rw-r--r--pygments/lexers/dotnet.py4
-rw-r--r--pygments/lexers/special.py6
-rw-r--r--pygments/lexers/web.py3
-rw-r--r--pygments/util.py2
13 files changed, 68 insertions, 38 deletions
diff --git a/docs/generate.py b/docs/generate.py
index 1bf5f8f7..1ecfeb33 100644
--- a/docs/generate.py
+++ b/docs/generate.py
@@ -30,7 +30,7 @@ from pygments.formatters import HtmlFormatter
LEXERDOC = '''
`%s`
%s
- :Aliases: %s
+ :Short names: %s
:Filename patterns: %s
:Mimetypes: %s
@@ -74,7 +74,7 @@ def generate_formatter_docs():
out.append('`' + heading + '`\n' + '-'*(2+len(heading)) + '\n')
out.append(cls.__doc__)
out.append('''
- :Aliases: %s
+ :Short names: %s
:Filename patterns: %s
diff --git a/docs/src/api.txt b/docs/src/api.txt
index 8cd7f1f2..ba581306 100644
--- a/docs/src/api.txt
+++ b/docs/src/api.txt
@@ -158,15 +158,15 @@ def `analyze_text(text):`
For a list of known tokens have a look at the `Tokens`_ page.
-The lexer also recognizes the following attributes that are used by the
-builtin lookup mechanism.
+A lexer also can have the following attributes (in fact, they are mandatory
+except `alias_filenames`) that are used by the builtin lookup mechanism.
`name`
Full name for the lexer, in human-readable form.
`aliases`
A list of short, unique identifiers that can be used to lookup
- the lexer from a list.
+ the lexer from a list, e.g. using `get_lexer_by_name()`.
`filenames`
A list of `fnmatch` patterns that match filenames which contain
@@ -222,6 +222,21 @@ def `format(self, tokensource, outfile):`
.. _command-line option: cmdline.txt
+A formatter must have the following attributes that are used by the
+builtin lookup mechanism. (*New in Pygments 0.7.*)
+
+`name`
+ Full name for the formatter, in human-readable form.
+
+`aliases`
+ A list of short, unique identifiers that can be used to lookup
+ the formatter from a list, e.g. using `get_formatter_by_name()`.
+
+`filenames`
+ A list of `fnmatch` patterns that match filenames for which this formatter
+ can produce output. The patterns in this list should be unique among
+ all formatters.
+
Option processing
=================
@@ -231,7 +246,7 @@ processing:
class `OptionError`
This exception will be raised by all option processing functions if
- the type of the argument is not correct.
+ the type or value of the argument is not correct.
def `get_bool_opt(options, optname, default=None):`
Interpret the key `optname` from the dictionary `options`
@@ -249,3 +264,7 @@ def `get_list_opt(options, optname, default=None):`
If the key `optname` from the dictionary `options` is a string,
split it at whitespace and return it. If it is already a list
or a tuple, it is returned as a list.
+
+def `get_choice_opt(options, optname, allowed, default=None):`
+ If the key `optname` from the dictionary is not in the sequence
+ `allowed`, raise an error, otherwise return it. *New in Pygments 0.8.*
diff --git a/docs/src/lexers.txt b/docs/src/lexers.txt
index 22fe7c7a..5d40b4b8 100644
--- a/docs/src/lexers.txt
+++ b/docs/src/lexers.txt
@@ -31,12 +31,15 @@ Currently, **all lexers** support these options:
guess the encoding of the input.
+The "Short Names" field lists the identifiers that can be used with the
+`get_lexer_by_name()` function.
+
These lexers are builtin and can be imported from `pygments.lexers`:
[builtin_lexer_docs]
Iterating over all lexers
-=========================
+-------------------------
*New in Pygments 0.6.*
diff --git a/docs/src/plugins.txt b/docs/src/plugins.txt
index da8b33b9..2c98c523 100644
--- a/docs/src/plugins.txt
+++ b/docs/src/plugins.txt
@@ -2,32 +2,35 @@
Register Plugins
================
-If you want to extend pygments without hacking in the sources but want to
-use the lexer/formatter/style resolve functions you can use setuptools
-entrypoints to add new lexers, formatters or styles to the pygments core.
+If you want to extend Pygments without hacking the sources, but want to
+use the lexer/formatter/style/filter lookup functions (`lexers.get_lexer_by_name`
+et al.), you can use `setuptools`_ entrypoints to add new lexers, formatters
+or styles as if they were in the Pygments core.
-That means you can use your highlighter also with the ``pygmentize`` script.
+.. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
+
+That means you can use your highlighter modules with the `pygmentize` script,
+which relies on the mentioned functions.
Entrypoints
===========
-Here a list of setuptools entrypoints pygments understands:
+Here is a list of setuptools entrypoints that Pygments understands:
`pygments.lexers`
- This entrypoint is used for adding new lexers to the pygments core.
- The name of entrypoint values doesn't really matter, pygements extract
- required informations from the class definition:
+ This entrypoint is used for adding new lexers to the Pygments core.
+ The name of the entrypoint values doesn't really matter, Pygments extracts
+ required metadata from the class definition:
.. sourcecode:: ini
[pygments.lexers]
yourlexer = yourmodule:YourLexer
- Note that you have to
- defined a ``name``, ``aliases`` and ``filename`` patterns so that you
- can use the highlighter from the command line:
+ Note that you have to define ``name``, ``aliases`` and ``filename``
+ attributes so that you can use the highlighter from the command line:
.. sourcecode:: python
@@ -39,9 +42,9 @@ Here a list of setuptools entrypoints pygments understands:
`pygments.formatters`
- You can use this entrypoint to add new formatters to pygments. The
+ You can use this entrypoint to add new formatters to Pygments. The
name of an entrypoint item is the name of the formatter. If you
- prefix the name with a slash it's used for the filename pattern:
+ prefix the name with a slash it's used as a filename pattern:
.. sourcecode:: ini
@@ -86,5 +89,6 @@ you need to write a plugin. Also `this blog entry`_ might be interesting.
Extending The Core
==================
-If you have written a pygments plugin which is open source, please inform us
-about that. There is a high chance that we'll add it to the pygments core :-)
+If you have written a Pygments plugin that is open source, please inform us
+about that. There is a high chance that we'll add it to the Pygments
+distribution.
diff --git a/docs/src/quickstart.txt b/docs/src/quickstart.txt
index 91e0953f..489c6112 100644
--- a/docs/src/quickstart.txt
+++ b/docs/src/quickstart.txt
@@ -6,9 +6,9 @@ Quickstart
Pygments comes with a wide range of lexers for modern languages which are all
-accessible through the pygments.lexers package. A lexer enables Pygments to
-parse the source code into tokens which then are passed to a formatter. Currently
-formatters exist for HTML, LaTeX, RTF and ANSI sequences.
+accessible through the `pygments.lexers` package. A lexer enables Pygments to
+split the source code into tokens which then are passed to a formatter.
+The most useful formatters include HTML, LaTeX, RTF and ANSI sequence output.
Example
diff --git a/pygments/filters/__init__.py b/pygments/filters/__init__.py
index 3268f660..416e85dd 100644
--- a/pygments/filters/__init__.py
+++ b/pygments/filters/__init__.py
@@ -18,7 +18,7 @@ import re
from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
string_to_tokentype
from pygments.filter import Filter
-from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
+from pygments.util import get_list_opt, get_int_opt, get_bool_opt, get_choice_opt, \
ClassNotFound, OptionError
from pygments.plugin import find_plugin_filters
@@ -119,9 +119,7 @@ class KeywordCaseFilter(Filter):
def __init__(self, **options):
Filter.__init__(self, **options)
- case = options.get('case', 'lower')
- if case not in ('lower', 'upper', 'capitalize'):
- raise OptionError('unknown conversion method %r' % case)
+ case = get_choice_opt(options, 'case', ['lower', 'upper', 'capitalize'], 'lower')
self.convert = getattr(unicode, case)
def filter(self, lexer, stream):
diff --git a/pygments/formatters/other.py b/pygments/formatters/other.py
index 9422dd23..bd375f32 100644
--- a/pygments/formatters/other.py
+++ b/pygments/formatters/other.py
@@ -10,6 +10,7 @@
"""
from pygments.formatter import Formatter
+from pygments.util import get_choice_opt
__all__ = ['NullFormatter', 'RawTokenFormatter']
@@ -54,7 +55,8 @@ class RawTokenFormatter(Formatter):
def __init__(self, **options):
Formatter.__init__(self, **options)
- self.compress = options.get('compress', '')
+ self.compress = get_choice_opt(options, 'compress',
+ ['', 'none', 'gz', 'bz2'], '')
def format(self, tokensource, outfile):
if self.compress == 'gz':
diff --git a/pygments/formatters/terminal.py b/pygments/formatters/terminal.py
index adac90df..c781cf16 100644
--- a/pygments/formatters/terminal.py
+++ b/pygments/formatters/terminal.py
@@ -13,6 +13,7 @@ from pygments.formatter import Formatter
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Token, Whitespace
from pygments.console import ansiformat
+from pygments.util import get_choice_opt
__all__ = ['TerminalFormatter']
@@ -76,7 +77,7 @@ class TerminalFormatter(Formatter):
def __init__(self, **options):
Formatter.__init__(self, **options)
- self.darkbg = options.get('bg', 'light') == 'dark'
+ self.darkbg = get_choice_opt(options, 'bg', ['light', 'dark'], 'light') == 'dark'
self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
def format(self, tokensource, outfile):
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index 4e3d44fa..e59a5362 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -854,7 +854,7 @@ class LuaLexer(RegexLexer):
def __init__(self, **options):
self.func_name_highlighting = get_bool_opt(
options, 'func_name_highlighting', True)
- self.disabled_modules = get_list_opt(options, 'disabled_module', [])
+ self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
self._functions = set()
if self.func_name_highlighting:
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index 62569241..5c97c430 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -13,7 +13,7 @@ import re
from pygments.lexer import RegexLexer, bygroups, using, this
from pygments.token import Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Literal
-from pygments.util import get_flag_opt
+from pygments.util import get_choice_opt
from pygments import unistring as uni
__all__ = ['CSharpLexer', 'BooLexer', 'VbNetLexer']
@@ -120,7 +120,7 @@ class CSharpLexer(RegexLexer):
}
def __init__(self, **options):
- level = get_flag_opt(options, 'unicodelevel', self.tokens.keys(), 'basic')
+ level = get_choice_opt(options, 'unicodelevel', self.tokens.keys(), 'basic')
if level not in self._all_tokens:
# compile the regexes now
self._tokens = self.__class__.process_tokendef(level)
diff --git a/pygments/lexers/special.py b/pygments/lexers/special.py
index 714bdf74..8d66fc67 100644
--- a/pygments/lexers/special.py
+++ b/pygments/lexers/special.py
@@ -14,6 +14,7 @@ import cStringIO
from pygments.lexer import Lexer
from pygments.token import Token, Error, Text
+from pygments.util import get_choice_opt
__all__ = ['TextLexer', 'RawTokenLexer']
@@ -44,7 +45,7 @@ class RawTokenLexer(Lexer):
`compress`
If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
- the given compression algorithm before lexing (default: ``''``).
+ the given compression algorithm before lexing (default: ``""``).
"""
name = 'Raw token data'
aliases = ['raw']
@@ -52,7 +53,8 @@ class RawTokenLexer(Lexer):
mimetypes = ['application/x-pygments-tokens']
def __init__(self, **options):
- self.compress = options.get('compress', '')
+ self.compress = get_choice_opt(options, 'compress',
+ ['', 'none', 'gz', 'bz2'], '')
Lexer.__init__(self, **options)
def get_tokens(self, text):
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index 969f7451..5385c725 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -264,7 +264,8 @@ class PhpLexer(RegexLexer):
`startinline`
If given and ``True`` the lexer starts highlighting with
- php code. (i.e.: no starting ``<?php`` required)
+ php code (i.e.: no starting ``<?php`` required). The default
+ is ``False``.
`funcnamehighlighting`
If given and ``True``, highlight builtin function names
(default: ``True``).
diff --git a/pygments/util.py b/pygments/util.py
index 01af01e6..07140187 100644
--- a/pygments/util.py
+++ b/pygments/util.py
@@ -33,7 +33,7 @@ class OptionError(Exception):
pass
-def get_flag_opt(options, optname, allowed, default=None):
+def get_choice_opt(options, optname, allowed, default=None):
string = options.get(optname, default)
if string not in allowed:
raise OptionError('Value for option %s must be one of %s' %