diff options
-rw-r--r-- | TODO | 1 | ||||
-rw-r--r-- | docs/src/tokens.txt | 15 | ||||
-rw-r--r-- | pygments/filters/__init__.py | 4 | ||||
-rw-r--r-- | pygments/token.py | 7 |
4 files changed, 13 insertions, 14 deletions
@@ -6,7 +6,6 @@ for 0.7 - new lexers: * Haskell - * (Q)BASIC * Lisp * IPython sessions * HTML with special formatting? diff --git a/docs/src/tokens.txt b/docs/src/tokens.txt index 22ccccea..f98b4d6e 100644 --- a/docs/src/tokens.txt +++ b/docs/src/tokens.txt @@ -81,20 +81,21 @@ of those token aliases, a number of subtypes exists (excluding the special token The `is_token_subtype()` function in the `pygments.token` module can be used to test if a token type is a subtype of another (such as `Name.Tag` and `Name`). -(This is the same as ``Name.Tag in Name``. The in operator was newly introduced -in pygments 0.7, the function still exists for backwards compatiblity) +(This is the same as ``Name.Tag in Name``. The overloaded `in` operator was newly +introduced in Pygments 0.7, the function still exists for backwards +compatiblity.) -With pygments 0.7 it's also possible to convert token from strings (for example +With Pygments 0.7, it's also possible to convert strings to token types (for example if you want to supply a token from the command line): .. sourcecode:: pycon - >>> from pygments.token import String, string_to_token - >>> string_to_token("String") + >>> from pygments.token import String, string_to_tokentype + >>> string_to_tokentype("String") Token.Literal.String - >>> string_to_token("Token.Literal.String") + >>> string_to_tokentype("Token.Literal.String") Token.Literal.String - >>> string_to_token(String) + >>> string_to_tokentype(String) Token.Literal.String diff --git a/pygments/filters/__init__.py b/pygments/filters/__init__.py index 206cf9aa..be52a01b 100644 --- a/pygments/filters/__init__.py +++ b/pygments/filters/__init__.py @@ -15,7 +15,7 @@ except NameError: from sets import Set as set import re -from pygments.token import String, Comment, Keyword, Name, string_to_token +from pygments.token import String, Comment, Keyword, Name, string_to_tokentype from pygments.filter import Filter from pygments.util import get_list_opt from pygments.plugin import find_plugin_filters @@ -119,7 +119,7 @@ class NameHighlightFilter(Filter): self.words = set(get_list_opt(options, 'highlight', [])) highlight_token = options.get('highlight_token') if highlight_token: - self.highlight_token = string_to_token(highlight_token) + self.highlight_token = string_to_tokentype(highlight_token) else: self.highlight_token = Name.Function diff --git a/pygments/token.py b/pygments/token.py index b99ee9b9..813bc9a2 100644 --- a/pygments/token.py +++ b/pygments/token.py @@ -66,7 +66,7 @@ Name = Token.Name Literal = Token.Literal String = Literal.String Number = Literal.Number -Punctuation = Literal.Punctuation +Punctuation = Token.Punctuation Operator = Token.Operator Comment = Token.Comment @@ -78,7 +78,6 @@ Generic = Token.Generic Token.Token = Token Token.String = String Token.Number = Number -Token.Punctuation = Punctuation def is_token_subtype(ttype, other): @@ -90,9 +89,9 @@ def is_token_subtype(ttype, other): return ttype in other -def string_to_token(s): +def string_to_tokentype(s): """ - Convert a string into a token:: + Convert a string into a token type:: >>> string_to_token('String.Double') Token.Literal.String.Double |