summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGES9
-rw-r--r--TODO1
-rw-r--r--docs/src/api.txt6
-rw-r--r--docs/src/quickstart.txt2
-rw-r--r--pygments/__init__.py2
-rw-r--r--pygments/filters/__init__.py5
-rw-r--r--pygments/formatters/img.py4
-rw-r--r--pygments/formatters/latex.py2
-rw-r--r--pygments/lexers/_mapping.py1
-rw-r--r--pygments/lexers/agile.py6
-rw-r--r--pygments/lexers/compiled.py12
-rw-r--r--pygments/lexers/dotnet.py6
-rw-r--r--pygments/lexers/functional.py35
-rw-r--r--pygments/lexers/math.py11
-rw-r--r--pygments/lexers/other.py195
-rw-r--r--pygments/lexers/text.py18
-rw-r--r--pygments/lexers/web.py2
-rw-r--r--tests/examplefiles/source.lgt343
-rw-r--r--tests/test_basic_api.py11
19 files changed, 620 insertions, 51 deletions
diff --git a/CHANGES b/CHANGES
index 71fa8153..baa781f7 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,9 +1,9 @@
Pygments changelog
==================
-Version 1.0
------------
-(codename not selected, released XXX YY, 2008)
+Version 0.10
+------------
+(codename Malzeug, released May 06, 2008)
- Lexers added:
@@ -19,6 +19,7 @@ Version 1.0
* NumPy
* Python 3
* S, S-plus, R statistics languages
+ * Logtalk
- In the LatexFormatter, the *commandprefix* option is now by default
'PY' instead of 'C', since the latter resulted in several collisions
@@ -38,6 +39,8 @@ Version 1.0
- Fixes in the IRC and MuPad lexers.
+- Fix function and interface name highlighting in the Java lexer.
+
- Fix at-rule handling in the CSS lexer.
- Handle KeyboardInterrupts gracefully in pygmentize.
diff --git a/TODO b/TODO
index de29ec63..5873d873 100644
--- a/TODO
+++ b/TODO
@@ -7,7 +7,6 @@ suggested new lexers
* IPython sessions
* Nemerle
* PostgreSQL/SQLite
-* Tcl
for 1.0
-------
diff --git a/docs/src/api.txt b/docs/src/api.txt
index 7998a7e2..b8159379 100644
--- a/docs/src/api.txt
+++ b/docs/src/api.txt
@@ -54,7 +54,7 @@ def `get_lexer_for_mimetype(mime, **options):`
def `guess_lexer(text, **options):`
Return a `Lexer` subclass instance that's guessed from the text
- in `text`. For that, the `analyze_text()` method of every known
+ in `text`. For that, the `analyse_text()` method of every known
lexer class is called with the text as argument, and the lexer
which returned the highest value will be instantiated and returned.
@@ -150,8 +150,8 @@ def `get_tokens_unprocessed(self, text):`
This method must be overridden by subclasses.
-def `analyze_text(text):`
- A static method which is called for lexer guessing. It should analyze
+def `analyse_text(text):`
+ A static method which is called for lexer guessing. It should analyse
the text and return a float in the range from ``0.0`` to ``1.0``.
If it returns ``0.0``, the lexer will not be selected as the most
probable one, if it returns ``1.0``, it will be selected immediately.
diff --git a/docs/src/quickstart.txt b/docs/src/quickstart.txt
index b3f73ae5..ea9166bb 100644
--- a/docs/src/quickstart.txt
+++ b/docs/src/quickstart.txt
@@ -153,7 +153,7 @@ or some template tags), use these functions:
>>> guess_lexer_for_filename('test.py', 'print "Hello World!"')
<pygments.lexers.PythonLexer>
-`guess_lexer()` passes the given content to the lexer classes' `analyze_text()`
+`guess_lexer()` passes the given content to the lexer classes' `analyse_text()`
method and returns the one for which it returns the highest number.
All lexers have two different filename pattern lists: the primary and the
diff --git a/pygments/__init__.py b/pygments/__init__.py
index 5482b0b6..a99c6408 100644
--- a/pygments/__init__.py
+++ b/pygments/__init__.py
@@ -24,7 +24,7 @@
:license: BSD, see LICENSE for more details.
"""
-__version__ = '1.0'
+__version__ = '0.10'
__author__ = 'Georg Brandl <g.brandl@gmx.net>'
__url__ = 'http://pygments.org/'
__license__ = 'BSD License'
diff --git a/pygments/filters/__init__.py b/pygments/filters/__init__.py
index 416e85dd..efe8a4bd 100644
--- a/pygments/filters/__init__.py
+++ b/pygments/filters/__init__.py
@@ -222,8 +222,9 @@ class VisibleWhitespaceFilter(Filter):
is ``False``. Note: this will not work if the `tabsize` option for the
lexer is nonzero, as tabs will already have been expanded then.
`tabsize` : int
- If tabs are to be replaced by this filter, this is the total number of
- characters that a tab should be expanded to. The default is ``8``.
+ If tabs are to be replaced by this filter (see the `tabs` option), this
+ is the total number of characters that a tab should be expanded to.
+ The default is ``8``.
`newlines` : string or bool
The same as for `spaces`, but the default replacement character is ``¶``
(unicode PILCROW SIGN). The default value is ``False``.
diff --git a/pygments/formatters/img.py b/pygments/formatters/img.py
index eeb6449f..0b5831b2 100644
--- a/pygments/formatters/img.py
+++ b/pygments/formatters/img.py
@@ -43,7 +43,7 @@ DEFAULT_FONT_NAME_NIX = 'Bitstream Vera Sans Mono'
DEFAULT_FONT_NAME_WIN = 'Courier New'
-class PilNotAvailable(Exception):
+class PilNotAvailable(ImportError):
"""When Python imaging library is not available"""
@@ -166,7 +166,7 @@ class ImageFormatter(Formatter):
Create an image from source code. This uses the Python Imaging Library to
generate a pixmap from the source code.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
Additional options accepted:
diff --git a/pygments/formatters/latex.py b/pygments/formatters/latex.py
index f91b1e57..ae3ce26d 100644
--- a/pygments/formatters/latex.py
+++ b/pygments/formatters/latex.py
@@ -109,7 +109,7 @@ class LatexFormatter(Formatter):
The LaTeX commands used to produce colored output are constructed
using this prefix and some letters (default: ``'PY'``).
*New in Pygments 0.7.*
- *New in Pygments 1.0:* the default is now ``'PY'`` instead of ``'C'``.
+ *New in Pygments 0.10:* the default is now ``'PY'`` instead of ``'C'``.
"""
name = 'LaTeX'
aliases = ['latex', 'tex']
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 68781e53..995c0c30 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -70,6 +70,7 @@ LEXERS = {
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell'), ('*.lhs',), ('text/x-literate-haskell',)),
'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
+ 'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua',), ('text/x-lua', 'application/x-lua')),
'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode',), ('*.moo',), ('text/x-moocode',)),
'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', 'Makefile', 'makefile', 'Makefile.*'), ('text/x-makefile',)),
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index 7199d405..1a6b2d1a 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -182,7 +182,7 @@ class Python3Lexer(RegexLexer):
"""
For `Python <http://www.python.org>`_ source code (version 3.0).
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Python 3'
@@ -1081,7 +1081,7 @@ class IoLexer(RegexLexer):
For `Io <http://iolanguage.com/>`_ (a small, prototype-based
programming language) source.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Io'
filenames = ['*.io']
@@ -1124,7 +1124,7 @@ class TclLexer(RegexLexer):
"""
For Tcl source code.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
keyword_cmds_re = (
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
index 2e0f62f0..91730982 100644
--- a/pygments/lexers/compiled.py
+++ b/pygments/lexers/compiled.py
@@ -856,9 +856,9 @@ class JavaLexer(RegexLexer):
tokens = {
'root': [
# method names
- (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.]*\s+)+?)' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment),
@@ -867,13 +867,13 @@ class JavaLexer(RegexLexer):
(r'(abstract|assert|break|case|catch|'
r'const|continue|default|do|else|enum|extends|final|'
r'finally|for|if|goto|implements|instanceof|'
- r'interface|native|new|package|private|protected|public|'
+ r'native|new|package|private|protected|public|'
r'return|static|strictfp|super|switch|synchronized|this|'
r'throw|throws|transient|try|volatile|while)\b', Keyword),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(true|false|null)\b', Keyword.Constant),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'class'),
+ (r'(class|interface)(\s+)', bygroups(Keyword, Text), 'class'),
(r'(import)(\s+)', bygroups(Keyword, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
@@ -1064,7 +1064,7 @@ class FortranLexer(RegexLexer):
'''
Lexer for FORTRAN 90 code.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
'''
name = 'Fortran'
aliases = ['fortran']
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index 60663c4a..832f21f5 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -76,9 +76,9 @@ class CSharpLexer(RegexLexer):
tokens[levelname] = {
'root': [
# method names
- (r'^([ \t]*(?:' + cs_ident + r'\s+)+?)' # return arguments
- r'(' + cs_ident + ')' # method name
- r'(\s*)(\()', # signature start
+ (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
+ r'(' + cs_ident + ')' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py
index c2305f68..a7a50f7d 100644
--- a/pygments/lexers/functional.py
+++ b/pygments/lexers/functional.py
@@ -627,6 +627,36 @@ class ErlangLexer(RegexLexer):
'let', 'of', 'query', 'receive', 'try', 'when',
]
+ builtins = [ # See erlang(3) man page
+ 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
+ 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
+ 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
+ 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
+ 'float', 'float_to_list', 'fun_info', 'fun_to_list',
+ 'function_exported', 'garbage_collect', 'get', 'get_keys',
+ 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
+ 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
+ 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
+ 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
+ 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
+ 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
+ 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
+ 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
+ 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
+ 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
+ 'pid_to_list', 'port_close', 'port_command', 'port_connect',
+ 'port_control', 'port_call', 'port_info', 'port_to_list',
+ 'process_display', 'process_flag', 'process_info', 'purge_module',
+ 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
+ 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
+ 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
+ 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
+ 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
+ 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
+ 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
+ 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
+ ]
+
operators = r'(\+|-|\*|/|<|>|=|==|/=|=:=|=/=|=<|>=|\+\+|--|<-|!)'
word_operators = [
'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
@@ -647,8 +677,9 @@ class ErlangLexer(RegexLexer):
'root': [
(r'\s+', Text),
(r'%.*\n', Comment),
- ('|'.join(keywords), Name.Keyword),
- (r'\b(?:'+'|'.join(word_operators)+r')\b', Operator.Word),
+ ('(' + '|'.join(keywords) + r')\b', Keyword),
+ ('(' + '|'.join(builtins) + r')\b', Name.Builtin),
+ ('(' + '|'.join(word_operators) + r')\b', Operator.Word),
(r'^-', Punctuation, 'directive'),
(operators, Operator),
(r'"', String, 'string'),
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index 00ba9e58..cf628c04 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -102,7 +102,7 @@ class MatlabLexer(RegexLexer):
For Matlab (or GNU Octave) source code.
Contributed by Ken Schutte <kschutte@csail.mit.edu>.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Matlab'
aliases = ['matlab', 'octave']
@@ -198,7 +198,7 @@ class MatlabSessionLexer(Lexer):
For Matlab (or GNU Octave) sessions. Modeled after PythonConsoleLexer.
Contributed by Ken Schutte <kschutte@csail.mit.edu>.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Matlab session'
aliases = ['matlabsession']
@@ -246,7 +246,7 @@ class NumPyLexer(PythonLexer):
'''
A Python lexer recognizing Numerical Python builtins.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
'''
name = 'NumPy'
@@ -332,7 +332,7 @@ class SLexer(RegexLexer):
"""
For S, S-plus, and R source code.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'S'
@@ -352,7 +352,8 @@ class SLexer(RegexLexer):
(r'\[|\]|\[\[|\]\]|\$|\(|\)|@|:::?|;|,', Punctuation),
],
'keywords': [
- (r'for(?=\s*\()|while(?=\s*\()|if(?=\s*\()|(?<=\s)else|(?<=\s)break(?=;|$)',
+ (r'for(?=\s*\()|while(?=\s*\()|if(?=\s*\()|(?<=\s)else|'
+ r'(?<=\s)break(?=;|$)|return(?=\s*\()|function(?=\s*\()',
Keyword.Reserved)
],
'operators': [
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index 65e147d5..0701caa4 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -6,7 +6,7 @@
Lexers for other languages.
:copyright: 2006-2008 by Georg Brandl, Tim Hatch <tim@timhatch.com>,
- Stou Sandalski.
+ Stou Sandalski, Paulo Moura, Clara Dimene.
:license: BSD, see LICENSE for more details.
"""
@@ -20,7 +20,7 @@ from pygments.util import shebang_matches
__all__ = ['SqlLexer', 'MySqlLexer', 'BrainfuckLexer', 'BashLexer',
'BatchLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
- 'SmalltalkLexer', 'TcshLexer']
+ 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer']
class SqlLexer(RegexLexer):
@@ -487,7 +487,7 @@ class SmalltalkLexer(RegexLexer):
For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
Contributed by Stefan Matthias Aust.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Smalltalk'
filenames = ['*.st']
@@ -558,7 +558,7 @@ class TcshLexer(RegexLexer):
"""
Lexer for tcsh scripts.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Tcsh'
@@ -620,3 +620,190 @@ class TcshLexer(RegexLexer):
],
}
+
+class LogtalkLexer(RegexLexer):
+ """
+ For `Logtalk <http://logtalk.org/>`_ source code.
+
+ *New in Pygments 0.10.*
+ """
+
+ name = 'Logtalk'
+ aliases = ['logtalk']
+ filenames = ['*.lgt']
+ mimetypes = ['text/x-logtalk']
+
+ tokens = {
+ 'root': [
+ # Directives
+ (r'^\s*:-\s',Punctuation,'directive'),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/',Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Numbers
+ (r"0'.", Number),
+ (r'0b[01]+', Number),
+ (r'0o[0-7]+', Number),
+ (r'0x[0-9a-fA-F]+', Number),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ # Event handlers
+ (r'(after|before)(?=[(])', Keyword),
+ # Execution-context methods
+ (r'(parameter|this|se(lf|nder))(?=[(])', Keyword),
+ # Reflection
+ (r'(current_predicate|predicate_property)(?=[(])', Keyword),
+ # DCGs and term expansion
+ (r'(expand_term|(goal|term)_expansion|phrase)(?=[(])', Keyword),
+ # Entity
+ (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
+ (r'(object|protocol|category)_property(?=[(])', Keyword),
+ # Entity relations
+ (r'complements_object(?=[(])', Keyword),
+ (r'extends_(object|protocol|category)(?=[(])', Keyword),
+ (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
+ (r'(instantiat|specializ)es_class(?=[(])', Keyword),
+ # Events
+ (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
+ # Flags
+ (r'(current|set)_logtalk_flag(?=[(])', Keyword),
+ # Compiling, loading, and library paths
+ (r'logtalk_(compile|l(ibrary_path|oad))(?=[(])', Keyword),
+ # Database
+ (r'(clause|retract(all)?)(?=[(])', Keyword),
+ (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
+ # Control
+ (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
+ (r'(fail|true)\b', Keyword),
+ # All solutions
+ (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
+ # Multi-threading meta-predicates
+ (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
+ # Term unification
+ (r'unify_with_occurs_check(?=[(])', Keyword),
+ # Term creation and decomposition
+ (r'(functor|arg|copy_term)(?=[(])', Keyword),
+ # Evaluable functors
+ (r'(rem|mod|abs|sign)(?=[(])', Keyword),
+ (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
+ (r'(floor|truncate|round|ceiling)(?=[(])', Keyword),
+ # Other arithmetic functors
+ (r'(cos|atan|exp|log|s(in|qrt))(?=[(])', Keyword),
+ # Term testing
+ (r'(var|atom(ic)?|integer|float|compound|n(onvar|umber))(?=[(])', Keyword),
+ # Stream selection and control
+ (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
+ (r'(open|close)(?=[(])', Keyword),
+ (r'flush_output(?=[(])', Keyword),
+ (r'(at_end_of_stream|flush_output)\b', Keyword),
+ (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
+ # Character and byte input/output
+ (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
+ (r'\bnl\b', Keyword),
+ # Term input/output
+ (r'read(_term)?(?=[(])', Keyword),
+ (r'write(q|_(canonical|term))?(?=[(])', Keyword),
+ (r'(current_)?op(?=[(])', Keyword),
+ (r'(current_)?char_conversion(?=[(])', Keyword),
+ # Atomic term processing
+ (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
+ (r'(char_code|sub_atom)(?=[(])', Keyword),
+ (r'number_c(har|ode)s(?=[(])', Keyword),
+ # Implementation defined hooks functions
+ (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
+ (r'\bhalt\b', Keyword),
+ (r'halt(?=[(])', Keyword),
+ # Message sending operators
+ (r'(::|:|\^\^)', Operator),
+ # External call
+ (r'[{}]', Keyword),
+ # Logic and control
+ (r'\bonce(?=[(])', Keyword),
+ (r'\brepeat\b', Keyword),
+ # Bitwise functors
+ (r'(>>|<<|/\\|\\\\|\\)', Operator),
+ # Arithemtic evaluation
+ (r'\bis\b', Keyword),
+ # Arithemtic comparison
+ (r'(=:=|=\\=|<|=<|>=|>)', Operator),
+ # Term creation and decomposition
+ (r'=\.\.', Operator),
+ # Term unification
+ (r'(=|\\=)', Operator),
+ # Term comparison
+ (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
+ # Evaluable functors
+ (r'(//|[-+*/])', Operator),
+ (r'\b(mod|rem)\b', Operator),
+ # Other arithemtic functors
+ (r'\b\*\*\b', Operator),
+ # DCG rules
+ (r'-->', Operator),
+ # Control constructs
+ (r'([!;]|->)', Operator),
+ # Logic and control
+ (r'\\+', Operator),
+ # Mode operators
+ (r'[?@]', Operator),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Ponctuation
+ (r'[()\[\],.|]', Text),
+ # Atoms
+ (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"[']", String, 'quoted_atom'),
+ ],
+
+ 'quoted_atom': [
+ (r"['][']", String),
+ (r"[']", String, '#pop'),
+ (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
+ (r"[^\\'\n]+", String),
+ (r'\\', String),
+ ],
+
+ 'directive': [
+ # Entity directives
+ (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
+ (r'(end_(category|object|protocol))[.]',Keyword, 'root'),
+ # Predicate scope directives
+ (r'(public|protected|private)(?=[(])', Keyword, 'root'),
+ # Other directives
+ (r'\be(ncoding|xport)(?=[(])', Keyword, 'root'),
+ (r'\bin(fo|itialization)(?=[(])', Keyword, 'root'),
+ (r'\b(dynamic|synchronized|threaded)[.]', Keyword, 'root'),
+ (r'\b(alias|d(ynamic|iscontiguous)|m(eta_predicate|ode|ultifile)|synchronized)(?=[(])', Keyword, 'root'),
+ (r'\bop(?=[(])', Keyword, 'root'),
+ (r'\b(calls|use(s|_module))(?=[(])', Keyword, 'root'),
+ ],
+
+ 'entityrelations': [
+ (r'(extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
+ # Numbers
+ (r"0'.", Number),
+ (r'0b[01]+', Number),
+ (r'0o[0-7]+', Number),
+ (r'0x[0-9a-fA-F]+', Number),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ # Atoms
+ (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"[']", String, 'quoted_atom'),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # End of entity-opening directive
+ (r'([)]\.\n)', Text, 'root'),
+ # Scope operator
+ (r'(::)', Operator),
+ # Ponctuation
+ (r'[()\[\],.|]', Text),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ ]
+ }
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index 5d66b68a..bb86d8ad 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -120,7 +120,7 @@ class MakefileLexer(Lexer):
Lexer for BSD and GNU make extensions (lenient enough to handle both in
the same file even).
- *Rewritten in Pygments 1.0.*
+ *Rewritten in Pygments 0.10.*
"""
name = 'Makefile'
@@ -157,7 +157,7 @@ class BaseMakefileLexer(RegexLexer):
"""
Lexer for simple Makefiles (no preprocessing).
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Makefile'
@@ -167,14 +167,16 @@ class BaseMakefileLexer(RegexLexer):
tokens = {
'root': [
+ (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
+ (r'\$\((?:.*\\\n|.*\n)+', using(BashLexer)),
(r'\s+', Text),
(r'#.*?\n', Comment),
(r'(export)(\s+)(?=[a-zA-Z0-9_${}\t -]+\n)',
bygroups(Keyword, Text), 'export'),
(r'export\s+', Keyword),
# assignment
- (r'([a-zA-Z0-9_${}-]+)(\s*)([!?:+]?=)([ \t]*)',
- bygroups(Name.Variable, Text, Operator, Text), 'var'),
+ (r'([a-zA-Z0-9_${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n|.*\n)+)',
+ bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
# strings
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
@@ -188,12 +190,6 @@ class BaseMakefileLexer(RegexLexer):
(r'\n', Text, '#pop'),
(r'\s+', Text),
],
- 'var': [
- (r'\\\n', String),
- (r'\\', String),
- (r'\n', Text, '#pop'),
- (r'[^\\\n]+', String),
- ],
'block-header': [
(r'[^,\\\n#]+', Number),
(r',', Punctuation),
@@ -243,7 +239,7 @@ class DarcsPatchLexer(RegexLexer):
format. Examples of this format are derived by commands such as
``darcs annotate --patch`` and ``darcs send``.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Darcs Patch'
aliases = ['dpatch']
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index b9777bf9..87ea32fe 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -539,7 +539,7 @@ class XsltLexer(XmlLexer):
'''
A lexer for XSLT.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
'''
name = 'XSLT'
diff --git a/tests/examplefiles/source.lgt b/tests/examplefiles/source.lgt
new file mode 100644
index 00000000..ce5abced
--- /dev/null
+++ b/tests/examplefiles/source.lgt
@@ -0,0 +1,343 @@
+
+% this is a single-line comment
+
+/*
+this is
+a block
+comment
+*/
+
+
+:- encoding(some_encoding).
+:- op(Precedence, Associativity, Operator).
+
+
+:- object(prototype,
+ implements(protocol),
+ imports(category),
+ extends(parent)).
+
+ :- info([
+ version is 1.0,
+ author is 'Paulo Moura',
+ date is 2008/5/1,
+ comment is 'Sample prototype for testing syntax coloring.']).
+ :- threaded.
+ :- synchronized.
+ :- dynamic.
+ :- initialization(some_goal(X, Y)).
+ :- calls(some_other_protocol).
+ :- uses(another_object).
+
+ :- alias(set, member/2, set_member/2).
+ :- alias(words, singular//0, peculiar//0).
+
+ :- uses(list, [append/3, member/2]).
+ :- uses(queues, [new/1::new_queue/1]).
+
+ :- public(aaa/2).
+ :- meta_predicate(aaa(::, *)).
+ :- discontiguous(aaa/2).
+ :- mode(aaa(+callable, ?integer), zero_or_one).
+ :- info(position/2, [
+ comment is 'Predicate brief description.',
+ arguments is ['Arg1'-'Arg1 description', 'Arg2'-'Arg2 description']]).
+
+ :- protected(bbb/2).
+ :- synchronized(bbb/2).
+ :- mode(bbb(+integer, -float), one).
+ :- info(bbb/2, [
+ comment is 'Predicate brief description.',
+ argnames is ['Arg1', 'Arg2']]).
+
+ :- private(ccc/2).
+ :- dynamic(ccc/2).
+ :- mode(ccc(@atom, ?atom), one_or_more).
+ :- info(ccc/2, [
+ comment is 'Predicate brief description.',
+ argnames is ['Arg1', 'Arg2']]).
+
+ enumerating_entities(Object, Protocol, Category) :-
+ current_category(Category),
+ current_object(Object),
+ current_protocol(Protocol).
+
+ enumerating_properties :-
+ category_property(Category, Property),
+ object_property(Object, Property),
+ protocol_property(Protocol, Property).
+
+ creating_entities(Object, Protocol, Category) :-
+ create_category(Category, Relations, Directives, Clauses),
+ create_object(Object, Relations, Directives, Clauses),
+ create_protocol(Protocol, Relations, Directives).
+
+ abolishing_entities(Object, Protocol, Category) :-
+ abolish_category(Category),
+ abolish_object(Object),
+ abolish_protocol(Protocol).
+
+ entity_relations :-
+ extends_object(Prototype, Parent, Scope),
+ extends_protocol(Protocol1, Protocol2, Scope),
+ extends_category(Category1, Category2, Scope),
+ implements_protocol(Object, Protocol, Scope),
+ imports_category(Object, Category, Scope),
+ instantiates_class(Instance, Class, Scope),
+ specializes_class(Class, Superclass, Scope),
+ complements_object(Category, Object).
+
+ event_handling :-
+ abolish_events(Event, Object, Message, Sender, Monitor),
+ current_event(Event, Object, Message, Sender, Monitor),
+ define_events(Event, Object, Message, Sender, Monitor).
+
+ multi_threading :-
+ threaded(Goals),
+ threaded_call(Goal),
+ threaded_once(Goal),
+ threaded_ignore(Goal),
+ threaded_exit(Goal),
+ threaded_peek(Goal),
+ threaded_wait(Goal),
+ threaded_notify(Notification).
+
+ compiling_and_loading :-
+ logtalk_compile(File, Options),
+ logtalk_load(File, Options),
+ logtalk_library_path(Library, Path).
+
+ flags :-
+ current_logtalk_flag(Flag, Value),
+ set_logtalk_flag(Flag, Value).
+
+ execution_context_methods :-
+ parameter(N, Parameter),
+ self(Self),
+ sender(Sender),
+ this(This).
+
+ reflection_methods :-
+ current_predicate(Predicate),
+ predicate_property(Predicate, Property).
+
+ database_methods :-
+ abolish(Functor/Arity),
+ asserta(Clause),
+ assertz(Clause),
+ clause(Head, Body),
+ retract(Clause),
+ retractall(Head).
+
+ meta_call_methods :-
+ call(Goal).
+
+ all_solutions_methods :-
+ bagof(Term, Goal, List),
+ findall(Term, Goal, List),
+ forall(Generate, Test),
+ setof(Term, Goal, List).
+
+ event_handler_methods :-
+ before(Object, Message, Sender),
+ after(Object, Message, Sender).
+
+ dcg_rules_parsing_methods :-
+ phrase(NonTerminal, Input, Rest).
+
+ term_expansion_methods :-
+ expand_term(Term, Expanded),
+ term_expansion(Term, Expanded),
+ goal_expansion(Goal, Expanded).
+
+ message_sending :-
+ Object::Message,
+ ::Message,
+ ^^Message.
+
+ calling_external_code :-
+ {goal1, goal2, goal3}.
+
+ context_switching_calls :-
+ Object<<Goal.
+
+ direct_calls_of_category_predicates :-
+ :Goal.
+
+ if_then_else :-
+ ( If ->
+ Then
+ ; Else
+ ).
+
+ numbers :-
+ X is 13,
+ Y is 13.13,
+ Z is 13.13e-23,
+ C1 is 0'A, C2 is 0'', C3 is 0'",
+ B is 0b1011101,
+ O is 0o1234560,
+ H is 0x1234567890abcDEF.
+
+ functions :-
+ A is atan(3.14) + sin(0.77) - cos(123.23),
+ B is sign(-12) * abs(35/78),
+ C is truncate(3.14) + round(-7.8) - ceiling(111.88),
+ D is exp(3.8) - log(123.98) / sqrt(33) * 23 ** 4,
+ E is rem(3, 2) + mod(5, 3) * 2 rem 2 // 5 mod 3,
+ F is float_fractional_part(3.14) + float_integer_part(3.14),
+ G is float(33) + floor(99.99).
+
+ bitwise :-
+ A is 16 >> 2,
+ B is 16 << 2,
+ C is 10 /\ 12,
+ D is 10 \/ 12,
+ E is \ 10.
+
+ term_unification :-
+ Term1 = Term2,
+ Term1 \= Term2,
+ unify_with_occurs_check(Term1, Term2).
+
+ term_testing :-
+ atom(Atom),
+ atomic(Atomic),
+ integer(Integer),
+ float(Float),
+ compound(Term),
+ nonvar(Term),
+ var(Term),
+ number(Number).
+
+ term_comparison :-
+ Term1 == Term2,
+ Term1 \== Term2,
+ Term1 @< Term2,
+ Term1 @=< Term2,
+ Term1 @>= Term2,
+ Term1 @> Term2.
+
+ term_creation_and_decomposition :-
+ functor(Term, Functor, Arity),
+ arg(N, Term, Arg),
+ Term =.. [Functor| Args],
+ copy_term(Term, Copy).
+
+ arithemtic_evaluation :-
+ X is Expression.
+
+ arithemtic_comparison :-
+ Exp1 =:= Exp2,
+ Exp1 =\= Exp2,
+ Exp1 < Exp2,
+ Exp1 =< Exp2,
+ Exp1 > Exp2,
+ Exp1 >= Exp2.
+
+ stream_selection_and_control :-
+ current_input(Stream),
+ current_output(Stream),
+ set_input(Stream),
+ set_output(Stream),
+ open(Source, Mode, Stream, Options),
+ close(Stream),
+ flush_output(Stream),
+ stream_property(Stream, Property),
+ at_end_of_stream(Stream),
+ set_stream_position(Stream, Position),
+ flush_output,
+ at_end_of_stream.
+
+ character_input_output :-
+ get_char(Char),
+ get_code(Code),
+ peek_char(Char),
+ peek_code(Code),
+ put_char(Char),
+ put_code(Code),
+ nl(Stream),
+ nl.
+
+ byte_input_output :-
+ get_byte(Byte),
+ peek_byte(Byte),
+ put_byte(Byte).
+
+ term_input_output :-
+ read(Term),
+ read_term(Term),
+ write(Term),
+ write(Term),
+ write_canonical(Term),
+ write_term(Stream, Term, Options),
+ current_op(Precedence, Associativity, Operator),
+ op(Precedence, Associativity, Operator),
+ current_char_conversion(InChar, OutChar),
+ char_conversion(InChar, OutChar).
+
+ logic_and_control :-
+ \+ Goal,
+ once(Goal),
+ repeat,
+ !.
+
+ atomic_term_processing :-
+ atom_length(Atom, Length),
+ atom_chars(Atom, Chars),
+ atom_codes(Atom, Codes),
+ atom_concat(Atom1, Atom2, Atom),
+ sub_atom(Atom, Before, Length, After, SubAtom),
+ char_code(Char, Code),
+ number_chars(Number, Chars),
+ number_codes(Number, Codes).
+
+ implementation_defined_hooks :-
+ current_prolog_flag(Flag, Value),
+ set_prolog_flag(Flag, Value),
+ halt(ExitCode),
+ halt.
+
+ number(C) --> "+", number(C).
+ number(C) --> "-", number(X), {C is -X}.
+ number(X) --> [C], {0'0 =< C, C =< 0'9, X is C - 0'0}.
+
+:- end_object.
+
+
+
+:- object(class,
+ implements(protocol),
+ imports(category),
+ instantiates(metaclass),
+ specializes(superclass)).
+
+
+:- end_object.
+
+
+
+:- object(parametric(_Par1, _Par2),
+ implements(protocol),
+ imports(category),
+ extends(parent(_Par))).
+
+
+:- end_object.
+
+
+
+:- category(category,
+ implements(protocol),
+ extends(other_category)).
+
+
+:- end_category.
+
+
+
+:- protocol(extended,
+ extends(minimal)).
+
+
+:- end_protocol.
diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py
index 0bcbb938..ab348cbc 100644
--- a/tests/test_basic_api.py
+++ b/tests/test_basic_api.py
@@ -131,7 +131,10 @@ class FormattersTest(unittest.TestCase):
a(info[1], "missing formatter aliases") # aliases
a(info[3], "missing formatter docstring") # doc
- inst = formatter(opt1="val1")
+ try:
+ inst = formatter(opt1="val1")
+ except ImportError:
+ continue
inst.get_style_defs()
inst.format(ts, out)
@@ -163,7 +166,11 @@ class FormattersTest(unittest.TestCase):
# test that the formatter supports encoding and Unicode
tokens = list(lexers.PythonLexer(encoding='utf-8').get_tokens("def f(): 'ä'"))
for formatter, info in formatters.FORMATTERS.iteritems():
- inst = formatter(encoding=None)
+ try:
+ inst = formatter(encoding=None)
+ except ImportError:
+ # some dependency not installed
+ continue
out = format(tokens, inst)
if formatter.unicodeoutput:
self.assert_(type(out) is unicode)