summaryrefslogtreecommitdiff
path: root/pygments
diff options
context:
space:
mode:
Diffstat (limited to 'pygments')
-rw-r--r--pygments/__init__.py2
-rw-r--r--pygments/filters/__init__.py5
-rw-r--r--pygments/formatters/img.py4
-rw-r--r--pygments/formatters/latex.py2
-rw-r--r--pygments/lexers/_mapping.py1
-rw-r--r--pygments/lexers/agile.py6
-rw-r--r--pygments/lexers/compiled.py12
-rw-r--r--pygments/lexers/dotnet.py6
-rw-r--r--pygments/lexers/functional.py35
-rw-r--r--pygments/lexers/math.py11
-rw-r--r--pygments/lexers/other.py195
-rw-r--r--pygments/lexers/text.py18
-rw-r--r--pygments/lexers/web.py2
13 files changed, 258 insertions, 41 deletions
diff --git a/pygments/__init__.py b/pygments/__init__.py
index 5482b0b6..a99c6408 100644
--- a/pygments/__init__.py
+++ b/pygments/__init__.py
@@ -24,7 +24,7 @@
:license: BSD, see LICENSE for more details.
"""
-__version__ = '1.0'
+__version__ = '0.10'
__author__ = 'Georg Brandl <g.brandl@gmx.net>'
__url__ = 'http://pygments.org/'
__license__ = 'BSD License'
diff --git a/pygments/filters/__init__.py b/pygments/filters/__init__.py
index 416e85dd..efe8a4bd 100644
--- a/pygments/filters/__init__.py
+++ b/pygments/filters/__init__.py
@@ -222,8 +222,9 @@ class VisibleWhitespaceFilter(Filter):
is ``False``. Note: this will not work if the `tabsize` option for the
lexer is nonzero, as tabs will already have been expanded then.
`tabsize` : int
- If tabs are to be replaced by this filter, this is the total number of
- characters that a tab should be expanded to. The default is ``8``.
+ If tabs are to be replaced by this filter (see the `tabs` option), this
+ is the total number of characters that a tab should be expanded to.
+ The default is ``8``.
`newlines` : string or bool
The same as for `spaces`, but the default replacement character is ``ΒΆ``
(unicode PILCROW SIGN). The default value is ``False``.
diff --git a/pygments/formatters/img.py b/pygments/formatters/img.py
index eeb6449f..0b5831b2 100644
--- a/pygments/formatters/img.py
+++ b/pygments/formatters/img.py
@@ -43,7 +43,7 @@ DEFAULT_FONT_NAME_NIX = 'Bitstream Vera Sans Mono'
DEFAULT_FONT_NAME_WIN = 'Courier New'
-class PilNotAvailable(Exception):
+class PilNotAvailable(ImportError):
"""When Python imaging library is not available"""
@@ -166,7 +166,7 @@ class ImageFormatter(Formatter):
Create an image from source code. This uses the Python Imaging Library to
generate a pixmap from the source code.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
Additional options accepted:
diff --git a/pygments/formatters/latex.py b/pygments/formatters/latex.py
index f91b1e57..ae3ce26d 100644
--- a/pygments/formatters/latex.py
+++ b/pygments/formatters/latex.py
@@ -109,7 +109,7 @@ class LatexFormatter(Formatter):
The LaTeX commands used to produce colored output are constructed
using this prefix and some letters (default: ``'PY'``).
*New in Pygments 0.7.*
- *New in Pygments 1.0:* the default is now ``'PY'`` instead of ``'C'``.
+ *New in Pygments 0.10:* the default is now ``'PY'`` instead of ``'C'``.
"""
name = 'LaTeX'
aliases = ['latex', 'tex']
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 68781e53..995c0c30 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -70,6 +70,7 @@ LEXERS = {
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell'), ('*.lhs',), ('text/x-literate-haskell',)),
'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
+ 'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua',), ('text/x-lua', 'application/x-lua')),
'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode',), ('*.moo',), ('text/x-moocode',)),
'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', 'Makefile', 'makefile', 'Makefile.*'), ('text/x-makefile',)),
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index 7199d405..1a6b2d1a 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -182,7 +182,7 @@ class Python3Lexer(RegexLexer):
"""
For `Python <http://www.python.org>`_ source code (version 3.0).
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Python 3'
@@ -1081,7 +1081,7 @@ class IoLexer(RegexLexer):
For `Io <http://iolanguage.com/>`_ (a small, prototype-based
programming language) source.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Io'
filenames = ['*.io']
@@ -1124,7 +1124,7 @@ class TclLexer(RegexLexer):
"""
For Tcl source code.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
keyword_cmds_re = (
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
index 2e0f62f0..91730982 100644
--- a/pygments/lexers/compiled.py
+++ b/pygments/lexers/compiled.py
@@ -856,9 +856,9 @@ class JavaLexer(RegexLexer):
tokens = {
'root': [
# method names
- (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.]*\s+)+?)' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment),
@@ -867,13 +867,13 @@ class JavaLexer(RegexLexer):
(r'(abstract|assert|break|case|catch|'
r'const|continue|default|do|else|enum|extends|final|'
r'finally|for|if|goto|implements|instanceof|'
- r'interface|native|new|package|private|protected|public|'
+ r'native|new|package|private|protected|public|'
r'return|static|strictfp|super|switch|synchronized|this|'
r'throw|throws|transient|try|volatile|while)\b', Keyword),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(true|false|null)\b', Keyword.Constant),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'class'),
+ (r'(class|interface)(\s+)', bygroups(Keyword, Text), 'class'),
(r'(import)(\s+)', bygroups(Keyword, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
@@ -1064,7 +1064,7 @@ class FortranLexer(RegexLexer):
'''
Lexer for FORTRAN 90 code.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
'''
name = 'Fortran'
aliases = ['fortran']
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index 60663c4a..832f21f5 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -76,9 +76,9 @@ class CSharpLexer(RegexLexer):
tokens[levelname] = {
'root': [
# method names
- (r'^([ \t]*(?:' + cs_ident + r'\s+)+?)' # return arguments
- r'(' + cs_ident + ')' # method name
- r'(\s*)(\()', # signature start
+ (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
+ r'(' + cs_ident + ')' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py
index c2305f68..a7a50f7d 100644
--- a/pygments/lexers/functional.py
+++ b/pygments/lexers/functional.py
@@ -627,6 +627,36 @@ class ErlangLexer(RegexLexer):
'let', 'of', 'query', 'receive', 'try', 'when',
]
+ builtins = [ # See erlang(3) man page
+ 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
+ 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
+ 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
+ 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
+ 'float', 'float_to_list', 'fun_info', 'fun_to_list',
+ 'function_exported', 'garbage_collect', 'get', 'get_keys',
+ 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
+ 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
+ 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
+ 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
+ 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
+ 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
+ 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
+ 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
+ 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
+ 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
+ 'pid_to_list', 'port_close', 'port_command', 'port_connect',
+ 'port_control', 'port_call', 'port_info', 'port_to_list',
+ 'process_display', 'process_flag', 'process_info', 'purge_module',
+ 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
+ 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
+ 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
+ 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
+ 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
+ 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
+ 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
+ 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
+ ]
+
operators = r'(\+|-|\*|/|<|>|=|==|/=|=:=|=/=|=<|>=|\+\+|--|<-|!)'
word_operators = [
'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
@@ -647,8 +677,9 @@ class ErlangLexer(RegexLexer):
'root': [
(r'\s+', Text),
(r'%.*\n', Comment),
- ('|'.join(keywords), Name.Keyword),
- (r'\b(?:'+'|'.join(word_operators)+r')\b', Operator.Word),
+ ('(' + '|'.join(keywords) + r')\b', Keyword),
+ ('(' + '|'.join(builtins) + r')\b', Name.Builtin),
+ ('(' + '|'.join(word_operators) + r')\b', Operator.Word),
(r'^-', Punctuation, 'directive'),
(operators, Operator),
(r'"', String, 'string'),
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index 00ba9e58..cf628c04 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -102,7 +102,7 @@ class MatlabLexer(RegexLexer):
For Matlab (or GNU Octave) source code.
Contributed by Ken Schutte <kschutte@csail.mit.edu>.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Matlab'
aliases = ['matlab', 'octave']
@@ -198,7 +198,7 @@ class MatlabSessionLexer(Lexer):
For Matlab (or GNU Octave) sessions. Modeled after PythonConsoleLexer.
Contributed by Ken Schutte <kschutte@csail.mit.edu>.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Matlab session'
aliases = ['matlabsession']
@@ -246,7 +246,7 @@ class NumPyLexer(PythonLexer):
'''
A Python lexer recognizing Numerical Python builtins.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
'''
name = 'NumPy'
@@ -332,7 +332,7 @@ class SLexer(RegexLexer):
"""
For S, S-plus, and R source code.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'S'
@@ -352,7 +352,8 @@ class SLexer(RegexLexer):
(r'\[|\]|\[\[|\]\]|\$|\(|\)|@|:::?|;|,', Punctuation),
],
'keywords': [
- (r'for(?=\s*\()|while(?=\s*\()|if(?=\s*\()|(?<=\s)else|(?<=\s)break(?=;|$)',
+ (r'for(?=\s*\()|while(?=\s*\()|if(?=\s*\()|(?<=\s)else|'
+ r'(?<=\s)break(?=;|$)|return(?=\s*\()|function(?=\s*\()',
Keyword.Reserved)
],
'operators': [
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index 65e147d5..0701caa4 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -6,7 +6,7 @@
Lexers for other languages.
:copyright: 2006-2008 by Georg Brandl, Tim Hatch <tim@timhatch.com>,
- Stou Sandalski.
+ Stou Sandalski, Paulo Moura, Clara Dimene.
:license: BSD, see LICENSE for more details.
"""
@@ -20,7 +20,7 @@ from pygments.util import shebang_matches
__all__ = ['SqlLexer', 'MySqlLexer', 'BrainfuckLexer', 'BashLexer',
'BatchLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
- 'SmalltalkLexer', 'TcshLexer']
+ 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer']
class SqlLexer(RegexLexer):
@@ -487,7 +487,7 @@ class SmalltalkLexer(RegexLexer):
For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
Contributed by Stefan Matthias Aust.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Smalltalk'
filenames = ['*.st']
@@ -558,7 +558,7 @@ class TcshLexer(RegexLexer):
"""
Lexer for tcsh scripts.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Tcsh'
@@ -620,3 +620,190 @@ class TcshLexer(RegexLexer):
],
}
+
+class LogtalkLexer(RegexLexer):
+ """
+ For `Logtalk <http://logtalk.org/>`_ source code.
+
+ *New in Pygments 0.10.*
+ """
+
+ name = 'Logtalk'
+ aliases = ['logtalk']
+ filenames = ['*.lgt']
+ mimetypes = ['text/x-logtalk']
+
+ tokens = {
+ 'root': [
+ # Directives
+ (r'^\s*:-\s',Punctuation,'directive'),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/',Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Numbers
+ (r"0'.", Number),
+ (r'0b[01]+', Number),
+ (r'0o[0-7]+', Number),
+ (r'0x[0-9a-fA-F]+', Number),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ # Event handlers
+ (r'(after|before)(?=[(])', Keyword),
+ # Execution-context methods
+ (r'(parameter|this|se(lf|nder))(?=[(])', Keyword),
+ # Reflection
+ (r'(current_predicate|predicate_property)(?=[(])', Keyword),
+ # DCGs and term expansion
+ (r'(expand_term|(goal|term)_expansion|phrase)(?=[(])', Keyword),
+ # Entity
+ (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
+ (r'(object|protocol|category)_property(?=[(])', Keyword),
+ # Entity relations
+ (r'complements_object(?=[(])', Keyword),
+ (r'extends_(object|protocol|category)(?=[(])', Keyword),
+ (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
+ (r'(instantiat|specializ)es_class(?=[(])', Keyword),
+ # Events
+ (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
+ # Flags
+ (r'(current|set)_logtalk_flag(?=[(])', Keyword),
+ # Compiling, loading, and library paths
+ (r'logtalk_(compile|l(ibrary_path|oad))(?=[(])', Keyword),
+ # Database
+ (r'(clause|retract(all)?)(?=[(])', Keyword),
+ (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
+ # Control
+ (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
+ (r'(fail|true)\b', Keyword),
+ # All solutions
+ (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
+ # Multi-threading meta-predicates
+ (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
+ # Term unification
+ (r'unify_with_occurs_check(?=[(])', Keyword),
+ # Term creation and decomposition
+ (r'(functor|arg|copy_term)(?=[(])', Keyword),
+ # Evaluable functors
+ (r'(rem|mod|abs|sign)(?=[(])', Keyword),
+ (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
+ (r'(floor|truncate|round|ceiling)(?=[(])', Keyword),
+ # Other arithmetic functors
+ (r'(cos|atan|exp|log|s(in|qrt))(?=[(])', Keyword),
+ # Term testing
+ (r'(var|atom(ic)?|integer|float|compound|n(onvar|umber))(?=[(])', Keyword),
+ # Stream selection and control
+ (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
+ (r'(open|close)(?=[(])', Keyword),
+ (r'flush_output(?=[(])', Keyword),
+ (r'(at_end_of_stream|flush_output)\b', Keyword),
+ (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
+ # Character and byte input/output
+ (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
+ (r'\bnl\b', Keyword),
+ # Term input/output
+ (r'read(_term)?(?=[(])', Keyword),
+ (r'write(q|_(canonical|term))?(?=[(])', Keyword),
+ (r'(current_)?op(?=[(])', Keyword),
+ (r'(current_)?char_conversion(?=[(])', Keyword),
+ # Atomic term processing
+ (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
+ (r'(char_code|sub_atom)(?=[(])', Keyword),
+ (r'number_c(har|ode)s(?=[(])', Keyword),
+ # Implementation defined hooks functions
+ (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
+ (r'\bhalt\b', Keyword),
+ (r'halt(?=[(])', Keyword),
+ # Message sending operators
+ (r'(::|:|\^\^)', Operator),
+ # External call
+ (r'[{}]', Keyword),
+ # Logic and control
+ (r'\bonce(?=[(])', Keyword),
+ (r'\brepeat\b', Keyword),
+ # Bitwise functors
+ (r'(>>|<<|/\\|\\\\|\\)', Operator),
+ # Arithemtic evaluation
+ (r'\bis\b', Keyword),
+ # Arithemtic comparison
+ (r'(=:=|=\\=|<|=<|>=|>)', Operator),
+ # Term creation and decomposition
+ (r'=\.\.', Operator),
+ # Term unification
+ (r'(=|\\=)', Operator),
+ # Term comparison
+ (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
+ # Evaluable functors
+ (r'(//|[-+*/])', Operator),
+ (r'\b(mod|rem)\b', Operator),
+ # Other arithemtic functors
+ (r'\b\*\*\b', Operator),
+ # DCG rules
+ (r'-->', Operator),
+ # Control constructs
+ (r'([!;]|->)', Operator),
+ # Logic and control
+ (r'\\+', Operator),
+ # Mode operators
+ (r'[?@]', Operator),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Ponctuation
+ (r'[()\[\],.|]', Text),
+ # Atoms
+ (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"[']", String, 'quoted_atom'),
+ ],
+
+ 'quoted_atom': [
+ (r"['][']", String),
+ (r"[']", String, '#pop'),
+ (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
+ (r"[^\\'\n]+", String),
+ (r'\\', String),
+ ],
+
+ 'directive': [
+ # Entity directives
+ (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
+ (r'(end_(category|object|protocol))[.]',Keyword, 'root'),
+ # Predicate scope directives
+ (r'(public|protected|private)(?=[(])', Keyword, 'root'),
+ # Other directives
+ (r'\be(ncoding|xport)(?=[(])', Keyword, 'root'),
+ (r'\bin(fo|itialization)(?=[(])', Keyword, 'root'),
+ (r'\b(dynamic|synchronized|threaded)[.]', Keyword, 'root'),
+ (r'\b(alias|d(ynamic|iscontiguous)|m(eta_predicate|ode|ultifile)|synchronized)(?=[(])', Keyword, 'root'),
+ (r'\bop(?=[(])', Keyword, 'root'),
+ (r'\b(calls|use(s|_module))(?=[(])', Keyword, 'root'),
+ ],
+
+ 'entityrelations': [
+ (r'(extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
+ # Numbers
+ (r"0'.", Number),
+ (r'0b[01]+', Number),
+ (r'0o[0-7]+', Number),
+ (r'0x[0-9a-fA-F]+', Number),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ # Atoms
+ (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"[']", String, 'quoted_atom'),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # End of entity-opening directive
+ (r'([)]\.\n)', Text, 'root'),
+ # Scope operator
+ (r'(::)', Operator),
+ # Ponctuation
+ (r'[()\[\],.|]', Text),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ ]
+ }
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index 5d66b68a..bb86d8ad 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -120,7 +120,7 @@ class MakefileLexer(Lexer):
Lexer for BSD and GNU make extensions (lenient enough to handle both in
the same file even).
- *Rewritten in Pygments 1.0.*
+ *Rewritten in Pygments 0.10.*
"""
name = 'Makefile'
@@ -157,7 +157,7 @@ class BaseMakefileLexer(RegexLexer):
"""
Lexer for simple Makefiles (no preprocessing).
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Makefile'
@@ -167,14 +167,16 @@ class BaseMakefileLexer(RegexLexer):
tokens = {
'root': [
+ (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
+ (r'\$\((?:.*\\\n|.*\n)+', using(BashLexer)),
(r'\s+', Text),
(r'#.*?\n', Comment),
(r'(export)(\s+)(?=[a-zA-Z0-9_${}\t -]+\n)',
bygroups(Keyword, Text), 'export'),
(r'export\s+', Keyword),
# assignment
- (r'([a-zA-Z0-9_${}-]+)(\s*)([!?:+]?=)([ \t]*)',
- bygroups(Name.Variable, Text, Operator, Text), 'var'),
+ (r'([a-zA-Z0-9_${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n|.*\n)+)',
+ bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
# strings
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
@@ -188,12 +190,6 @@ class BaseMakefileLexer(RegexLexer):
(r'\n', Text, '#pop'),
(r'\s+', Text),
],
- 'var': [
- (r'\\\n', String),
- (r'\\', String),
- (r'\n', Text, '#pop'),
- (r'[^\\\n]+', String),
- ],
'block-header': [
(r'[^,\\\n#]+', Number),
(r',', Punctuation),
@@ -243,7 +239,7 @@ class DarcsPatchLexer(RegexLexer):
format. Examples of this format are derived by commands such as
``darcs annotate --patch`` and ``darcs send``.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
"""
name = 'Darcs Patch'
aliases = ['dpatch']
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index b9777bf9..87ea32fe 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -539,7 +539,7 @@ class XsltLexer(XmlLexer):
'''
A lexer for XSLT.
- *New in Pygments 1.0.*
+ *New in Pygments 0.10.*
'''
name = 'XSLT'