summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--pygments/formatters/irc.py2
-rw-r--r--pygments/lexers/_csound_builtins.py7
-rw-r--r--pygments/lexers/asm.py2
-rw-r--r--pygments/lexers/chapel.py3
-rw-r--r--pygments/lexers/elm.py2
-rw-r--r--pygments/lexers/ezhil.py2
-rw-r--r--pygments/lexers/j.py2
-rw-r--r--pygments/lexers/jvm.py15
-rw-r--r--pygments/lexers/python.py4
-rw-r--r--pygments/lexers/trafficscript.py3
-rw-r--r--pygments/lexers/webmisc.py51
-rw-r--r--pygments/styles/lovelace.py3
-rw-r--r--tests/test_java.py38
13 files changed, 110 insertions, 24 deletions
diff --git a/pygments/formatters/irc.py b/pygments/formatters/irc.py
index 44fe6c4a..d1eed0ac 100644
--- a/pygments/formatters/irc.py
+++ b/pygments/formatters/irc.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
pygments.formatters.irc
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ ~~~~~~~~~~~~~~~~~~~~~~~
Formatter for IRC output
diff --git a/pygments/lexers/_csound_builtins.py b/pygments/lexers/_csound_builtins.py
index 5f7a798a..a88e0a83 100644
--- a/pygments/lexers/_csound_builtins.py
+++ b/pygments/lexers/_csound_builtins.py
@@ -1,4 +1,11 @@
# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._csound_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
# Opcodes in Csound 6.05 from
# csound --list-opcodes
diff --git a/pygments/lexers/asm.py b/pygments/lexers/asm.py
index 918ed83b..bbe04f69 100644
--- a/pygments/lexers/asm.py
+++ b/pygments/lexers/asm.py
@@ -87,7 +87,7 @@ class GasLexer(RegexLexer):
(r'#.*?\n', Comment)
],
'punctuation': [
- (r'[-*,.():]+', Punctuation)
+ (r'[-*,.()\[\]!:]+', Punctuation)
]
}
diff --git a/pygments/lexers/chapel.py b/pygments/lexers/chapel.py
index 5b7be4dd..d69c55f5 100644
--- a/pygments/lexers/chapel.py
+++ b/pygments/lexers/chapel.py
@@ -77,7 +77,8 @@ class ChapelLexer(RegexLexer):
(r'[0-9]+', Number.Integer),
# strings
- (r'["\'](\\\\|\\"|[^"\'])*["\']', String),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'(\\\\|\\'|[^'])*'", String),
# tokens
(r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|'
diff --git a/pygments/lexers/elm.py b/pygments/lexers/elm.py
index b8206c6d..7df6346a 100644
--- a/pygments/lexers/elm.py
+++ b/pygments/lexers/elm.py
@@ -5,6 +5,8 @@
Lexer for the Elm programming language.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words, include
diff --git a/pygments/lexers/ezhil.py b/pygments/lexers/ezhil.py
index 713541ee..a5468a0f 100644
--- a/pygments/lexers/ezhil.py
+++ b/pygments/lexers/ezhil.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
pygments.lexers.ezhil
- ~~~~~~~~~~~~~~~~~~~~~~
+ ~~~~~~~~~~~~~~~~~~~~~
Pygments lexers for Ezhil language.
diff --git a/pygments/lexers/j.py b/pygments/lexers/j.py
index 20176d0d..278374e5 100644
--- a/pygments/lexers/j.py
+++ b/pygments/lexers/j.py
@@ -5,6 +5,8 @@
Lexer for the J programming language.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words, include
diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py
index 14647616..41fc0fdb 100644
--- a/pygments/lexers/jvm.py
+++ b/pygments/lexers/jvm.py
@@ -66,10 +66,19 @@ class JavaLexer(RegexLexer):
(r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
(r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
(r'([^\W\d]|\$)[\w$]*', Name),
+ (r'([0-9](_*[0-9]+)*\.([0-9](_*[0-9]+)*)?|'
+ r'([0-9](_*[0-9]+)*)?\.[0-9](_*[0-9]+)*)'
+ r'([eE][+\-]?[0-9](_*[0-9]+)*)?[fFdD]?|'
+ r'[0-9][eE][+\-]?[0-9](_*[0-9]+)*[fFdD]?|'
+ r'[0-9]([eE][+\-]?[0-9](_*[0-9]+)*)?[fFdD]|'
+ r'0[xX]([0-9a-fA-F](_*[0-9a-fA-F]+)*\.?|'
+ r'([0-9a-fA-F](_*[0-9a-fA-F]+)*)?\.[0-9a-fA-F](_*[0-9a-fA-F]+)*)'
+ r'[pP][+\-]?[0-9](_*[0-9]+)*[fFdD]?', Number.Float),
+ (r'0[xX][0-9a-fA-F](_*[0-9a-fA-F]+)*[lL]?', Number.Hex),
+ (r'0[bB][01](_*[01]+)*[lL]?', Number.Bin),
+ (r'0(_*[0-7]+)+[lL]?', Number.Oct),
+ (r'0|[1-9](_*[0-9]+)*[lL]?', Number.Integer),
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+(_+[0-9]+)*L?', Number.Integer),
(r'\n', Text)
],
'class': [
diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py
index dee8e6c7..c05c8ae0 100644
--- a/pygments/lexers/python.py
+++ b/pygments/lexers/python.py
@@ -515,6 +515,8 @@ class CythonLexer(RegexLexer):
include('keywords'),
(r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
(r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
+ # (should actually start a block with only cdefs)
+ (r'(cdef)(:)', bygroups(Keyword, Punctuation)),
(r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
(r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
(r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
@@ -534,7 +536,7 @@ class CythonLexer(RegexLexer):
'keywords': [
(words((
'assert', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
- 'else', 'except', 'except?', 'exec', 'finally', 'for', 'gil',
+ 'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
Keyword),
diff --git a/pygments/lexers/trafficscript.py b/pygments/lexers/trafficscript.py
index 34ca7d5b..03ab6a06 100644
--- a/pygments/lexers/trafficscript.py
+++ b/pygments/lexers/trafficscript.py
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
"""
-
pygments.lexers.trafficscript
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -9,6 +8,7 @@
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+
import re
from pygments.lexer import RegexLexer
@@ -16,6 +16,7 @@ from pygments.token import String, Number, Name, Keyword, Operator, Text, Commen
__all__ = ['RtsLexer']
+
class RtsLexer(RegexLexer):
"""
For `Riverbed Stingray Traffic Manager <http://www.riverbed.com/stingray>`_
diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py
index def11dba..0eafa954 100644
--- a/pygments/lexers/webmisc.py
+++ b/pygments/lexers/webmisc.py
@@ -191,6 +191,14 @@ class XQueryLexer(ExtendedRegexLexer):
lexer.xquery_parse_state.append('operator')
ctx.pos = match.end()
+ def pushstate_operator_map_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
def pushstate_operator_root_validate(lexer, match, ctx):
yield match.start(), Keyword, match.group(1)
yield match.start(), Text, match.group(2)
@@ -338,11 +346,11 @@ class XQueryLexer(ExtendedRegexLexer):
(r'and|or', Operator.Word, 'root'),
(r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
Operator.Word, 'root'),
- (r'return|satisfies|to|union|where|preserve\s+strip',
+ (r'return|satisfies|to|union|where|count|preserve\s+strip',
Keyword, 'root'),
(r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\|\||\||:=|=|!)',
operator_root_callback),
- (r'(::|;|\[|//|/|,)',
+ (r'(::|:|;|\[|//|/|,)',
punctuation_root_callback),
(r'(castable|cast)(\s+)(as)\b',
bygroups(Keyword, Text, Keyword), 'singletype'),
@@ -356,13 +364,18 @@ class XQueryLexer(ExtendedRegexLexer):
(r'(\))(\s*)(as)',
bygroups(Punctuation, Text, Keyword), 'itemtype'),
(r'\$', Name.Variable, 'varname'),
- (r'(for|let)(\s+)(\$)',
+ (r'(for|let|previous|next)(\s+)(\$)',
bygroups(Keyword, Text, Name.Variable), 'varname'),
+ (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
# (r'\)|\?|\]', Punctuation, '#push'),
(r'\)|\?|\]', Punctuation),
(r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
(r'ascending|descending|default', Keyword, '#push'),
+ (r'(allowing)(\s+)(empty)', bygroups(Keyword, Text, Keyword)),
(r'external', Keyword),
+ (r'(start|when|end)', Keyword, 'root'),
+ (r'(only)(\s+)(end)', bygroups(Keyword, Text, Keyword), 'root'),
(r'collation', Keyword, 'uritooperator'),
# eXist specific XQUF
@@ -421,6 +434,7 @@ class XQueryLexer(ExtendedRegexLexer):
(r'(' + qname + ')(\()?', bygroups(Name, Punctuation), 'operator'),
],
'singletype': [
+ include('whitespace'),
(r'\(:', Comment, 'comment'),
(ncname + r'(:\*)', Name.Variable, 'operator'),
(qname, Name.Variable, 'operator'),
@@ -448,7 +462,7 @@ class XQueryLexer(ExtendedRegexLexer):
bygroups(Keyword, Text, String.Double), 'namespacedecl'),
(r'(at)(\s+)(' + stringsingle + ')',
bygroups(Keyword, Text, String.Single), 'namespacedecl'),
- (r'except|intersect|in|is|return|satisfies|to|union|where',
+ (r'except|intersect|in|is|return|satisfies|to|union|where|count',
Keyword, 'root'),
(r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
(r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|\||\|', Operator, 'root'),
@@ -464,7 +478,7 @@ class XQueryLexer(ExtendedRegexLexer):
(r'case|as', Keyword, 'itemtype'),
(r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
(ncname + r':\*', Keyword.Type, 'operator'),
- (r'(function)(\()', bygroups(Keyword.Type, Punctuation)),
+ (r'(function|map|array)(\()', bygroups(Keyword.Type, Punctuation)),
(qname, Keyword.Type, 'occurrenceindicator'),
],
'kindtest': [
@@ -555,6 +569,7 @@ class XQueryLexer(ExtendedRegexLexer):
(qname, Name.Tag),
],
'xmlspace_decl': [
+ include('whitespace'),
(r'\(:', Comment, 'comment'),
(r'preserve|strip', Keyword, '#pop'),
],
@@ -617,13 +632,15 @@ class XQueryLexer(ExtendedRegexLexer):
bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
(r'(declare)(\s+)(default)(\s+)(order)',
bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
+ (r'(declare)(\s+)(context)(\s+)(item)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
(ncname + ':\*', Name, 'operator'),
('\*:'+ncname, Name.Tag, 'operator'),
('\*', Name.Tag, 'operator'),
(stringdouble, String.Double, 'operator'),
(stringsingle, String.Single, 'operator'),
- (r'(\})', popstate_callback),
+ (r'(\}|\])', popstate_callback),
# NAMESPACE DECL
(r'(declare)(\s+)(default)(\s+)(collation)',
@@ -644,6 +661,8 @@ class XQueryLexer(ExtendedRegexLexer):
# VARNAMEs
(r'(for|let|some|every)(\s+)(\$)',
bygroups(Keyword, Text, Name.Variable), 'varname'),
+ (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
(r'\$', Name.Variable, 'varname'),
(r'(declare)(\s+)(variable)(\s+)(\$)',
bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
@@ -677,8 +696,8 @@ class XQueryLexer(ExtendedRegexLexer):
pushstate_operator_root_validate_withmode),
(r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
(r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
- (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
- (r'(element|attribute)(\s*)(\{)',
+ (r'(switch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+ (r'(element|attribute|namespace)(\s*)(\{)',
pushstate_operator_root_construct_callback),
(r'(document|text|processing-instruction|comment)(\s*)(\{)',
@@ -690,18 +709,21 @@ class XQueryLexer(ExtendedRegexLexer):
(r'(element)(\s+)(?=' + qname + r')',
bygroups(Keyword, Text), 'element_qname'),
# PROCESSING_INSTRUCTION
- (r'(processing-instruction)(\s+)(' + ncname + r')(\s*)(\{)',
+ (r'(processing-instruction|namespace)(\s+)(' + ncname + r')(\s*)(\{)',
bygroups(Keyword, Text, Name.Variable, Text, Punctuation),
'operator'),
(r'(declare|define)(\s+)(function)',
bygroups(Keyword.Declaration, Text, Keyword.Declaration)),
- (r'(\{)', pushstate_operator_root_callback),
+ (r'(\{|\[)', pushstate_operator_root_callback),
(r'(unordered|ordered)(\s*)(\{)',
pushstate_operator_order_callback),
+ (r'(map|array)(\s*)(\{)',
+ pushstate_operator_map_callback),
+
(r'(declare)(\s+)(ordering)',
bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'declareordering'),
@@ -739,10 +761,11 @@ class XQueryLexer(ExtendedRegexLexer):
bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
- (r'(@'+qname+')', Name.Attribute),
- (r'(@'+ncname+')', Name.Attribute),
- (r'@\*:'+ncname, Name.Attribute),
- (r'(@)', Name.Attribute),
+ (r'(@'+qname+')', Name.Attribute, 'operator'),
+ (r'(@'+ncname+')', Name.Attribute, 'operator'),
+ (r'@\*:'+ncname, Name.Attribute, 'operator'),
+ (r'@\*', Name.Attribute, 'operator'),
+ (r'(@)', Name.Attribute, 'operator'),
(r'//|/|\+|-|;|,|\(|\)', Punctuation),
diff --git a/pygments/styles/lovelace.py b/pygments/styles/lovelace.py
index 31bd5505..4009274c 100644
--- a/pygments/styles/lovelace.py
+++ b/pygments/styles/lovelace.py
@@ -8,6 +8,9 @@
Pygments style by Miikka Salminen (https://github.com/miikkas)
A desaturated, somewhat subdued style created for the Lovelace interactive
learning environment.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
"""
from pygments.style import Style
diff --git a/tests/test_java.py b/tests/test_java.py
index 33a64e99..f4096647 100644
--- a/tests/test_java.py
+++ b/tests/test_java.py
@@ -9,7 +9,7 @@
import unittest
-from pygments.token import Text, Name, Operator, Keyword
+from pygments.token import Text, Name, Operator, Keyword, Number
from pygments.lexers import JavaLexer
@@ -40,3 +40,39 @@ class JavaTest(unittest.TestCase):
]
self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+ def testNumericLiterals(self):
+ fragment = '0 5L 9__542_72l 0xbEEf 0X9_A 0_35 01 0b0___101_0'
+ fragment += ' 0. .7_17F 3e-1_3d 1f 6_01.9e+3 0x.1Fp3 0XEP8D\n'
+ tokens = [
+ (Number.Integer, '0'),
+ (Text, ' '),
+ (Number.Integer, '5L'),
+ (Text, ' '),
+ (Number.Integer, '9__542_72l'),
+ (Text, ' '),
+ (Number.Hex, '0xbEEf'),
+ (Text, ' '),
+ (Number.Hex, '0X9_A'),
+ (Text, ' '),
+ (Number.Oct, '0_35'),
+ (Text, ' '),
+ (Number.Oct, '01'),
+ (Text, ' '),
+ (Number.Bin, '0b0___101_0'),
+ (Text, ' '),
+ (Number.Float, '0.'),
+ (Text, ' '),
+ (Number.Float, '.7_17F'),
+ (Text, ' '),
+ (Number.Float, '3e-1_3d'),
+ (Text, ' '),
+ (Number.Float, '1f'),
+ (Text, ' '),
+ (Number.Float, '6_01.9e+3'),
+ (Text, ' '),
+ (Number.Float, '0x.1Fp3'),
+ (Text, ' '),
+ (Number.Float, '0XEP8D'),
+ (Text, '\n')
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))