diff options
-rw-r--r-- | pygments/lexer.py | 2 | ||||
-rw-r--r-- | pygments/lexers/functional.py | 58 | ||||
-rw-r--r-- | pygments/lexers/math.py | 2 | ||||
-rw-r--r-- | pygments/lexers/templates.py | 1 | ||||
-rw-r--r-- | pygments/lexers/web.py | 17 | ||||
-rw-r--r-- | pygments/style.py | 2 | ||||
-rw-r--r-- | tests/test_regexlexer.py | 8 |
7 files changed, 71 insertions, 19 deletions
diff --git a/pygments/lexer.py b/pygments/lexer.py index 82f09318..ad2c72d1 100644 --- a/pygments/lexer.py +++ b/pygments/lexer.py @@ -544,10 +544,10 @@ class RegexLexer(Lexer): try: if text[pos] == '\n': # at EOL, reset state to "root" - pos += 1 statestack = ['root'] statetokens = tokendefs['root'] yield pos, Text, u'\n' + pos += 1 continue yield pos, Error, text[pos] pos += 1 diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index d4f9a32c..c34599d0 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -1688,13 +1688,46 @@ class ElixirLexer(RegexLexer): filenames = ['*.ex', '*.exs'] mimetypes = ['text/x-elixir'] + def gen_elixir_sigil_rules(): + states = {} + + states['strings'] = [ + (r'(%[A-Ba-z])?"""(?:.|\n)*?"""', String.Doc), + (r"'''(?:.|\n)*?'''", String.Doc), + (r'"', String.Double, 'dqs'), + (r"'.*'", String.Single), + (r'(?<!\w)\?(\\(x\d{1,2}|\h{1,2}(?!\h)\b|0[0-7]{0,2}(?![0-7])\b|' + r'[^x0MC])|(\\[MC]-)+\w|[^\s\\])', String.Other) + ] + + for lbrace, rbrace, name, in ('\\{', '\\}', 'cb'), \ + ('\\[', '\\]', 'sb'), \ + ('\\(', '\\)', 'pa'), \ + ('\\<', '\\>', 'lt'): + + states['strings'] += [ + (r'%[a-z]' + lbrace, String.Double, name + 'intp'), + (r'%[A-Z]' + lbrace, String.Double, name + 'no-intp') + ] + + states[name +'intp'] = [ + (r'' + rbrace + '[a-z]*', String.Double, "#pop"), + include('enddoublestr') + ] + + states[name +'no-intp'] = [ + (r'.*' + rbrace + '[a-z]*', String.Double , "#pop") + ] + + return states + tokens = { 'root': [ (r'\s+', Text), (r'#.*$', Comment.Single), - (r'\b(case|end|bc|lc|if|unless|try|loop|receive|fn|defmodule|' - r'defp|def|defprotocol|defimpl|defrecord|defmacro|defdelegate|' - r'defexception|exit|raise|throw)\b(?![?!])|' + (r'\b(case|cond|end|bc|lc|if|unless|try|loop|receive|fn|defmodule|' + r'defp?|defprotocol|defimpl|defrecord|defmacrop?|defdelegate|' + r'defexception|exit|raise|throw|unless|after|rescue|catch|else)\b(?![?!])|' r'(?<!\.)\b(do|\-\>)\b\s*', Keyword), (r'\b(import|require|use|recur|quote|unquote|super|refer)\b(?![?!])', Keyword.Namespace), @@ -1708,26 +1741,18 @@ class ElixirLexer(RegexLexer): r'\*\*?|=?~|<\-)|([a-zA-Z_]\w*([?!])?)(:)(?!:)', String.Symbol), (r':"', String.Symbol, 'interpoling_symbol'), (r'\b(nil|true|false)\b(?![?!])|\b[A-Z]\w*\b', Name.Constant), - (r'\b(__(FILE|LINE|MODULE|LOCAL|MAIN|FUNCTION)__)\b(?![?!])', Name.Builtin.Pseudo), + (r'\b(__(FILE|LINE|MODULE|MAIN|FUNCTION)__)\b(?![?!])', Name.Builtin.Pseudo), (r'[a-zA-Z_!][\w_]*[!\?]?', Name), (r'[(){};,/\|:\\\[\]]', Punctuation), (r'@[a-zA-Z_]\w*|&\d', Name.Variable), (r'\b(0[xX][0-9A-Fa-f]+|\d(_?\d)*(\.(?![^\d\s])' r'(_?\d)*)?([eE][-+]?\d(_?\d)*)?|0[bB][01]+)\b', Number), + (r'%r\/.*\/', String.Regex), include('strings'), ], - 'strings': [ - (r'"""(?:.|\n)*?"""', String.Doc), - (r"'''(?:.|\n)*?'''", String.Doc), - (r'"', String.Double, 'dqs'), - (r"'.*'", String.Single), - (r'(?<!\w)\?(\\(x\d{1,2}|\h{1,2}(?!\h)\b|0[0-7]{0,2}(?![0-7])\b|' - r'[^x0MC])|(\\[MC]-)+\w|[^\s\\])', String.Other) - ], 'dqs': [ (r'"', String.Double, "#pop"), - include('interpoling'), - (r'[^#"]+', String.Double), + include('enddoublestr') ], 'interpoling': [ (r'#{', String.Interpol, 'interpoling_string'), @@ -1741,7 +1766,12 @@ class ElixirLexer(RegexLexer): include('interpoling'), (r'[^#"]+', String.Symbol), ], + 'enddoublestr' : [ + include('interpoling'), + (r'[^#"]+', String.Double), + ] } + tokens.update(gen_elixir_sigil_rules()) class ElixirConsoleLexer(Lexer): diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py index 7ae7d6b2..db51cd62 100644 --- a/pygments/lexers/math.py +++ b/pygments/lexers/math.py @@ -50,7 +50,7 @@ class JuliaLexer(RegexLexer): # keywords (r'(begin|while|for|in|return|break|continue|' r'macro|quote|let|if|elseif|else|try|catch|end|' - r'bitstype|ccall)\b', Keyword), + r'bitstype|ccall|do)\b', Keyword), (r'(local|global|const)\b', Keyword.Declaration), (r'(module|import|export)\b', Keyword.Reserved), (r'(Bool|Int|Int8|Int16|Int32|Int64|Uint|Uint8|Uint16|Uint32|Uint64' diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py index 84850728..c154eb0f 100644 --- a/pygments/lexers/templates.py +++ b/pygments/lexers/templates.py @@ -1504,6 +1504,7 @@ class ColdfusionLexer(RegexLexer): ], } + class ColdfusionMarkupLexer(RegexLexer): """ Coldfusion markup only diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py index 93a6f115..5ac56c19 100644 --- a/pygments/lexers/web.py +++ b/pygments/lexers/web.py @@ -101,6 +101,16 @@ class JSONLexer(RegexLexer): filenames = ['*.json'] mimetypes = [ 'application/json', ] + # integer part of a number + int_part = r'-?(0|[1-9]\d*)' + + # fractional part of a number + frac_part = r'\.\d+' + + # exponential part of a number + exp_part = r'[eE](\+|-)?\d+' + + flags = re.DOTALL tokens = { 'whitespace': [ @@ -108,9 +118,12 @@ class JSONLexer(RegexLexer): ], # represents a simple terminal value - 'simplevalue':[ + 'simplevalue': [ (r'(true|false|null)\b', Keyword.Constant), - (r'-?[0-9]+', Number.Integer), + (('%(int_part)s(%(frac_part)s%(exp_part)s|' + '%(exp_part)s|%(frac_part)s)') % vars(), + Number.Float), + (int_part, Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), ], diff --git a/pygments/style.py b/pygments/style.py index 470fe99f..0d9e6eb6 100644 --- a/pygments/style.py +++ b/pygments/style.py @@ -26,7 +26,7 @@ class StyleMeta(type): if len(col) == 6: return col elif len(col) == 3: - return col[0]+'0'+col[1]+'0'+col[2]+'0' + return col[0]*2 + col[1]*2 + col[2]*2 elif text == '': return '' assert False, "wrong color format %r" % text diff --git a/tests/test_regexlexer.py b/tests/test_regexlexer.py index 74b64d9b..b1392d3a 100644 --- a/tests/test_regexlexer.py +++ b/tests/test_regexlexer.py @@ -11,6 +11,7 @@ import unittest from pygments.token import Text from pygments.lexer import RegexLexer +from pygments.lexer import bygroups class TestLexer(RegexLexer): @@ -37,3 +38,10 @@ class TupleTransTest(unittest.TestCase): self.assertEqual(toks, [(0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'), (3, Text.Beer, 'd'), (4, Text.Root, 'e')]) + + def test_multiline(self): + lx = TestLexer() + toks = list(lx.get_tokens_unprocessed('a\ne')) + self.assertEqual(toks, + [(0, Text.Root, 'a'), (1, Text, u'\n'), + (2, Text.Root, 'e')]) |