diff options
author | Tim Hatch <tim@timhatch.com> | 2013-05-05 23:29:02 -0700 |
---|---|---|
committer | Tim Hatch <tim@timhatch.com> | 2013-05-05 23:29:02 -0700 |
commit | fb36d2de4d57833084466de07fdbc52d6baa2741 (patch) | |
tree | 3e6cbed0b54573e7f1d4fae0f55a47169a797396 | |
parent | d0166615eef115dbea294d9f9442e0f78334167a (diff) | |
parent | c9855f8191cdc73912c151db662f863e0a8b9df3 (diff) | |
download | pygments-fb36d2de4d57833084466de07fdbc52d6baa2741.tar.gz |
Merged in gentoo90/pygments-main (pull request #185)
PowerShellLexer: minor fixes
-rw-r--r-- | pygments/lexers/_robotframeworklexer.py | 2 | ||||
-rw-r--r-- | pygments/lexers/_stan_builtins.py | 226 | ||||
-rw-r--r-- | pygments/lexers/functional.py | 2 | ||||
-rw-r--r-- | pygments/lexers/math.py | 27 | ||||
-rw-r--r-- | pygments/lexers/other.py | 8 | ||||
-rw-r--r-- | pygments/lexers/text.py | 6 | ||||
-rw-r--r-- | pygments/lexers/web.py | 2 | ||||
-rw-r--r-- | tests/examplefiles/example.stan | 161 | ||||
-rw-r--r-- | tests/examplefiles/type.lisp | 16 |
9 files changed, 331 insertions, 119 deletions
diff --git a/pygments/lexers/_robotframeworklexer.py b/pygments/lexers/_robotframeworklexer.py index 0192d289..bc64e12b 100644 --- a/pygments/lexers/_robotframeworklexer.py +++ b/pygments/lexers/_robotframeworklexer.py @@ -163,7 +163,7 @@ class RowSplitter(object): def split(self, row): splitter = (row.startswith('| ') and self._split_from_pipes or self._split_from_spaces) - for value in splitter(row.rstrip()): + for value in splitter(row): yield value yield '\n' diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py index 69d8ce75..637072e4 100644 --- a/pygments/lexers/_stan_builtins.py +++ b/pygments/lexers/_stan_builtins.py @@ -1,27 +1,31 @@ # -*- coding: utf-8 -*- """ - pygments.lexers._stan_builtins - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +pygments.lexers._stan_builtins +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - This file contains the names of functions for Stan used by - ``pygments.lexers.math.StanLexer. +This file contains the names of functions for Stan used by +``pygments.lexers.math.StanLexer. - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. +:copyright: Copyright 2013 by the Pygments team, see AUTHORS. +:license: BSD, see LICENSE for details. """ -CONSTANTS=[ 'e', - 'epsilon', - 'log10', - 'log2', - 'negative_epsilon', - 'negative_infinity', - 'not_a_number', - 'pi', - 'positive_infinity', - 'sqrt2'] +KEYWORDS = ['else', 'for', 'if', 'in', 'lower', 'lp__', 'print', 'upper', 'while'] + +TYPES = [ 'corr_matrix', + 'cov_matrix', + 'int', + 'matrix', + 'ordered', + 'positive_ordered', + 'real', + 'row_vector', + 'simplex', + 'unit_vector', + 'vector'] -FUNCTIONS=[ 'Phi', +FUNCTIONS = [ 'Phi', + 'Phi_approx', 'abs', 'acos', 'acosh', @@ -30,37 +34,66 @@ FUNCTIONS=[ 'Phi', 'atan', 'atan2', 'atanh', + 'bernoulli_cdf', 'bernoulli_log', + 'bernoulli_logit_log', + 'bernoulli_rng', + 'beta_binomial_cdf', 'beta_binomial_log', + 'beta_binomial_rng', + 'beta_cdf', 'beta_log', + 'beta_rng', 'binary_log_loss', + 'binomial_cdf', 'binomial_coefficient_log', + 'binomial_log', + 'binomial_logit_log', + 'binomial_rng', + 'block', 'categorical_log', + 'categorical_rng', + 'cauchy_cdf', 'cauchy_log', + 'cauchy_rng', 'cbrt', 'ceil', 'chi_square_log', + 'chi_square_rng', 'cholesky_decompose', 'col', 'cols', 'cos', 'cosh', + 'crossprod', + 'cumulative_sum', 'determinant', 'diag_matrix', + 'diag_post_multiply', + 'diag_pre_multiply', 'diagonal', + 'dims', 'dirichlet_log', + 'dirichlet_rng', 'dot_product', 'dot_self', 'double_exponential_log', - 'eigenvalues', + 'double_exponential_rng', + 'e', 'eigenvalues_sym', + 'eigenvectors_sym', + 'epsilon', 'erf', 'erfc', 'exp', 'exp2', + 'exp_mod_normal_cdf', + 'exp_mod_normal_log', + 'exp_mod_normal_rng', 'expm1', 'exponential_cdf', 'exponential_log', + 'exponential_rng', 'fabs', 'fdim', 'floor', @@ -69,85 +102,148 @@ FUNCTIONS=[ 'Phi', 'fmin', 'fmod', 'gamma_log', + 'gamma_rng', + 'gumbel_cdf', + 'gumbel_log', + 'gumbel_rng', 'hypergeometric_log', + 'hypergeometric_rng', 'hypot', 'if_else', 'int_step', + 'inv_chi_square_cdf', 'inv_chi_square_log', + 'inv_chi_square_rng', 'inv_cloglog', + 'inv_gamma_cdf', 'inv_gamma_log', + 'inv_gamma_rng', 'inv_logit', 'inv_wishart_log', + 'inv_wishart_rng', 'inverse', 'lbeta', 'lgamma', 'lkj_corr_cholesky_log', + 'lkj_corr_cholesky_rng', 'lkj_corr_log', + 'lkj_corr_rng', 'lkj_cov_log', 'lmgamma', 'log', 'log10', 'log1m', + 'log1m_inv_logit', 'log1p', 'log1p_exp', 'log2', + 'log_determinant', + 'log_inv_logit', 'log_sum_exp', + 'logistic_cdf', 'logistic_log', + 'logistic_rng', 'logit', 'lognormal_cdf', 'lognormal_log', + 'lognormal_rng', 'max', + 'mdivide_left_tri_low', + 'mdivide_right_tri_low', 'mean', 'min', 'multi_normal_cholesky_log', 'multi_normal_log', + 'multi_normal_prec_log', + 'multi_normal_rng', 'multi_student_t_log', + 'multi_student_t_rng', + 'multinomial_cdf', 'multinomial_log', + 'multinomial_rng', 'multiply_log', 'multiply_lower_tri_self_transpose', + 'neg_binomial_cdf', 'neg_binomial_log', + 'neg_binomial_rng', + 'negative_epsilon', + 'negative_infinity', 'normal_cdf', 'normal_log', + 'normal_rng', + 'not_a_number', 'ordered_logistic_log', + 'ordered_logistic_rng', + 'owens_t', + 'pareto_cdf', 'pareto_log', + 'pareto_rng', + 'pi', + 'poisson_cdf', 'poisson_log', + 'poisson_log_log', + 'poisson_rng', + 'positive_infinity', 'pow', 'prod', + 'rep_array', + 'rep_matrix', + 'rep_row_vector', + 'rep_vector', 'round', 'row', 'rows', + 'scaled_inv_chi_square_cdf', 'scaled_inv_chi_square_log', + 'scaled_inv_chi_square_rng', 'sd', 'sin', 'singular_values', 'sinh', + 'size', + 'skew_normal_cdf', + 'skew_normal_log', + 'skew_normal_rng', 'softmax', 'sqrt', + 'sqrt2', 'square', 'step', + 'student_t_cdf', 'student_t_log', + 'student_t_rng', 'sum', 'tan', 'tanh', + 'tcrossprod', 'tgamma', 'trace', 'trunc', 'uniform_log', + 'uniform_rng', 'variance', 'weibull_cdf', 'weibull_log', - 'wishart_log'] + 'weibull_rng', + 'wishart_log', + 'wishart_rng'] -DISTRIBUTIONS=[ 'bernoulli', +DISTRIBUTIONS = [ 'bernoulli', + 'bernoulli_logit', 'beta', 'beta_binomial', + 'binomial', + 'binomial_coefficient', + 'binomial_logit', 'categorical', 'cauchy', 'chi_square', 'dirichlet', 'double_exponential', + 'exp_mod_normal', 'exponential', 'gamma', + 'gumbel', 'hypergeometric', 'inv_chi_square', 'inv_gamma', @@ -159,16 +255,106 @@ DISTRIBUTIONS=[ 'bernoulli', 'lognormal', 'multi_normal', 'multi_normal_cholesky', + 'multi_normal_prec', 'multi_student_t', 'multinomial', + 'multiply', 'neg_binomial', 'normal', 'ordered_logistic', 'pareto', 'poisson', + 'poisson_log', 'scaled_inv_chi_square', + 'skew_normal', 'student_t', 'uniform', 'weibull', 'wishart'] +RESERVED = [ 'alignas', + 'alignof', + 'and', + 'and_eq', + 'asm', + 'auto', + 'bitand', + 'bitor', + 'bool', + 'break', + 'case', + 'catch', + 'char', + 'char16_t', + 'char32_t', + 'class', + 'compl', + 'const', + 'const_cast', + 'constexpr', + 'continue', + 'decltype', + 'default', + 'delete', + 'do', + 'double', + 'dynamic_cast', + 'enum', + 'explicit', + 'export', + 'extern', + 'false', + 'false', + 'float', + 'friend', + 'goto', + 'inline', + 'int', + 'long', + 'mutable', + 'namespace', + 'new', + 'noexcept', + 'not', + 'not_eq', + 'nullptr', + 'operator', + 'or', + 'or_eq', + 'private', + 'protected', + 'public', + 'register', + 'reinterpret_cast', + 'repeat', + 'return', + 'short', + 'signed', + 'sizeof', + 'static', + 'static_assert', + 'static_cast', + 'struct', + 'switch', + 'template', + 'then', + 'this', + 'thread_local', + 'throw', + 'true', + 'true', + 'try', + 'typedef', + 'typeid', + 'typename', + 'union', + 'unsigned', + 'until', + 'using', + 'virtual', + 'void', + 'volatile', + 'wchar_t', + 'xor', + 'xor_eq'] + diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index 889e7ec6..613be987 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -808,6 +808,8 @@ class CommonLispLexer(RegexLexer): (r'"(\\.|\\\n|[^"\\])*"', String), # quoting (r":" + symbol, String.Symbol), + (r"::" + symbol, String.Symbol), + (r":#" + symbol, String.Symbol), (r"'" + symbol, String.Symbol), (r"'", Operator), (r"`", Operator), diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py index 4dda7372..0b757e44 100644 --- a/pygments/lexers/math.py +++ b/pygments/lexers/math.py @@ -1300,8 +1300,11 @@ class JagsLexer(RegexLexer): return 0 class StanLexer(RegexLexer): - """ - Pygments Lexer for Stan models. + """Pygments Lexer for Stan models. + + The Stan modeling language is specified in the *Stan 1.3.0 + Modeling Language Manual* `pdf + <http://code.google.com/p/stan/downloads/detail?name=stan-reference-1.3.0.pdf>`_. *New in Pygments 1.6.* """ @@ -1310,13 +1313,6 @@ class StanLexer(RegexLexer): aliases = ['stan'] filenames = ['*.stan'] - _RESERVED = ('for', 'in', 'while', 'repeat', 'until', 'if', - 'then', 'else', 'true', 'false', 'T', - 'lower', 'upper', 'print') - - _TYPES = ('int', 'real', 'vector', 'simplex', 'ordered', 'row_vector', - 'matrix', 'corr_matrix', 'cov_matrix', 'positive_ordered') - tokens = { 'whitespace' : [ (r"\s+", Text), @@ -1340,20 +1336,21 @@ class StanLexer(RegexLexer): 'model', r'generated\s+quantities')), bygroups(Keyword.Namespace, Text, Punctuation)), # Reserved Words - (r'(%s)\b' % r'|'.join(_RESERVED), Keyword.Reserved), + (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword), + # Truncation + (r'T(?=\s*\[)', Keyword), # Data types - (r'(%s)\b' % r'|'.join(_TYPES), Keyword.Type), + (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type), # Punctuation - (r"[;:,\[\]()<>]", Punctuation), + (r"[;:,\[\]()]", Punctuation), # Builtin (r'(%s)(?=\s*\()' % r'|'.join(_stan_builtins.FUNCTIONS + _stan_builtins.DISTRIBUTIONS), Name.Builtin), - (r'(%s)(?=\s*\()' - % r'|'.join(_stan_builtins.CONSTANTS), Keyword.Constant), # Special names ending in __, like lp__ (r'[A-Za-z][A-Za-z0-9_]*__\b', Name.Builtin.Pseudo), + (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved), # Regular variable names (r'[A-Za-z][A-Za-z0-9_]*\b', Name), # Real Literals @@ -1365,7 +1362,7 @@ class StanLexer(RegexLexer): # SLexer makes these tokens Operators. (r'<-|~', Operator), # Infix and prefix operators (and = ) - (r"\+|-|\.?\*|\.?/|\\|'|=", Operator), + (r"\+|-|\.?\*|\.?/|\\|'|==?|!=?|<=?|>=?|\|\||&&", Operator), # Block delimiters (r'[{}]', Punctuation), ] diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 35350de4..803212b0 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -1397,8 +1397,6 @@ class RebolLexer(RegexLexer): (r';.*\n', Comment), (r'%"', Name.Decorator, 'stringFile'), (r'%[^(\^{^")\s\[\]]+', Name.Decorator), - (r'<[a-zA-Z0-9:._-]*>', Name.Tag), - (r'<[^(<>\s")]+', Name.Tag, 'tag'), (r'[+-]?([a-zA-Z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time (r'\d+\-[0-9a-zA-Z]+\-\d+(\/\d+\:\d+(\:\d+)?' @@ -1415,6 +1413,8 @@ class RebolLexer(RegexLexer): (r'comment\s', Comment, 'comment'), (r'/[^(\^{^")\s/[\]]*', Name.Attribute), (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback), + (r'<[a-zA-Z0-9:._-]*>', Name.Tag), + (r'<[^(<>\s")]+', Name.Tag, 'tag'), (r'([^(\^{^")\s]+)', Text), ], 'string': [ @@ -2839,8 +2839,8 @@ class BroLexer(RegexLexer): (r'\\\n', Text), # Keywords (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event' - r'|export|for|function|if|global|local|module|next' - r'|of|print|redef|return|schedule|type|when|while)\b', Keyword), + r'|export|for|function|if|global|hook|local|module|next' + r'|of|print|redef|return|schedule|switch|type|when|while)\b', Keyword), (r'(addr|any|bool|count|counter|double|file|int|interval|net' r'|pattern|port|record|set|string|subnet|table|time|timer' r'|vector)\b', Keyword.Type), diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py index 5e340893..b47c49d2 100644 --- a/pygments/lexers/text.py +++ b/pygments/lexers/text.py @@ -1709,12 +1709,12 @@ class HttpLexer(RegexLexer): tokens = { 'root': [ - (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE)( +)([^ ]+)( +)' - r'(HTTPS?)(/)(1\.[01])(\r?\n|$)', + (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)' + r'(HTTP)(/)(1\.[01])(\r?\n|$)', bygroups(Name.Function, Text, Name.Namespace, Text, Keyword.Reserved, Operator, Number, Text), 'headers'), - (r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)', + (r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)', bygroups(Keyword.Reserved, Operator, Number, Text, Number, Text, Name.Exception, Text), 'headers'), diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py index dc8c7c5f..383bf6ad 100644 --- a/pygments/lexers/web.py +++ b/pygments/lexers/web.py @@ -67,7 +67,7 @@ class JavascriptLexer(RegexLexer): (r'[{(\[;,]', Punctuation, 'slashstartsregex'), (r'[})\].]', Punctuation), (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|' - r'throw|try|catch|finally|new|delete|typeof|instanceof|void|' + r'throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|' r'this)\b', Keyword, 'slashstartsregex'), (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'), (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|' diff --git a/tests/examplefiles/example.stan b/tests/examplefiles/example.stan index 5723403c..e936f54a 100644 --- a/tests/examplefiles/example.stan +++ b/tests/examplefiles/example.stan @@ -6,92 +6,103 @@ It is not a real model and will not compile # also a comment // also a comment data { - // valid name - int abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_abc; - // all types should be highlighed - int a3; - real foo[2]; - vector[3] bar; - row_vector[3] baz; - matrix[3,3] qux; - simplex[3] quux; - ordered[3] corge; - positive_ordered[3] wibble; - corr_matrix[3] grault; - cov_matrix[3] garply; - - real<lower=-1,upper=1> foo1; - real<lower=0> foo2; - real<upper=0> foo3; - - // bad names - // includes . - // real foo.; - // beings with number - //real 0foo; - // begins with _ - //real _foo; + // valid name + int abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_abc; + // all types should be highlighed + int a3; + real foo[2]; + vector[3] bar; + row_vector[3] baz; + matrix[3,3] qux; + simplex[3] quux; + ordered[3] corge; + positive_ordered[3] wibble; + corr_matrix[3] grault; + cov_matrix[3] garply; + + real<lower=-1,upper=1> foo1; + real<lower=0> foo2; + real<upper=0> foo3; } transformed data { - real xyzzy; - int thud; - row_vector grault2; - matrix qux2; - - // all floating point literals should be recognized - // all operators should be recognized - // paren should be recognized; - xyzzy <- 1234.5687 + .123 - (2.7e3 / 2E-5 * 135e-5); - // integer literal - thud <- -12309865; - // ./ and .* should be recognized as operators - grault2 <- grault .* garply ./ garply; - // ' and \ should be regognized as operators - qux2 <- qux' \ bar; - + real xyzzy; + int thud; + row_vector grault2; + matrix qux2; + + // all floating point literals should be recognized + // all operators should be recognized + // paren should be recognized; + xyzzy <- 1234.5687 + .123 - (2.7e3 / 2E-5 * 135e-5); + // integer literal + thud <- -12309865; + // ./ and .* should be recognized as operators + grault2 <- grault .* garply ./ garply; + // ' and \ should be regognized as operators + qux2 <- qux' \ bar; + } parameters { - real fred; - real plugh; - + real fred; + real plugh; } transformed parameters { } model { - // ~, <- are operators, - // T may be be recognized - // normal is a function - fred ~ normal(0, 1) T(-0.5, 0.5); - // interior block - { - real tmp; - // for, in should be highlighted - for (i in 1:10) { - tmp <- tmp + 0.1; - } - } - // lp__ should be highlighted - // normal_log as a function - lp__ <- lp__ + normal_log(plugh, 0, 1); + // ~, <- are operators, + // T may be be recognized + // normal is a function + fred ~ normal(0, 1) T(-0.5, 0.5); + real tmp; + // C++ reserved + real public; + + // control structures + for (i in 1:10) { + tmp <- tmp + 0.1; + } + tmp <- 0.0; + while (tmp < 5.0) { + tmp <- tmp + 1; + } + if (tmp > 0.0) { + print(tmp); + } else { + print(tmp); + } - // print statement and string literal - print("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_~@#$%^&*`'-+={}[].,;: "); - print("Hello, world!"); - print(""); + // operators + tmp || tmp; + tmp && tmp; + tmp == tmp; + tmp != tmp; + tmp < tmp; + tmp <= tmp; + tmp > tmp; + tmp >= tmp; + tmp + tmp; + tmp - tmp; + tmp * tmp; + tmp / tmp; + tmp .* tmp; + tmp ./ tmp; + ! tmp; + - tmp; + + tmp; + tmp '; + // lp__ should be highlighted + // normal_log as a function + lp__ <- lp__ + normal_log(plugh, 0, 1); + + // print statement and string literal + print("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_~@#$%^&*`'-+={}[].,;: "); + print("Hello, world!"); + print(""); + } generated quantities { - real bar1; - bar1 <- foo + 1; + real bar1; + bar1 <- foo + 1; } -## Baddness -//foo <- 2.0; -//foo ~ normal(0, 1); -//not_a_block { -//} - -/* -what happens with this? -*/ -// */ diff --git a/tests/examplefiles/type.lisp b/tests/examplefiles/type.lisp index 9c769379..c02c29df 100644 --- a/tests/examplefiles/type.lisp +++ b/tests/examplefiles/type.lisp @@ -1200,3 +1200,19 @@ Henry Baker: (unless (clos::funcallable-instance-p #'clos::class-name) (fmakunbound 'clos::class-name)) + + +(keywordp :junk) + T + +(keywordp ::junk) + T + +(symbol-name ::junk) + "JUNK" + +(symbol-name :#junk) + "#JUNK" + +(symbol-name :#.junk) + "#.JUNK" |