summaryrefslogtreecommitdiff
path: root/pygments/lexers/math.py
diff options
context:
space:
mode:
authorGeorg Brandl <georg@python.org>2013-01-09 14:04:54 +0100
committerGeorg Brandl <georg@python.org>2013-01-09 14:04:54 +0100
commit9cc134d20b8505b6d472cca6872406dbecf5fda3 (patch)
tree773193024f85183f92d1ee4e32fe7a2b5ea70349 /pygments/lexers/math.py
parent2867cf9479652aebd216e0c081e14bdc9aa4897b (diff)
parent0b03ce73a68bc794d4891b3cbbc581c7ccf3e03a (diff)
downloadpygments-9cc134d20b8505b6d472cca6872406dbecf5fda3.tar.gz
Merged in blackjack/pygments-main (pull request #88: CBM BASIC V2 and ca65 assembler)
Diffstat (limited to 'pygments/lexers/math.py')
-rw-r--r--pygments/lexers/math.py137
1 files changed, 82 insertions, 55 deletions
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index fb39abaf..a4e241d6 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -5,12 +5,13 @@
Lexers for math languages.
- :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
+from pygments.util import shebang_matches
from pygments.lexer import Lexer, RegexLexer, bygroups, include, \
combined, do_insertions
from pygments.token import Comment, String, Punctuation, Keyword, Name, \
@@ -18,13 +19,20 @@ from pygments.token import Comment, String, Punctuation, Keyword, Name, \
from pygments.lexers.agile import PythonLexer
from pygments.lexers import _scilab_builtins
+from pygments.lexers import _stan_builtins
__all__ = ['JuliaLexer', 'JuliaConsoleLexer', 'MuPADLexer', 'MatlabLexer',
'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer', 'NumPyLexer',
- 'RConsoleLexer', 'SLexer', 'JagsLexer', 'BugsLexer', 'StanLexer']
+ 'RConsoleLexer', 'SLexer', 'JagsLexer', 'BugsLexer', 'StanLexer',
+ 'RdLexer']
class JuliaLexer(RegexLexer):
+ """
+ For `Julia <http://julialang.org/>`_ source code.
+
+ *New in Pygments 1.6.*
+ """
name = 'Julia'
aliases = ['julia','jl']
filenames = ['*.jl']
@@ -76,7 +84,8 @@ class JuliaLexer(RegexLexer):
(r'`(?s).*?`', String.Backtick),
# chars
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
+ r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
# try to match trailing transpose
(r'(?<=[.\w\)\]])\'+', Operator),
@@ -90,9 +99,10 @@ class JuliaLexer(RegexLexer):
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
# numbers
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
(r'0b[01]+', Number.Binary),
+ (r'0o[0-7]+', Number.Oct),
(r'0x[a-fA-F0-9]+', Number.Hex),
(r'\d+', Number.Integer)
],
@@ -134,6 +144,8 @@ line_re = re.compile('.*?\n')
class JuliaConsoleLexer(Lexer):
"""
For Julia console sessions. Modeled after MatlabSessionLexer.
+
+ *New in Pygments 1.6.*
"""
name = 'Julia console'
aliases = ['jlcon']
@@ -332,6 +344,10 @@ class MatlabLexer(RegexLexer):
# (not great, but handles common cases...)
(r'(?<=[\w\)\]])\'', Operator),
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
(r'(?<![\w\)\]])\'', String, 'string'),
('[a-zA-Z_][a-zA-Z0-9_]*', Name),
(r'.', Text),
@@ -778,6 +794,10 @@ class OctaveLexer(RegexLexer):
(r'"[^"]*"', String),
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
# quote can be transpose, instead of string:
# (not great, but handles common cases...)
(r'(?<=[\w\)\]])\'', Operator),
@@ -849,6 +869,10 @@ class ScilabLexer(RegexLexer):
(r'(?<=[\w\)\]])\'', Operator),
(r'(?<![\w\)\]])\'', String, 'string'),
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
('[a-zA-Z_][a-zA-Z0-9_]*', Name),
(r'.', Text),
],
@@ -1089,7 +1113,8 @@ class SLexer(RegexLexer):
class BugsLexer(RegexLexer):
"""
- Pygments Lexer for OpenBugs and WinBugs models.
+ Pygments Lexer for `OpenBugs <http://www.openbugs.info/w/>`_ and WinBugs
+ models.
*New in Pygments 1.6.*
"""
@@ -1152,7 +1177,7 @@ class BugsLexer(RegexLexer):
include('comments'),
include('whitespace'),
# Block start
- (r'(?s)(model)(\s+)({)',
+ (r'(model)(\s+)({)',
bygroups(Keyword.Namespace, Text, Punctuation)),
# Reserved Words
(r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
@@ -1234,7 +1259,7 @@ class JagsLexer(RegexLexer):
include('comments'),
include('whitespace'),
# Block start
- (r'(?s)(model|data)(\s+)({)',
+ (r'(model|data)(\s+)({)',
bygroups(Keyword.Namespace, Text, Punctuation)),
(r'var(?![0-9a-zA-Z\._])', Keyword.Declaration),
# Reserved Words
@@ -1281,47 +1306,11 @@ class StanLexer(RegexLexer):
filenames = ['*.stan']
_RESERVED = ('for', 'in', 'while', 'repeat', 'until', 'if',
- 'then', 'else', 'true', 'false', 'T')
+ 'then', 'else', 'true', 'false', 'T',
+ 'lower', 'upper', 'print')
_TYPES = ('int', 'real', 'vector', 'simplex', 'ordered', 'row_vector',
- 'matrix', 'corr_matrix', 'cov_matrix')
-
- # STAN 1.0 Manual, Chapter 20
- _CONSTANTS = ['pi', 'e', 'sqrt2', 'log2', 'log10', 'nan', 'infinity',
- 'epsilon', 'negative_epsilon']
- _FUNCTIONS = ['abs', 'int_step', 'min', 'max',
- 'if_else', 'step',
- 'fabs', 'fdim',
- 'fmin', 'fmax',
- 'fmod',
- 'floor', 'ceil', 'round', 'trunc',
- 'sqrt', 'cbrt', 'square', 'exp', 'exp2', 'expm1',
- 'log', 'log2', 'log10', 'pow', 'logit', 'inv_logit',
- 'inv_cloglog', 'hypot', 'cos', 'sin', 'tan', 'acos',
- 'asin', 'atan', 'atan2', 'cosh', 'sinh', 'tanh',
- 'acosh', 'asinh', 'atanh', 'erf', 'erfc', 'Phi',
- 'log_loss', 'tgamma', 'lgamma', 'lmgamma', 'lbeta',
- 'binomial_coefficient_log',
- 'fma', 'multiply_log', 'log1p', 'log1m', 'log1p_exp',
- 'log_sum_exp',
- 'rows', 'cols',
- 'dot_product', 'prod', 'mean', 'variance', 'sd',
- 'diagonal', 'diag_matrix', 'col', 'row',
- 'softmax', 'trace', 'determinant', 'inverse', 'eigenvalue',
- 'eigenvalues_sym', 'cholesky', 'singular_values',
- '(log)?normal_p', 'exponential_p', 'gamma_p', 'weibull_p']
- _DISTRIBUTIONS = ['bernoulli', 'bernoulli_logit', 'binomial',
- 'beta_binomial', 'hypergeometric', 'categorical',
- 'ordered_logistic', 'neg_binomial', 'poisson',
- 'multinomial', 'normal', 'student_t',
- 'cauchy', 'double_exponential', 'logistic',
- 'lognormal', 'chi_square', 'inv_chi_square',
- 'scaled_inv_chi_square', 'exponential',
- 'gamma', 'inv_gamma', 'weibull', 'pareto',
- 'beta', 'uniform', 'dirichlet', 'multi_normal',
- 'multi_normal_cholesky', 'multi_student_t',
- 'wishart', 'inv_wishart', 'lkj_cov',
- 'lkj_corr_cholesky']
+ 'matrix', 'corr_matrix', 'cov_matrix', 'positive_ordered')
tokens = {
'whitespace' : [
@@ -1333,12 +1322,14 @@ class StanLexer(RegexLexer):
(r'(//|#).*$', Comment.Single),
],
'root': [
+ # Stan is more restrictive on strings than this regex
+ (r'"[^"]*"', String),
# Comments
include('comments'),
# block start
include('whitespace'),
# Block start
- (r'(?s)(%s)(\s*)({)' %
+ (r'(%s)(\s*)({)' %
r'|'.join(('data', r'transformed\s+?data',
'parameters', r'transformed\s+parameters',
'model', r'generated\s+quantities')),
@@ -1351,13 +1342,11 @@ class StanLexer(RegexLexer):
(r"[;:,\[\]()<>]", Punctuation),
# Builtin
(r'(%s)(?=\s*\()'
- % r'|'.join(_FUNCTIONS
- + _DISTRIBUTIONS
- + ['%s_log' % x for x in _DISTRIBUTIONS]),
+ % r'|'.join(_stan_builtins.FUNCTIONS
+ + _stan_builtins.DISTRIBUTIONS),
Name.Builtin),
(r'(%s)(?=\s*\()'
- % r'|'.join(_CONSTANTS),
- Keyword.Constant),
+ % r'|'.join(_stan_builtins.CONSTANTS), Keyword.Constant),
# Special names ending in __, like lp__
(r'[A-Za-z][A-Za-z0-9_]*__\b', Name.Builtin.Pseudo),
# Regular variable names
@@ -1368,7 +1357,7 @@ class StanLexer(RegexLexer):
# Integer Literals
(r'-?[0-9]+', Number.Integer),
# Assignment operators
- # SLexer makes these tokens Operators.
+ # SLexer makes these tokens Operators.
(r'<-|~', Operator),
# Infix and prefix operators (and = )
(r"\+|-|\.?\*|\.?/|\\|'|=", Operator),
@@ -1382,4 +1371,42 @@ class StanLexer(RegexLexer):
return 1.0
else:
return 0.0
-
+
+
+class RdLexer(RegexLexer):
+ """
+ Pygments Lexer for R documentation (Rd) files
+
+ This is a very minimal implementation, highlighting little more
+ than the macros. A description of Rd syntax is found in `Writing R
+ Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
+ and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_.
+
+ *New in Pygments 1.6.*
+ """
+ name = 'Rd'
+ aliases = ['rd']
+ filenames = ['*.Rd']
+ mimetypes = ['text/x-r-doc']
+
+ # To account for verbatim / LaTeX-like / and R-like areas
+ # would require parsing.
+ tokens = {
+ 'root' : [
+ # catch escaped brackets and percent sign
+ (r'\\[\\{}%]', String.Escape),
+ # comments
+ (r'%.*$', Comment),
+ # special macros with no arguments
+ (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
+ # macros
+ (r'\\[a-zA-Z]+\b', Keyword),
+ # special preprocessor macros
+ (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
+ # non-escaped brackets
+ (r'[{}]', Name.Builtin),
+ # everything else
+ (r'[^\\%\n{}]+', Text),
+ (r'.', Text),
+ ]
+ }