summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGeorg Brandl <georg@python.org>2015-08-08 07:12:19 +0200
committerGeorg Brandl <georg@python.org>2015-08-08 07:12:19 +0200
commitc93f993dd58ceb68b8f268ec712d6444fb2a032c (patch)
tree17de7c3818160d41f6062b9166d0b7d27ce1612f
parentfd34378d0a6fd5f3bcb5b392ab0a14ff50618b07 (diff)
parent68f0e6abf7407c789114100aca05955a11defb97 (diff)
downloadpygments-c93f993dd58ceb68b8f268ec712d6444fb2a032c.tar.gz
Merged in thomas_beale/pygments-main (pull request #483)
Add lexer and test files for the Archetype Definition Language (ADL) and sub-sytaxes.
-rw-r--r--AUTHORS1
-rw-r--r--CHANGES3
-rw-r--r--doc/languages.rst1
-rw-r--r--pygments/lexers/_mapping.py9
-rw-r--r--pygments/lexers/_stan_builtins.py14
-rw-r--r--pygments/lexers/c_cpp.py4
-rw-r--r--pygments/lexers/chapel.py7
-rw-r--r--pygments/lexers/css.py1
-rw-r--r--pygments/lexers/fortran.py50
-rw-r--r--pygments/lexers/javascript.py2
-rw-r--r--pygments/lexers/julia.py4
-rw-r--r--pygments/lexers/jvm.py2
-rw-r--r--pygments/lexers/modeling.py4
-rw-r--r--pygments/lexers/python.py9
-rw-r--r--pygments/lexers/robotframework.py30
-rw-r--r--pygments/lexers/rust.py113
-rw-r--r--pygments/lexers/shell.py2
-rw-r--r--pygments/lexers/templates.py2
-rw-r--r--pygments/lexers/testing.py4
-rw-r--r--pygments/modeline.py5
-rw-r--r--tests/examplefiles/99_bottles_of_beer.chpl13
-rw-r--r--tests/examplefiles/ahcon.f340
-rw-r--r--tests/examplefiles/eval.rs606
-rw-r--r--tests/examplefiles/robotframework_test.txt1
-rw-r--r--tests/examplefiles/rust_example.rs235
-rw-r--r--tests/examplefiles/test.gradle20
26 files changed, 1157 insertions, 325 deletions
diff --git a/AUTHORS b/AUTHORS
index 4fa7e0da..96a346ba 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -127,6 +127,7 @@ Other contributors, listed alphabetically, are:
* Dominik Picheta -- Nimrod lexer
* Andrew Pinkham -- RTF Formatter Refactoring
* Clément Prévost -- UrbiScript lexer
+* Elias Rabel -- Fortran fixed form lexer
* raichoo -- Idris lexer
* Kashif Rasul -- CUDA lexer
* Justin Reidy -- MXML lexer
diff --git a/CHANGES b/CHANGES
index ad5bba49..ed031f86 100644
--- a/CHANGES
+++ b/CHANGES
@@ -16,6 +16,7 @@ Version 2.1
* Emacs Lisp (PR#431)
* Arduino (PR#442)
* Modula-2 with multi-dialect support (#1090)
+ * Fortran fixed format (PR#213)
- Added styles:
@@ -26,6 +27,8 @@ Version 2.1
- Fixed style inheritance for non-standard token types in HTML output.
+- Added support for async/await to Python 3 lexer.
+
Version 2.0.3
-------------
diff --git a/doc/languages.rst b/doc/languages.rst
index 1d5c3155..13555ccf 100644
--- a/doc/languages.rst
+++ b/doc/languages.rst
@@ -86,7 +86,6 @@ Programming languages
* Visual FoxPro
* XQuery
* Zephir
- </ul>
Template languages
------------------
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 89e2da5f..2e855570 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -128,7 +128,8 @@ LEXERS = {
'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
- 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)),
+ 'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
+ 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
@@ -144,7 +145,7 @@ LEXERS = {
'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
- 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy',), ('text/x-groovy',)),
+ 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
@@ -177,7 +178,7 @@ LEXERS = {
'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
- 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
+ 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
@@ -315,7 +316,7 @@ LEXERS = {
'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
- 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs',), ('text/x-rustsrc',)),
+ 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs',), ('text/rust',)),
'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py
index 0a225eba..6bf44574 100644
--- a/pygments/lexers/_stan_builtins.py
+++ b/pygments/lexers/_stan_builtins.py
@@ -4,7 +4,7 @@
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the names of functions for Stan used by
- ``pygments.lexers.math.StanLexer. This is for Stan language version 2.5.0.
+ ``pygments.lexers.math.StanLexer. This is for Stan language version 2.7.0
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
@@ -35,6 +35,7 @@ TYPES = (
'positive_ordered',
'real',
'row_vector',
+ 'row_vectormatrix',
'simplex',
'unit_vector',
'vector',
@@ -200,7 +201,6 @@ FUNCTIONS = (
'lkj_corr_cholesky_rng',
'lkj_corr_log',
'lkj_corr_rng',
- 'lkj_cov_log',
'lmgamma',
'log',
'log10',
@@ -214,6 +214,7 @@ FUNCTIONS = (
'log_diff_exp',
'log_falling_factorial',
'log_inv_logit',
+ 'log_mix',
'log_rising_factorial',
'log_softmax',
'log_sum_exp',
@@ -236,6 +237,7 @@ FUNCTIONS = (
'min',
'modified_bessel_first_kind',
'modified_bessel_second_kind',
+ 'multi_gp_cholesky_log',
'multi_gp_log',
'multi_normal_cholesky_log',
'multi_normal_cholesky_rng',
@@ -248,6 +250,9 @@ FUNCTIONS = (
'multinomial_rng',
'multiply_log',
'multiply_lower_tri_self_transpose',
+ 'neg_binomial_2_ccdf_log',
+ 'neg_binomial_2_cdf',
+ 'neg_binomial_2_cdf_log',
'neg_binomial_2_log',
'neg_binomial_2_log_log',
'neg_binomial_2_log_rng',
@@ -284,6 +289,7 @@ FUNCTIONS = (
'poisson_cdf_log',
'poisson_log',
'poisson_log_log',
+ 'poisson_log_rng',
'poisson_rng',
'positive_infinity',
'pow',
@@ -371,6 +377,7 @@ FUNCTIONS = (
'weibull_cdf_log',
'weibull_log',
'weibull_rng',
+ 'wiener_log',
'wishart_log',
'wishart_rng'
)
@@ -400,10 +407,10 @@ DISTRIBUTIONS = (
'inv_wishart',
'lkj_corr',
'lkj_corr_cholesky',
- 'lkj_cov',
'logistic',
'lognormal',
'multi_gp',
+ 'multi_gp_cholesky',
'multi_normal',
'multi_normal_cholesky',
'multi_normal_prec',
@@ -425,6 +432,7 @@ DISTRIBUTIONS = (
'uniform',
'von_mises',
'weibull',
+ 'wiener',
'wishart'
)
diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py
index bbac406e..35ea517f 100644
--- a/pygments/lexers/c_cpp.py
+++ b/pygments/lexers/c_cpp.py
@@ -28,8 +28,10 @@ class CFamilyLexer(RegexLexer):
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ # The trailing ?, rather than *, avoids a geometric performance drop here.
#: only one /* */ style comment
- _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)?'
tokens = {
'whitespace': [
diff --git a/pygments/lexers/chapel.py b/pygments/lexers/chapel.py
index 520be37b..6fb6920c 100644
--- a/pygments/lexers/chapel.py
+++ b/pygments/lexers/chapel.py
@@ -46,9 +46,10 @@ class ChapelLexer(RegexLexer):
'continue', 'delete', 'dmapped', 'do', 'domain', 'else', 'enum',
'export', 'extern', 'for', 'forall', 'if', 'index', 'inline',
'iter', 'label', 'lambda', 'let', 'local', 'new', 'noinit', 'on',
- 'otherwise', 'pragma', 'reduce', 'return', 'scan', 'select',
- 'serial', 'single', 'sparse', 'subdomain', 'sync', 'then', 'use',
- 'when', 'where', 'while', 'with', 'yield', 'zip'), suffix=r'\b'),
+ 'otherwise', 'pragma', 'private', 'public', 'reduce', 'return',
+ 'scan', 'select', 'serial', 'single', 'sparse', 'subdomain',
+ 'sync', 'then', 'use', 'when', 'where', 'while', 'with', 'yield',
+ 'zip'), suffix=r'\b'),
Keyword),
(r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'),
(r'(class|module|record|union)(\s+)', bygroups(Keyword, Text),
diff --git a/pygments/lexers/css.py b/pygments/lexers/css.py
index c11e7ec0..6f27d63c 100644
--- a/pygments/lexers/css.py
+++ b/pygments/lexers/css.py
@@ -484,6 +484,7 @@ class ScssLexer(RegexLexer):
(r'[^\s:="\[]+', Name.Attribute),
(r'#\{', String.Interpol, 'interpolation'),
(r'[ \t]*:', Operator, 'value'),
+ default('#pop'),
],
'inline-comment': [
diff --git a/pygments/lexers/fortran.py b/pygments/lexers/fortran.py
index 8ba54aff..df3fed4f 100644
--- a/pygments/lexers/fortran.py
+++ b/pygments/lexers/fortran.py
@@ -11,11 +11,11 @@
import re
-from pygments.lexer import RegexLexer, include, words
+from pygments.lexer import RegexLexer, bygroups, include, words, using
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
+ Number, Punctuation, Generic
-__all__ = ['FortranLexer']
+__all__ = ['FortranLexer', 'FortranFixedLexer']
class FortranLexer(RegexLexer):
@@ -26,7 +26,7 @@ class FortranLexer(RegexLexer):
"""
name = 'Fortran'
aliases = ['fortran']
- filenames = ['*.f', '*.f90', '*.F', '*.F90']
+ filenames = ['*.f03', '*.f90', '*.F03', '*.F90']
mimetypes = ['text/x-fortran']
flags = re.IGNORECASE | re.MULTILINE
@@ -159,3 +159,45 @@ class FortranLexer(RegexLexer):
(r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
],
}
+
+
+class FortranFixedLexer(RegexLexer):
+ """
+ Lexer for fixed format Fortran.
+ """
+ name = 'FortranFixed'
+ aliases = ['fortranfixed']
+ filenames = ['*.f', '*.F']
+
+ flags = re.IGNORECASE
+
+ def _lex_fortran(self, match, ctx=None):
+ """Lex a line just as free form fortran without line break."""
+ lexer = FortranLexer()
+ text = match.group(0) + "\n"
+ for index, token, value in lexer.get_tokens_unprocessed(text):
+ value = value.replace('\n', '')
+ if value != '':
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ (r'[C*].*\n', Comment),
+ (r'#.*\n', Comment.Preproc),
+ (r' {0,4}!.*\n', Comment),
+ (r'(.{5})', Name.Label, 'cont-char'),
+ (r'.*\n', using(FortranLexer)),
+ ],
+
+ 'cont-char': [
+ (' ', Text, 'code'),
+ ('0', Comment, 'code'),
+ ('.', Generic.Strong, 'code')
+ ],
+
+ 'code': [
+ (r'(.{66})(.*)(\n)',
+ bygroups(_lex_fortran, Comment, Text), 'root'),
+ (r'(.*)(\n)', bygroups(_lex_fortran, Text), 'root'),
+ (r'', Text, 'root')]
+ }
diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py
index aed8438e..7dcfbb4b 100644
--- a/pygments/lexers/javascript.py
+++ b/pygments/lexers/javascript.py
@@ -36,7 +36,7 @@ class JavascriptLexer(RegexLexer):
name = 'JavaScript'
aliases = ['js', 'javascript']
- filenames = ['*.js', ]
+ filenames = ['*.js', '*.jsm', ]
mimetypes = ['application/javascript', 'application/x-javascript',
'text/x-javascript', 'text/javascript', ]
diff --git a/pygments/lexers/julia.py b/pygments/lexers/julia.py
index 1b7d543a..1304b395 100644
--- a/pygments/lexers/julia.py
+++ b/pygments/lexers/julia.py
@@ -30,6 +30,8 @@ class JuliaLexer(RegexLexer):
filenames = ['*.jl']
mimetypes = ['text/x-julia', 'application/x-julia']
+ flags = re.MULTILINE | re.UNICODE
+
builtins = [
'exit', 'whos', 'edit', 'load', 'is', 'isa', 'isequal', 'typeof', 'tuple',
'ntuple', 'uid', 'hash', 'finalizer', 'convert', 'promote', 'subtype',
@@ -89,7 +91,7 @@ class JuliaLexer(RegexLexer):
# names
(r'@[\w.]+', Name.Decorator),
- (r'[a-zA-Z_]\w*', Name),
+ (u'[a-zA-Z_\u00A1-\U0010FFFF][a-zA-Z_0-9\u00A1-\U0010FFFF]*!*', Name),
# numbers
(r'(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?', Number.Float),
diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py
index 6b302c7e..4d3c9159 100644
--- a/pygments/lexers/jvm.py
+++ b/pygments/lexers/jvm.py
@@ -457,7 +457,7 @@ class GroovyLexer(RegexLexer):
name = 'Groovy'
aliases = ['groovy']
- filenames = ['*.groovy']
+ filenames = ['*.groovy','*.gradle']
mimetypes = ['text/x-groovy']
flags = re.MULTILINE | re.DOTALL
diff --git a/pygments/lexers/modeling.py b/pygments/lexers/modeling.py
index 43194436..ec99543f 100644
--- a/pygments/lexers/modeling.py
+++ b/pygments/lexers/modeling.py
@@ -284,8 +284,8 @@ class StanLexer(RegexLexer):
"""Pygments Lexer for Stan models.
The Stan modeling language is specified in the *Stan Modeling Language
- User's Guide and Reference Manual, v2.5.0*,
- `pdf <https://github.com/stan-dev/stan/releases/download/v2.5.0/stan-reference-2.5.0.pdf>`__.
+ User's Guide and Reference Manual, v2.7.0*,
+ `pdf <https://github.com/stan-dev/stan/releases/download/v2.7.0/stan-reference-2.7.0.pdf>`__.
.. versionadded:: 1.6
"""
diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py
index 49c15b6d..ea97b855 100644
--- a/pygments/lexers/python.py
+++ b/pygments/lexers/python.py
@@ -215,10 +215,10 @@ class Python3Lexer(RegexLexer):
tokens = PythonLexer.tokens.copy()
tokens['keywords'] = [
(words((
- 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
- 'finally', 'for', 'global', 'if', 'lambda', 'pass', 'raise',
- 'nonlocal', 'return', 'try', 'while', 'yield', 'yield from', 'as',
- 'with'), suffix=r'\b'),
+ 'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
+ 'else', 'except', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
+ 'raise', 'nonlocal', 'return', 'try', 'while', 'yield', 'yield from',
+ 'as', 'with'), suffix=r'\b'),
Keyword),
(words((
'True', 'False', 'None'), suffix=r'\b'),
@@ -272,6 +272,7 @@ class Python3Lexer(RegexLexer):
tokens['backtick'] = []
tokens['name'] = [
(r'@\w+', Name.Decorator),
+ (r'@', Operator), # new matrix multiplication operator
(uni_name, Name),
]
tokens['funcname'] = [
diff --git a/pygments/lexers/robotframework.py b/pygments/lexers/robotframework.py
index 56996fa8..eab06efe 100644
--- a/pygments/lexers/robotframework.py
+++ b/pygments/lexers/robotframework.py
@@ -87,7 +87,7 @@ class RobotFrameworkLexer(Lexer):
class VariableTokenizer(object):
def tokenize(self, string, token):
- var = VariableSplitter(string, identifiers='$@%')
+ var = VariableSplitter(string, identifiers='$@%&')
if var.start < 0 or token in (COMMENT, ERROR):
yield string, token
return
@@ -205,7 +205,7 @@ class Tokenizer(object):
def _is_assign(self, value):
if value.endswith('='):
value = value[:-1].strip()
- var = VariableSplitter(value, identifiers='$@')
+ var = VariableSplitter(value, identifiers='$@&')
return var.start == 0 and var.end == len(value)
@@ -262,7 +262,7 @@ class TestCaseSetting(Setting):
class KeywordSetting(TestCaseSetting):
_keyword_settings = ('teardown',)
- _other_settings = ('documentation', 'arguments', 'return', 'timeout')
+ _other_settings = ('documentation', 'arguments', 'return', 'timeout', 'tags')
class Variable(Tokenizer):
@@ -465,13 +465,13 @@ class VariableSplitter:
self.identifier = self._variable_chars[0]
self.base = ''.join(self._variable_chars[2:-1])
self.end = self.start + len(self._variable_chars)
- if self._has_list_variable_index():
- self.index = ''.join(self._list_variable_index_chars[1:-1])
- self.end += len(self._list_variable_index_chars)
+ if self._has_list_or_dict_variable_index():
+ self.index = ''.join(self._list_and_dict_variable_index_chars[1:-1])
+ self.end += len(self._list_and_dict_variable_index_chars)
- def _has_list_variable_index(self):
- return self._list_variable_index_chars\
- and self._list_variable_index_chars[-1] == ']'
+ def _has_list_or_dict_variable_index(self):
+ return self._list_and_dict_variable_index_chars\
+ and self._list_and_dict_variable_index_chars[-1] == ']'
def _split(self, string):
start_index, max_index = self._find_variable(string)
@@ -479,7 +479,7 @@ class VariableSplitter:
self._open_curly = 1
self._state = self._variable_state
self._variable_chars = [string[start_index], '{']
- self._list_variable_index_chars = []
+ self._list_and_dict_variable_index_chars = []
self._string = string
start_index += 2
for index, char in enumerate(string[start_index:]):
@@ -530,14 +530,14 @@ class VariableSplitter:
if char == '}' and not self._is_escaped(self._string, index):
self._open_curly -= 1
if self._open_curly == 0:
- if not self._is_list_variable():
+ if not self._is_list_or_dict_variable():
raise StopIteration
self._state = self._waiting_list_variable_index_state
elif char in self._identifiers:
self._state = self._internal_variable_start_state
- def _is_list_variable(self):
- return self._variable_chars[0] == '@'
+ def _is_list_or_dict_variable(self):
+ return self._variable_chars[0] in ('@','&')
def _internal_variable_start_state(self, char, index):
self._state = self._variable_state
@@ -551,10 +551,10 @@ class VariableSplitter:
def _waiting_list_variable_index_state(self, char, index):
if char != '[':
raise StopIteration
- self._list_variable_index_chars.append(char)
+ self._list_and_dict_variable_index_chars.append(char)
self._state = self._list_variable_index_state
def _list_variable_index_state(self, char, index):
- self._list_variable_index_chars.append(char)
+ self._list_and_dict_variable_index_chars.append(char)
if char == ']':
raise StopIteration
diff --git a/pygments/lexers/rust.py b/pygments/lexers/rust.py
index 2ca860d6..d8939678 100644
--- a/pygments/lexers/rust.py
+++ b/pygments/lexers/rust.py
@@ -10,7 +10,7 @@
"""
from pygments.lexer import RegexLexer, include, bygroups, words, default
-from pygments.token import Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
__all__ = ['RustLexer']
@@ -18,33 +18,40 @@ __all__ = ['RustLexer']
class RustLexer(RegexLexer):
"""
- Lexer for the Rust programming language (version 0.9).
+ Lexer for the Rust programming language (version 1.0).
.. versionadded:: 1.6
"""
name = 'Rust'
filenames = ['*.rs']
aliases = ['rust']
- mimetypes = ['text/x-rustsrc']
+ mimetypes = ['text/rust']
tokens = {
'root': [
+ # rust allows a file to start with a shebang, but if the first line
+ # starts with #![ then it’s not a shebang but a crate attribute.
+ (r'#![^[\r\n].*$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
# Whitespace and Comments
(r'\n', Whitespace),
(r'\s+', Whitespace),
- (r'//[/!](.*?)\n', Comment.Doc),
+ (r'//!.*?\n', String.Doc),
+ (r'///(\n|[^/].*?\n)', String.Doc),
(r'//(.*?)\n', Comment.Single),
+ (r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
+ (r'/\*!', String.Doc, 'doccomment'),
(r'/\*', Comment.Multiline, 'comment'),
- # Lifetime
- (r"""'[a-zA-Z_]\w*""", Name.Label),
# Macro parameters
(r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
# Keywords
(words((
- 'as', 'box', 'break', 'continue', 'do', 'else', 'enum', 'extern',
+ 'as', 'box', 'crate', 'do', 'else', 'enum', 'extern', # break and continue are in labels
'fn', 'for', 'if', 'impl', 'in', 'loop', 'match', 'mut', 'priv',
- 'proc', 'pub', 'ref', 'return', 'static', '\'static', 'struct',
+ 'proc', 'pub', 'ref', 'return', 'static', 'struct',
'trait', 'true', 'type', 'unsafe', 'while'), suffix=r'\b'),
Keyword),
(words(('alignof', 'be', 'const', 'offsetof', 'pure', 'sizeof',
@@ -53,44 +60,45 @@ class RustLexer(RegexLexer):
(r'(mod|use)\b', Keyword.Namespace),
(r'(true|false)\b', Keyword.Constant),
(r'let\b', Keyword.Declaration),
- (words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', 'uint',
- 'int', 'f32', 'f64', 'str', 'bool'), suffix=r'\b'),
+ (words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', 'usize',
+ 'isize', 'f32', 'f64', 'str', 'bool'), suffix=r'\b'),
Keyword.Type),
(r'self\b', Name.Builtin.Pseudo),
- # Prelude
+ # Prelude (taken from Rust’s src/libstd/prelude.rs)
(words((
- 'Freeze', 'Pod', 'Send', 'Sized', 'Add', 'Sub', 'Mul', 'Div', 'Rem', 'Neg', 'Not', 'BitAnd',
- 'BitOr', 'BitXor', 'Drop', 'Shl', 'Shr', 'Index', 'Option', 'Some', 'None', 'Result',
- 'Ok', 'Err', 'from_str', 'range', 'print', 'println', 'Any', 'AnyOwnExt', 'AnyRefExt',
- 'AnyMutRefExt', 'Ascii', 'AsciiCast', 'OnwedAsciiCast', 'AsciiStr',
- 'IntoBytes', 'Bool', 'ToCStr', 'Char', 'Clone', 'DeepClone', 'Eq', 'ApproxEq',
- 'Ord', 'TotalEq', 'Ordering', 'Less', 'Equal', 'Greater', 'Equiv', 'Container',
- 'Mutable', 'Map', 'MutableMap', 'Set', 'MutableSet', 'Default', 'FromStr',
- 'Hash', 'FromIterator', 'Extendable', 'Iterator', 'DoubleEndedIterator',
- 'RandomAccessIterator', 'CloneableIterator', 'OrdIterator',
- 'MutableDoubleEndedIterator', 'ExactSize', 'Times', 'Algebraic',
- 'Trigonometric', 'Exponential', 'Hyperbolic', 'Bitwise', 'BitCount',
- 'Bounded', 'Integer', 'Fractional', 'Real', 'RealExt', 'Num', 'NumCast',
- 'CheckedAdd', 'CheckedSub', 'CheckedMul', 'Orderable', 'Signed',
- 'Unsigned', 'Round', 'Primitive', 'Int', 'Float', 'ToStrRadix',
- 'ToPrimitive', 'FromPrimitive', 'GenericPath', 'Path', 'PosixPath',
- 'WindowsPath', 'RawPtr', 'Buffer', 'Writer', 'Reader', 'Seek',
- 'SendStr', 'SendStrOwned', 'SendStrStatic', 'IntoSendStr', 'Str',
- 'StrVector', 'StrSlice', 'OwnedStr', 'IterBytes', 'ToStr', 'IntoStr',
- 'CopyableTuple', 'ImmutableTuple', 'ImmutableEqVector', 'ImmutableTotalOrdVector',
- 'ImmutableCopyableVector', 'OwnedVector', 'OwnedCopyableVector',
- 'OwnedEqVector', 'MutableVector', 'MutableTotalOrdVector',
- 'Vector', 'VectorVector', 'CopyableVector', 'ImmutableVector',
- 'Port', 'Chan', 'SharedChan', 'spawn', 'drop'), suffix=r'\b'),
+ # Reexported core operators
+ 'Copy', 'Send', 'Sized', 'Sync',
+ 'Drop', 'Fn', 'FnMut', 'FnOnce',
+
+ # Reexported functions
+ 'drop',
+
+ # Reexported types and traits
+ 'Box',
+ 'ToOwned',
+ 'Clone',
+ 'PartialEq', 'PartialOrd', 'Eq', 'Ord',
+ 'AsRef', 'AsMut', 'Into', 'From',
+ 'Default',
+ 'Iterator', 'Extend', 'IntoIterator',
+ 'DoubleEndedIterator', 'ExactSizeIterator',
+ 'Option',
+ 'Some', 'None',
+ 'Result',
+ 'Ok', 'Err',
+ 'SliceConcatExt',
+ 'String', 'ToString',
+ 'Vec',
+ ), suffix=r'\b'),
Name.Builtin),
- (r'(ImmutableTuple\d+|Tuple\d+)\b', Name.Builtin),
- # Borrowed pointer
- (r'(&)(\'[A-Za-z_]\w*)?', bygroups(Operator, Name)),
# Labels
- (r'\'[A-Za-z_]\w*:', Name.Label),
+ (r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?', bygroups(Keyword, Text.Whitespace, Name.Label)),
# Character Literal
- (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
+ (r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
+ String.Char),
+ (r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
String.Char),
# Binary Literal
(r'0b[01_]+', Number.Bin, 'number_lit'),
@@ -100,11 +108,16 @@ class RustLexer(RegexLexer):
(r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
# Decimal Literal
(r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float, 'number_lit'),
+ r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float, 'number_lit'),
(r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
# String Literal
+ (r'b"', String, 'bytestring'),
(r'"', String, 'string'),
- (r'r(#*)".*?"\1', String.Raw),
+ (r'b?r(#*)".*?"\1', String),
+
+ # Lifetime
+ (r"""'static""", Name.Builtin),
+ (r"""'[a-zA-Z_]\w*""", Name.Attribute),
# Operators and Punctuation
(r'[{}()\[\],.;]', Punctuation),
@@ -129,18 +142,28 @@ class RustLexer(RegexLexer):
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
+ 'doccomment': [
+ (r'[^*/]+', String.Doc),
+ (r'/\*', String.Doc, '#push'),
+ (r'\*/', String.Doc, '#pop'),
+ (r'[*/]', String.Doc),
+ ],
'number_lit': [
- (r'[ui](8|16|32|64)', Keyword, '#pop'),
+ (r'[ui](8|16|32|64|size)', Keyword, '#pop'),
(r'f(32|64)', Keyword, '#pop'),
default('#pop'),
],
'string': [
(r'"', String, '#pop'),
- (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
+ (r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
(r'[^\\"]+', String),
(r'\\', String),
],
+ 'bytestring': [
+ (r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
+ include('string'),
+ ],
'macro{': [
(r'\{', Operator, '#push'),
(r'\}', Operator, '#pop'),
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
index a9c1e6b9..1e3640bf 100644
--- a/pygments/lexers/shell.py
+++ b/pygments/lexers/shell.py
@@ -47,7 +47,7 @@ class BashLexer(RegexLexer):
(r'\$\(\(', Keyword, 'math'),
(r'\$\(', Keyword, 'paren'),
(r'\$\{#?', String.Interpol, 'curly'),
- (r'\$#?(\w+|.)', Name.Variable),
+ (r'\$(\w+|.)', Name.Variable),
],
'basic': [
(r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
index 3cb73059..bfca0d38 100644
--- a/pygments/lexers/templates.py
+++ b/pygments/lexers/templates.py
@@ -369,7 +369,7 @@ class DjangoLexer(RegexLexer):
r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
Keyword),
(r'(loop|block|super|forloop)\b', Name.Builtin),
- (r'[a-zA-Z][\w-]*', Name.Variable),
+ (r'[a-zA-Z_][\w-]*', Name.Variable),
(r'\.\w+', Name.Variable),
(r':?"(\\\\|\\"|[^"])*"', String.Double),
(r":?'(\\\\|\\'|[^'])*'", String.Single),
diff --git a/pygments/lexers/testing.py b/pygments/lexers/testing.py
index 55f4c054..4a91c5b1 100644
--- a/pygments/lexers/testing.py
+++ b/pygments/lexers/testing.py
@@ -27,9 +27,9 @@ class GherkinLexer(RegexLexer):
mimetypes = ['text/x-gherkin']
feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
- feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
+ feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
- step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
+ step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
tokens = {
'comments': [
diff --git a/pygments/modeline.py b/pygments/modeline.py
index 54df90c4..2200f1cf 100644
--- a/pygments/modeline.py
+++ b/pygments/modeline.py
@@ -13,16 +13,19 @@ import re
__all__ = ['get_filetype_from_buffer']
+
modeline_re = re.compile(r'''
(?: vi | vim | ex ) (?: [<=>]? \d* )? :
.* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
''', re.VERBOSE)
+
def get_filetype_from_line(l):
m = modeline_re.search(l)
if m:
return m.group(1)
+
def get_filetype_from_buffer(buf, max_lines=5):
"""
Scan the buffer for modelines and return filetype if one is found.
@@ -32,7 +35,7 @@ def get_filetype_from_buffer(buf, max_lines=5):
ret = get_filetype_from_line(l)
if ret:
return ret
- for l in lines[max_lines:0:-1]:
+ for l in lines[max_lines:-1:-1]:
ret = get_filetype_from_line(l)
if ret:
return ret
diff --git a/tests/examplefiles/99_bottles_of_beer.chpl b/tests/examplefiles/99_bottles_of_beer.chpl
index 47fcaaf6..3629028d 100644
--- a/tests/examplefiles/99_bottles_of_beer.chpl
+++ b/tests/examplefiles/99_bottles_of_beer.chpl
@@ -159,3 +159,16 @@ var wideOpen = 0o777,
clique_y = 0O660,
zeroOct = 0o0,
minPosOct = 0O1;
+
+private module M3 {
+ private proc foo() {
+
+ }
+
+ private iter bar() {
+
+ }
+
+ private var x: int;
+
+} \ No newline at end of file
diff --git a/tests/examplefiles/ahcon.f b/tests/examplefiles/ahcon.f
new file mode 100644
index 00000000..48ae920b
--- /dev/null
+++ b/tests/examplefiles/ahcon.f
@@ -0,0 +1,340 @@
+ SUBROUTINE AHCON (SIZE,N,M,A,B,OLEVR,OLEVI,CLEVR,CLEVI, TRUNCATED
+ & SCR1,SCR2,IPVT,JPVT,CON,WORK,ISEED,IERR) !Test inline comment
+C
+C FUNCTION:
+CF
+CF Determines whether the pair (A,B) is controllable and flags
+CF the eigenvalues corresponding to uncontrollable modes.
+CF this ad-hoc controllability calculation uses a random matrix F
+CF and computes whether eigenvalues move from A to the controlled
+CF system A+B*F.
+CF
+C USAGE:
+CU
+CU CALL AHCON (SIZE,N,M,A,B,OLEVR,OLEVI,CLEVR,CLEVI,SCR1,SCR2,IPVT,
+CU JPVT,CON,WORK,ISEED,IERR)
+CU
+CU since AHCON generates different random F matrices for each
+CU call, as long as iseed is not re-initialized by the main
+CU program, and since this code has the potential to be fooled
+CU by extremely ill-conditioned problems, the cautious user
+CU may wish to call it multiple times and rely, perhaps, on
+CU a 2-of-3 vote. We believe, but have not proved, that any
+CU errors this routine may produce are conservative--i.e., that
+CU it may flag a controllable mode as uncontrollable, but
+CU not vice-versa.
+CU
+C INPUTS:
+CI
+CI SIZE integer - first dimension of all 2-d arrays.
+CI
+CI N integer - number of states.
+CI
+CI M integer - number of inputs.
+CI
+CI A double precision - SIZE by N array containing the
+CI N by N system dynamics matrix A.
+CI
+CI B double precision - SIZE by M array containing the
+CI N by M system input matrix B.
+CI
+CI ISEED initial seed for random number generator; if ISEED=0,
+CI then AHCON will set ISEED to a legal value.
+CI
+C OUTPUTS:
+CO
+CO OLEVR double precision - N dimensional vector containing the
+CO real parts of the eigenvalues of A.
+CO
+CO OLEVI double precision - N dimensional vector containing the
+CO imaginary parts of the eigenvalues of A.
+CO
+CO CLEVR double precision - N dimensional vector work space
+CO containing the real parts of the eigenvalues of A+B*F,
+CO where F is the random matrix.
+CO
+CO CLEVI double precision - N dimensional vector work space
+CO containing the imaginary parts of the eigenvalues of
+CO A+B*F, where F is the random matrix.
+CO
+CO SCR1 double precision - N dimensional vector containing the
+CO magnitudes of the corresponding eigenvalues of A.
+CO
+CO SCR2 double precision - N dimensional vector containing the
+CO damping factors of the corresponding eigenvalues of A.
+CO
+CO IPVT integer - N dimensional vector; contains the row pivots
+CO used in finding the nearest neighbor eigenvalues between
+CO those of A and of A+B*F. The IPVT(1)th eigenvalue of
+CO A and the JPVT(1)th eigenvalue of A+B*F are the closest
+CO pair.
+CO
+CO JPVT integer - N dimensional vector; contains the column
+CO pivots used in finding the nearest neighbor eigenvalues;
+CO see IPVT.
+CO
+CO CON logical - N dimensional vector; flagging the uncontrollable
+CO modes of the system. CON(I)=.TRUE. implies the
+CO eigenvalue of A given by DCMPLX(OLEVR(IPVT(I)),OLEVI(IPVT(i)))
+CO corresponds to a controllable mode; CON(I)=.FALSE.
+CO implies an uncontrollable mode for that eigenvalue.
+CO
+CO WORK double precision - SIZE by N dimensional array containing
+CO an N by N matrix. WORK(I,J) is the distance between
+CO the open loop eigenvalue given by DCMPLX(OLEVR(I),OLEVI(I))
+CO and the closed loop eigenvalue of A+B*F given by
+CO DCMPLX(CLEVR(J),CLEVI(J)).
+CO
+CO IERR integer - IERR=0 indicates normal return; a non-zero
+CO value indicates trouble in the eigenvalue calculation.
+CO see the EISPACK and EIGEN documentation for details.
+CO
+C ALGORITHM:
+CA
+CA Calculate eigenvalues of A and of A+B*F for a randomly
+CA generated F, and see which ones change. Use a full pivot
+CA search through a matrix of euclidean distance measures
+CA between each pair of eigenvalues from (A,A+BF) to
+CA determine the closest pairs.
+CA
+C MACHINE DEPENDENCIES:
+CM
+CM NONE
+CM
+C HISTORY:
+CH
+CH written by: Birdwell & Laub
+CH date: May 18, 1985
+CH current version: 1.0
+CH modifications: made machine independent and modified for
+CH f77:bb:8-86.
+CH changed cmplx -> dcmplx: 7/27/88 jdb
+CH
+C ROUTINES CALLED:
+CC
+CC EIGEN,RAND
+CC
+C COMMON MEMORY USED:
+CM
+CM none
+CM
+C----------------------------------------------------------------------
+C written for: The CASCADE Project
+C Oak Ridge National Laboratory
+C U.S. Department of Energy
+C contract number DE-AC05-840R21400
+C subcontract number 37B-7685 S13
+C organization: The University of Tennessee
+C----------------------------------------------------------------------
+C THIS SOFTWARE IS IN THE PUBLIC DOMAIN
+C NO RESTRICTIONS ON ITS USE ARE IMPLIED
+C----------------------------------------------------------------------
+C
+C--global variables:
+C
+ INTEGER SIZE
+ INTEGER N
+ INTEGER M
+ INTEGER IPVT(1)
+ INTEGER JPVT(1)
+ INTEGER IERR
+C
+ DOUBLE PRECISION A(SIZE,N)
+ DOUBLE PRECISION B(SIZE,M)
+ DOUBLE PRECISION WORK(SIZE,N)
+ DOUBLE PRECISION CLEVR(N)
+ DOUBLE PRECISION CLEVI(N)
+ DOUBLE PRECISION OLEVR(N)
+ DOUBLE PRECISION OLEVI(N)
+ DOUBLE PRECISION SCR1(N)
+ DOUBLE PRECISION SCR2(N)
+C
+ LOGICAL CON(N)
+C
+C--local variables:
+C
+ INTEGER ISEED
+ INTEGER ITEMP
+ INTEGER K1
+ INTEGER K2
+ INTEGER I
+ INTEGER J
+ INTEGER K
+ INTEGER IMAX
+ INTEGER JMAX
+C
+ DOUBLE PRECISION VALUE
+ DOUBLE PRECISION EPS
+ DOUBLE PRECISION EPS1
+ DOUBLE PRECISION TEMP
+ DOUBLE PRECISION CURR
+ DOUBLE PRECISION ANORM
+ DOUBLE PRECISION BNORM
+ DOUBLE PRECISION COLNRM
+ DOUBLE PRECISION RNDMNO
+C
+ DOUBLE COMPLEX DCMPLX
+C
+C--compute machine epsilon
+C
+ EPS = 1.D0
+100 CONTINUE
+ EPS = EPS / 2.D0
+ EPS1 = 1.D0 + EPS
+ IF (EPS1 .NE. 1.D0) GO TO 100
+ EPS = EPS * 2.D0
+C
+C--compute the l-1 norm of a
+C
+ ANORM = 0.0D0
+ DO 120 J = 1, N
+ COLNRM = 0.D0
+ DO 110 I = 1, N
+ COLNRM = COLNRM + ABS(A(I,J))
+110 CONTINUE
+ IF (COLNRM .GT. ANORM) ANORM = COLNRM
+120 CONTINUE
+C
+C--compute the l-1 norm of b
+C
+ BNORM = 0.0D0
+ DO 140 J = 1, M
+ COLNRM = 0.D0
+ DO 130 I = 1, N
+ COLNRM = COLNRM + ABS(B(I,J))
+130 CONTINUE
+ IF (COLNRM .GT. BNORM) BNORM = COLNRM
+140 CONTINUE
+C
+C--compute a + b * f
+C
+ DO 160 J = 1, N
+ DO 150 I = 1, N
+ WORK(I,J) = A(I,J)
+150 CONTINUE
+160 CONTINUE
+C
+C--the elements of f are random with uniform distribution
+C--from -anorm/bnorm to +anorm/bnorm
+C--note that f is not explicitly stored as a matrix
+C--pathalogical floating point notes: the if (bnorm .gt. 0.d0)
+C--test should actually be if (bnorm .gt. dsmall), where dsmall
+C--is the smallest representable number whose reciprocal does
+C--not generate an overflow or loss of precision.
+C
+ IF (ISEED .EQ. 0) ISEED = 86345823
+ IF (ANORM .EQ. 0.D0) ANORM = 1.D0
+ IF (BNORM .GT. 0.D0) THEN
+ TEMP = 2.D0 * ANORM / BNORM
+ ELSE
+ TEMP = 2.D0
+ END IF
+ DO 190 K = 1, M
+ DO 180 J = 1, N
+ CALL RAND(ISEED,ISEED,RNDMNO)
+ VALUE = (RNDMNO - 0.5D0) * TEMP
+ DO 170 I = 1, N
+ WORK(I,J) = WORK(I,J) + B(I,K)*VALUE
+170 CONTINUE
+180 CONTINUE
+190 CONTINUE
+C
+C--compute the eigenvalues of a + b*f, and several other things
+C
+ CALL EIGEN (0,SIZE,N,WORK,CLEVR,CLEVI,WORK,SCR1,SCR2,IERR)
+ IF (IERR .NE. 0) RETURN
+C
+C--copy a so it is not destroyed
+C
+ DO 210 J = 1, N
+ DO 200 I = 1, N
+ WORK(I,J) = A(I,J)
+200 CONTINUE
+210 CONTINUE
+C
+C--compute the eigenvalues of a, and several other things
+C
+ CALL EIGEN (0,SIZE,N,WORK,OLEVR,OLEVI,WORK,SCR1,SCR2,IERR)
+ IF (IERR .NE. 0) RETURN
+C
+C--form the matrix of distances between eigenvalues of a and
+C--EIGENVALUES OF A+B*F
+C
+ DO 230 J = 1, N
+ DO 220 I = 1, N
+ WORK(I,J) =
+ & ABS(DCMPLX(OLEVR(I),OLEVI(I))-DCMPLX(CLEVR(J),CLEVI(J)))
+220 CONTINUE
+230 CONTINUE
+C
+C--initialize row and column pivots
+C
+ DO 240 I = 1, N
+ IPVT(I) = I
+ JPVT(I) = I
+240 CONTINUE
+C
+C--a little bit messy to avoid swapping columns and
+C--rows of work
+C
+ DO 270 I = 1, N-1
+C
+C--find the minimum element of each lower right square
+C--submatrix of work, for submatrices of size n x n
+C--through 2 x 2
+C
+ CURR = WORK(IPVT(I),JPVT(I))
+ IMAX = I
+ JMAX = I
+ TEMP = CURR
+C
+C--find the minimum element
+C
+ DO 260 K1 = I, N
+ DO 250 K2 = I, N
+ IF (WORK(IPVT(K1),JPVT(K2)) .LT. TEMP) THEN
+ TEMP = WORK(IPVT(K1),JPVT(K2))
+ IMAX = K1
+ JMAX = K2
+ END IF
+250 CONTINUE
+260 CONTINUE
+C
+C--update row and column pivots for indirect addressing of work
+C
+ ITEMP = IPVT(I)
+ IPVT(I) = IPVT(IMAX)
+ IPVT(IMAX) = ITEMP
+C
+ ITEMP = JPVT(I)
+ JPVT(I) = JPVT(JMAX)
+ JPVT(JMAX) = ITEMP
+C
+C--do next submatrix
+C
+270 CONTINUE
+C
+C--this threshold for determining when an eigenvalue has
+C--not moved, and is therefore uncontrollable, is critical,
+C--and may require future changes with more experience.
+C
+ EPS1 = SQRT(EPS)
+C
+C--for each eigenvalue pair, decide if it is controllable
+C
+ DO 280 I = 1, N
+C
+C--note that we are working with the "pivoted" work matrix
+C--and are looking at its diagonal elements
+C
+ IF (WORK(IPVT(I),JPVT(I))/ANORM .LE. EPS1) THEN
+ CON(I) = .FALSE.
+ ELSE
+ CON(I) = .TRUE.
+ END IF
+280 CONTINUE
+C
+C--finally!
+C
+ RETURN
+ END
diff --git a/tests/examplefiles/eval.rs b/tests/examplefiles/eval.rs
new file mode 100644
index 00000000..17e585a0
--- /dev/null
+++ b/tests/examplefiles/eval.rs
@@ -0,0 +1,606 @@
+// -------------------------------------------------------------------------------------------------
+// Rick, a Rust intercal compiler. Save your souls!
+//
+// Copyright (c) 2015 Georg Brandl
+//
+// This program is free software; you can redistribute it and/or modify it under the terms of the
+// GNU General Public License as published by the Free Software Foundation; either version 2 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
+// even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License along with this program;
+// if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+// -------------------------------------------------------------------------------------------------
+
+/// Interprets INTERCAL source.
+///
+/// The evaluator is used when rick is called with `-i`, or when the compiler generates
+/// the output while compiling (in the constant-output case).
+
+use std::fmt::{ Debug, Display };
+use std::io::Write;
+use std::u16;
+
+use err::{ Res, IE123, IE129, IE252, IE275, IE555, IE633, IE774, IE994 };
+use ast::{ self, Program, Stmt, StmtBody, ComeFrom, Expr, Var, VType };
+use stdops::{ Bind, Array, write_number, read_number, check_chance, check_ovf, pop_jumps,
+ get_random_seed, mingle, select, and_16, and_32, or_16, or_32, xor_16, xor_32 };
+
+
+/// Represents a value (either 16-bit or 32-bit) at runtime.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub enum Val {
+ I16(u16),
+ I32(u32),
+}
+
+impl Val {
+ /// Cast as a 16-bit value; returns an error if 32-bit and too big.
+ pub fn as_u16(&self) -> Res<u16> {
+ match *self {
+ Val::I16(v) => Ok(v),
+ Val::I32(v) => {
+ if v > (u16::MAX as u32) {
+ return IE275.err();
+ }
+ Ok(v as u16)
+ }
+ }
+ }
+
+ /// Cast as a 32-bit value; always succeeds.
+ pub fn as_u32(&self) -> u32 {
+ match *self {
+ Val::I16(v) => v as u32,
+ Val::I32(v) => v
+ }
+ }
+
+ /// Cast as an usize value; always succeeds.
+ pub fn as_usize(&self) -> usize {
+ self.as_u32() as usize
+ }
+
+ /// Create from a 32-bit value; will select the smallest possible type.
+ pub fn from_u32(v: u32) -> Val {
+ if v & 0xFFFF == v {
+ Val::I16(v as u16)
+ } else {
+ Val::I32(v)
+ }
+ }
+}
+
+/// The state of the interpreter's evaluator.
+pub struct Eval<'a> {
+ /// Program to execute.
+ program: &'a Program,
+ /// Stream to use for printing output.
+ stdout: &'a mut Write,
+ /// Whether to print debugging output during execution.
+ debug: bool,
+ /// Variable bindings for the four types of variables.
+ spot: Vec<Bind<u16>>,
+ twospot: Vec<Bind<u32>>,
+ tail: Vec<Bind<Array<u16>>>,
+ hybrid: Vec<Bind<Array<u32>>>,
+ /// The infamous NEXT stack, capable of holding 80 elements.
+ jumps: Vec<ast::LogLine>,
+ /// Abstain counter for each statement.
+ abstain: Vec<u32>,
+ /// Binary I/O "tape" state.
+ last_in: u8,
+ last_out: u8,
+ /// Random number generator state.
+ rand_st: u32,
+ /// Counts the number of executed statements.
+ stmt_ctr: usize,
+}
+
+/// Represents the control flow effect of an executed statement.
+enum StmtRes {
+ /// normal execution, next statement
+ Next,
+ /// jump around, from DO ... NEXT
+ Jump(usize),
+ /// jump back, from RESUME
+ Back(usize),
+ /// start from the first statement, from TRY AGAIN
+ FromTop,
+ /// end the program, from GIVE UP
+ End,
+}
+
+impl<'a> Eval<'a> {
+ /// Construct a new evaluator.
+ pub fn new(program: &'a Program, stdout: &'a mut Write, debug: bool,
+ random: bool) -> Eval<'a> {
+ let abs = program.stmts.iter().map(|stmt| stmt.props.disabled as u32).collect();
+ let nvars = (program.var_info.0.len(),
+ program.var_info.1.len(),
+ program.var_info.2.len(),
+ program.var_info.3.len());
+ Eval {
+ program: program,
+ stdout: stdout,
+ debug: debug,
+ spot: vec![Bind::new(0); nvars.0],
+ twospot: vec![Bind::new(0); nvars.1],
+ tail: vec![Bind::new(Array::empty()); nvars.2],
+ hybrid: vec![Bind::new(Array::empty()); nvars.3],
+ jumps: Vec::with_capacity(80),
+ rand_st: if random { get_random_seed() } else { 0 },
+ abstain: abs,
+ last_in: 0,
+ last_out: 0,
+ stmt_ctr: 0,
+ }
+ }
+
+ /// Interpret the program. Returns either the number of executed statements,
+ /// or an error (RtError).
+ pub fn eval(&mut self) -> Res<usize> {
+ let mut pctr = 0; // index of current statement
+ let program = self.program.clone();
+ let nstmts = program.stmts.len();
+ loop {
+ // check for falling off the end
+ if pctr >= nstmts {
+ // if the last statement was a TRY AGAIN, falling off the end is fine
+ if let StmtBody::TryAgain = program.stmts[program.stmts.len() - 1].body {
+ break;
+ }
+ return IE633.err();
+ }
+ self.stmt_ctr += 1;
+ let stmt = &program.stmts[pctr];
+ // execute statement if not abstained
+ if self.abstain[pctr] == 0 {
+ // check execution chance
+ let (passed, rand_st) = check_chance(stmt.props.chance, self.rand_st);
+ self.rand_st = rand_st;
+ if passed {
+ // try to eval this statement
+ let res = match self.eval_stmt(stmt) {
+ // on error, set the correct line number and bubble up
+ Err(mut err) => {
+ err.set_line(stmt.props.onthewayto);
+ // special treatment for NEXT
+ if let StmtBody::DoNext(n) = stmt.body {
+ if let Some(i) = program.labels.get(&n) {
+ err.set_line(program.stmts[*i as usize].props.srcline);
+ }
+ }
+ return Err(err);
+ }
+ Ok(res) => res
+ };
+ // handle control flow effects
+ match res {
+ StmtRes::Next => { }
+ StmtRes::Jump(n) => {
+ self.jumps.push(pctr as u16); // push the line with the NEXT
+ pctr = n;
+ continue; // do not increment or check for COME FROMs
+ }
+ StmtRes::Back(n) => {
+ pctr = n; // will be incremented below after COME FROM check
+ }
+ StmtRes::FromTop => {
+ pctr = 0; // start from the beginning, do not push any stack
+ continue;
+ }
+ StmtRes::End => break,
+ }
+ }
+ }
+ // if we are on the line with the compiler bug, error out
+ if pctr == self.program.bugline as usize {
+ return IE774.err_with(None, stmt.props.onthewayto);
+ }
+ // try to determine if we have to go to a COME FROM statement
+ // (note: in general, program.stmts[pctr] != stmt)
+ //
+ // the static COME FROM is always a possibility
+ let mut maybe_next = program.stmts[pctr].comefrom;
+ // the complicated case: evaluate all computed-come-from expressions
+ let my_label = program.stmts[pctr].props.label;
+ if program.uses_complex_comefrom && my_label > 0 {
+ for (i, stmt) in program.stmts.iter().enumerate() {
+ if let StmtBody::ComeFrom(ComeFrom::Expr(ref e)) = stmt.body {
+ let v = try!(try!(self.eval_expr(e)).as_u16());
+ if v == my_label {
+ // as soon as we have multiple candidates, we can bail out
+ if maybe_next.is_some() {
+ return IE555.err();
+ }
+ maybe_next = Some(i as u16);
+ }
+ }
+ }
+ }
+ // check for COME FROMs from this line
+ if let Some(next) = maybe_next {
+ let next = next as usize;
+ // check for abstained COME FROM
+ if self.abstain[next] == 0 {
+ // the COME FROM can also have a % chance
+ let (passed, rand_st) = check_chance(program.stmts[next].props.chance,
+ self.rand_st);
+ self.rand_st = rand_st;
+ if passed {
+ pctr = next;
+ continue;
+ }
+ }
+ }
+ // no COME FROM, normal execution
+ pctr += 1;
+ }
+ Ok(self.stmt_ctr)
+ }
+
+ /// Interpret a single statement.
+ fn eval_stmt(&mut self, stmt: &Stmt) -> Res<StmtRes> {
+ if self.debug {
+ println!("\nExecuting Stmt #{} (state before following)", self.stmt_ctr);
+ self.dump_state();
+ println!("{}", stmt);
+ }
+ match stmt.body {
+ StmtBody::Calc(ref var, ref expr) => {
+ let val = try!(self.eval_expr(expr));
+ try!(self.assign(var, val));
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Dim(ref var, ref exprs) => {
+ try!(self.array_dim(var, exprs));
+ Ok(StmtRes::Next)
+ }
+ StmtBody::DoNext(n) => {
+ match self.program.labels.get(&n) {
+ // too many jumps on stack already?
+ Some(_) if self.jumps.len() >= 80 => IE123.err(),
+ Some(i) => Ok(StmtRes::Jump(*i as usize)),
+ None => IE129.err(),
+ }
+ }
+ StmtBody::ComeFrom(_) => {
+ // nothing to do here at runtime
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Resume(ref expr) => {
+ let n = try!(self.eval_expr(expr)).as_u32();
+ // this expect() is safe: if the third arg is true, there will
+ // be no Ok(None) returns
+ let next = try!(pop_jumps(&mut self.jumps, n, true, 0))
+ .expect("https://xkcd.com/378/ ?!");
+ Ok(StmtRes::Back(next as usize))
+ }
+ StmtBody::Forget(ref expr) => {
+ let n = try!(self.eval_expr(expr)).as_u32();
+ try!(pop_jumps(&mut self.jumps, n, false, 0));
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Ignore(ref vars) => {
+ for var in vars {
+ self.set_rw(var, false);
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Remember(ref vars) => {
+ for var in vars {
+ self.set_rw(var, true);
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Stash(ref vars) => {
+ for var in vars {
+ self.stash(var);
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Retrieve(ref vars) => {
+ for var in vars {
+ try!(self.retrieve(var));
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Abstain(ref expr, ref whats) => {
+ let f: Box<Fn(u32) -> u32> = if let Some(ref e) = *expr {
+ let n = try!(self.eval_expr(e)).as_u32();
+ box move |v: u32| v.saturating_add(n)
+ } else {
+ box |_| 1
+ };
+ for what in whats {
+ self.abstain(what, &*f);
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Reinstate(ref whats) => {
+ for what in whats {
+ self.abstain(what, &|v: u32| v.saturating_sub(1));
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::ReadOut(ref vars) => {
+ for var in vars {
+ match *var {
+ // read out whole array
+ Expr::Var(ref var) if var.is_dim() => {
+ try!(self.array_readout(var));
+ }
+ // read out single var or array element
+ Expr::Var(ref var) => {
+ let varval = try!(self.lookup(var));
+ try!(write_number(self.stdout, varval.as_u32(), 0));
+ }
+ // read out constant
+ Expr::Num(_, v) => try!(write_number(self.stdout, v, 0)),
+ // others will not be generated
+ _ => return IE994.err(),
+ };
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::WriteIn(ref vars) => {
+ for var in vars {
+ if var.is_dim() {
+ // write in whole array
+ try!(self.array_writein(var));
+ } else {
+ // write in single var or array element
+ let n = try!(read_number(0));
+ try!(self.assign(var, Val::from_u32(n)));
+ }
+ }
+ Ok(StmtRes::Next)
+ }
+ // this one is only generated by the constant-program optimizer
+ StmtBody::Print(ref s) => {
+ if let Err(_) = self.stdout.write(&s) {
+ return IE252.err();
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::TryAgain => Ok(StmtRes::FromTop),
+ StmtBody::GiveUp => Ok(StmtRes::End),
+ StmtBody::Error(ref e) => Err((*e).clone()),
+ }
+ }
+
+ /// Evaluate an expression to a value.
+ fn eval_expr(&self, expr: &Expr) -> Res<Val> {
+ match *expr {
+ Expr::Num(vtype, v) => match vtype {
+ VType::I16 => Ok(Val::I16(v as u16)),
+ VType::I32 => Ok(Val::I32(v)),
+ },
+ Expr::Var(ref var) => self.lookup(var),
+ Expr::Mingle(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx)).as_u32();
+ let w = try!(self.eval_expr(wx)).as_u32();
+ let v = try!(check_ovf(v, 0));
+ let w = try!(check_ovf(w, 0));
+ Ok(Val::I32(mingle(v, w)))
+ }
+ Expr::Select(vtype, ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ if vtype == VType::I16 {
+ Ok(Val::I16(select(v.as_u32(), try!(w.as_u16()) as u32) as u16))
+ } else {
+ Ok(Val::I32(select(v.as_u32(), w.as_u32())))
+ }
+ }
+ Expr::And(vtype, ref vx) => {
+ let v = try!(self.eval_expr(vx));
+ match vtype {
+ VType::I16 => Ok(Val::I16(and_16(try!(v.as_u16()) as u32) as u16)),
+ VType::I32 => Ok(Val::I32(and_32(v.as_u32()))),
+ }
+ }
+ Expr::Or(vtype, ref vx) => {
+ let v = try!(self.eval_expr(vx));
+ match vtype {
+ VType::I16 => Ok(Val::I16(or_16(try!(v.as_u16()) as u32) as u16)),
+ VType::I32 => Ok(Val::I32(or_32(v.as_u32()))),
+ }
+ }
+ Expr::Xor(vtype, ref vx) => {
+ let v = try!(self.eval_expr(vx));
+ match vtype {
+ VType::I16 => Ok(Val::I16(xor_16(try!(v.as_u16()) as u32) as u16)),
+ VType::I32 => Ok(Val::I32(xor_32(v.as_u32()))),
+ }
+ }
+ Expr::RsNot(ref vx) => {
+ let v = try!(self.eval_expr(vx));
+ Ok(Val::I32(!v.as_u32()))
+ }
+ Expr::RsAnd(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() & w.as_u32()))
+ }
+ Expr::RsOr(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() | w.as_u32()))
+ }
+ Expr::RsXor(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() ^ w.as_u32()))
+ }
+ Expr::RsRshift(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() >> w.as_u32()))
+ }
+ Expr::RsLshift(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() << w.as_u32()))
+ }
+ // Expr::RsEqual(ref vx, ref wx) => {
+ // let v = try!(self.eval_expr(vx));
+ // let w = try!(self.eval_expr(wx));
+ // Ok(Val::I32((v.as_u32() == w.as_u32()) as u32))
+ // }
+ Expr::RsNotEqual(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32((v.as_u32() != w.as_u32()) as u32))
+ }
+ Expr::RsPlus(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() + w.as_u32()))
+ }
+ Expr::RsMinus(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() - w.as_u32()))
+ }
+ }
+ }
+
+ #[inline]
+ fn eval_subs(&self, subs: &Vec<Expr>) -> Res<Vec<usize>> {
+ subs.iter().map(|v| self.eval_expr(v).map(|w| w.as_usize())).collect()
+ }
+
+ /// Dimension an array.
+ fn array_dim(&mut self, var: &Var, dims: &Vec<Expr>) -> Res<()> {
+ let dims = try!(self.eval_subs(dims));
+ match *var {
+ Var::A16(n, _) => self.tail[n].dimension(dims, 0),
+ Var::A32(n, _) => self.hybrid[n].dimension(dims, 0),
+ _ => return IE994.err(),
+ }
+ }
+
+ /// Assign to a variable.
+ fn assign(&mut self, var: &Var, val: Val) -> Res<()> {
+ match *var {
+ Var::I16(n) => Ok(self.spot[n].assign(try!(val.as_u16()))),
+ Var::I32(n) => Ok(self.twospot[n].assign(val.as_u32())),
+ Var::A16(n, ref subs) => {
+ let subs = try!(self.eval_subs(subs));
+ self.tail[n].set_md(subs, try!(val.as_u16()), 0)
+ }
+ Var::A32(n, ref subs) => {
+ let subs = try!(self.eval_subs(subs));
+ self.hybrid[n].set_md(subs, val.as_u32(), 0)
+ }
+ }
+ }
+
+ /// Look up the value of a variable.
+ fn lookup(&self, var: &Var) -> Res<Val> {
+ match *var {
+ Var::I16(n) => Ok(Val::I16(self.spot[n].val)),
+ Var::I32(n) => Ok(Val::I32(self.twospot[n].val)),
+ Var::A16(n, ref subs) => {
+ let subs = try!(self.eval_subs(subs));
+ self.tail[n].get_md(subs, 0).map(Val::I16)
+ }
+ Var::A32(n, ref subs) => {
+ let subs = try!(self.eval_subs(subs));
+ self.hybrid[n].get_md(subs, 0).map(Val::I32)
+ }
+ }
+ }
+
+ /// Process a STASH statement.
+ fn stash(&mut self, var: &Var) {
+ match *var {
+ Var::I16(n) => self.spot[n].stash(),
+ Var::I32(n) => self.twospot[n].stash(),
+ Var::A16(n, _) => self.tail[n].stash(),
+ Var::A32(n, _) => self.hybrid[n].stash(),
+ }
+ }
+
+ /// Process a RETRIEVE statement.
+ fn retrieve(&mut self, var: &Var) -> Res<()> {
+ match *var {
+ Var::I16(n) => self.spot[n].retrieve(0),
+ Var::I32(n) => self.twospot[n].retrieve(0),
+ Var::A16(n, _) => self.tail[n].retrieve(0),
+ Var::A32(n, _) => self.hybrid[n].retrieve(0),
+ }
+ }
+
+ /// Process an IGNORE or REMEMBER statement. Cannot fail.
+ fn set_rw(&mut self, var: &Var, rw: bool) {
+ match *var {
+ Var::I16(n) => self.spot[n].rw = rw,
+ Var::I32(n) => self.twospot[n].rw = rw,
+ Var::A16(n, _) => self.tail[n].rw = rw,
+ Var::A32(n, _) => self.hybrid[n].rw = rw,
+ }
+ }
+
+ /// P()rocess an ABSTAIN or REINSTATE statement. Cannot fail.
+ fn abstain(&mut self, what: &ast::Abstain, f: &Fn(u32) -> u32) {
+ if let &ast::Abstain::Label(lbl) = what {
+ let idx = self.program.labels[&lbl] as usize;
+ if self.program.stmts[idx].body != StmtBody::GiveUp {
+ self.abstain[idx] = f(self.abstain[idx]);
+ }
+ } else {
+ for (i, stype) in self.program.stmt_types.iter().enumerate() {
+ if stype == what {
+ self.abstain[i] = f(self.abstain[i]);
+ }
+ }
+ }
+ }
+
+ /// Array readout helper.
+ fn array_readout(&mut self, var: &Var) -> Res<()> {
+ let state = &mut self.last_out;
+ match *var {
+ Var::A16(n, _) => self.tail[n].readout(self.stdout, state, 0),
+ Var::A32(n, _) => self.hybrid[n].readout(self.stdout, state, 0),
+ _ => return IE994.err(),
+ }
+ }
+
+ /// Array writein helper.
+ fn array_writein(&mut self, var: &Var) -> Res<()> {
+ let state = &mut self.last_in;
+ match *var {
+ Var::A16(n, _) => self.tail[n].writein(state, 0),
+ Var::A32(n, _) => self.hybrid[n].writein(state, 0),
+ _ => return IE994.err(),
+ }
+ }
+
+ /// Debug helpers.
+ fn dump_state(&self) {
+ self.dump_state_one(&self.spot, ".");
+ self.dump_state_one(&self.twospot, ":");
+ self.dump_state_one(&self.tail, ",");
+ self.dump_state_one(&self.hybrid, ";");
+ if self.jumps.len() > 0 {
+ println!("Next stack: {:?}", self.jumps);
+ }
+ //println!("Abstained: {:?}", self.abstain);
+ }
+
+ fn dump_state_one<T: Debug + Display>(&self, vec: &Vec<Bind<T>>, sigil: &str) {
+ if vec.len() > 0 {
+ for (i, v) in vec.iter().enumerate() {
+ print!("{}{} = {}, ", sigil, i, v);
+ }
+ println!("");
+ }
+ }
+}
diff --git a/tests/examplefiles/robotframework_test.txt b/tests/examplefiles/robotframework_test.txt
index 63ba63e6..0d8179c0 100644
--- a/tests/examplefiles/robotframework_test.txt
+++ b/tests/examplefiles/robotframework_test.txt
@@ -6,6 +6,7 @@ Test Setup Keyword argument argument with ${VARIABLE}
*** Variables ***
${VARIABLE} Variable value
@{LIST} List variable here
+&{DICT} Key1=Value1 Key2=Value2
*** Test Cases ***
Keyword-driven example
diff --git a/tests/examplefiles/rust_example.rs b/tests/examplefiles/rust_example.rs
deleted file mode 100644
index 8c44af1d..00000000
--- a/tests/examplefiles/rust_example.rs
+++ /dev/null
@@ -1,235 +0,0 @@
-// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// based on:
-// http://shootout.alioth.debian.org/u32/benchmark.php?test=nbody&lang=java
-
-/* nest some /* comments */ */
-
-extern mod std;
-
-use core::os;
-
-// Using sqrt from the standard library is way slower than using libc
-// directly even though std just calls libc, I guess it must be
-// because the the indirection through another dynamic linker
-// stub. Kind of shocking. Might be able to make it faster still with
-// an llvm intrinsic.
-#[nolink]
-extern mod libc {
- #![legacy_exports];
- fn sqrt(n: float) -> float;
-}
-
-fn main() {
- let args = os::args();
- let args = if os::getenv(~"RUST_BENCH").is_some() {
- ~[~"", ~"4000000"]
- } else if args.len() <= 1u {
- ~[~"", ~"100000"]
- } else {
- args
- };
- let n = int::from_str(args[1]).get();
- let mut bodies: ~[Body::props] = NBodySystem::make();
- io::println(fmt!("%f", NBodySystem::energy(bodies)));
- let mut i = 0;
- while i < n {
- NBodySystem::advance(bodies, 0.01);
- i += 1;
- }
- io::println(fmt!("%f", NBodySystem::energy(bodies)));
-}
-
-mod NBodySystem {
- use Body;
-
- pub fn make() -> ~[Body::props] {
- let mut bodies: ~[Body::props] =
- ~[Body::sun(),
- Body::jupiter(),
- Body::saturn(),
- Body::uranus(),
- Body::neptune()];
-
- let mut px = 0.0;
- let mut py = 0.0;
- let mut pz = 0.0;
-
- let mut i = 0;
- while i < 5 {
- px += bodies[i].vx * bodies[i].mass;
- py += bodies[i].vy * bodies[i].mass;
- pz += bodies[i].vz * bodies[i].mass;
-
- i += 1;
- }
-
- // side-effecting
- Body::offset_momentum(&mut bodies[0], px, py, pz);
-
- return bodies;
- }
-
- pub fn advance(bodies: &mut [Body::props], dt: float) {
- let mut i = 0;
- while i < 5 {
- let mut j = i + 1;
- while j < 5 {
- advance_one(&mut bodies[i],
- &mut bodies[j], dt);
- j += 1;
- }
-
- i += 1;
- }
-
- i = 0;
- while i < 5 {
- move_(&mut bodies[i], dt);
- i += 1;
- }
- }
-
- pub fn advance_one(bi: &mut Body::props,
- bj: &mut Body::props,
- dt: float) unsafe {
- let dx = bi.x - bj.x;
- let dy = bi.y - bj.y;
- let dz = bi.z - bj.z;
-
- let dSquared = dx * dx + dy * dy + dz * dz;
-
- let distance = ::libc::sqrt(dSquared);
- let mag = dt / (dSquared * distance);
-
- bi.vx -= dx * bj.mass * mag;
- bi.vy -= dy * bj.mass * mag;
- bi.vz -= dz * bj.mass * mag;
-
- bj.vx += dx * bi.mass * mag;
- bj.vy += dy * bi.mass * mag;
- bj.vz += dz * bi.mass * mag;
- }
-
- pub fn move_(b: &mut Body::props, dt: float) {
- b.x += dt * b.vx;
- b.y += dt * b.vy;
- b.z += dt * b.vz;
- }
-
- pub fn energy(bodies: &[Body::props]) -> float unsafe {
- let mut dx;
- let mut dy;
- let mut dz;
- let mut distance;
- let mut e = 0.0;
-
- let mut i = 0;
- while i < 5 {
- e +=
- 0.5 * bodies[i].mass *
- (bodies[i].vx * bodies[i].vx + bodies[i].vy * bodies[i].vy
- + bodies[i].vz * bodies[i].vz);
-
- let mut j = i + 1;
- while j < 5 {
- dx = bodies[i].x - bodies[j].x;
- dy = bodies[i].y - bodies[j].y;
- dz = bodies[i].z - bodies[j].z;
-
- distance = ::libc::sqrt(dx * dx + dy * dy + dz * dz);
- e -= bodies[i].mass * bodies[j].mass / distance;
-
- j += 1;
- }
-
- i += 1;
- }
- return e;
-
- }
-}
-
-mod Body {
- use Body;
-
- pub const PI: float = 3.141592653589793;
- pub const SOLAR_MASS: float = 39.478417604357432;
- // was 4 * PI * PI originally
- pub const DAYS_PER_YEAR: float = 365.24;
-
- pub type props =
- {mut x: float,
- mut y: float,
- mut z: float,
- mut vx: float,
- mut vy: float,
- mut vz: float,
- mass: float};
-
- pub fn jupiter() -> Body::props {
- return {mut x: 4.84143144246472090e+00,
- mut y: -1.16032004402742839e+00,
- mut z: -1.03622044471123109e-01,
- mut vx: 1.66007664274403694e-03 * DAYS_PER_YEAR,
- mut vy: 7.69901118419740425e-03 * DAYS_PER_YEAR,
- mut vz: -6.90460016972063023e-05 * DAYS_PER_YEAR,
- mass: 9.54791938424326609e-04 * SOLAR_MASS};
- }
-
- pub fn saturn() -> Body::props {
- return {mut x: 8.34336671824457987e+00,
- mut y: 4.12479856412430479e+00,
- mut z: -4.03523417114321381e-01,
- mut vx: -2.76742510726862411e-03 * DAYS_PER_YEAR,
- mut vy: 4.99852801234917238e-03 * DAYS_PER_YEAR,
- mut vz: 2.30417297573763929e-05 * DAYS_PER_YEAR,
- mass: 2.85885980666130812e-04 * SOLAR_MASS};
- }
-
- pub fn uranus() -> Body::props {
- return {mut x: 1.28943695621391310e+01,
- mut y: -1.51111514016986312e+01,
- mut z: -2.23307578892655734e-01,
- mut vx: 2.96460137564761618e-03 * DAYS_PER_YEAR,
- mut vy: 2.37847173959480950e-03 * DAYS_PER_YEAR,
- mut vz: -2.96589568540237556e-05 * DAYS_PER_YEAR,
- mass: 4.36624404335156298e-05 * SOLAR_MASS};
- }
-
- pub fn neptune() -> Body::props {
- return {mut x: 1.53796971148509165e+01,
- mut y: -2.59193146099879641e+01,
- mut z: 1.79258772950371181e-01,
- mut vx: 2.68067772490389322e-03 * DAYS_PER_YEAR,
- mut vy: 1.62824170038242295e-03 * DAYS_PER_YEAR,
- mut vz: -9.51592254519715870e-05 * DAYS_PER_YEAR,
- mass: 5.15138902046611451e-05 * SOLAR_MASS};
- }
-
- pub fn sun() -> Body::props {
- return {mut x: 0.0,
- mut y: 0.0,
- mut z: 0.0,
- mut vx: 0.0,
- mut vy: 0.0,
- mut vz: 0.0,
- mass: SOLAR_MASS};
- }
-
- pub fn offset_momentum(props: &mut Body::props,
- px: float, py: float, pz: float) {
- props.vx = -px / SOLAR_MASS;
- props.vy = -py / SOLAR_MASS;
- props.vz = -pz / SOLAR_MASS;
- }
-
-}
diff --git a/tests/examplefiles/test.gradle b/tests/examplefiles/test.gradle
new file mode 100644
index 00000000..0bc834c1
--- /dev/null
+++ b/tests/examplefiles/test.gradle
@@ -0,0 +1,20 @@
+apply plugin: 'java'
+
+repositories {
+ mavenCentral()
+}
+
+dependencies {
+ testCompile 'junit:junit:4.12'
+}
+
+task sayHello << {
+ def x = SomeClass.worldString
+ println "Hello ${x}"
+}
+
+private class SomeClass {
+ public static String getWorldString() {
+ return "world"
+ }
+}