summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGeorg Brandl <georg@python.org>2017-01-22 20:27:58 +0100
committerGeorg Brandl <georg@python.org>2017-01-22 20:27:58 +0100
commitf9d97178624f11b597803206ba1d918da9c89532 (patch)
tree73ec1e1e0d5a451561453e73b8dcc1947cc52890
parent1f75c51afdbe281e0044ca5c5369e14fc5e0e8e1 (diff)
downloadpygments-f9d97178624f11b597803206ba1d918da9c89532.tar.gz
all: run regexlint, add a few more changelog entries
-rw-r--r--CHANGES3
-rw-r--r--pygments/lexers/business.py2
-rw-r--r--pygments/lexers/capnproto.py18
-rw-r--r--pygments/lexers/clean.py18
-rw-r--r--pygments/lexers/crystal.py12
-rw-r--r--pygments/lexers/esoteric.py14
-rw-r--r--pygments/lexers/haskell.py4
-rw-r--r--pygments/lexers/julia.py4
-rw-r--r--pygments/lexers/monte.py2
-rw-r--r--pygments/lexers/ncl.py2
-rw-r--r--pygments/lexers/sas.py12
-rw-r--r--pygments/lexers/shell.py2
-rw-r--r--pygments/lexers/smv.py50
-rw-r--r--pygments/lexers/stata.py14
-rw-r--r--pygments/lexers/templates.py4
15 files changed, 88 insertions, 73 deletions
diff --git a/CHANGES b/CHANGES
index 2558e977..9a85ac27 100644
--- a/CHANGES
+++ b/CHANGES
@@ -28,6 +28,9 @@ Version 2.2
* Monte (PR#592)
* Crystal (PR#576)
* Snowball (PR#589)
+ * CapDL (PR#579)
+ * NuSMV (PR#564)
+ * SAS, Stata (PR#593)
- Added the ability to load lexer and formatter classes directly from files
with the `-x` command line option and the `lexers.load_lexer_from_file()`
diff --git a/pygments/lexers/business.py b/pygments/lexers/business.py
index 12ed6925..552f3d9c 100644
--- a/pygments/lexers/business.py
+++ b/pygments/lexers/business.py
@@ -439,7 +439,7 @@ class ABAPLexer(RegexLexer):
(r'[?*<>=\-+&]', Operator),
(r"'(''|[^'])*'", String.Single),
(r"`([^`])*`", String.Single),
- (r"([\|\}])([^\{\}\|]*?)([\|\{])",
+ (r"([|}])([^{}|]*?)([|{])",
bygroups(Punctuation, String.Single, Punctuation)),
(r'[/;:()\[\],.]', Punctuation),
(r'(!)(\w+)', bygroups(Operator, Name)),
diff --git a/pygments/lexers/capnproto.py b/pygments/lexers/capnproto.py
index 49fd3d3a..203523a1 100644
--- a/pygments/lexers/capnproto.py
+++ b/pygments/lexers/capnproto.py
@@ -11,7 +11,7 @@
import re
-from pygments.lexer import RegexLexer
+from pygments.lexer import RegexLexer, default
from pygments.token import Text, Comment, Keyword, Name, Literal
__all__ = ['CapnProtoLexer']
@@ -39,40 +39,40 @@ class CapnProtoLexer(RegexLexer):
(r'(struct|enum|interface|union|import|using|const|annotation|'
r'extends|in|of|on|as|with|from|fixed)\b',
Keyword),
- (r'[a-zA-Z0-9_.]+', Name),
- (r'[^#@=:$a-zA-Z0-9_]+', Text),
+ (r'[\w.]+', Name),
+ (r'[^#@=:$\w]+', Text),
],
'type': [
(r'[^][=;,(){}$]+', Name.Class),
(r'[[(]', Name.Class, 'parentype'),
- (r'', Name.Class, '#pop')
+ default('#pop'),
],
'parentype': [
(r'[^][;()]+', Name.Class),
(r'[[(]', Name.Class, '#push'),
(r'[])]', Name.Class, '#pop'),
- (r'', Name.Class, '#pop')
+ default('#pop'),
],
'expression': [
(r'[^][;,(){}$]+', Literal),
(r'[[(]', Literal, 'parenexp'),
- (r'', Literal, '#pop')
+ default('#pop'),
],
'parenexp': [
(r'[^][;()]+', Literal),
(r'[[(]', Literal, '#push'),
(r'[])]', Literal, '#pop'),
- (r'', Literal, '#pop')
+ default('#pop'),
],
'annotation': [
(r'[^][;,(){}=:]+', Name.Attribute),
(r'[[(]', Name.Attribute, 'annexp'),
- (r'', Name.Attribute, '#pop')
+ default('#pop'),
],
'annexp': [
(r'[^][;()]+', Name.Attribute),
(r'[[(]', Name.Attribute, '#push'),
(r'[])]', Name.Attribute, '#pop'),
- (r'', Name.Attribute, '#pop')
+ default('#pop'),
],
}
diff --git a/pygments/lexers/clean.py b/pygments/lexers/clean.py
index 5c8be8d4..ba2569f6 100644
--- a/pygments/lexers/clean.py
+++ b/pygments/lexers/clean.py
@@ -116,7 +116,7 @@ class CleanLexer(ExtendedRegexLexer):
(r'(?s)/\*.*?\*/', Comment.Multi),
# Modules, imports, etc.
- (r'\b((?:implementation|definition|system)\s+)?(module)(\s+)([\w`\.]+)',
+ (r'\b((?:implementation|definition|system)\s+)?(module)(\s+)([\w`.]+)',
bygroups(Keyword.Namespace, Keyword.Namespace, Text, Name.Class)),
(r'(?<=\n)import(?=\s)', Keyword.Namespace, 'import'),
(r'(?<=\n)from(?=\s)', Keyword.Namespace, 'fromimport'),
@@ -150,11 +150,11 @@ class CleanLexer(ExtendedRegexLexer):
Literal),
# Qualified names
- (r'(\')([\w\.]+)(\'\.)',
+ (r'(\')([\w.]+)(\'\.)',
bygroups(Punctuation, Name.Namespace, Punctuation)),
# Everything else is some name
- (r'([\w`$%\/\?@]+\.?)*[\w`$%\/\?@]+', Name),
+ (r'([\w`$%/?@]+\.?)*[\w`$%/?@]+', Name),
# Punctuation
(r'[{}()\[\],:;.#]', Punctuation),
@@ -171,14 +171,14 @@ class CleanLexer(ExtendedRegexLexer):
],
'fromimport': [
include('common'),
- (r'([\w`\.]+)', check_class_not_import),
+ (r'([\w`.]+)', check_class_not_import),
(r'\n', Whitespace, '#pop'),
(r'\s', Whitespace),
],
'fromimportfunc': [
include('common'),
- (r'(::)\s+([^,\s]+)', bygroups(Punctuation, Keyword.Type)),
- (r'([\w`$()=\-<>~*\^|+&%\/]+)', check_instance_class),
+ (r'(::)(\s+)([^,\s]+)', bygroups(Punctuation, Text, Keyword.Type)),
+ (r'([\w`$()=\-<>~*\^|+&%/]+)', check_instance_class),
(r',', Punctuation),
(r'\n', Whitespace, '#pop'),
(r'\s', Whitespace),
@@ -204,7 +204,7 @@ class CleanLexer(ExtendedRegexLexer):
include('common'),
(words(('from', 'import', 'as', 'qualified'),
prefix='(?<=\s)', suffix='(?=\s)'), Keyword.Namespace),
- (r'[\w`\.]+', Name.Class),
+ (r'[\w`.]+', Name.Class),
(r'\n', Whitespace, '#pop'),
(r',', Punctuation),
(r'[^\S\n]+', Whitespace),
@@ -269,7 +269,7 @@ class CleanLexer(ExtendedRegexLexer):
(r'^(?=\S)', Whitespace, '#pop:3'),
(r'[,&]', Punctuation),
(r'\[', Punctuation, 'functiondefuniquneq'),
- (r'[\w`$()=\-<>~*\^|+&%\/{}\[\]@]', Name.Function, 'functionname'),
+ (r'[\w`$()=\-<>~*\^|+&%/{}\[\]@]', Name.Function, 'functionname'),
(r'\s+', Whitespace),
],
'functiondefuniquneq': [
@@ -281,7 +281,7 @@ class CleanLexer(ExtendedRegexLexer):
],
'functionname': [
include('common'),
- (r'[\w`$()=\-<>~*\^|+&%\/]+', Name.Function),
+ (r'[\w`$()=\-<>~*\^|+&%/]+', Name.Function),
(r'(?=\{\|)', Punctuation, 'genericfunction'),
default('#pop'),
]
diff --git a/pygments/lexers/crystal.py b/pygments/lexers/crystal.py
index 7aecaf3e..bea4833f 100644
--- a/pygments/lexers/crystal.py
+++ b/pygments/lexers/crystal.py
@@ -277,12 +277,16 @@ class CrystalLexer(ExtendedRegexLexer):
bygroups(Number.Hex, Text, Operator)),
(r'(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
bygroups(Number.Bin, Text, Operator)),
- # 3 separate expressions for floats because any of the 3 optional parts makes it a float
- (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?(?:_?[f][0-9]+)?)(\s*)([/?])?',
+ # 3 separate expressions for floats because any of the 3 optional
+ # parts makes it a float
+ (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?'
+ r'(?:_?f[0-9]+)?)(\s*)([/?])?',
bygroups(Number.Float, Text, Operator)),
- (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)(?:_?[f][0-9]+)?)(\s*)([/?])?',
+ (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)'
+ r'(?:_?f[0-9]+)?)(\s*)([/?])?',
bygroups(Number.Float, Text, Operator)),
- (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?(?:_?[f][0-9]+))(\s*)([/?])?',
+ (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?'
+ r'(?:_?f[0-9]+))(\s*)([/?])?',
bygroups(Number.Float, Text, Operator)),
(r'(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
bygroups(Number.Integer, Text, Operator)),
diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py
index 54577bf9..793c28be 100644
--- a/pygments/lexers/esoteric.py
+++ b/pygments/lexers/esoteric.py
@@ -14,7 +14,7 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error
__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer',
- 'CapDLLexer', 'AheuiLexer']
+ 'CapDLLexer', 'AheuiLexer']
class BrainfuckLexer(RegexLexer):
@@ -155,14 +155,15 @@ class CapDLLexer(RegexLexer):
shadow type names, but these instances are currently incorrectly
highlighted as types. Supporting this would need a stateful lexer that is
considered unnecessarily complex for now.
+
+ .. versionadded:: 2.2
"""
name = 'CapDL'
aliases = ['capdl']
filenames = ['*.cdl']
tokens = {
- 'root':[
-
+ 'root': [
# C pre-processor directive
(r'^\s*#.*\n', Comment.Preproc),
@@ -171,7 +172,7 @@ class CapDLLexer(RegexLexer):
(r'/\*(.|\n)*?\*/', Comment),
(r'(//|--).*\n', Comment),
- (r'[<>\[\(\)\{\},:;=\]]', Punctuation),
+ (r'[<>\[(){},:;=\]]', Punctuation),
(r'\.\.', Punctuation),
(words(('arch', 'arm11', 'caps', 'child_of', 'ia32', 'irq', 'maps',
@@ -187,7 +188,7 @@ class CapDLLexer(RegexLexer):
'prio', 'sp', 'R', 'RG', 'RX', 'RW', 'RWG', 'RWX', 'W',
'WG', 'WX', 'level', 'masked', 'master_reply', 'paddr',
'ports', 'reply', 'uncached'), suffix=r'\b'),
- Keyword.Reserved),
+ Keyword.Reserved),
# Literals
(r'0[xX][\da-fA-F]+', Number.Hex),
@@ -197,10 +198,11 @@ class CapDLLexer(RegexLexer):
'ipc_buffer_slot'), suffix=r'\b'), Number),
# Identifiers
- (r'[a-zA-Z_][-_@\.\w]*', Name),
+ (r'[a-zA-Z_][-@\.\w]*', Name),
],
}
+
class RedcodeLexer(RegexLexer):
"""
A simple Redcode lexer based on ICWS'94.
diff --git a/pygments/lexers/haskell.py b/pygments/lexers/haskell.py
index 9020ceb6..1a2f2217 100644
--- a/pygments/lexers/haskell.py
+++ b/pygments/lexers/haskell.py
@@ -64,8 +64,8 @@ class HaskellLexer(RegexLexer):
(r"'?[_" + uni.Ll + r"][\w']*", Name),
(r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
(r"(')[" + uni.Lu + r"][\w\']*", Keyword.Type),
- (r"(')\[[^\]]*\]", Keyword.Type), # tuples and lists get special treatment in GHC
- (r"(')\([^\)]*\)", Keyword.Type), # ..
+ (r"(')\[[^\]]*\]", Keyword.Type), # tuples and lists get special treatment in GHC
+ (r"(')\([^)]*\)", Keyword.Type), # ..
# Operators
(r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
(r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
diff --git a/pygments/lexers/julia.py b/pygments/lexers/julia.py
index d946554b..67453aba 100644
--- a/pygments/lexers/julia.py
+++ b/pygments/lexers/julia.py
@@ -231,7 +231,7 @@ class JuliaLexer(RegexLexer):
'string': [
(r'"', String, '#pop'),
# FIXME: This escape pattern is not perfect.
- (r'\\([\\"\'\$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
+ (r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
# Interpolation is defined as "$" followed by the shortest full
# expression, which is something we can't parse.
# Include the most common cases here: $word, and $(paren'd expr).
@@ -246,7 +246,7 @@ class JuliaLexer(RegexLexer):
'tqstring': [
(r'"""', String, '#pop'),
- (r'\\([\\"\'\$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
+ (r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
(r'\$' + allowed_variable, String.Interpol),
(r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
(r'.|\s', String),
diff --git a/pygments/lexers/monte.py b/pygments/lexers/monte.py
index e18560b8..ed6e20f8 100644
--- a/pygments/lexers/monte.py
+++ b/pygments/lexers/monte.py
@@ -44,7 +44,7 @@ _escape_pattern = (
r'(?:\\x[0-9a-fA-F]{2}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
r'\\["\'\\bftnr])')
# _char = _escape_chars + [('.', String.Char)]
-_identifier = '[_a-zA-Z][_0-9a-zA-Z]*'
+_identifier = r'[_a-zA-Z]\w*'
_constants = [
# Void constants
diff --git a/pygments/lexers/ncl.py b/pygments/lexers/ncl.py
index 1ba7f4a7..3ca5135c 100644
--- a/pygments/lexers/ncl.py
+++ b/pygments/lexers/ncl.py
@@ -62,7 +62,7 @@ class NCLLexer(RegexLexer):
(r'[\%^*+\-/<>]', Operator),
# punctuation:
- (r'[\[\]():@$!&\|.,\\{}]', Punctuation),
+ (r'[\[\]():@$!&|.,\\{}]', Punctuation),
(r'[=:]', Punctuation),
# Intrinsics
diff --git a/pygments/lexers/sas.py b/pygments/lexers/sas.py
index 264ba51f..3747ed9a 100644
--- a/pygments/lexers/sas.py
+++ b/pygments/lexers/sas.py
@@ -16,6 +16,7 @@ from pygments.token import Comment, Keyword, Name, Number, String, Text, \
__all__ = ['SASLexer']
+
class SASLexer(RegexLexer):
"""
For `SAS <http://www.sas.com/>`_ files.
@@ -136,7 +137,7 @@ class SASLexer(RegexLexer):
],
# Special highlight for proc, data, quit, run
'proc-data': [
- (r'(^|;)\s*(proc [a-zA-Z0-9_]+|data|run|quit)[\s;\n]',
+ (r'(^|;)\s*(proc \w+|data|run|quit)[\s;]',
Keyword.Reserved),
],
# Special highlight cards and datalines
@@ -154,7 +155,6 @@ class SASLexer(RegexLexer):
(r'NOTE(:|-).*', Generic, '#pop'),
(r'WARNING(:|-).*', Generic.Emph, '#pop'),
(r'ERROR(:|-).*', Generic.Error, '#pop'),
- (r'(?!(WARNING|NOTE|ERROR))+', Text, '#pop'),
include('general'),
],
'general': [
@@ -188,8 +188,8 @@ class SASLexer(RegexLexer):
],
# Strings and user-defined variables and macros (order matters)
'vars-strings': [
- (r'&[a-zA-Z_][a-zA-Z0-9_]{0,31}\.?', Name.Variable),
- (r'%[a-zA-Z_][a-zA-Z0-9_]{0,31}', Name.Function),
+ (r'&[a-z_]\w{0,31}\.?', Name.Variable),
+ (r'%[a-z_]\w{0,31}', Name.Function),
(r'\'', String, 'string_squote'),
(r'"', String, 'string_dquote'),
],
@@ -209,11 +209,11 @@ class SASLexer(RegexLexer):
(r'[$"\\]', String),
],
'validvar': [
- (r'[a-zA-Z_][a-zA-Z0-9_]{0,31}\.?', Name.Variable, '#pop'),
+ (r'[a-z_]\w{0,31}\.?', Name.Variable, '#pop'),
],
# SAS numbers and special variables
'numbers': [
- (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[i]?\b',
+ (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)(E[+-]?[0-9]+)?i?\b',
Number),
],
'special': [
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
index 7c2c3743..ceb6f14d 100644
--- a/pygments/lexers/shell.py
+++ b/pygments/lexers/shell.py
@@ -51,7 +51,7 @@ class BashLexer(RegexLexer):
(r'\$\(\(', Keyword, 'math'),
(r'\$\(', Keyword, 'paren'),
(r'\$\{#?', String.Interpol, 'curly'),
- (r'\$[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable), # user variable
+ (r'\$[a-zA-Z_]\w*', Name.Variable), # user variable
(r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
(r'\$', Text),
],
diff --git a/pygments/lexers/smv.py b/pygments/lexers/smv.py
index 529a3814..380a3b70 100644
--- a/pygments/lexers/smv.py
+++ b/pygments/lexers/smv.py
@@ -19,6 +19,8 @@ __all__ = ['NuSMVLexer']
class NuSMVLexer(RegexLexer):
"""
Lexer for the NuSMV language.
+
+ .. versionadded:: 2.2
"""
name = 'NuSMV'
@@ -33,43 +35,45 @@ class NuSMVLexer(RegexLexer):
(r'--.*\n', Comment),
# Reserved
- (words(('MODULE','DEFINE','MDEFINE','CONSTANTS','VAR','IVAR',
- 'FROZENVAR','INIT','TRANS','INVAR','SPEC','CTLSPEC','LTLSPEC',
- 'PSLSPEC','COMPUTE','NAME','INVARSPEC','FAIRNESS','JUSTICE',
- 'COMPASSION','ISA','ASSIGN','CONSTRAINT','SIMPWFF','CTLWFF',
- 'LTLWFF','PSLWFF','COMPWFF','IN','MIN','MAX','MIRROR','PRED',
- 'PREDICATES'), suffix=r'(?![\w$#-])'), Keyword.Declaration),
+ (words(('MODULE', 'DEFINE', 'MDEFINE', 'CONSTANTS', 'VAR', 'IVAR',
+ 'FROZENVAR', 'INIT', 'TRANS', 'INVAR', 'SPEC', 'CTLSPEC',
+ 'LTLSPEC', 'PSLSPEC', 'COMPUTE', 'NAME', 'INVARSPEC',
+ 'FAIRNESS', 'JUSTICE', 'COMPASSION', 'ISA', 'ASSIGN',
+ 'CONSTRAINT', 'SIMPWFF', 'CTLWFF', 'LTLWFF', 'PSLWFF',
+ 'COMPWFF', 'IN', 'MIN', 'MAX', 'MIRROR', 'PRED',
+ 'PREDICATES'), suffix=r'(?![\w$#-])'),
+ Keyword.Declaration),
(r'process(?![\w$#-])', Keyword),
- (words(('array','of','boolean','integer','real','word'),
- suffix=r'(?![\w$#-])'), Keyword.Type),
- (words(('case','esac'), suffix=r'(?![\w$#-])'), Keyword),
- (words(('word1','bool','signed','unsigned','extend','resize',
- 'sizeof','uwconst','swconst','init','self','count','abs','max',
- 'min'), suffix=r'(?![\w$#-])'), Name.Builtin),
- (words(('EX','AX','EF','AF','EG','AG','E','F','O','G','H','X','Y',
- 'Z','A','U','S','V','T','BU','EBF','ABF','EBG','ABG','next',
- 'mod','union','in','xor','xnor'), suffix=r'(?![\w$#-])'),
+ (words(('array', 'of', 'boolean', 'integer', 'real', 'word'),
+ suffix=r'(?![\w$#-])'), Keyword.Type),
+ (words(('case', 'esac'), suffix=r'(?![\w$#-])'), Keyword),
+ (words(('word1', 'bool', 'signed', 'unsigned', 'extend', 'resize',
+ 'sizeof', 'uwconst', 'swconst', 'init', 'self', 'count',
+ 'abs', 'max', 'min'), suffix=r'(?![\w$#-])'),
+ Name.Builtin),
+ (words(('EX', 'AX', 'EF', 'AF', 'EG', 'AG', 'E', 'F', 'O', 'G',
+ 'H', 'X', 'Y', 'Z', 'A', 'U', 'S', 'V', 'T', 'BU', 'EBF',
+ 'ABF', 'EBG', 'ABG', 'next', 'mod', 'union', 'in', 'xor',
+ 'xnor'), suffix=r'(?![\w$#-])'),
Operator.Word),
- (words(('TRUE','FALSE'), suffix=r'(?![\w$#-])'), Keyword.Constant),
+ (words(('TRUE', 'FALSE'), suffix=r'(?![\w$#-])'), Keyword.Constant),
# Names
(r'[a-zA-Z_][\w$#-]*', Name.Variable),
-
+
# Operators
(r':=', Operator),
- (r'[&\|\+\-\*/<>!=]', Operator),
+ (r'[-&|+*/<>!=]', Operator),
# Literals
(r'\-?\d+\b', Number.Integer),
(r'0[su][bB]\d*_[01_]+', Number.Bin),
- (r'0[su][oO]\d*_[01234567_]+', Number.Oct),
+ (r'0[su][oO]\d*_[0-7_]+', Number.Oct),
(r'0[su][dD]\d*_[\d_]+', Number.Dec),
(r'0[su][hH]\d*_[\da-fA-F_]+', Number.Hex),
# Whitespace, punctuation and the rest
(r'\s+', Text.Whitespace),
- (r'[\(\)\[\]\{\};\?:\.,]', Punctuation),
- (r'.', Generic.Error),
- ]
+ (r'[()\[\]{};?:.,]', Punctuation),
+ ],
}
-
diff --git a/pygments/lexers/stata.py b/pygments/lexers/stata.py
index d3d87ed7..a015a23e 100644
--- a/pygments/lexers/stata.py
+++ b/pygments/lexers/stata.py
@@ -17,6 +17,7 @@ from pygments.lexers._stata_builtins import builtins_base, builtins_functions
__all__ = ['StataLexer']
+
class StataLexer(RegexLexer):
"""
For `Stata <http://www.stata.com/>`_ do files.
@@ -43,8 +44,8 @@ class StataLexer(RegexLexer):
],
# Global and local macros; regular and special strings
'vars-strings': [
- (r'\$[a-zA-Z_0-9\{]', Name.Variable.Global, 'var_validglobal'),
- (r'`[a-zA-Z_0-9]{0,31}\'', Name.Variable),
+ (r'\$[\w{]', Name.Variable.Global, 'var_validglobal'),
+ (r'`\w{0,31}\'', Name.Variable),
(r'"', String, 'string_dquote'),
(r'`"', String, 'string_mquote'),
],
@@ -54,7 +55,7 @@ class StataLexer(RegexLexer):
(r'\\\\|\\"|\\\n', String.Escape),
(r'\$', Name.Variable.Global, 'var_validglobal'),
(r'`', Name.Variable, 'var_validlocal'),
- (r'[^$\$`"\\]+', String),
+ (r'[^$`"\\]+', String),
(r'[$"\\]', String),
],
'string_mquote': [
@@ -62,14 +63,15 @@ class StataLexer(RegexLexer):
(r'\\\\|\\"|\\\n', String.Escape),
(r'\$', Name.Variable.Global, 'var_validglobal'),
(r'`', Name.Variable, 'var_validlocal'),
- (r'[^$\$`"\\]+', String),
+ (r'[^$`"\\]+', String),
(r'[$"\\]', String),
],
'var_validglobal': [
- (r'\{?[a-zA-Z0-9_]{0,32}\}?', Name.Variable.Global, '#pop'),
+ (r'\{\w{0,32}\}', Name.Variable.Global, '#pop'),
+ (r'\w{1,32}', Name.Variable.Global, '#pop'),
],
'var_validlocal': [
- (r'[a-zA-Z0-9_]{0,31}\'', Name.Variable, '#pop'),
+ (r'\w{0,31}\'', Name.Variable, '#pop'),
],
# * only OK at line start, // OK anywhere
'comments': [
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
index 2c3feaac..83c57db8 100644
--- a/pygments/lexers/templates.py
+++ b/pygments/lexers/templates.py
@@ -1838,8 +1838,8 @@ class HandlebarsLexer(RegexLexer):
(r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
Name.Variable, Text)),
- (r'(lookup)(\s+)([^\s]+)', bygroups(Keyword, Text,
- using(this, state='variable'))),
+ (r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text,
+ using(this, state='variable'))),
(r'[\w-]+', Name.Function),
include('generic'),