From 1cecb2f9d417afc82445716a4847411daa132fd5 Mon Sep 17 00:00:00 2001 From: michaelficarra Date: Fri, 1 Feb 2013 17:08:56 -0600 Subject: add CPSA support --- pygments/lexers/functional.py | 82 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 82 insertions(+) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index 4947bf7d..a3739394 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -712,6 +712,88 @@ class SchemeLexer(RegexLexer): } +class CPSALexer(SchemeLexer): + """ + A CPSA lexer based on the CPSA language as of version 2.2.12 + """ + name = 'CPSA' + aliases = ['cpsa'] + filenames = ['*.cpsa'] + mimetypes = [] + + # list of known keywords and builtins taken form vim 6.4 scheme.vim + # syntax file. + keywords = [ + 'herald', 'vars', 'defmacro', 'include', 'defprotocol', 'defrole', + 'defskeleton', 'defstrand', 'deflistener', 'non-orig', 'uniq-orig', + 'pen-non-orig', 'precedes', 'trace', 'send', 'recv', 'name', 'text', + 'skey', 'akey', 'data', 'mesg' + ] + builtins = [ + 'cat', 'enc', 'hash', 'privk', 'pubk', 'invk', 'ltk', 'gen', 'exp' + ] + + # valid names for identifiers + # well, names can only not consist fully of numbers + # but this should be good enough for now + valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+' + + tokens = { + 'root' : [ + # the comments - always starting with semicolon + # and going to the end of the line + (r';.*$', Comment.Single), + + # whitespaces - usually not relevant + (r'\s+', Text), + + # numbers + (r'-?\d+\.\d+', Number.Float), + (r'-?\d+', Number.Integer), + # support for uncommon kinds of numbers - + # have to figure out what the characters mean + #(r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number), + + # strings, symbols and characters + (r'"(\\\\|\\"|[^"])*"', String), + (r"'" + valid_name, String.Symbol), + (r"#\\([()/'\"._!§$%& ?=+-]{1}|[a-zA-Z0-9]+)", String.Char), + + # constants + (r'(#t|#f)', Name.Constant), + + # special operators + (r"('|#|`|,@|,|\.)", Operator), + + # highlight the keywords + ('(%s)' % '|'.join([ + re.escape(entry) + ' ' for entry in keywords]), + Keyword + ), + + # first variable in a quoted string like + # '(this is syntactic sugar) + (r"(?<='\()" + valid_name, Name.Variable), + (r"(?<=#\()" + valid_name, Name.Variable), + + # highlight the builtins + ("(?<=\()(%s)" % '|'.join([ + re.escape(entry) + ' ' for entry in builtins]), + Name.Builtin + ), + + # the remaining functions + (r'(?<=\()' + valid_name, Name.Function), + # find the remaining variables + (valid_name, Name.Variable), + + # the famous parentheses! + (r'(\(|\))', Punctuation), + (r'(\[|\])', Punctuation), + ], + } + + class CommonLispLexer(RegexLexer): """ A Common Lisp lexer. -- cgit v1.2.1 From 99270371a5a39e153e9cc294b7706e283aa956fd Mon Sep 17 00:00:00 2001 From: michaelficarra Date: Fri, 1 Feb 2013 17:13:41 -0600 Subject: added CPSA example to tests --- tests/yahalom.cpsa | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 tests/yahalom.cpsa diff --git a/tests/yahalom.cpsa b/tests/yahalom.cpsa new file mode 100644 index 00000000..3bc918d4 --- /dev/null +++ b/tests/yahalom.cpsa @@ -0,0 +1,34 @@ +(herald "Yahalom Protocol with Forwarding Removed") + +(defprotocol yahalom basic + (defrole init + (vars (a b c name) (n-a n-b text) (k skey)) + (trace (send (cat a n-a)) + (recv (enc b k n-a n-b (ltk a c))) + (send (enc n-b k)))) + (defrole resp + (vars (b a c name) (n-a n-b text) (k skey)) + (trace (recv (cat a n-a)) + (send (cat b (enc a n-a n-b (ltk b c)))) + (recv (enc a k (ltk b c))) + (recv (enc n-b k)))) + (defrole serv + (vars (c a b name) (n-a n-b text) (k skey)) + (trace (recv (cat b (enc a n-a n-b (ltk b c)))) + (send (enc b k n-a n-b (ltk a c))) + (send (enc a k (ltk b c)))) + (uniq-orig k))) + +(defskeleton yahalom + (vars (a b c name) (n-b text)) + (defstrand resp 4 (a a) (b b) (c c) (n-b n-b)) + (non-orig (ltk b c) (ltk a c)) + (uniq-orig n-b)) + +;;; Ensure encryption key remains secret. +(defskeleton yahalom + (vars (a b c name) (n-b text) (k skey)) + (defstrand resp 4 (a a) (b b) (c c) (n-b n-b) (k k)) + (deflistener k) + (non-orig (ltk b c) (ltk a c)) + (uniq-orig n-b)) -- cgit v1.2.1 From 514fb3d28b6b77343bdbcbfcc18e26e71b384fb4 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Mon, 18 Feb 2013 02:49:05 +0100 Subject: Added support for EasyTrieve, JCL and (Web)FOCUS. --- pygments/lexers/_mapping.py | 3 + pygments/lexers/other.py | 272 +++++++++++++++++++++++++++++++++++++++++++- tests/examplefiles/test.ezt | 18 +++ tests/examplefiles/test.fex | 225 ++++++++++++++++++++++++++++++++++++ tests/examplefiles/test.jcl | 30 +++++ 5 files changed, 546 insertions(+), 2 deletions(-) create mode 100644 tests/examplefiles/test.ezt create mode 100644 tests/examplefiles/test.fex create mode 100644 tests/examplefiles/test.jcl diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 53e09176..5c4275d0 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -93,6 +93,7 @@ LEXERS = { 'DylanLidLexer': ('pygments.lexers.compiled', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), 'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), 'ECLexer': ('pygments.lexers.compiled', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), + 'EasyTrieveLexer': ('pygments.lexers.other', 'EasyTrieve', ('easytrieve',), ('*.ezt',), ('text/x-easytrieve',)), 'ElixirConsoleLexer': ('pygments.lexers.functional', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), 'ElixirLexer': ('pygments.lexers.functional', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)), 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), @@ -146,6 +147,7 @@ LEXERS = { 'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')), 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')), 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')), + 'JclLexer': ('pygments.lexers.other', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)), 'JsonLexer': ('pygments.lexers.web', 'JSON', ('json',), ('*.json',), ('application/json',)), 'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), 'JuliaConsoleLexer': ('pygments.lexers.math', 'Julia console', ('jlcon',), (), ()), @@ -288,6 +290,7 @@ LEXERS = { 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)), 'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)), 'VimLexer': ('pygments.lexers.text', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), + 'WebFocusLexer': ('pygments.lexers.other', 'WebFOCUS', ('webfocus', 'FOCUS', 'focus'), ('*.fex',), ('text/x-webfocus', 'text/x-focus')), 'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), 'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')), 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)), diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index c8557922..18d3059d 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -14,7 +14,8 @@ import re from pygments.lexer import RegexLexer, include, bygroups, using, \ this, combined, ExtendedRegexLexer from pygments.token import Error, Punctuation, Literal, Token, \ - Text, Comment, Operator, Keyword, Name, String, Number, Generic + Text, Comment, Operator, Keyword, Name, String, Number, Generic, Other, \ + Whitespace from pygments.util import get_bool_opt from pygments.lexers.web import HtmlLexer @@ -35,7 +36,8 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer', 'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer', 'BroLexer', 'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer', 'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer', - 'CbmBasicV2Lexer', 'AutoItLexer'] + 'CbmBasicV2Lexer', 'AutoItLexer', 'EasyTrieveLexer', 'JclLexer', + 'WebFocusLexer'] class ECLLexer(RegexLexer): @@ -3665,3 +3667,269 @@ class AutoItLexer(RegexLexer): (r'[^\S\n]', Text), ], } + + +class EasyTrieveLexer(RegexLexer): + """ + EasyTrieve (Classic and Plus) are programming languages tailored to + generating reports and are mainly used in mainframe related environments. + + This lexer is designed for EasyTrieve Plus 6.4. + """ + name = 'EasyTrieve' + aliases = ['easytrieve'] + filenames = ['*.ezt'] + mimetypes = ['text/x-easytrieve'] + flags = re.IGNORECASE + + # TODO: Treat only the first 72 characters as source code and the rest as comment. + # TODO: After some reserved words such as 'define', even keywords are names. + # TODO: Consider continuation characters '+' and '-' + # TODO: Treat the 'not' character as operator. + + tokens = { + 'root': [ + # Note: We cannot use r'\b' at the start and end of keywords + # because EasyTrieve Plus delimiter characters are: + # + # * space ( ) + # * apostrophe (') + # * period (.) + # * comma (,) + # * paranthesis ( and ) + # * colon (:) + (r'(after-break|after-line|after-screen|aim|and|attr|before|' + r'before-break|before-line|before-screen|bushu|by|call|case|' + r'checkpoint|chkp|chkp-status|clear|close|col|color|commit|' + r'control|copy|cursor|d|declare|default|define|delete|denwa|' + r'display|dli|do|duplicate|e|else|else-if|end|end-case|end-do|' + r'end-if|end-proc|endpage|endtable|enter|eof|eq|error|exit|' + r'external|ezlib|f1|f10|f11|f12|f13|f14|f15|f16|f17|f18|f19|f2|' + r'f20|f21|f22|f23|f24|f25|f26|f27|f28|f29|f3|f30|f31|f32|f33|' + r'f34|f35|f36|f4|f5|f6|f7|f8|f9|fetch|file|file-status|fill|' + r'final|first|first-dup|for|ge|get|go|goto|gq|gr|gt|heading|' + r'hex|high-values|idd|idms|if|in|insert|job|justify|kanji-date|' + r'kanji-date-long|kanji-time|key|key-pressed|kokugo|kun|' + r'last-dup|le|level|like|line|line-count|line-number|link|list|' + r'low-values|lq|ls|lt|mask|matched|mend|message|move|mstart|ne|' + r'newpage|nomask|noprint|not|note|noverify|nq|null|of|or|' + r'otherwise|pa1|pa2|pa3|page-count|page-number|parm-register|' + r'path-id|pattern|perform|point|pos|primary|print|proc|' + r'procedure|program|put|read|record|record-count|record-length|' + r'refresh|release|renum|repeat|report|report-input|reshow|' + r'restart|retrieve|return-code|rollback|row|s|screen|search|' + r'secondary|select|sequence|size|skip|sokaku|sort|sql|stop|sum|' + r'sysdate|sysdate-long|sysin|sysipt|syslst|sysprint|syssnap|' + r'systime|tally|term-columns|term-name|term-rows|termination|' + r'title|to|transfer|trc|unique|until|update|uppercase|user|' + r'userid|value|verify|w|when|while|work|write|x|xdm|xrst)[ \'.,():]', + Keyword.Reserved), + # These are not actually keywords but section separators so + # treating them differently from names seems in order. + # TODO: Fix: (r'(param|report)[ \'.,():]', Keyword), + (r'[\[\](){}<>;,]', Punctuation), + (r'[-+/=&%]', Operator), + (r'[0-9]+\.[0-9]*', Number.Float), + (r'[0-9]+', Number.Integer), + (r"'(''|[^'])*'", String), + (r'\*.*\n', Comment.Single), + (r'\.', Operator), + (r'\s+', Whitespace), + (r'[^ \'.,():]+', Name) # Everything else just belongs to a name + ] + } + + +class JclLexer(RegexLexer): + """ + Job Control Language (JCL) is a scripting language used on IBM mainframe + operating systems to instruct the system on how to run a batch job or + start a subsystem. + + For more information, refer to the + `MVS Job Control Language Reference `_ + """ + name = 'JCL' + aliases = ['jcl'] + filenames = ['*.jcl'] + mimetypes = ['text/x-jcl'] + flags = re.IGNORECASE + + tokens = { + 'root': [ + (r'//\*.*\n', Comment.Single), + (r'//', Keyword.Pseudo, 'statement'), + (r'/\*', Keyword.Pseudo, 'jes2_statement'), + # TODO: JES3 statement + (r'.*\n', Other) # Input text or inline code in any language. + ], + 'statement': [ + (r'\s*\n', Whitespace, 'root'), + (r'([a-z][a-z_0-9]*)(\s+)(exec|job)(\s*)', + bygroups(Name.Label, Whitespace, Keyword.Reserved, Whitespace), + 'option'), + (r'[a-z][a-z_0-9]*', Name.Variable, 'statement_command'), + (r'\s+', Whitespace, 'statement_command'), + ], + 'statement_command': [ + (r'\s+(command|cntl|dd|endctl|endif|else|include|jcllib|' + r'output|pend|proc|set|then|xmit)', Keyword.Reserved, 'option'), + include('option') + ], + 'jes2_statement': [ + (r'\s*\n', Whitespace, 'root'), + (r'\$', Keyword, 'option'), + (r'\b(jobparam|message|netacct|notify|output|priority|route|' + r'setup|signoff|xeq|xmit)\b', Keyword, 'option'), + ], + 'option': [ + (r'\n', Text, 'root'), + (r'\*', Name.Builtin), + (r'[\[\](){}<>;,]', Punctuation), + (r'[-+*/=&%]', Operator), + (r'[a-zA-Z_][a-zA-Z_0-9]*', Name), + (r'[0-9]+\.[0-9]*', Number.Float), + (r'\.[0-9]+', Number.Float), + (r'[0-9]+', Number.Integer), + (r"'", String, 'option_string'), + (r'\s+', Whitespace), + ], + 'option_string': [ + (r"(\n)(//)", bygroups(Text, Keyword.Pseudo)), + (r"''", String), + (r"[^']", String), + (r"'", String, 'option'), + ] + } + + +class WebFocusLexer(RegexLexer): + """ + WebFOCUS und FOCUS are business intelligence tools mainly used in + mainframe related environments. + + For more information, refer to the + `Information Builders product page `_. + """ + name = 'WebFOCUS' + aliases = ['webfocus', 'FOCUS', 'focus'] + filenames = ['*.fex'] + mimetypes = ['text/x-webfocus', 'text/x-focus'] + flags = re.IGNORECASE + + # TODO: Consolidate rules common to 'focus' and 'dialog_manager' with 'include' or something. + # TODO: Find out if FIDEL supports "" to escape " and if so implement it. + # TODO: Add support for backslash escapes in single quote strings (and maybe double quote too?). + # TODO: Support dialog manager FIDEL input modifiers such as '.nodisplay'. + # TODO: Highlight function name after DEFINE FUNCTION. + # TODO: Highlight field name for all field types, not only numeric ones. + tokens = { + 'root': [ + (r'-\*.*\n', Comment.Single), + (r'-', Punctuation, 'dialog_manager'), + include('focus') + ], + 'focus': [ + (r'\n', Text, 'root'), + (r'\s*(across|add|alloc|as|by|clear|column-total|compute|count|' + r'crtform|decode|define|dynam|else|end|ex|exceeds|exec|file|' + r'filter|footing|for|format|free|heading|highest|hold|if|' + r'in-groups-of|in-ranges-of|join|list|lowest|match|modify|' + r'multilines|newpage|nomatch|noprint|nototal|on|over|' + r'page-break|print|printonly|ranked|recap|recompute|redefines|' + r'reject|row-total|rows|savb|save|set|sub-total|subfoot|' + r'subhead|subtotal|sum|summarize|table|the|then|tiles|total|' + r'update|when|where|with|within)\b', Keyword.Reserved), + (r'"', String, 'focus_fidel'), + (r'\b(missing)\b', Name.Constant), + (r'\b(asq|ave|cnt|cnt|ct|dst|fst|lst|max|min|pct|rcpt|st|sum|' + r'tot)\.', Operator), + # FOCUS field declaration including display options. + (r'([a-z][a-z_0-9]*)([/])([adfip]*[0-9]+(\.[0-9]+)[-%bcdelmnrsty]*)', + bygroups(Name.Variable, Operator, Keyword.Type)), + # Rules common to 'focus' and 'dialog_manager'. + (r'\b(and|contains|div|eq|exceeds|excludes|from|ge|gt|in|' + r'includes|is|is-from|is-from|is-less-than|is-more-than|' + r'is-not-missing|le|like|lt|mod|ne|not|not-from|omits|or|to)\b', + Operator), + (r'[-+*/=|!]', Operator), + (r'[(){}<>;,]', Punctuation), + (r'[a-z_][a-z_0-9]*', Literal), + (r'[&]+[a-z_][a-z_0-9]*', Literal), + (r'[0-9]+\.[0-9]*', Number.Float), + (r'\.[0-9]+', Number.Float), + (r'[0-9]+', Number.Integer), + (r"'(''|[^'])*'", String), + (r'\s+', Whitespace) + ], + 'dialog_manager': [ + # Detect possible labels in first word of dialog manager line. + (r'\s*type\b', Keyword.Reserved, 'dialog_manager_type'), + (r'[:][a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), + (r'"', String, 'dialog_manager_fidel'), + # TODO: Get rid of redundant dialog manager keyword rule which + # already could be handled by the included + # 'dialog_manager_others'. However, we currently need it to not + # recognize classic labels without ':' too soon. + (r'\b([?]|close|cms|crtclear|crtform|default|defaults|else|exit|' + r'goto|htmlform|if|include|mvs|pass|prompt|quit|read|repeat|' + r'run|set|then|tso|type|window|write)\b', Keyword.Reserved, + 'dialog_manager_others'), + (r'[a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), + include('dialog_manager_others'), + ], + 'dialog_manager_others': [ + (r'\n', Text, 'root'), + (r'\s*type\b', Keyword.Reserved, 'dialog_manager_type'), + (r'[:][a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), + (r'\b([?]|close|cms|crtclear|crtform|default|defaults|else|exit|' + r'goto|htmlform|if|include|mvs|pass|prompt|quit|read|repeat|' + r'run|set|then|tso|type|window|write)\b', Keyword.Reserved), + # Rules common to 'focus' and 'dialog_manager'. + (r'\b(and|contains|div|eq|exceeds|excludes|from|ge|gt|in|' + r'includes|is|is-from|is-from|is-less-than|is-more-than|' + r'is-not-missing|le|like|lt|mod|ne|not|not-from|omits|or|to)\b', + Operator), + (r'[-+*/=|!]', Operator), + (r'[(){}<>;,]', Punctuation), + (r'[a-z_][a-z_0-9]*', Literal), + (r'[&]+[a-z_][a-z_0-9]*', Name.Variable), + (r'[0-9]+\.[0-9]*', Number.Float), + (r'\.[0-9]+', Number.Float), + (r'[0-9]+', Number.Integer), + (r"'(''|[^'])*'", String), + (r'\s+', Whitespace) + ], + 'dialog_manager_type': [ + # For -TYPE, render everything as ``String`` except variables. + (r'\n', Text, 'root'), + (r'[&]+[a-z_][a-z_0-9]*\.*', Name.Variable), + (r'[^&\n]*', String) + ], + 'dialog_manager_fidel': [ + (r'"', String, 'dialog_manager_fidel_end'), + (r'([<])([&][a-z][a-z_0-9]*)([/])([0-9]+)', + bygroups(Keyword.Reserved, Name.Variable, Operator, Number.Integer)), + (r'.', String) + ], + 'dialog_manager_fidel_end': [ + (r'\n', Text, 'root'), + (r'\s*', Whitespace) + ], + 'focus_fidel': [ + (r'"', String, 'focus_fidel_end'), + (r'[&]+[a-z][a-z_0-9]*', Name.Variable), + (r'\>', Keyword.Reserved), + # Line continuation. + (r'\<0x\s*\n', Keyword.Reserved), + (r'([<])([a-z][a-z_0-9]*)', + bygroups(Keyword.Reserved, Name.Variable)), + (r'([<])([+-/]?)([0-9]+)', + bygroups(Keyword.Reserved, Operator, Number.Integer)), + (r'.', String) + ], + 'focus_fidel_end': [ + (r'\n', Text, 'root'), + (r'\s*', Whitespace) + ] + } diff --git a/tests/examplefiles/test.ezt b/tests/examplefiles/test.ezt new file mode 100644 index 00000000..8ef53089 --- /dev/null +++ b/tests/examplefiles/test.ezt @@ -0,0 +1,18 @@ +* EasyTrieve Plus Test Programm. + +* Environtment section. +PARM DEBUG(FLOW FLDCHK) + +* Library Section +FILE PERSNL FB(150 1800) + NAME 17 8 A + EMP# 9 5 N * <-- '#' is a valid character for names. + DEPT 98 3 N. GROSS 94 4 P 2 + * ^ 2 field definitions in 1 line. + +* Activity Section +JOB INPUT PERSNL NAME FIRST-PROGRAM + PRINT PAY-RPT +REPORT PAY-RPT LINESIZE 80 + TITLE 01 'PERSONNEL REPORT EXAMPLE-1' + LINE 01 DEPT NAME EMP# GROSS diff --git a/tests/examplefiles/test.fex b/tests/examplefiles/test.fex new file mode 100644 index 00000000..92d0b0f6 --- /dev/null +++ b/tests/examplefiles/test.fex @@ -0,0 +1,225 @@ +-* WebFOCUS test source file for pygments. + +-* Some basic commands +-TYPE hello world! +-SET &NAME='John'; +-TYPE hello &NAME ! +-SET &DATETMP = DATECVT((DATEADD((DATECVT(&DATEIN, 'I8YYMD', 'YYMD')),'M', -6)),'YYMD','I8YYMD'); + +-GOTO LABEL1; +-TYPE Skip me + +-* Label using classic syntax. +-LABEL1 + +-* Label using modern syntax. +-:LABEL2 + +-* Use FIDEL with MODIFY. +MODIFY FILE EMPLOYEE +CRTFORM + "EMPLOYEE UPDATE" + "EMPLOYEE ID #: : BANK, EMPLOYEES AND SALARIES " + "<10 . +-HTMLFORM CSTERM +-SET &ECHO = ALL; +-* +-* Replaces IDCAMS SYSIN +-* +DYNAM ALLOC FILE CATCTL NEW RECFM FB - + LRECL 80 BLKSIZE 3120 SPACE 1,1 TRK REUSE +-* +-* Replaces IDCAMS SYSPRINT +-* +DYNAM ALLOC FILE CATLST NEW RECFM VBA - + LRECL 125 BLKSIZE 27998 SPACE 2,1 TRK REUSE +-* +-* PRINT80 is a MFD used to read an 80 byte field named CONTENTS +-* +DYNAM ALLOC FILE PRNT80 NEW RECFM FB - + LRECL 80 BLKSIZE 3120 SPACE 1,1 TRK REUSE +-* +-RUN +-* +-* Create the tape dataset name mask (first five nodes) +-* +-SET &CAT1 = ' LISTC LVL(' || '''' || +- 'PN0100.A143200.P87.P87800.STERM' || '''' || ') VOL'; +-* +-WRITE CATCTL &CAT1 +-* +-RUN +-* +-* Call the user written subroutine (i.e., GETCAT) to retrieve a list of tape datasets +-* +-SET &X = ' '; +-SET &X = GETCAT(&X,'A1'); +-* +-* TABLE the catalog listing of tape datasets to a HOLD file +-* +DEFINE FILE PRNT80 + DSNAME/A44 = SUBSTR (80,CONTENTS,1,44,44,'A44'); +END +-* +TABLE FILE PRNT80 + PRINT DSNAME + ON TABLE HOLD +END +-* +DYNAM FREE FILE CATCTL +DYNAM FREE FILE CATLST +DYNAM FREE FILE PRNT80 +-* +-RUN +-* +-*----------------------------------------------------------------------- +-* +-* Create the HTML drop down list +-* +DEFINE FILE HOLD + NODE_POS/I3 = POSIT (DSNAME,44,'.SEM',4,'I3'); + WHATSEM/A4 = SUBSTR (44,DSNAME,NODE_POS+4,44,4,'A4'); + SEMWK/A2 = EDIT (WHATSEM,'$$99'); + SEMYR/A2 = EDIT (WHATSEM,'99$$'); + SEMESTERY2K/A6 = IF SEMYR LT '50' THEN ('20' | WHATSEM) + ELSE ('19' | WHATSEM); + D_SEM/A8 = DECODE SEMWK (03 'Spring' + 20 'Summer 1' + 26 'Summer 2' + 36 'Fall' + ELSE 'ERROR'); + SEMTXT/A16 = D_SEM | ' - ' | WHATSEM; +-* Create the dynamic html for the drop down list + SEMESTER/A50 = ''; +END +-* +TABLE FILE HOLD + SUM SEMESTER +-* Want the most recent semester first + BY HIGHEST SEMESTERY2K NOPRINT + ON TABLE SET HOLDLIST PRINTONLY + ON TABLE HOLD AS HOLDSEMS +END +-* +-RUN +-* +-*----------------------------------------------------------------------- +-* +-SET &SPACE = '&' || 'nbsp;'; +-* +-* Display the HTML form to allow users to select semester/cycle +-* +-HTMLFORM CSTERM diff --git a/tests/examplefiles/test.jcl b/tests/examplefiles/test.jcl new file mode 100644 index 00000000..51c5de1b --- /dev/null +++ b/tests/examplefiles/test.jcl @@ -0,0 +1,30 @@ +//IS198CPY JOB (PYGM-TEST-001),'PYGMENTS TEST JOB', +// CLASS=L, MSGCLASS=X, TIME=(00,10) +//* Copy 'OLDFILE' to 'NEWFILE'. +//COPY01 EXEC PGM=IEBGENER +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD DSN=OLDFILE,DISP=SHR +//SYSUT2 DD DSN=NEWFILE, +// DISP=(NEW,CATLG,DELETE), +// SPACE=(CYL,(40,5),RLSE), +// DCB=(LRECL=115,BLKSIZE=1150) +//SYSIN DD DUMMY +/* +//* Test line continuation in strings. +//CONT01 EXEC PGM=IEFBR14,PARM='THIS IS A LONG PARAMETER WITHIN APOST +// ROPHES, CONTINUED IN COLUMN 15 OF THE NEXT RECORD' +//* Sort a couple of lines and show the result in the job log. +//SORT01 EXEC PGM=IEFBR14 +//SORTIN DD * +spam +eggs +ham +/* +//SORTOUT DD SYSOUT=* +/* +//* Test line continuation with comment at the end of a line +//STP4 EXEC PROC=BILLING,COND.PAID=((20,LT),EVEN), +// COND.LATE=(60,GT,FIND), +// COND.BILL=((20,GE),(30,LT,CHGE)) THIS STATEMENT CALLS X +// THE BILLING PROCEDURE AND SPECIFIES RETURN CODE TESTS X +// FOR THREE PROCEDURE STEPS. -- cgit v1.2.1 From bb1611634bdef386c29c9558074efdd5bcfafbab Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Wed, 20 Feb 2013 02:27:09 +0100 Subject: Renamed mainframe example files from test.* to example.*. --- tests/examplefiles/example.ezt | 18 ++++ tests/examplefiles/example.fex | 225 +++++++++++++++++++++++++++++++++++++++++ tests/examplefiles/example.jcl | 30 ++++++ tests/examplefiles/test.ezt | 18 ---- tests/examplefiles/test.fex | 225 ----------------------------------------- tests/examplefiles/test.jcl | 30 ------ 6 files changed, 273 insertions(+), 273 deletions(-) create mode 100644 tests/examplefiles/example.ezt create mode 100644 tests/examplefiles/example.fex create mode 100644 tests/examplefiles/example.jcl delete mode 100644 tests/examplefiles/test.ezt delete mode 100644 tests/examplefiles/test.fex delete mode 100644 tests/examplefiles/test.jcl diff --git a/tests/examplefiles/example.ezt b/tests/examplefiles/example.ezt new file mode 100644 index 00000000..8ef53089 --- /dev/null +++ b/tests/examplefiles/example.ezt @@ -0,0 +1,18 @@ +* EasyTrieve Plus Test Programm. + +* Environtment section. +PARM DEBUG(FLOW FLDCHK) + +* Library Section +FILE PERSNL FB(150 1800) + NAME 17 8 A + EMP# 9 5 N * <-- '#' is a valid character for names. + DEPT 98 3 N. GROSS 94 4 P 2 + * ^ 2 field definitions in 1 line. + +* Activity Section +JOB INPUT PERSNL NAME FIRST-PROGRAM + PRINT PAY-RPT +REPORT PAY-RPT LINESIZE 80 + TITLE 01 'PERSONNEL REPORT EXAMPLE-1' + LINE 01 DEPT NAME EMP# GROSS diff --git a/tests/examplefiles/example.fex b/tests/examplefiles/example.fex new file mode 100644 index 00000000..92d0b0f6 --- /dev/null +++ b/tests/examplefiles/example.fex @@ -0,0 +1,225 @@ +-* WebFOCUS test source file for pygments. + +-* Some basic commands +-TYPE hello world! +-SET &NAME='John'; +-TYPE hello &NAME ! +-SET &DATETMP = DATECVT((DATEADD((DATECVT(&DATEIN, 'I8YYMD', 'YYMD')),'M', -6)),'YYMD','I8YYMD'); + +-GOTO LABEL1; +-TYPE Skip me + +-* Label using classic syntax. +-LABEL1 + +-* Label using modern syntax. +-:LABEL2 + +-* Use FIDEL with MODIFY. +MODIFY FILE EMPLOYEE +CRTFORM + "EMPLOYEE UPDATE" + "EMPLOYEE ID #: : BANK, EMPLOYEES AND SALARIES " + "<10 . +-HTMLFORM CSTERM +-SET &ECHO = ALL; +-* +-* Replaces IDCAMS SYSIN +-* +DYNAM ALLOC FILE CATCTL NEW RECFM FB - + LRECL 80 BLKSIZE 3120 SPACE 1,1 TRK REUSE +-* +-* Replaces IDCAMS SYSPRINT +-* +DYNAM ALLOC FILE CATLST NEW RECFM VBA - + LRECL 125 BLKSIZE 27998 SPACE 2,1 TRK REUSE +-* +-* PRINT80 is a MFD used to read an 80 byte field named CONTENTS +-* +DYNAM ALLOC FILE PRNT80 NEW RECFM FB - + LRECL 80 BLKSIZE 3120 SPACE 1,1 TRK REUSE +-* +-RUN +-* +-* Create the tape dataset name mask (first five nodes) +-* +-SET &CAT1 = ' LISTC LVL(' || '''' || +- 'PN0100.A143200.P87.P87800.STERM' || '''' || ') VOL'; +-* +-WRITE CATCTL &CAT1 +-* +-RUN +-* +-* Call the user written subroutine (i.e., GETCAT) to retrieve a list of tape datasets +-* +-SET &X = ' '; +-SET &X = GETCAT(&X,'A1'); +-* +-* TABLE the catalog listing of tape datasets to a HOLD file +-* +DEFINE FILE PRNT80 + DSNAME/A44 = SUBSTR (80,CONTENTS,1,44,44,'A44'); +END +-* +TABLE FILE PRNT80 + PRINT DSNAME + ON TABLE HOLD +END +-* +DYNAM FREE FILE CATCTL +DYNAM FREE FILE CATLST +DYNAM FREE FILE PRNT80 +-* +-RUN +-* +-*----------------------------------------------------------------------- +-* +-* Create the HTML drop down list +-* +DEFINE FILE HOLD + NODE_POS/I3 = POSIT (DSNAME,44,'.SEM',4,'I3'); + WHATSEM/A4 = SUBSTR (44,DSNAME,NODE_POS+4,44,4,'A4'); + SEMWK/A2 = EDIT (WHATSEM,'$$99'); + SEMYR/A2 = EDIT (WHATSEM,'99$$'); + SEMESTERY2K/A6 = IF SEMYR LT '50' THEN ('20' | WHATSEM) + ELSE ('19' | WHATSEM); + D_SEM/A8 = DECODE SEMWK (03 'Spring' + 20 'Summer 1' + 26 'Summer 2' + 36 'Fall' + ELSE 'ERROR'); + SEMTXT/A16 = D_SEM | ' - ' | WHATSEM; +-* Create the dynamic html for the drop down list + SEMESTER/A50 = ''; +END +-* +TABLE FILE HOLD + SUM SEMESTER +-* Want the most recent semester first + BY HIGHEST SEMESTERY2K NOPRINT + ON TABLE SET HOLDLIST PRINTONLY + ON TABLE HOLD AS HOLDSEMS +END +-* +-RUN +-* +-*----------------------------------------------------------------------- +-* +-SET &SPACE = '&' || 'nbsp;'; +-* +-* Display the HTML form to allow users to select semester/cycle +-* +-HTMLFORM CSTERM diff --git a/tests/examplefiles/example.jcl b/tests/examplefiles/example.jcl new file mode 100644 index 00000000..51c5de1b --- /dev/null +++ b/tests/examplefiles/example.jcl @@ -0,0 +1,30 @@ +//IS198CPY JOB (PYGM-TEST-001),'PYGMENTS TEST JOB', +// CLASS=L, MSGCLASS=X, TIME=(00,10) +//* Copy 'OLDFILE' to 'NEWFILE'. +//COPY01 EXEC PGM=IEBGENER +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD DSN=OLDFILE,DISP=SHR +//SYSUT2 DD DSN=NEWFILE, +// DISP=(NEW,CATLG,DELETE), +// SPACE=(CYL,(40,5),RLSE), +// DCB=(LRECL=115,BLKSIZE=1150) +//SYSIN DD DUMMY +/* +//* Test line continuation in strings. +//CONT01 EXEC PGM=IEFBR14,PARM='THIS IS A LONG PARAMETER WITHIN APOST +// ROPHES, CONTINUED IN COLUMN 15 OF THE NEXT RECORD' +//* Sort a couple of lines and show the result in the job log. +//SORT01 EXEC PGM=IEFBR14 +//SORTIN DD * +spam +eggs +ham +/* +//SORTOUT DD SYSOUT=* +/* +//* Test line continuation with comment at the end of a line +//STP4 EXEC PROC=BILLING,COND.PAID=((20,LT),EVEN), +// COND.LATE=(60,GT,FIND), +// COND.BILL=((20,GE),(30,LT,CHGE)) THIS STATEMENT CALLS X +// THE BILLING PROCEDURE AND SPECIFIES RETURN CODE TESTS X +// FOR THREE PROCEDURE STEPS. diff --git a/tests/examplefiles/test.ezt b/tests/examplefiles/test.ezt deleted file mode 100644 index 8ef53089..00000000 --- a/tests/examplefiles/test.ezt +++ /dev/null @@ -1,18 +0,0 @@ -* EasyTrieve Plus Test Programm. - -* Environtment section. -PARM DEBUG(FLOW FLDCHK) - -* Library Section -FILE PERSNL FB(150 1800) - NAME 17 8 A - EMP# 9 5 N * <-- '#' is a valid character for names. - DEPT 98 3 N. GROSS 94 4 P 2 - * ^ 2 field definitions in 1 line. - -* Activity Section -JOB INPUT PERSNL NAME FIRST-PROGRAM - PRINT PAY-RPT -REPORT PAY-RPT LINESIZE 80 - TITLE 01 'PERSONNEL REPORT EXAMPLE-1' - LINE 01 DEPT NAME EMP# GROSS diff --git a/tests/examplefiles/test.fex b/tests/examplefiles/test.fex deleted file mode 100644 index 92d0b0f6..00000000 --- a/tests/examplefiles/test.fex +++ /dev/null @@ -1,225 +0,0 @@ --* WebFOCUS test source file for pygments. - --* Some basic commands --TYPE hello world! --SET &NAME='John'; --TYPE hello &NAME ! --SET &DATETMP = DATECVT((DATEADD((DATECVT(&DATEIN, 'I8YYMD', 'YYMD')),'M', -6)),'YYMD','I8YYMD'); - --GOTO LABEL1; --TYPE Skip me - --* Label using classic syntax. --LABEL1 - --* Label using modern syntax. --:LABEL2 - --* Use FIDEL with MODIFY. -MODIFY FILE EMPLOYEE -CRTFORM - "EMPLOYEE UPDATE" - "EMPLOYEE ID #: : BANK, EMPLOYEES AND SALARIES " - "<10 . --HTMLFORM CSTERM --SET &ECHO = ALL; --* --* Replaces IDCAMS SYSIN --* -DYNAM ALLOC FILE CATCTL NEW RECFM FB - - LRECL 80 BLKSIZE 3120 SPACE 1,1 TRK REUSE --* --* Replaces IDCAMS SYSPRINT --* -DYNAM ALLOC FILE CATLST NEW RECFM VBA - - LRECL 125 BLKSIZE 27998 SPACE 2,1 TRK REUSE --* --* PRINT80 is a MFD used to read an 80 byte field named CONTENTS --* -DYNAM ALLOC FILE PRNT80 NEW RECFM FB - - LRECL 80 BLKSIZE 3120 SPACE 1,1 TRK REUSE --* --RUN --* --* Create the tape dataset name mask (first five nodes) --* --SET &CAT1 = ' LISTC LVL(' || '''' || -- 'PN0100.A143200.P87.P87800.STERM' || '''' || ') VOL'; --* --WRITE CATCTL &CAT1 --* --RUN --* --* Call the user written subroutine (i.e., GETCAT) to retrieve a list of tape datasets --* --SET &X = ' '; --SET &X = GETCAT(&X,'A1'); --* --* TABLE the catalog listing of tape datasets to a HOLD file --* -DEFINE FILE PRNT80 - DSNAME/A44 = SUBSTR (80,CONTENTS,1,44,44,'A44'); -END --* -TABLE FILE PRNT80 - PRINT DSNAME - ON TABLE HOLD -END --* -DYNAM FREE FILE CATCTL -DYNAM FREE FILE CATLST -DYNAM FREE FILE PRNT80 --* --RUN --* --*----------------------------------------------------------------------- --* --* Create the HTML drop down list --* -DEFINE FILE HOLD - NODE_POS/I3 = POSIT (DSNAME,44,'.SEM',4,'I3'); - WHATSEM/A4 = SUBSTR (44,DSNAME,NODE_POS+4,44,4,'A4'); - SEMWK/A2 = EDIT (WHATSEM,'$$99'); - SEMYR/A2 = EDIT (WHATSEM,'99$$'); - SEMESTERY2K/A6 = IF SEMYR LT '50' THEN ('20' | WHATSEM) - ELSE ('19' | WHATSEM); - D_SEM/A8 = DECODE SEMWK (03 'Spring' - 20 'Summer 1' - 26 'Summer 2' - 36 'Fall' - ELSE 'ERROR'); - SEMTXT/A16 = D_SEM | ' - ' | WHATSEM; --* Create the dynamic html for the drop down list - SEMESTER/A50 = ''; -END --* -TABLE FILE HOLD - SUM SEMESTER --* Want the most recent semester first - BY HIGHEST SEMESTERY2K NOPRINT - ON TABLE SET HOLDLIST PRINTONLY - ON TABLE HOLD AS HOLDSEMS -END --* --RUN --* --*----------------------------------------------------------------------- --* --SET &SPACE = '&' || 'nbsp;'; --* --* Display the HTML form to allow users to select semester/cycle --* --HTMLFORM CSTERM diff --git a/tests/examplefiles/test.jcl b/tests/examplefiles/test.jcl deleted file mode 100644 index 51c5de1b..00000000 --- a/tests/examplefiles/test.jcl +++ /dev/null @@ -1,30 +0,0 @@ -//IS198CPY JOB (PYGM-TEST-001),'PYGMENTS TEST JOB', -// CLASS=L, MSGCLASS=X, TIME=(00,10) -//* Copy 'OLDFILE' to 'NEWFILE'. -//COPY01 EXEC PGM=IEBGENER -//SYSPRINT DD SYSOUT=* -//SYSUT1 DD DSN=OLDFILE,DISP=SHR -//SYSUT2 DD DSN=NEWFILE, -// DISP=(NEW,CATLG,DELETE), -// SPACE=(CYL,(40,5),RLSE), -// DCB=(LRECL=115,BLKSIZE=1150) -//SYSIN DD DUMMY -/* -//* Test line continuation in strings. -//CONT01 EXEC PGM=IEFBR14,PARM='THIS IS A LONG PARAMETER WITHIN APOST -// ROPHES, CONTINUED IN COLUMN 15 OF THE NEXT RECORD' -//* Sort a couple of lines and show the result in the job log. -//SORT01 EXEC PGM=IEFBR14 -//SORTIN DD * -spam -eggs -ham -/* -//SORTOUT DD SYSOUT=* -/* -//* Test line continuation with comment at the end of a line -//STP4 EXEC PROC=BILLING,COND.PAID=((20,LT),EVEN), -// COND.LATE=(60,GT,FIND), -// COND.BILL=((20,GE),(30,LT,CHGE)) THIS STATEMENT CALLS X -// THE BILLING PROCEDURE AND SPECIFIES RETURN CODE TESTS X -// FOR THREE PROCEDURE STEPS. -- cgit v1.2.1 From 117caaeb9d100b37f7a3f652f5c33cc1b62c30ba Mon Sep 17 00:00:00 2001 From: roskakori Date: Wed, 20 Feb 2013 03:22:35 +0100 Subject: Added support for option comments. --- pygments/lexers/other.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 18d3059d..701f98b9 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -365,7 +365,7 @@ class SmalltalkLexer(RegexLexer): include('literals'), ], 'afterobject' : [ - (r'! !$', Keyword , '#pop'), # squeak chunk delimeter + (r'! !$', Keyword , '#pop'), # squeak chunk delimiter include('whitespaces'), (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)', Name.Builtin, '#pop'), @@ -3773,7 +3773,7 @@ class JclLexer(RegexLexer): ], 'statement_command': [ (r'\s+(command|cntl|dd|endctl|endif|else|include|jcllib|' - r'output|pend|proc|set|then|xmit)', Keyword.Reserved, 'option'), + r'output|pend|proc|set|then|xmit)\s*', Keyword.Reserved, 'option'), include('option') ], 'jes2_statement': [ @@ -3792,13 +3792,18 @@ class JclLexer(RegexLexer): (r'\.[0-9]+', Number.Float), (r'[0-9]+', Number.Integer), (r"'", String, 'option_string'), - (r'\s+', Whitespace), + (r'\s+', Whitespace, 'option_comment'), + (r'\.', Punctuation), ], 'option_string': [ (r"(\n)(//)", bygroups(Text, Keyword.Pseudo)), (r"''", String), (r"[^']", String), (r"'", String, 'option'), + ], + 'option_comment': [ + (r'\n', Text, 'root'), + (r'.*', Comment.Single), ] } -- cgit v1.2.1 From b05cda5226e10dbfdfe9a939277db909dcf1624d Mon Sep 17 00:00:00 2001 From: roskakori Date: Wed, 20 Feb 2013 03:27:16 +0100 Subject: Cleaned up indentation. --- pygments/lexers/other.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 701f98b9..240becdc 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -1963,11 +1963,11 @@ class AsymptoteLexer(RegexLexer): from pygments.lexers._asybuiltins import ASYFUNCNAME, ASYVARNAME for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text): - if token is Name and value in ASYFUNCNAME: - token = Name.Function - elif token is Name and value in ASYVARNAME: - token = Name.Variable - yield index, token, value + if token is Name and value in ASYFUNCNAME: + token = Name.Function + elif token is Name and value in ASYVARNAME: + token = Name.Variable + yield index, token, value class PostScriptLexer(RegexLexer): -- cgit v1.2.1 From 2c3d320beee9e4dfa039e96cbc629af80ca0da08 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Wed, 20 Feb 2013 17:36:30 +0100 Subject: * Added example for JCL option comment. * Fixed broken white space in example JCL job header. --- tests/examplefiles/example.jcl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/examplefiles/example.jcl b/tests/examplefiles/example.jcl index 51c5de1b..fb006369 100644 --- a/tests/examplefiles/example.jcl +++ b/tests/examplefiles/example.jcl @@ -1,12 +1,12 @@ //IS198CPY JOB (PYGM-TEST-001),'PYGMENTS TEST JOB', -// CLASS=L, MSGCLASS=X, TIME=(00,10) +// CLASS=L,MSGCLASS=X,TIME=(00,10) //* Copy 'OLDFILE' to 'NEWFILE'. //COPY01 EXEC PGM=IEBGENER //SYSPRINT DD SYSOUT=* //SYSUT1 DD DSN=OLDFILE,DISP=SHR //SYSUT2 DD DSN=NEWFILE, // DISP=(NEW,CATLG,DELETE), -// SPACE=(CYL,(40,5),RLSE), +// SPACE=(CYL,(40,5),RLSE), Some comment // DCB=(LRECL=115,BLKSIZE=1150) //SYSIN DD DUMMY /* -- cgit v1.2.1 From 3260756274b472351f56d9ce0e8a227d5f98c9be Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Wed, 20 Feb 2013 17:38:28 +0100 Subject: Added EasyTrieve 'not' operator. --- pygments/lexers/other.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 240becdc..57598e1a 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3685,7 +3685,6 @@ class EasyTrieveLexer(RegexLexer): # TODO: Treat only the first 72 characters as source code and the rest as comment. # TODO: After some reserved words such as 'define', even keywords are names. # TODO: Consider continuation characters '+' and '-' - # TODO: Treat the 'not' character as operator. tokens = { 'root': [ @@ -3728,7 +3727,7 @@ class EasyTrieveLexer(RegexLexer): # treating them differently from names seems in order. # TODO: Fix: (r'(param|report)[ \'.,():]', Keyword), (r'[\[\](){}<>;,]', Punctuation), - (r'[-+/=&%]', Operator), + (ur'[-+/=&%¬]', Operator), (r'[0-9]+\.[0-9]*', Number.Float), (r'[0-9]+', Number.Integer), (r"'(''|[^'])*'", String), -- cgit v1.2.1 From 5b3aa5ce59965a6663e0451ca5600c16091ad31d Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Wed, 20 Feb 2013 17:44:16 +0100 Subject: Fixed endless loop in case a WebFOCUS FIDEL segment contained any non white space characters after the terminating double quote. --- pygments/lexers/other.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 57598e1a..9607e327 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3918,7 +3918,7 @@ class WebFocusLexer(RegexLexer): ], 'dialog_manager_fidel_end': [ (r'\n', Text, 'root'), - (r'\s*', Whitespace) + (r'\s+', Whitespace) ], 'focus_fidel': [ (r'"', String, 'focus_fidel_end'), @@ -3934,6 +3934,6 @@ class WebFocusLexer(RegexLexer): ], 'focus_fidel_end': [ (r'\n', Text, 'root'), - (r'\s*', Whitespace) + (r'\s+', Whitespace) ] } -- cgit v1.2.1 From 149dc5f44ec02a0ec06ff14a39262b97fa1d192b Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Fri, 22 Feb 2013 02:06:43 +0100 Subject: * Added guessing for EasyTrieve, JCL and (Web)FOCUS lexer. * Removed comment containing "<-" from EasyTrieve example so it won't be guessed to be a source code for the S language. * Improved guessing for Mason language so it does not interpret FOCUS FIDEL variable references as Mason calling components. * Cleaned up indentation. --- pygments/lexers/other.py | 154 ++++++++++++++++++++++++++++++++++++----- pygments/lexers/templates.py | 10 +-- tests/examplefiles/example.ezt | 2 +- tests/test_lexers_other.py | 53 ++++++++++++++ 4 files changed, 197 insertions(+), 22 deletions(-) create mode 100644 tests/test_lexers_other.py diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 9607e327..446e6eff 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3734,10 +3734,70 @@ class EasyTrieveLexer(RegexLexer): (r'\*.*\n', Comment.Single), (r'\.', Operator), (r'\s+', Whitespace), - (r'[^ \'.,():]+', Name) # Everything else just belongs to a name + (r'[^ \'.,():]+', Name) # Everything else just belongs to a name ] } + def analyse_text(text): + """ + Perform a structural analysis for basic EasyTrieve constructs. + """ + result = 0.0 + hasEndProc = False + hasFile = False + hasJob = False + hasProc = False + hasParam = False + hasReport = False + isBroken = False + + # Scan the source for lines starting with indicators. + for line in text.split('\n'): + words = line.split() + if (len(words) >= 2): + first_word = words[0] + if not hasReport: + if not hasJob: + if not hasFile: + if not hasParam: + if first_word == 'PARAM': + hasParam = True + if first_word == 'FILE': + hasFile = True + if first_word == 'JOB': + hasJob = True + elif first_word == 'PROC': + hasProc = True + elif first_word == 'END-PROC': + hasEndProc = True + elif first_word == 'REPORT': + hasReport = True + + # Weight the findings. + if not isBroken and hasJob and (hasProc == hasEndProc): + if hasParam: + if hasProc: + # Found PARAM, JOB and PROC/END-PROC: + # pretty sure this is EasyTrieve. + result = 0.8 + else: + # Found PARAM and JOB: probably this is EasyTrieve + result = 0.5 + else: + # Found JOB and possibly other keywords: might be EasyTrieve + result = 0.11 + if hasParam: + # Note: PARAM is not a proper English word, so this is + # regarded a much better indicator for EasyTrieve than + # the other words. + result += 0.2 + if hasFile: + result += 0.01 + if hasReport: + result += 0.01 + assert 0.0 <= result <= 1.0 + return result + class JclLexer(RegexLexer): """ @@ -3760,7 +3820,7 @@ class JclLexer(RegexLexer): (r'//', Keyword.Pseudo, 'statement'), (r'/\*', Keyword.Pseudo, 'jes2_statement'), # TODO: JES3 statement - (r'.*\n', Other) # Input text or inline code in any language. + (r'.*\n', Other) # Input text or inline code in any language. ], 'statement': [ (r'\s*\n', Whitespace, 'root'), @@ -3806,6 +3866,20 @@ class JclLexer(RegexLexer): ] } + _JOB_HEADER_PATTERN = re.compile(r'^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$', re.IGNORECASE) + + def analyse_text(text): + """ + Recognize JCL job by header. + """ + result = 0.0 + lines = text.split('\n') + if len(lines) > 0: + if JclLexer._JOB_HEADER_PATTERN.match(lines[0]): + result = 1.0 + assert 0.0 <= result <= 1.0 + return result + class WebFocusLexer(RegexLexer): """ @@ -3865,8 +3939,8 @@ class WebFocusLexer(RegexLexer): (r'[0-9]+', Number.Integer), (r"'(''|[^'])*'", String), (r'\s+', Whitespace) - ], - 'dialog_manager': [ + ], + 'dialog_manager': [ # Detect possible labels in first word of dialog manager line. (r'\s*type\b', Keyword.Reserved, 'dialog_manager_type'), (r'[:][a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), @@ -3881,8 +3955,8 @@ class WebFocusLexer(RegexLexer): 'dialog_manager_others'), (r'[a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), include('dialog_manager_others'), - ], - 'dialog_manager_others': [ + ], + 'dialog_manager_others': [ (r'\n', Text, 'root'), (r'\s*type\b', Keyword.Reserved, 'dialog_manager_type'), (r'[:][a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), @@ -3903,24 +3977,24 @@ class WebFocusLexer(RegexLexer): (r'[0-9]+', Number.Integer), (r"'(''|[^'])*'", String), (r'\s+', Whitespace) - ], - 'dialog_manager_type': [ + ], + 'dialog_manager_type': [ # For -TYPE, render everything as ``String`` except variables. (r'\n', Text, 'root'), (r'[&]+[a-z_][a-z_0-9]*\.*', Name.Variable), (r'[^&\n]*', String) - ], - 'dialog_manager_fidel': [ + ], + 'dialog_manager_fidel': [ (r'"', String, 'dialog_manager_fidel_end'), (r'([<])([&][a-z][a-z_0-9]*)([/])([0-9]+)', bygroups(Keyword.Reserved, Name.Variable, Operator, Number.Integer)), (r'.', String) - ], - 'dialog_manager_fidel_end': [ + ], + 'dialog_manager_fidel_end': [ (r'\n', Text, 'root'), (r'\s+', Whitespace) - ], - 'focus_fidel': [ + ], + 'focus_fidel': [ (r'"', String, 'focus_fidel_end'), (r'[&]+[a-z][a-z_0-9]*', Name.Variable), (r'\>', Keyword.Reserved), @@ -3931,9 +4005,55 @@ class WebFocusLexer(RegexLexer): (r'([<])([+-/]?)([0-9]+)', bygroups(Keyword.Reserved, Operator, Number.Integer)), (r'.', String) - ], - 'focus_fidel_end': [ + ], + 'focus_fidel_end': [ (r'\n', Text, 'root'), (r'\s+', Whitespace) - ] + ] } + + def analyse_text(text): + """ + Perform a heuristic analysis for certain very common WebFOCUS + constructs. + """ + result = 0.0 + hasComment = False + hasExec = False + hasInclude = False + hasSet = False + hasTableFile = False + + # Scan the source lines for indicators. + for line in text.lower().split('\n'): + if line.startswith('-'): + words = line[1:].split() + wordCount = len(words) + if wordCount > 0: + firstWord = words[0] + if firstWord.startswith('*'): + hasComment = True + elif wordCount > 1: + if firstWord == 'include': + hasInclude = True + elif (firstWord == 'set') and words[1].startswith('&'): + hasSet = True + else: + words = line.split() + wordCount = len(words) + if wordCount > 1: + if words[0] in ('ex', 'exec'): + hasExec = True + elif (words[0] in ('table', 'tablef')) \ + and (words[1] == 'file'): + hasTableFile = True + if hasComment: + result += 0.2 + if hasExec or hasInclude: + result += 0.1 + if hasTableFile: + result += 0.2 + if hasSet: + result += 0.1 + assert 0.0 <= result <= 1.0 + return result diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py index b3e70d05..55fb75dd 100644 --- a/pygments/lexers/templates.py +++ b/pygments/lexers/templates.py @@ -559,10 +559,12 @@ class MasonLexer(RegexLexer): } def analyse_text(text): - rv = 0.0 - if re.search('<&', text) is not None: - rv = 1.0 - return rv + result = 0.0 + if re.search(r'', text) is not None: + result = 1.0 + elif re.search(r'<&.+&>', text, re.DOTALL) is not None: + result = 0.11 + return result class MakoLexer(RegexLexer): diff --git a/tests/examplefiles/example.ezt b/tests/examplefiles/example.ezt index 8ef53089..68a849dd 100644 --- a/tests/examplefiles/example.ezt +++ b/tests/examplefiles/example.ezt @@ -6,7 +6,7 @@ PARM DEBUG(FLOW FLDCHK) * Library Section FILE PERSNL FB(150 1800) NAME 17 8 A - EMP# 9 5 N * <-- '#' is a valid character for names. + EMP# 9 5 N * Note: '#' is a valid character for names. DEPT 98 3 N. GROSS 94 4 P 2 * ^ 2 field definitions in 1 line. diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py new file mode 100644 index 00000000..3c64fd43 --- /dev/null +++ b/tests/test_lexers_other.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +""" + Tests for other lexers + ~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" +import glob +import os +import unittest + +from pygments.lexers import guess_lexer +from pygments.lexers.other import EasyTrieveLexer, JclLexer, WebFocusLexer + + +def _exampleFilePath(filename): + return os.path.join(os.path.dirname(__file__), 'examplefiles', filename) + + +class _AnalyseTextTest(unittest.TestCase): + def setUp(self): + raise NotImplementedError('self.lexer must be set') + + def testCanRecognizeAndGuessExampleFiles(self): + for pattern in self.lexer.filenames: + exampleFilesPattern = _exampleFilePath(pattern) + for exampleFilePath in glob.glob(exampleFilesPattern): + exampleFile = open(exampleFilePath, 'rb') + try: + text = exampleFile.read() + probability = self.lexer.analyse_text(text) + self.assertTrue(probability > 0, + '%s must recognize %r' % (self.lexer.name, exampleFilePath)) + guessedLexer = guess_lexer(text) + self.assertEqual(guessedLexer.name, self.lexer.name) + finally: + exampleFile.close() + + +class EasyTrieveLexerTest(_AnalyseTextTest): + def setUp(self): + self.lexer = EasyTrieveLexer() + + +class JclLexerTest(_AnalyseTextTest): + def setUp(self): + self.lexer = JclLexer() + + +class WebFocusLexerTest(_AnalyseTextTest): + def setUp(self): + self.lexer = WebFocusLexer() -- cgit v1.2.1 From af2ddb5febe5d7f51cea3d9211c9099b99b586a5 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Fri, 22 Feb 2013 03:59:43 +0100 Subject: Fixed EasyTrieve lexer, which is now case sensitive and expects keywords in upper case. --- pygments/lexers/other.py | 53 ++++++++++++++++++++++++------------------------ 1 file changed, 26 insertions(+), 27 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 446e6eff..0b2a31e6 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3680,7 +3680,6 @@ class EasyTrieveLexer(RegexLexer): aliases = ['easytrieve'] filenames = ['*.ezt'] mimetypes = ['text/x-easytrieve'] - flags = re.IGNORECASE # TODO: Treat only the first 72 characters as source code and the rest as comment. # TODO: After some reserved words such as 'define', even keywords are names. @@ -3697,35 +3696,35 @@ class EasyTrieveLexer(RegexLexer): # * comma (,) # * paranthesis ( and ) # * colon (:) - (r'(after-break|after-line|after-screen|aim|and|attr|before|' - r'before-break|before-line|before-screen|bushu|by|call|case|' - r'checkpoint|chkp|chkp-status|clear|close|col|color|commit|' - r'control|copy|cursor|d|declare|default|define|delete|denwa|' - r'display|dli|do|duplicate|e|else|else-if|end|end-case|end-do|' - r'end-if|end-proc|endpage|endtable|enter|eof|eq|error|exit|' - r'external|ezlib|f1|f10|f11|f12|f13|f14|f15|f16|f17|f18|f19|f2|' - r'f20|f21|f22|f23|f24|f25|f26|f27|f28|f29|f3|f30|f31|f32|f33|' - r'f34|f35|f36|f4|f5|f6|f7|f8|f9|fetch|file|file-status|fill|' - r'final|first|first-dup|for|ge|get|go|goto|gq|gr|gt|heading|' - r'hex|high-values|idd|idms|if|in|insert|job|justify|kanji-date|' - r'kanji-date-long|kanji-time|key|key-pressed|kokugo|kun|' - r'last-dup|le|level|like|line|line-count|line-number|link|list|' - r'low-values|lq|ls|lt|mask|matched|mend|message|move|mstart|ne|' - r'newpage|nomask|noprint|not|note|noverify|nq|null|of|or|' - r'otherwise|pa1|pa2|pa3|page-count|page-number|parm-register|' - r'path-id|pattern|perform|point|pos|primary|print|proc|' - r'procedure|program|put|read|record|record-count|record-length|' - r'refresh|release|renum|repeat|report|report-input|reshow|' - r'restart|retrieve|return-code|rollback|row|s|screen|search|' - r'secondary|select|sequence|size|skip|sokaku|sort|sql|stop|sum|' - r'sysdate|sysdate-long|sysin|sysipt|syslst|sysprint|syssnap|' - r'systime|tally|term-columns|term-name|term-rows|termination|' - r'title|to|transfer|trc|unique|until|update|uppercase|user|' - r'userid|value|verify|w|when|while|work|write|x|xdm|xrst)[ \'.,():]', + (r'(AFTER-BREAK|AFTER-LINE|AFTER-SCREEN|AIM|AND|ATTR|BEFORE|' + r'BEFORE-BREAK|BEFORE-LINE|BEFORE-SCREEN|BUSHU|BY|CALL|CASE|' + r'CHECKPOINT|CHKP|CHKP-STATUS|CLEAR|CLOSE|COL|COLOR|COMMIT|' + r'CONTROL|COPY|CURSOR|D|DECLARE|DEFAULT|DEFINE|DELETE|DENWA|' + r'DISPLAY|DLI|DO|DUPLICATE|E|ELSE|ELSE-IF|END|END-CASE|END-DO|' + r'END-IF|END-PROC|ENDPAGE|ENDTABLE|ENTER|EOF|EQ|ERROR|EXIT|' + r'EXTERNAL|EZLIB|F1|F10|F11|F12|F13|F14|F15|F16|F17|F18|F19|F2|' + r'F20|F21|F22|F23|F24|F25|F26|F27|F28|F29|F3|F30|F31|F32|F33|' + r'F34|F35|F36|F4|F5|F6|F7|F8|F9|FETCH|FILE|FILE-STATUS|FILL|' + r'FINAL|FIRST|FIRST-DUP|FOR|GE|GET|GO|GOTO|GQ|GR|GT|HEADING|' + r'HEX|HIGH-VALUES|IDD|IDMS|IF|IN|INSERT|JOB|JUSTIFY|KANJI-DATE|' + r'KANJI-DATE-LONG|KANJI-TIME|KEY|KEY-PRESSED|KOKUGO|KUN|' + r'LAST-DUP|LE|LEVEL|LIKE|LINE|LINE-COUNT|LINE-NUMBER|LINK|LIST|' + r'LOW-VALUES|LQ|LS|LT|MASK|MATCHED|MEND|MESSAGE|MOVE|MSTART|NE|' + r'NEWPAGE|NOMASK|NOPRINT|NOT|NOTE|NOVERIFY|NQ|NULL|OF|OR|' + r'OTHERWISE|PA1|PA2|PA3|PAGE-COUNT|PAGE-NUMBER|PARM-REGISTER|' + r'PATH-ID|PATTERN|PERFORM|POINT|POS|PRIMARY|PRINT|PROC|' + r'PROCEDURE|PROGRAM|PUT|READ|RECORD|RECORD-COUNT|RECORD-LENGTH|' + r'REFRESH|RELEASE|RENUM|REPEAT|REPORT|REPORT-INPUT|RESHOW|' + r'RESTART|RETRIEVE|RETURN-CODE|ROLLBACK|ROW|S|SCREEN|SEARCH|' + r'SECONDARY|SELECT|SEQUENCE|SIZE|SKIP|SOKAKU|SORT|SQL|STOP|SUM|' + r'SYSDATE|SYSDATE-LONG|SYSIN|SYSIPT|SYSLST|SYSPRINT|SYSSNAP|' + r'SYSTIME|TALLY|TERM-COLUMNS|TERM-NAME|TERM-ROWS|TERMINATION|' + r'TITLE|TO|TRANSFER|TRC|UNIQUE|UNTIL|UPDATE|UPPERCASE|USER|' + r'USERID|VALUE|VERIFY|W|WHEN|WHILE|WORK|WRITE|X|XDM|XRST)[ \'.,():]', Keyword.Reserved), # These are not actually keywords but section separators so # treating them differently from names seems in order. - # TODO: Fix: (r'(param|report)[ \'.,():]', Keyword), + # TODO: Fix: (r'(PARAM|REPORT)[ \'.,():]', Keyword), (r'[\[\](){}<>;,]', Punctuation), (ur'[-+/=&%¬]', Operator), (r'[0-9]+\.[0-9]*', Number.Float), -- cgit v1.2.1 From 93f496e25dc1a60d620a3288e9efe7fb9418dcfc Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sat, 23 Feb 2013 02:17:43 +0100 Subject: Cleaned up punctuation in comments. --- tests/examplefiles/example.ezt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/examplefiles/example.ezt b/tests/examplefiles/example.ezt index 68a849dd..5ea20b99 100644 --- a/tests/examplefiles/example.ezt +++ b/tests/examplefiles/example.ezt @@ -3,14 +3,14 @@ * Environtment section. PARM DEBUG(FLOW FLDCHK) -* Library Section +* Library Section. FILE PERSNL FB(150 1800) NAME 17 8 A EMP# 9 5 N * Note: '#' is a valid character for names. DEPT 98 3 N. GROSS 94 4 P 2 * ^ 2 field definitions in 1 line. -* Activity Section +* Activity Section. JOB INPUT PERSNL NAME FIRST-PROGRAM PRINT PAY-RPT REPORT PAY-RPT LINESIZE 80 -- cgit v1.2.1 From 5cfaf8dfa2fec182f2b2c86f330e97185e8801db Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sat, 23 Feb 2013 02:22:44 +0100 Subject: Fixed Easytrieve lexer. * Fixed keywords following immediately after a new line. * Fixed PARM keyword, which read PARAM. * Fixed keyword tokens so that the delimiter gets an own token instead of being a part of the keyword token. * Changed some keywords from reserved --- pygments/lexers/other.py | 37 ++++++++++++++++++++----------------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 0b2a31e6..668e71b3 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3680,13 +3680,18 @@ class EasyTrieveLexer(RegexLexer): aliases = ['easytrieve'] filenames = ['*.ezt'] mimetypes = ['text/x-easytrieve'] + flags = 0 + # TODO: Fix capitalization: Easytrieve instead of EasyTrieve. + # TODO: Add support for MACRO and related calls. # TODO: Treat only the first 72 characters as source code and the rest as comment. # TODO: After some reserved words such as 'define', even keywords are names. # TODO: Consider continuation characters '+' and '-' tokens = { 'root': [ + (r'\*.*\n', Comment.Single), + (r'\n+', Whitespace), # Note: We cannot use r'\b' at the start and end of keywords # because EasyTrieve Plus delimiter characters are: # @@ -3696,6 +3701,8 @@ class EasyTrieveLexer(RegexLexer): # * comma (,) # * paranthesis ( and ) # * colon (:) + (r'(FILE|JOB|PARM|PROC|REPORT)([ \'.,():\n])', + bygroups(Keyword.Declaration, Operator)), (r'(AFTER-BREAK|AFTER-LINE|AFTER-SCREEN|AIM|AND|ATTR|BEFORE|' r'BEFORE-BREAK|BEFORE-LINE|BEFORE-SCREEN|BUSHU|BY|CALL|CASE|' r'CHECKPOINT|CHKP|CHKP-STATUS|CLEAR|CLOSE|COL|COLOR|COMMIT|' @@ -3704,15 +3711,15 @@ class EasyTrieveLexer(RegexLexer): r'END-IF|END-PROC|ENDPAGE|ENDTABLE|ENTER|EOF|EQ|ERROR|EXIT|' r'EXTERNAL|EZLIB|F1|F10|F11|F12|F13|F14|F15|F16|F17|F18|F19|F2|' r'F20|F21|F22|F23|F24|F25|F26|F27|F28|F29|F3|F30|F31|F32|F33|' - r'F34|F35|F36|F4|F5|F6|F7|F8|F9|FETCH|FILE|FILE-STATUS|FILL|' + r'F34|F35|F36|F4|F5|F6|F7|F8|F9|FETCH|FILE-STATUS|FILL|' r'FINAL|FIRST|FIRST-DUP|FOR|GE|GET|GO|GOTO|GQ|GR|GT|HEADING|' - r'HEX|HIGH-VALUES|IDD|IDMS|IF|IN|INSERT|JOB|JUSTIFY|KANJI-DATE|' + r'HEX|HIGH-VALUES|IDD|IDMS|IF|IN|INSERT|JUSTIFY|KANJI-DATE|' r'KANJI-DATE-LONG|KANJI-TIME|KEY|KEY-PRESSED|KOKUGO|KUN|' r'LAST-DUP|LE|LEVEL|LIKE|LINE|LINE-COUNT|LINE-NUMBER|LINK|LIST|' r'LOW-VALUES|LQ|LS|LT|MASK|MATCHED|MEND|MESSAGE|MOVE|MSTART|NE|' r'NEWPAGE|NOMASK|NOPRINT|NOT|NOTE|NOVERIFY|NQ|NULL|OF|OR|' r'OTHERWISE|PA1|PA2|PA3|PAGE-COUNT|PAGE-NUMBER|PARM-REGISTER|' - r'PATH-ID|PATTERN|PERFORM|POINT|POS|PRIMARY|PRINT|PROC|' + r'PATH-ID|PATTERN|PERFORM|POINT|POS|PRIMARY|PRINT|' r'PROCEDURE|PROGRAM|PUT|READ|RECORD|RECORD-COUNT|RECORD-LENGTH|' r'REFRESH|RELEASE|RENUM|REPEAT|REPORT|REPORT-INPUT|RESHOW|' r'RESTART|RETRIEVE|RETURN-CODE|ROLLBACK|ROW|S|SCREEN|SEARCH|' @@ -3720,20 +3727,16 @@ class EasyTrieveLexer(RegexLexer): r'SYSDATE|SYSDATE-LONG|SYSIN|SYSIPT|SYSLST|SYSPRINT|SYSSNAP|' r'SYSTIME|TALLY|TERM-COLUMNS|TERM-NAME|TERM-ROWS|TERMINATION|' r'TITLE|TO|TRANSFER|TRC|UNIQUE|UNTIL|UPDATE|UPPERCASE|USER|' - r'USERID|VALUE|VERIFY|W|WHEN|WHILE|WORK|WRITE|X|XDM|XRST)[ \'.,():]', - Keyword.Reserved), - # These are not actually keywords but section separators so - # treating them differently from names seems in order. - # TODO: Fix: (r'(PARAM|REPORT)[ \'.,():]', Keyword), + r'USERID|VALUE|VERIFY|W|WHEN|WHILE|WORK|WRITE|X|XDM|XRST)([ \'.,():\n])', + bygroups(Keyword.Reserved, Operator)), (r'[\[\](){}<>;,]', Punctuation), (ur'[-+/=&%¬]', Operator), (r'[0-9]+\.[0-9]*', Number.Float), (r'[0-9]+', Number.Integer), (r"'(''|[^'])*'", String), - (r'\*.*\n', Comment.Single), (r'\.', Operator), (r'\s+', Whitespace), - (r'[^ \'.,():]+', Name) # Everything else just belongs to a name + (r'[^ \'.,():\n]+', Name) # Everything else just belongs to a name ] } @@ -3746,7 +3749,7 @@ class EasyTrieveLexer(RegexLexer): hasFile = False hasJob = False hasProc = False - hasParam = False + hasParm = False hasReport = False isBroken = False @@ -3758,9 +3761,9 @@ class EasyTrieveLexer(RegexLexer): if not hasReport: if not hasJob: if not hasFile: - if not hasParam: - if first_word == 'PARAM': - hasParam = True + if not hasParm: + if first_word == 'PARM': + hasParm = True if first_word == 'FILE': hasFile = True if first_word == 'JOB': @@ -3774,9 +3777,9 @@ class EasyTrieveLexer(RegexLexer): # Weight the findings. if not isBroken and hasJob and (hasProc == hasEndProc): - if hasParam: + if hasParm: if hasProc: - # Found PARAM, JOB and PROC/END-PROC: + # Found PARM, JOB and PROC/END-PROC: # pretty sure this is EasyTrieve. result = 0.8 else: @@ -3785,7 +3788,7 @@ class EasyTrieveLexer(RegexLexer): else: # Found JOB and possibly other keywords: might be EasyTrieve result = 0.11 - if hasParam: + if hasParm: # Note: PARAM is not a proper English word, so this is # regarded a much better indicator for EasyTrieve than # the other words. -- cgit v1.2.1 From c26700a25284d7c7e88ecb2bba6a5358a5392d82 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sat, 23 Feb 2013 05:46:46 +0100 Subject: Added Easytrieve macros. --- pygments/lexers/_mapping.py | 2 +- pygments/lexers/other.py | 146 ++++++++++++++++++++++++----------------- tests/examplefiles/example.ezt | 4 ++ tests/examplefiles/example.mac | 6 ++ 4 files changed, 96 insertions(+), 62 deletions(-) create mode 100644 tests/examplefiles/example.mac diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 5c4275d0..f651c8da 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -93,7 +93,7 @@ LEXERS = { 'DylanLidLexer': ('pygments.lexers.compiled', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), 'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), 'ECLexer': ('pygments.lexers.compiled', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), - 'EasyTrieveLexer': ('pygments.lexers.other', 'EasyTrieve', ('easytrieve',), ('*.ezt',), ('text/x-easytrieve',)), + 'EasyTrieveLexer': ('pygments.lexers.other', 'EasyTrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)), 'ElixirConsoleLexer': ('pygments.lexers.functional', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), 'ElixirLexer': ('pygments.lexers.functional', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)), 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 668e71b3..4428605a 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3671,36 +3671,38 @@ class AutoItLexer(RegexLexer): class EasyTrieveLexer(RegexLexer): """ - EasyTrieve (Classic and Plus) are programming languages tailored to - generating reports and are mainly used in mainframe related environments. + Easytrieve Plus is a programming language for extracting, filtering and + converting sequential data. Furthermore it can layout data for reports. + It is mainly used on mainframe platforms and can access several of the + mainframe's native file formats. It is somewhat comparable to awk. - This lexer is designed for EasyTrieve Plus 6.4. + *New in Pygments 1.x.* """ name = 'EasyTrieve' aliases = ['easytrieve'] - filenames = ['*.ezt'] + filenames = ['*.ezt', '*.mac'] mimetypes = ['text/x-easytrieve'] flags = 0 # TODO: Fix capitalization: Easytrieve instead of EasyTrieve. - # TODO: Add support for MACRO and related calls. - # TODO: Treat only the first 72 characters as source code and the rest as comment. - # TODO: After some reserved words such as 'define', even keywords are names. - # TODO: Consider continuation characters '+' and '-' tokens = { + # Note: We cannot use r'\b' at the start and end of keywords because + # EasyTrieve Plus delimiter characters are: + # + # * space ( ) + # * apostrophe (') + # * period (.) + # * comma (,) + # * paranthesis ( and ) + # * colon (:) 'root': [ (r'\*.*\n', Comment.Single), (r'\n+', Whitespace), - # Note: We cannot use r'\b' at the start and end of keywords - # because EasyTrieve Plus delimiter characters are: - # - # * space ( ) - # * apostrophe (') - # * period (.) - # * comma (,) - # * paranthesis ( and ) - # * colon (:) + # Macro argument + (r'&[^ *\'.,():\n]+\.', Name.Variable, 'after_macro_argument'), + # Macro call + (r'%[^ *\'.,():\n]+', Name.Variable), (r'(FILE|JOB|PARM|PROC|REPORT)([ \'.,():\n])', bygroups(Keyword.Declaration, Operator)), (r'(AFTER-BREAK|AFTER-LINE|AFTER-SCREEN|AIM|AND|ATTR|BEFORE|' @@ -3737,7 +3739,15 @@ class EasyTrieveLexer(RegexLexer): (r'\.', Operator), (r'\s+', Whitespace), (r'[^ \'.,():\n]+', Name) # Everything else just belongs to a name - ] + ], + 'after_macro_argument': [ + (r'\*.*\n', Comment.Single, 'root'), + (r'[\n\s]+', Whitespace, 'root'), + (r'[\[\](){}<>;,]', Punctuation, 'root'), + (ur'[.+/=&%¬]', Operator, 'root'), + (r"'(''|[^'])*'", String, 'root'), + (r'[^ \'.,():\n]+', Name) # Everything else just belongs to a name + ], } def analyse_text(text): @@ -3745,7 +3755,9 @@ class EasyTrieveLexer(RegexLexer): Perform a structural analysis for basic EasyTrieve constructs. """ result = 0.0 + lines = text.split('\n') hasEndProc = False + hasHeaderComment = False hasFile = False hasJob = False hasProc = False @@ -3753,50 +3765,62 @@ class EasyTrieveLexer(RegexLexer): hasReport = False isBroken = False - # Scan the source for lines starting with indicators. - for line in text.split('\n'): - words = line.split() - if (len(words) >= 2): - first_word = words[0] - if not hasReport: - if not hasJob: - if not hasFile: - if not hasParm: - if first_word == 'PARM': - hasParm = True - if first_word == 'FILE': - hasFile = True - if first_word == 'JOB': - hasJob = True - elif first_word == 'PROC': - hasProc = True - elif first_word == 'END-PROC': - hasEndProc = True - elif first_word == 'REPORT': - hasReport = True - - # Weight the findings. - if not isBroken and hasJob and (hasProc == hasEndProc): - if hasParm: - if hasProc: - # Found PARM, JOB and PROC/END-PROC: - # pretty sure this is EasyTrieve. - result = 0.8 - else: - # Found PARAM and JOB: probably this is EasyTrieve - result = 0.5 - else: - # Found JOB and possibly other keywords: might be EasyTrieve - result = 0.11 + # Skip possible header comments. + while len(lines) and lines[0].startswith('*'): + hasHeaderComment = True + del lines[0] + + firstLine = lines[0] + if firstLine[:6] in ('MACRO', 'MACRO '): + # Looks like an Easytrieve macro. + result = 0.4 + if hasHeaderComment: + result += 0.4 + else: + # Scan the source for lines starting with indicators. + for line in lines: + words = line.split() + if (len(words) >= 2): + first_word = words[0] + if not hasReport: + if not hasJob: + if not hasFile: + if not hasParm: + if first_word == 'PARM': + hasParm = True + if first_word == 'FILE': + hasFile = True + if first_word == 'JOB': + hasJob = True + elif first_word == 'PROC': + hasProc = True + elif first_word == 'END-PROC': + hasEndProc = True + elif first_word == 'REPORT': + hasReport = True + + # Weight the findings. + if not isBroken and hasJob and (hasProc == hasEndProc): if hasParm: - # Note: PARAM is not a proper English word, so this is - # regarded a much better indicator for EasyTrieve than - # the other words. - result += 0.2 - if hasFile: - result += 0.01 - if hasReport: - result += 0.01 + if hasProc: + # Found PARM, JOB and PROC/END-PROC: + # pretty sure this is EasyTrieve. + result = 0.8 + else: + # Found PARAM and JOB: probably this is EasyTrieve + result = 0.5 + else: + # Found JOB and possibly other keywords: might be EasyTrieve + result = 0.11 + if hasParm: + # Note: PARAM is not a proper English word, so this is + # regarded a much better indicator for EasyTrieve than + # the other words. + result += 0.2 + if hasFile: + result += 0.01 + if hasReport: + result += 0.01 assert 0.0 <= result <= 1.0 return result diff --git a/tests/examplefiles/example.ezt b/tests/examplefiles/example.ezt index 5ea20b99..5e4dc1ef 100644 --- a/tests/examplefiles/example.ezt +++ b/tests/examplefiles/example.ezt @@ -10,6 +10,10 @@ FILE PERSNL FB(150 1800) DEPT 98 3 N. GROSS 94 4 P 2 * ^ 2 field definitions in 1 line. +FILE EXAMPLE FB(80 200) +%EXAMPLE SOMEFILE SOME + +* Macro declaration (to be valid, this would * Activity Section. JOB INPUT PERSNL NAME FIRST-PROGRAM PRINT PAY-RPT diff --git a/tests/examplefiles/example.mac b/tests/examplefiles/example.mac new file mode 100644 index 00000000..1c3831d1 --- /dev/null +++ b/tests/examplefiles/example.mac @@ -0,0 +1,6 @@ +* Example Easytrieve macro declaration. For an example on calling this +* macro, see example.ezt. +MACRO FILENAME PREFIX +&FILENAME. +&PREFIX.-LINE 1 80 A +&PREFIX.-KEY 1 8 A -- cgit v1.2.1 From 6f26840e0644b004c7d067ae7581a1cca0c62abc Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sat, 23 Feb 2013 07:06:01 +0100 Subject: Cleaned up naming of Easytrieve (instead of EasyTrieve) --- pygments/lexers/_mapping.py | 2 +- pygments/lexers/other.py | 20 +++++++++----------- tests/test_lexers_other.py | 6 +++--- 3 files changed, 13 insertions(+), 15 deletions(-) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index f651c8da..47530fee 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -93,7 +93,7 @@ LEXERS = { 'DylanLidLexer': ('pygments.lexers.compiled', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), 'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), 'ECLexer': ('pygments.lexers.compiled', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), - 'EasyTrieveLexer': ('pygments.lexers.other', 'EasyTrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)), + 'EasytrieveLexer': ('pygments.lexers.other', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)), 'ElixirConsoleLexer': ('pygments.lexers.functional', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), 'ElixirLexer': ('pygments.lexers.functional', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)), 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 4428605a..cee39a08 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -36,7 +36,7 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer', 'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer', 'BroLexer', 'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer', 'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer', - 'CbmBasicV2Lexer', 'AutoItLexer', 'EasyTrieveLexer', 'JclLexer', + 'CbmBasicV2Lexer', 'AutoItLexer', 'EasytrieveLexer', 'JclLexer', 'WebFocusLexer'] @@ -3669,7 +3669,7 @@ class AutoItLexer(RegexLexer): } -class EasyTrieveLexer(RegexLexer): +class EasytrieveLexer(RegexLexer): """ Easytrieve Plus is a programming language for extracting, filtering and converting sequential data. Furthermore it can layout data for reports. @@ -3678,17 +3678,15 @@ class EasyTrieveLexer(RegexLexer): *New in Pygments 1.x.* """ - name = 'EasyTrieve' + name = 'Easytrieve' aliases = ['easytrieve'] filenames = ['*.ezt', '*.mac'] mimetypes = ['text/x-easytrieve'] flags = 0 - # TODO: Fix capitalization: Easytrieve instead of EasyTrieve. - tokens = { # Note: We cannot use r'\b' at the start and end of keywords because - # EasyTrieve Plus delimiter characters are: + # Easytrieve Plus delimiter characters are: # # * space ( ) # * apostrophe (') @@ -3752,7 +3750,7 @@ class EasyTrieveLexer(RegexLexer): def analyse_text(text): """ - Perform a structural analysis for basic EasyTrieve constructs. + Perform a structural analysis for basic Easytrieve constructs. """ result = 0.0 lines = text.split('\n') @@ -3804,17 +3802,17 @@ class EasyTrieveLexer(RegexLexer): if hasParm: if hasProc: # Found PARM, JOB and PROC/END-PROC: - # pretty sure this is EasyTrieve. + # pretty sure this is Easytrieve. result = 0.8 else: - # Found PARAM and JOB: probably this is EasyTrieve + # Found PARAM and JOB: probably this is Easytrieve result = 0.5 else: - # Found JOB and possibly other keywords: might be EasyTrieve + # Found JOB and possibly other keywords: might be Easytrieve result = 0.11 if hasParm: # Note: PARAM is not a proper English word, so this is - # regarded a much better indicator for EasyTrieve than + # regarded a much better indicator for Easytrieve than # the other words. result += 0.2 if hasFile: diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py index 3c64fd43..2cdb8e5f 100644 --- a/tests/test_lexers_other.py +++ b/tests/test_lexers_other.py @@ -11,7 +11,7 @@ import os import unittest from pygments.lexers import guess_lexer -from pygments.lexers.other import EasyTrieveLexer, JclLexer, WebFocusLexer +from pygments.lexers.other import EasytrieveLexer, JclLexer, WebFocusLexer def _exampleFilePath(filename): @@ -38,9 +38,9 @@ class _AnalyseTextTest(unittest.TestCase): exampleFile.close() -class EasyTrieveLexerTest(_AnalyseTextTest): +class EasytrieveLexerTest(_AnalyseTextTest): def setUp(self): - self.lexer = EasyTrieveLexer() + self.lexer = EasytrieveLexer() class JclLexerTest(_AnalyseTextTest): -- cgit v1.2.1 From 5877fc13f6af82b1ef1dd85c3afc9bf4243b5bfb Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sat, 23 Feb 2013 13:02:31 +0100 Subject: Cleaned up naming of Easytrieve (instead of EasyTrieve). --- tests/examplefiles/example.ezt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/examplefiles/example.ezt b/tests/examplefiles/example.ezt index 5e4dc1ef..b068fca3 100644 --- a/tests/examplefiles/example.ezt +++ b/tests/examplefiles/example.ezt @@ -1,4 +1,4 @@ -* EasyTrieve Plus Test Programm. +* Easytrieve Plus Test Programm. * Environtment section. PARM DEBUG(FLOW FLDCHK) -- cgit v1.2.1 From bb8536fc6083d0d8a288532472e13a03a2bc74c5 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sun, 24 Feb 2013 05:58:24 +0100 Subject: Added lexer for Rexx. --- pygments/lexers/_mapping.py | 1 + pygments/lexers/other.py | 69 ++++++++++++++++++++++++++++++++++++++++- tests/examplefiles/example.rexx | 31 ++++++++++++++++++ tests/test_lexers_other.py | 6 +++- 4 files changed, 105 insertions(+), 2 deletions(-) create mode 100644 tests/examplefiles/example.rexx diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 47530fee..22a8b935 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -245,6 +245,7 @@ LEXERS = { 'RebolLexer': ('pygments.lexers.other', 'REBOL', ('rebol',), ('*.r', '*.r3'), ('text/x-rebol',)), 'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()), 'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)), + 'RexxLexer': ('pygments.lexers.other', 'REXX', ('rexx', 'ARexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), 'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('RobotFramework', 'robotframework'), ('*.txt', '*.robot'), ('text/x-robotframework',)), 'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index cee39a08..744f2337 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -37,7 +37,7 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer', 'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer', 'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer', 'CbmBasicV2Lexer', 'AutoItLexer', 'EasytrieveLexer', 'JclLexer', - 'WebFocusLexer'] + 'RexxLexer', 'WebFocusLexer'] class ECLLexer(RegexLexer): @@ -4081,3 +4081,70 @@ class WebFocusLexer(RegexLexer): result += 0.1 assert 0.0 <= result <= 1.0 return result + + +class RexxLexer(RegexLexer): + """ + `REXX `_ is a scripting language available for + a wide range of different platforms with its roots found on mainframe + systems. It is popular for I/O- and data based tasks and can act as glue + language to bind different applications together. + """ + name = 'REXX' + aliases = ['rexx', 'ARexx', 'arexx'] + filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx'] + mimetypes = ['text/x-rexx'] + flags = re.IGNORECASE + + tokens = { + 'root': [ + (r'[\s\n]', Whitespace), + (r'/\*', Comment.Multiline, 'comment'), + (r'["]', String, 'string_double'), + (r"'", String, 'string_single'), + (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number), + (r'([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)', + bygroups(Name.Function, Whitespace, Operator, Whitespace, Keyword.Declaration)), + (r'([a-z_][a-z0-9_]*)(\s*)(:)', + bygroups(Name.Label, Whitespace, Operator)), + include('keyword'), + include('operator'), + (r'[a-z_][a-z0-9_]*', Text), + ], + 'keyword': [ + (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|' + r'interpret|iterate|leave|nop|numeric|off|on|options|parse|' + r'pull|push|queue|return|say|select|signal|to|then|trace|until|' + r'while)', Keyword.Reserved), + ], + 'operator': [ + (ur'(-|//|/|\(|\)|\*\*|\*|\\|\\<<|\\<|\\==|\\=|\\>>|\\>|\|\||\||' + ur'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|' + ur'¬>>|¬>|¬|\.)', Operator), + ], + 'string_double': [ + (r'""', String), + (r'"', String, 'root'), + (r'\n', Error, 'root'), + (r'[^"]', String), + ], + 'string_single': [ + (r'\'\'', String), + (r'\'', String, 'root'), + (r'\n', Error, 'root'), + (r'[^\']', String), + ], + 'comment': [ + (r'[^*/]', Comment.Multiline), + (r'\*/', Comment.Multiline, 'root'), + ] + } + + def analyse_text(text): + """ + Check for inital comment. + """ + result = 0.0 + if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE): + result = 1.0 + return result diff --git a/tests/examplefiles/example.rexx b/tests/examplefiles/example.rexx new file mode 100644 index 00000000..dbe260d1 --- /dev/null +++ b/tests/examplefiles/example.rexx @@ -0,0 +1,31 @@ +/* REXX example. */ + +/* Some basic constructs. */ +almost_pi = 0.1415 + 3 +if almost_pi < 3 then + say 'huh?' +else do + say 'ok, almost_pi=' almost_pi || " - done" +end +x = '"' || "'" || '''' || """" /* quotes */ + +/* A comment + spawning multiple + lines. */ + +half: procedure + parse arg some + return some / 2 + +some_label: /* ... ready to go to. */ + +/* Print a text file on MVS. */ +ADDRESS TSO +"ALLOC F(TEXTFILE) DSN('some.text.dsn') SHR REU" +"EXECIO * DISKR TEXTFILE ( FINIS STEM LINES." +"FREE F(TEXTFILE)" +I = 1 +DO WHILE I <= LINES.0 + SAY ' LINE ' I ' : ' LINES.I + I = I + 1 +END diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py index 2cdb8e5f..b04a9d0d 100644 --- a/tests/test_lexers_other.py +++ b/tests/test_lexers_other.py @@ -11,7 +11,8 @@ import os import unittest from pygments.lexers import guess_lexer -from pygments.lexers.other import EasytrieveLexer, JclLexer, WebFocusLexer +from pygments.lexers.other import EasytrieveLexer, JclLexer, RexxLexer, \ + WebFocusLexer def _exampleFilePath(filename): @@ -47,6 +48,9 @@ class JclLexerTest(_AnalyseTextTest): def setUp(self): self.lexer = JclLexer() +class RexxLexerTest(_AnalyseTextTest): + def setUp(self): + self.lexer = RexxLexer() class WebFocusLexerTest(_AnalyseTextTest): def setUp(self): -- cgit v1.2.1 From b636e1c671056d9e1aa4201322f5f9b337cdcfb1 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sun, 24 Feb 2013 14:59:41 +0100 Subject: Fixed Rexx lexer. * Fixed comma (,) as operator. * Fixed slash (/) and asterisk (*) in comments. --- pygments/lexers/other.py | 6 +++--- tests/examplefiles/example.rexx | 17 ++++++++++------- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 744f2337..08a9bcd8 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -4115,12 +4115,12 @@ class RexxLexer(RegexLexer): (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|' r'interpret|iterate|leave|nop|numeric|off|on|options|parse|' r'pull|push|queue|return|say|select|signal|to|then|trace|until|' - r'while)', Keyword.Reserved), + r'while)\b', Keyword.Reserved), ], 'operator': [ (ur'(-|//|/|\(|\)|\*\*|\*|\\|\\<<|\\<|\\==|\\=|\\>>|\\>|\|\||\||' ur'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|' - ur'¬>>|¬>|¬|\.)', Operator), + ur'¬>>|¬>|¬|\.|,)', Operator), ], 'string_double': [ (r'""', String), @@ -4135,8 +4135,8 @@ class RexxLexer(RegexLexer): (r'[^\']', String), ], 'comment': [ - (r'[^*/]', Comment.Multiline), (r'\*/', Comment.Multiline, 'root'), + (r'(.|\n)', Comment.Multiline), ] } diff --git a/tests/examplefiles/example.rexx b/tests/examplefiles/example.rexx index dbe260d1..648f595b 100644 --- a/tests/examplefiles/example.rexx +++ b/tests/examplefiles/example.rexx @@ -5,19 +5,22 @@ almost_pi = 0.1415 + 3 if almost_pi < 3 then say 'huh?' else do - say 'ok, almost_pi=' almost_pi || " - done" + say 'almost_pi=' almost_pi || " - ok" end x = '"' || "'" || '''' || """" /* quotes */ /* A comment - spawning multiple - lines. */ + * spawning multiple + lines. /* / */ -half: procedure - parse arg some - return some / 2 +/* Labels and procedures. */ +some_label : -some_label: /* ... ready to go to. */ +divide: procedure + parse arg some other + return some / other + +call divide(5, 2) /* Print a text file on MVS. */ ADDRESS TSO -- cgit v1.2.1 From fa0364e432b55d6ef783950b48f05057d87d43e5 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Mon, 25 Feb 2013 02:33:01 +0100 Subject: Improved heuristic for guessing Rexx from text. If the text starts with "/*" but without the "Rexx" tag as first word in the comment, the lexer now analyzes the whole text for common Rexx constructs. --- pygments/lexers/other.py | 66 +++++++++++++++++++++++++++++++++++++++++++--- tests/test_lexers_other.py | 29 +++++++++++++++++++- 2 files changed, 90 insertions(+), 5 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 08a9bcd8..55dcf66c 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3626,7 +3626,7 @@ class AutoItLexer(RegexLexer): (r'[a-zA-Z_#@$][a-zA-Z0-9_#@$]*', Name), (r'\\|\'', Text), (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape), - (r'_\n', Text), # Line continuation + (r'_\n', Text), # Line continuation include('garbage'), ], 'commands': [ @@ -3674,7 +3674,7 @@ class EasytrieveLexer(RegexLexer): Easytrieve Plus is a programming language for extracting, filtering and converting sequential data. Furthermore it can layout data for reports. It is mainly used on mainframe platforms and can access several of the - mainframe's native file formats. It is somewhat comparable to awk. + mainframe's native file formats. It is somewhat comparable to awk. *New in Pygments 1.x.* """ @@ -3796,7 +3796,7 @@ class EasytrieveLexer(RegexLexer): hasEndProc = True elif first_word == 'REPORT': hasReport = True - + # Weight the findings. if not isBroken and hasJob and (hasProc == hasEndProc): if hasParm: @@ -4103,7 +4103,7 @@ class RexxLexer(RegexLexer): (r'["]', String, 'string_double'), (r"'", String, 'string_single'), (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number), - (r'([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)', + (r'([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)\b', bygroups(Name.Function, Whitespace, Operator, Whitespace, Keyword.Declaration)), (r'([a-z_][a-z0-9_]*)(\s*)(:)', bygroups(Name.Label, Whitespace, Operator)), @@ -4140,11 +4140,69 @@ class RexxLexer(RegexLexer): ] } + _ADDRESS_COMMAND_REGEX = re.compile(r'\s*address\s+command\b', re.IGNORECASE) + _ADDRESS_REGEX = re.compile(r'\s*address\s+', re.IGNORECASE) + _DO_WHILE_REGEX = re.compile(r'\s*do\s+while\b', re.IGNORECASE) + _IF_THEN_DO_REGEX = re.compile(r'\s*if\b.+\bthen\s+do\s*$', re.IGNORECASE) + _PROCEDURE_REGEX = re.compile(r'([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)\b', re.IGNORECASE) + _ELSE_DO_REGEX = re.compile(r'\s*else\s+do\s*$', re.IGNORECASE) + _PARSE_ARG_REGEX = re.compile(r'\s*parse\s+(upper\s+)?(arg|value)\b', re.IGNORECASE) + _REGEXS = [ + _ADDRESS_COMMAND_REGEX, + _ADDRESS_REGEX, + _DO_WHILE_REGEX, + _ELSE_DO_REGEX, + _IF_THEN_DO_REGEX, + _PROCEDURE_REGEX, + _PARSE_ARG_REGEX, + ] + def analyse_text(text): """ Check for inital comment. """ result = 0.0 if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE): + # Header matches MVS Rexx requirements, this is certainly a Rexx + # script. result = 1.0 + elif text.startswith('/*'): + # Header matches general Rexx requirements; the source code might + # still be any language using C comments such as C++, C# or Java. + result = 0.01 + + # Check if lines match certain regular expressions and + # collect the respective counts in a dictionary. + regexCount = len(RexxLexer._REGEXS) + regexToCountMap = {} + for regex in RexxLexer._REGEXS: + regexToCountMap[regex] = 0 + for line in (text.split('\n'))[1:]: + regexIndex = 0 + lineHasAnyRegex = False + while not lineHasAnyRegex and (regexIndex < regexCount): + regexToCheck = RexxLexer._REGEXS[regexIndex] + if regexToCheck.match(line) is not None: + regexToCountMap[regexToCheck] = \ + regexToCountMap[regexToCheck] + 1 + lineHasAnyRegex = True + else: + regexIndex += 1 + # Evaluate the findings. + if regexToCountMap[RexxLexer._PROCEDURE_REGEX] > 0: + result += 0.5 + elif regexToCountMap[RexxLexer._ADDRESS_COMMAND_REGEX] > 0: + result += 0.2 + elif regexToCountMap[RexxLexer._ADDRESS_REGEX] > 0: + result += 0.05 + if regexToCountMap[RexxLexer._DO_WHILE_REGEX] > 0: + result += 0.1 + if regexToCountMap[RexxLexer._ELSE_DO_REGEX] > 0: + result += 0.1 + if regexToCountMap[RexxLexer._PARSE_ARG_REGEX] > 0: + result += 0.2 + if regexToCountMap[RexxLexer._IF_THEN_DO_REGEX] > 0: + result += 0.1 + result = min(result, 1.0) + assert 0.0 <= result <= result return result diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py index b04a9d0d..feb62a1b 100644 --- a/tests/test_lexers_other.py +++ b/tests/test_lexers_other.py @@ -32,7 +32,8 @@ class _AnalyseTextTest(unittest.TestCase): text = exampleFile.read() probability = self.lexer.analyse_text(text) self.assertTrue(probability > 0, - '%s must recognize %r' % (self.lexer.name, exampleFilePath)) + '%s must recognize %r' % ( + self.lexer.name, exampleFilePath)) guessedLexer = guess_lexer(text) self.assertEqual(guessedLexer.name, self.lexer.name) finally: @@ -48,10 +49,36 @@ class JclLexerTest(_AnalyseTextTest): def setUp(self): self.lexer = JclLexer() + class RexxLexerTest(_AnalyseTextTest): def setUp(self): self.lexer = RexxLexer() + def testCanGuessFromText(self): + self.assertAlmostEqual(0.01, + self.lexer.analyse_text('/* */')) + self.assertAlmostEqual(1.0, + self.lexer.analyse_text('''/* Rexx */ + say "hello world"''')) + self.assertLess(0.5, + self.lexer.analyse_text('/* */\n' \ + + 'hello:pRoceduRe\n' \ + + ' say "hello world"')) + self.assertLess(0.2, + self.lexer.analyse_text('''/* */ + if 1 > 0 then do + say "ok" + end + else do + say "huh?" + end''')) + self.assertLess(0.2, + self.lexer.analyse_text('''/* */ + greeting = "hello world!" + parse value greeting "hello" name "!" + say name''')) + + class WebFocusLexerTest(_AnalyseTextTest): def setUp(self): self.lexer = WebFocusLexer() -- cgit v1.2.1 From 6afe2b06558c18fb886042720fefb172e21474ad Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Mon, 25 Feb 2013 02:49:29 +0100 Subject: Improved description of mainframe related languages. --- pygments/lexers/other.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 55dcf66c..e4b7cd41 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3825,12 +3825,12 @@ class EasytrieveLexer(RegexLexer): class JclLexer(RegexLexer): """ - Job Control Language (JCL) is a scripting language used on IBM mainframe - operating systems to instruct the system on how to run a batch job or - start a subsystem. + `Job Control Language (JCL) `_ + is a scripting language used on mainframe platforms to instruct the system + on how to run a batch job or start a subsystem. It is somewhat + comparable to MS DOS batch and Unix shell scripts. - For more information, refer to the - `MVS Job Control Language Reference `_ + *New in Pygments 1.x.* """ name = 'JCL' aliases = ['jcl'] @@ -3907,11 +3907,12 @@ class JclLexer(RegexLexer): class WebFocusLexer(RegexLexer): """ - WebFOCUS und FOCUS are business intelligence tools mainly used in - mainframe related environments. + `(Web)FOCUS `_ is + a language for business intelligence applications. It enables to + describe, query and modify data from various sources including mainframe + platforms. It also includes FIDEL, a language to describe input forms. - For more information, refer to the - `Information Builders product page `_. + *New in Pygments 1.x.* """ name = 'WebFOCUS' aliases = ['webfocus', 'FOCUS', 'focus'] @@ -4085,10 +4086,12 @@ class WebFocusLexer(RegexLexer): class RexxLexer(RegexLexer): """ - `REXX `_ is a scripting language available for + `Rexx `_ is a scripting language available for a wide range of different platforms with its roots found on mainframe systems. It is popular for I/O- and data based tasks and can act as glue language to bind different applications together. + + *New in Pygments 1.x.* """ name = 'REXX' aliases = ['rexx', 'ARexx', 'arexx'] -- cgit v1.2.1 From 072a208804b1f2f16d3553b325a05c3a0e4c00d8 Mon Sep 17 00:00:00 2001 From: Alex Gosse Date: Wed, 6 Mar 2013 14:12:54 -0800 Subject: Add support for Riverbed TrafficScript lexing. --- AUTHORS | 1 + pygments/lexers/_mapping.py | 1 + pygments/lexers/traffiscript.py | 50 +++++++++++++++++ tests/examplefiles/example.rts | 118 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 170 insertions(+) create mode 100644 pygments/lexers/traffiscript.py create mode 100644 tests/examplefiles/example.rts diff --git a/AUTHORS b/AUTHORS index 9447bd0f..a53f29ac 100644 --- a/AUTHORS +++ b/AUTHORS @@ -43,6 +43,7 @@ Other contributors, listed alphabetically, are: * Bertrand Goetzmann -- Groovy lexer * Krzysiek Goj -- Scala lexer * Matt Good -- Genshi, Cheetah lexers +* Alex Gosse -- TrafficScript lexer * Patrick Gotthardt -- PHP namespaces support * Olivier Guibe -- Asymptote lexer * Jordi Gutiérrez Hermoso -- Octave lexer diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 53e09176..37b82318 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -246,6 +246,7 @@ LEXERS = { 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), 'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('RobotFramework', 'robotframework'), ('*.txt', '*.robot'), ('text/x-robotframework',)), 'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), + 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('trafficscript', 'rts'), ('*.rts'), ('text/x-trafficscript', 'application/x-trafficscript')), 'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), 'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')), 'RustLexer': ('pygments.lexers.compiled', 'Rust', ('rust',), ('*.rs', '*.rc'), ('text/x-rustsrc',)), diff --git a/pygments/lexers/traffiscript.py b/pygments/lexers/traffiscript.py new file mode 100644 index 00000000..56b9e04c --- /dev/null +++ b/pygments/lexers/traffiscript.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +""" + + pygments.lexers.trafficscript + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + Lexer for RiverBed's TrafficScript (RTS) language. + + :copyright: Copyright 2013 by Alex Gosse + :license: BSD, see LICENSE for details. +""" +import re +from pygments.lexer import RegexLexer +from pygments.token import * + +__all__ = ['RtsLexer'] + +class RtsLexer(RegexLexer): + """ + For `Riverbed Stingray Traffic Manager + ` + """ + name = 'TrafficScript' + aliases = ['rts','trafficscript'] + filenames = ['*.rts'] + tokens = { + 'root' : [ + (r"'(\\\\|\\[^\\]|[^'\\])*'", String), + (r'"', String, 'escapable-string'), + (r'(0x[0-9a-fA-F]+|\d+)', Number), + (r'\d+\.\d+', Number.Float), + (r'\$[a-zA-Z](\w|_)*', Name.Variable), + (r'[a-zA-Z][\w.]*', Name.Function), + (r'[-+*/%=,;(){}<>^.!~|&\[\]\?\:]', Operator), + (r'(>=|<=|==|!=|' + r'&&|\|\||' + r'\+=|.=|-=|\*=|/=|%=|<<=|>>=|&=|\|=|\^=|' + r'>>|<<|' + r'\+\+|--|=>)', Operator), + (r'[ \t\r]+', Text), + (r'#[^\n]*', Comment), + ], + 'escapable-string' : [ + (r'\\[tsn]', String.Escape), + (r'[^"]', String), + (r'"', String, '#pop'), + ], + + } diff --git a/tests/examplefiles/example.rts b/tests/examplefiles/example.rts new file mode 100644 index 00000000..1f9cfc59 --- /dev/null +++ b/tests/examplefiles/example.rts @@ -0,0 +1,118 @@ +# Example of a Riverbed TrafficScript (*.rts) file. + +http.setHeader( "Host", "secure.mysite.com" ); +$body = http.getBody( ); # get the POST data +$single = 'Hello \ +world'; +$double = "Hello \ +world"; +$pi = 3.14157; +$message = "The URL path is " . http.GetPath(); +$four = 2 + 2; +# Sets $ratio to "75%" (for example) +$ratio = ( $a / ($a + $b) * 100 ) . "%"; +$contentLength = http.getHeader( "Content-Length" ); +if( $contentLength > 1024 * 1024 ) { + log.warn( "Large request body: ".$contentLength ); +} +4 + 7.5 * $a +-$b / $c - 1 +7 % 3 # Returns 1 +"foo" && !0 # true +( 1 < 2 ) && ( 3 < 4 ) # true +$a || $b # true if $a or $b is true +0x1234 & 255 # 0x34 +1|2|4 #7 +1^3 #2 +~1 & 0xffff # 65534 +1 << 2 # 4 +2 >> 1 # 1 +$foo *= 5 # Product equals ($foo = $foo * 5) +$foo /= 2 # Quotient equals ($foo = $foo / 5) +$foo %= 2 # Modulo equals ($foo = $foo % 5) +$foo <<= 2 # Bit-shift left equals ($foo = $foo << 2) +$foo >>= 2 # Bit-shift right equals ($foo = $foo >> 2) +$foo &= 2 # Bitwise AND equals ($foo = $foo & 2) +$foo |= 2 # Bitwise OR equals ($foo = $foo | 2) +$foo ^= 2 # Bitwise XOR equals ($foo = $foo ^ 2) +$int = 10; +$double = 2.71828; +string.len( $double ); # casts to string, returns 7 +# Convert $string to a number, and add 4: +$r = $string + 4; # $r is 14 +if( string.startsWith( $path, "/secure" ) ) { + pool.use( "secure pool" ); +} else { + pool.use( "non-secure pool" ); +} + +for( $count = 0; $count < 10; $count++ ) { + log.info( "In loop, count = " . $count ); +} + +i$count = 0; +while( $count < 10 ) { + log.info( "In loop, count = " . $count ); + $count = $count + 1; +} + +$count = 0; +do { + log.info( "In loop, count = " . $count ); + $count = $count + 1; +} while( $count < 10 ); + +$mime = http.getResponseHeader( "Content-Type" ); +if( !string.startsWith( $mime, "text/html" )) break; +$array = [ "Alex", "Matt", "Oliver", "Laurence" ]; +$someone = $array[0]; +$arraylen = array.length($array); +log.info("My array has " . $arraylen . " elements.\n"); + +for ( $i = 0; $i < $arraylen; $i++ ){ + log.info ( "Element #" . $i . " " . $array[$i]); +} + +$hash = [ "orange" => "fruit", + "apple" => "fruit", + "cabbage" => "vegetable", + "pear" => "fruit" ]; + +foreach ( $key in hash.keys($hash)){ + log.info("Key: " . $key . "; Value: " . $hash[$key] . +";"); } + +# Declare a subroutine to calculate factorials +sub factorial( $n ) { + if( $n == 0 ) return 1; + return $n*factorial( $n-1 ); +} +# Put entries into the array +$c = 0; +while( $c <= 10 ) { + $msg = "Did you know that ". $c ."! is ". factorial( $c ) +."?" ; + data.set( "myarray".$c, $msg ); +$c++; } +# Look up several entries. Note: the 1000th entry is empty +$msg = ""; +$msg .= "Index 1000: ".data.get( "myarray1000" )."\n"; +# delete the entire array (but no other data stored by data.set) +data.reset( "myarray" ); +http.sendResponse( "200 OK", "text/plain", $msg, "" ); +sub headbug(){ + # Prints each header to the event log. + $headers = http.listHeaderNames(); + foreach ($header in $headers){ + log.info( $header . ": " . http.getheader($header)); +} } + +import foo; +foo.headbug(); +# Sets the regex string as ^192\.168\. ; the two examples +# below have the same effect +$regex = "^(192)\\.168\\."; +$regex = '^192\.168\.'; +if ( string.regexMatch( $ip, $regex ) ) { + # IP is on 192.168.* network +} -- cgit v1.2.1 From 83a87b54d7a58b53478967ea1b439098d15fef40 Mon Sep 17 00:00:00 2001 From: Alex Gosse Date: Thu, 7 Mar 2013 02:23:53 -0800 Subject: Fixed incorrect filename for TrafficScript lexer module (oops!). --- pygments/lexers/trafficscript.py | 50 ++++++++++++++++++++++++++++++++++++++++ pygments/lexers/traffiscript.py | 50 ---------------------------------------- 2 files changed, 50 insertions(+), 50 deletions(-) create mode 100644 pygments/lexers/trafficscript.py delete mode 100644 pygments/lexers/traffiscript.py diff --git a/pygments/lexers/trafficscript.py b/pygments/lexers/trafficscript.py new file mode 100644 index 00000000..56b9e04c --- /dev/null +++ b/pygments/lexers/trafficscript.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +""" + + pygments.lexers.trafficscript + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + Lexer for RiverBed's TrafficScript (RTS) language. + + :copyright: Copyright 2013 by Alex Gosse + :license: BSD, see LICENSE for details. +""" +import re +from pygments.lexer import RegexLexer +from pygments.token import * + +__all__ = ['RtsLexer'] + +class RtsLexer(RegexLexer): + """ + For `Riverbed Stingray Traffic Manager + ` + """ + name = 'TrafficScript' + aliases = ['rts','trafficscript'] + filenames = ['*.rts'] + tokens = { + 'root' : [ + (r"'(\\\\|\\[^\\]|[^'\\])*'", String), + (r'"', String, 'escapable-string'), + (r'(0x[0-9a-fA-F]+|\d+)', Number), + (r'\d+\.\d+', Number.Float), + (r'\$[a-zA-Z](\w|_)*', Name.Variable), + (r'[a-zA-Z][\w.]*', Name.Function), + (r'[-+*/%=,;(){}<>^.!~|&\[\]\?\:]', Operator), + (r'(>=|<=|==|!=|' + r'&&|\|\||' + r'\+=|.=|-=|\*=|/=|%=|<<=|>>=|&=|\|=|\^=|' + r'>>|<<|' + r'\+\+|--|=>)', Operator), + (r'[ \t\r]+', Text), + (r'#[^\n]*', Comment), + ], + 'escapable-string' : [ + (r'\\[tsn]', String.Escape), + (r'[^"]', String), + (r'"', String, '#pop'), + ], + + } diff --git a/pygments/lexers/traffiscript.py b/pygments/lexers/traffiscript.py deleted file mode 100644 index 56b9e04c..00000000 --- a/pygments/lexers/traffiscript.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -""" - - pygments.lexers.trafficscript - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - Lexer for RiverBed's TrafficScript (RTS) language. - - :copyright: Copyright 2013 by Alex Gosse - :license: BSD, see LICENSE for details. -""" -import re -from pygments.lexer import RegexLexer -from pygments.token import * - -__all__ = ['RtsLexer'] - -class RtsLexer(RegexLexer): - """ - For `Riverbed Stingray Traffic Manager - ` - """ - name = 'TrafficScript' - aliases = ['rts','trafficscript'] - filenames = ['*.rts'] - tokens = { - 'root' : [ - (r"'(\\\\|\\[^\\]|[^'\\])*'", String), - (r'"', String, 'escapable-string'), - (r'(0x[0-9a-fA-F]+|\d+)', Number), - (r'\d+\.\d+', Number.Float), - (r'\$[a-zA-Z](\w|_)*', Name.Variable), - (r'[a-zA-Z][\w.]*', Name.Function), - (r'[-+*/%=,;(){}<>^.!~|&\[\]\?\:]', Operator), - (r'(>=|<=|==|!=|' - r'&&|\|\||' - r'\+=|.=|-=|\*=|/=|%=|<<=|>>=|&=|\|=|\^=|' - r'>>|<<|' - r'\+\+|--|=>)', Operator), - (r'[ \t\r]+', Text), - (r'#[^\n]*', Comment), - ], - 'escapable-string' : [ - (r'\\[tsn]', String.Escape), - (r'[^"]', String), - (r'"', String, '#pop'), - ], - - } -- cgit v1.2.1 From 09fde648539e051a77a793fae40469c46aa7a68d Mon Sep 17 00:00:00 2001 From: Alex Gosse Date: Fri, 8 Mar 2013 02:55:55 -0800 Subject: Added keywords to TrafficScript to align highlighting with that of the Stingray GUI editor and the http://splash.riverbed.com developer community site. --- pygments/lexers/trafficscript.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pygments/lexers/trafficscript.py b/pygments/lexers/trafficscript.py index 56b9e04c..ddc4aa50 100644 --- a/pygments/lexers/trafficscript.py +++ b/pygments/lexers/trafficscript.py @@ -31,6 +31,7 @@ class RtsLexer(RegexLexer): (r'(0x[0-9a-fA-F]+|\d+)', Number), (r'\d+\.\d+', Number.Float), (r'\$[a-zA-Z](\w|_)*', Name.Variable), + (r'(if|else|for(each)?|in|while|do|break|sub|return|import)', Keyword), (r'[a-zA-Z][\w.]*', Name.Function), (r'[-+*/%=,;(){}<>^.!~|&\[\]\?\:]', Operator), (r'(>=|<=|==|!=|' -- cgit v1.2.1 From 232c28383763473a44e2c5c2bbe139845efd5def Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Tue, 19 Mar 2013 23:47:06 +0100 Subject: #1: Add lexer for FOCUS master and access files. --- pygments/lexers/_mapping.py | 2 + pygments/lexers/other.py | 95 +++++++++++++++++++++++++++++++++++++++++- tests/examplefiles/example.acx | 3 ++ tests/examplefiles/example.mas | 21 ++++++++++ 4 files changed, 120 insertions(+), 1 deletion(-) create mode 100644 tests/examplefiles/example.acx create mode 100644 tests/examplefiles/example.mas diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 22a8b935..13a8b696 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -107,6 +107,8 @@ LEXERS = { 'FancyLexer': ('pygments.lexers.agile', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), 'FantomLexer': ('pygments.lexers.compiled', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), 'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), + 'FocusAccessLexer': ('pygments.lexers.other', 'FOCUS access', (), ('*.acx',), ('text/x-focus-access',)), + 'FocusMasterLexer': ('pygments.lexers.other', 'FOCUS master', ('master',), ('*.mas',), ('text/x-focus-master',)), 'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)), 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('Clipper', 'XBase'), ('*.PRG', '*.prg'), ()), 'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index e4b7cd41..b1cadc87 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -37,7 +37,8 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer', 'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer', 'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer', 'CbmBasicV2Lexer', 'AutoItLexer', 'EasytrieveLexer', 'JclLexer', - 'RexxLexer', 'WebFocusLexer'] + 'RexxLexer', 'WebFocusLexer', 'FocusAccessLexer', + 'FocusMasterLexer'] class ECLLexer(RegexLexer): @@ -4084,6 +4085,98 @@ class WebFocusLexer(RegexLexer): return result +class FocusMasterLexer(RegexLexer): + """ + FOCUS master files describes files and tables to be queried by FOCUS + programs. The are somewhat comparable to SQL ``create table`` statements. + + *New in Pygments 1.x.* + """ + name = 'FOCUS master' + aliases = ['master'] + filenames = ['*.mas'] + mimetypes = ['text/x-focus-master'] + flags = re.IGNORECASE + + tokens = { + 'root': [ + (r'\$.*\n', Comment.Single), + (r'\s+', Whitespace), + (r'[a-z_][a-z0-9_]*', Name), + (r'=', Punctuation, 'value'), + (r'\.', Punctuation), + ], + 'value': [ + (r'\$.*\n', Comment.Single, 'root'), + (r'\n', Text), + (r',', Punctuation, 'root'), + (r'\.!', Punctuation), + (r'["]', String, 'string_double'), + (r'\'', String, 'string_single'), + (r'[a-z_][a-z0-9_]*', Text), + (r'[0-9]+(\.[0-9]+)?', Number), + (r'.', Text), + ], + 'string_double': [ + (r'\'["]["]', String), + (r'["]', String, 'value'), + (r'[^"]', String), + ], + 'string_single': [ + (r'\'\'', String), + (r'\'', String, 'value'), + (r'[^\']', String), + ], + } + + _HEADER_PATTERN = re.compile( + r'^((\s*\$.*)\n)*file(name)?\s*=\s*[a-z_][a-z0-9_]+\s*,(\s*(\$.*)\n)*\s*suffix\s*=\s*.+', + re.IGNORECASE + ) + + def analyse_text(text): + """ + Check for ``FILE=..., SUFFIX=...`` while ignoring comments starting with ``$``. + """ + result = 0.0 + + if FocusMasterLexer._HEADER_PATTERN.match(text): + result = 0.8 + + assert 0.0 <= result <= 1.0 + return result + + +class FocusAccessLexer(FocusMasterLexer): + """ + FOCUS access files associate segments in FOCUS master files with actual + tables containing data. + + *New in Pygments 1.x.* + """ + name = 'FOCUS access' + aliases = [] + filenames = ['*.acx'] + mimetypes = ['text/x-focus-access'] + + _HEADER_PATTERN = re.compile( + r'^((\s*\$.*)\n)*segment\s*=\s*[a-z_][a-z0-9_]+\s*,(\s*(\$.*)\n)*\s*tablename\s*=\s*.+', + re.IGNORECASE + ) + + def analyse_text(text): + """ + Check for ``SEGMENT=..., TABLENAME=...`` while ignoring comments starting with ``$``. + """ + result = 0.0 + + if FocusAccessLexer._HEADER_PATTERN.match(text): + result = 0.8 + + assert 0.0 <= result <= 1.0 + return result + + class RexxLexer(RegexLexer): """ `Rexx `_ is a scripting language available for diff --git a/tests/examplefiles/example.acx b/tests/examplefiles/example.acx new file mode 100644 index 00000000..ec80b9a2 --- /dev/null +++ b/tests/examplefiles/example.acx @@ -0,0 +1,3 @@ +$ Example FOCUS access file. +SEGNAME=CUSTOMERS, TABLENAME = "SOME"."CUSTOMERS", KEYS =1, + WRITE= NO, DBSPACE = SOME.WHERE0,$ diff --git a/tests/examplefiles/example.mas b/tests/examplefiles/example.mas new file mode 100644 index 00000000..fe653f07 --- /dev/null +++ b/tests/examplefiles/example.mas @@ -0,0 +1,21 @@ +$ Example FOCUS master file. +FILE=CENTGL, SUFFIX=FOC +SEGNAME=ACCOUNTS,SEGTYPE = S01 +FIELDNAME=GL_ACCOUNT, ALIAS=GLACCT, FORMAT=A7, + TITLE='Ledger,Account', FIELDTYPE=I, $ +FIELDNAME=GL_ACCOUNT_PARENT, ALIAS=GLPAR, FORMAT=A7, + TITLE=Parent, + PROPERTY=PARENT_OF, REFERENCE=GL_ACCOUNT, $ +FIELDNAME=GL_ACCOUNT_TYPE, ALIAS=GLTYPE, FORMAT=A1, + TITLE=Type,$ + +FIELDNAME=GL_ROLLUP_OP, ALIAS=GLROLL, FORMAT=A1, + TITLE=Op, $ Some comment. +FIELDNAME=GL_ACCOUNT_LEVEL, ALIAS=GLLEVEL, FORMAT=I3, + TITLE=Lev, $ + $ Another comment. +FIELDNAME=GL_ACCOUNT_CAPTION, ALIAS=GLCAP, FORMAT=A30, + TITLE=Caption, + PROPERTY=CAPTION, REFERENCE=GL_ACCOUNT, $ +FIELDNAME=SYS_ACCOUNT, ALIAS=ALINE, FORMAT=A6, + TITLE='System,Account,Line', MISSING=ON, $ -- cgit v1.2.1 From 7d2221b1aba7faa131b6ac660696fe543d388ca3 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sat, 30 Mar 2013 14:15:43 +0100 Subject: #6: Add highlightning of Rexx built in functions. --- pygments/lexers/other.py | 12 ++++++++++++ tests/examplefiles/example.rexx | 5 +++++ 2 files changed, 17 insertions(+) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index b1cadc87..a4cd2f95 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -4203,10 +4203,22 @@ class RexxLexer(RegexLexer): bygroups(Name.Function, Whitespace, Operator, Whitespace, Keyword.Declaration)), (r'([a-z_][a-z0-9_]*)(\s*)(:)', bygroups(Name.Label, Whitespace, Operator)), + include('function'), include('keyword'), include('operator'), (r'[a-z_][a-z0-9_]*', Text), ], + 'function': [ + (r'(abbrev|abs|address|arg|b2x|bitand|bitor|bitxor|c2d|c2x|' + r'center|charin|charout|chars|compare|condition|copies|d2c|' + r'd2x|datatype|date|delstr|delword|digits|errortext|form|' + r'format|fuzz|insert|lastpos|left|length|linein|lineout|lines|' + r'max|min|overlay|pos|queued|random|reverse|right|sign|' + r'sourceline|space|stream|strip|substr|subword|symbol|time|' + r'trace|translate|trunc|value|verify|word|wordindex|' + r'wordlength|wordpos|words|x2b|x2c|x2d|xrange)(\s*)([(])', + bygroups(Name.Builtin, Whitespace, Operator)), + ], 'keyword': [ (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|' r'interpret|iterate|leave|nop|numeric|off|on|options|parse|' diff --git a/tests/examplefiles/example.rexx b/tests/examplefiles/example.rexx index 648f595b..07b7bacf 100644 --- a/tests/examplefiles/example.rexx +++ b/tests/examplefiles/example.rexx @@ -13,6 +13,11 @@ x = '"' || "'" || '''' || """" /* quotes */ * spawning multiple lines. /* / */ +/* Built-in functions. */ +line = 'line containing some short text' +say WordPos(line, 'some') +say Word(line, 4) + /* Labels and procedures. */ some_label : -- cgit v1.2.1 From 19b60945c582853056da25946a090fd049f43fd1 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sat, 30 Mar 2013 17:09:09 +0100 Subject: * #7: Add lexer for WebFOCUS style sheets. * Added aliases for FOCUS access and master files. * Added support for FOCUS comments (starting with -*) in access and master files. * Changed names in FOCUS access and master files to be highlighted as built-in names. * Changed lexers for FOCUS access and master files to be more picky about NAME=VALUE syntax. --- pygments/lexers/_mapping.py | 5 ++-- pygments/lexers/other.py | 65 +++++++++++++++++++++++++++++++++++++++------ 2 files changed, 60 insertions(+), 10 deletions(-) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 13a8b696..7498404c 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -107,8 +107,9 @@ LEXERS = { 'FancyLexer': ('pygments.lexers.agile', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), 'FantomLexer': ('pygments.lexers.compiled', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), 'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), - 'FocusAccessLexer': ('pygments.lexers.other', 'FOCUS access', (), ('*.acx',), ('text/x-focus-access',)), - 'FocusMasterLexer': ('pygments.lexers.other', 'FOCUS master', ('master',), ('*.mas',), ('text/x-focus-master',)), + 'FocusAccessLexer': ('pygments.lexers.other', 'FOCUS access', ('focus-access',), ('*.acx',), ('text/x-focus-access',)), + 'FocusMasterLexer': ('pygments.lexers.other', 'FOCUS master', ('focus-master',), ('*.mas',), ('text/x-focus-master',)), + 'FocusStyleSheetLexer': ('pygments.lexers.other', 'FOCUS style sheet', ('focus-style',), ('*.sty',), ('text/x-focus-style',)), 'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)), 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('Clipper', 'XBase'), ('*.PRG', '*.prg'), ()), 'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index a4cd2f95..efa51b72 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -38,7 +38,7 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer', 'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer', 'CbmBasicV2Lexer', 'AutoItLexer', 'EasytrieveLexer', 'JclLexer', 'RexxLexer', 'WebFocusLexer', 'FocusAccessLexer', - 'FocusMasterLexer'] + 'FocusMasterLexer', 'FocusStyleSheetLexer'] class ECLLexer(RegexLexer): @@ -4093,23 +4093,31 @@ class FocusMasterLexer(RegexLexer): *New in Pygments 1.x.* """ name = 'FOCUS master' - aliases = ['master'] + aliases = ['focus-master'] filenames = ['*.mas'] mimetypes = ['text/x-focus-master'] flags = re.IGNORECASE tokens = { 'root': [ + (r'-\*.*\n', Comment.Single), + include('name'), + ], + 'name': [ (r'\$.*\n', Comment.Single), (r'\s+', Whitespace), - (r'[a-z_][a-z0-9_]*', Name), + (r'[a-z_][a-z0-9_]*', Name.Builtin, 'before_value'), + (r'(\\)(\n)', bygroups(Operator, Text)), + ], + 'before_value': [ (r'=', Punctuation, 'value'), - (r'\.', Punctuation), - ], + (r'\s+', Whitespace), + (r'.', Error, 'name') + ], 'value': [ (r'\$.*\n', Comment.Single, 'root'), (r'\n', Text), - (r',', Punctuation, 'root'), + (r',', Punctuation, 'name'), (r'\.!', Punctuation), (r'["]', String, 'string_double'), (r'\'', String, 'string_single'), @@ -4155,7 +4163,7 @@ class FocusAccessLexer(FocusMasterLexer): *New in Pygments 1.x.* """ name = 'FOCUS access' - aliases = [] + aliases = ['focus-access'] filenames = ['*.acx'] mimetypes = ['text/x-focus-access'] @@ -4166,7 +4174,8 @@ class FocusAccessLexer(FocusMasterLexer): def analyse_text(text): """ - Check for ``SEGMENT=..., TABLENAME=...`` while ignoring comments starting with ``$``. + Check for ``SEGMENT=..., TABLENAME=...`` while ignoring comments + starting with ``$``. """ result = 0.0 @@ -4177,6 +4186,46 @@ class FocusAccessLexer(FocusMasterLexer): return result +class FocusStyleSheetLexer(FocusMasterLexer): + """ + Style sheet to format reports written in FOCUS. + + *New in Pygments 1.x.* + """ + name = 'FOCUS style sheet' + aliases = ['focus-style'] + filenames = ['*.sty'] + mimetypes = ['text/x-focus-style'] + + _HEADER_TYPE_PATTERN = re.compile( + r'^(((\s*\$)|(-\*)).*\n)*type\s*=\s*[a-z]+\s*,.+', + re.IGNORECASE + ) + _HEADER_PAGE_DECLARATION_PATTERN = re.compile( + r'^(((\s*\$)|(-\*)).*\n)*(orientation|pagecolor|pagesize)\s*=\s*.+\s*,.+', + re.IGNORECASE + ) + + def analyse_text(text): + """ + Check for ``TYPE=...,...`` or page layout declaration while + ignoring comments starting with ``$`` or ``-*``. + """ + result = 0.0 + + if FocusStyleSheetLexer._HEADER_TYPE_PATTERN.match(text): + result = 0.7 + elif FocusStyleSheetLexer._HEADER_PAGE_DECLARATION_PATTERN.match(text): + result = 0.5 + if result > 0: + textStartsWithComment = text.startswith('-*') or text.lstrip().startswith('$') + if textStartsWithComment: + result += 0.2 + + assert 0.0 <= result <= 1.0 + return result + + class RexxLexer(RegexLexer): """ `Rexx `_ is a scripting language available for -- cgit v1.2.1 From 02051996e26d1f6cf80836073c9fd57ec44a0f80 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Mon, 1 Apr 2013 16:02:20 +0200 Subject: #7: Add lexer for WebFOCUS style sheets. Added example WebFOCUS style sheet. --- tests/examplefiles/example.sty | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 tests/examplefiles/example.sty diff --git a/tests/examplefiles/example.sty b/tests/examplefiles/example.sty new file mode 100644 index 00000000..0c113d0f --- /dev/null +++ b/tests/examplefiles/example.sty @@ -0,0 +1,10 @@ +$ Example WebFOCUS style sheet. +-* A FOCUS comment. +TYPE=REPORT, PAGESIZE=A4, Orientation = LANDSCAPE, $ +TYPE=HEADING, FONT='Helvetica', + SIZE=14, STYLE=BOLD, $ another comment + +PAGECOLOR=blue, $ + +TYPE=HEADING, OBJECT=TEXT, ITEM=2, COLOR=BLUE, SIZE=12, + STYLE=BOLD+UNDERLINE, $ -- cgit v1.2.1 From 09f6b72ec3c24d1b4955f39f0b1923d2f06fa052 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Thu, 18 Apr 2013 22:20:32 +0200 Subject: Cleaned up Easytrieve lexer: delimiters are no collected in constants to make the regular expressions easier to read. --- pygments/lexers/other.py | 38 +++++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 84c30bf9..3d5d5118 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3685,24 +3685,31 @@ class EasytrieveLexer(RegexLexer): mimetypes = ['text/x-easytrieve'] flags = 0 + # Note: We cannot use r'\b' at the start and end of keywords because + # Easytrieve Plus delimiter characters are: + # + # * space ( ) + # * apostrophe (') + # * period (.) + # * comma (,) + # * paranthesis ( and ) + # * colon (:) + # + # Additionally words end once a '*' appears, indicatins a comment. + _DELIMITERS = r' \'.,():\n' + _DELIMITERS_OR_COMENT = _DELIMITERS + '*' + _DELIMITER_PATTERN = '[' + _DELIMITERS + ']' + _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']' + tokens = { - # Note: We cannot use r'\b' at the start and end of keywords because - # Easytrieve Plus delimiter characters are: - # - # * space ( ) - # * apostrophe (') - # * period (.) - # * comma (,) - # * paranthesis ( and ) - # * colon (:) 'root': [ (r'\*.*\n', Comment.Single), (r'\n+', Whitespace), # Macro argument - (r'&[^ *\'.,():\n]+\.', Name.Variable, 'after_macro_argument'), + (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable, 'after_macro_argument'), # Macro call - (r'%[^ *\'.,():\n]+', Name.Variable), - (r'(FILE|JOB|PARM|PROC|REPORT)([ \'.,():\n])', + (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable), + (r'(FILE|JOB|PARM|PROC|REPORT)(' + _DELIMITER_PATTERN + r')', bygroups(Keyword.Declaration, Operator)), (r'(AFTER-BREAK|AFTER-LINE|AFTER-SCREEN|AIM|AND|ATTR|BEFORE|' r'BEFORE-BREAK|BEFORE-LINE|BEFORE-SCREEN|BUSHU|BY|CALL|CASE|' @@ -3728,7 +3735,8 @@ class EasytrieveLexer(RegexLexer): r'SYSDATE|SYSDATE-LONG|SYSIN|SYSIPT|SYSLST|SYSPRINT|SYSSNAP|' r'SYSTIME|TALLY|TERM-COLUMNS|TERM-NAME|TERM-ROWS|TERMINATION|' r'TITLE|TO|TRANSFER|TRC|UNIQUE|UNTIL|UPDATE|UPPERCASE|USER|' - r'USERID|VALUE|VERIFY|W|WHEN|WHILE|WORK|WRITE|X|XDM|XRST)([ \'.,():\n])', + r'USERID|VALUE|VERIFY|W|WHEN|WHILE|WORK|WRITE|X|XDM|XRST)' + r'(' + _DELIMITER_PATTERN + r')', bygroups(Keyword.Reserved, Operator)), (r'[\[\](){}<>;,]', Punctuation), (ur'[-+/=&%¬]', Operator), @@ -3737,7 +3745,7 @@ class EasytrieveLexer(RegexLexer): (r"'(''|[^'])*'", String), (r'\.', Operator), (r'\s+', Whitespace), - (r'[^ \'.,():\n]+', Name) # Everything else just belongs to a name + (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name) # Everything else just belongs to a name ], 'after_macro_argument': [ (r'\*.*\n', Comment.Single, 'root'), @@ -3745,7 +3753,7 @@ class EasytrieveLexer(RegexLexer): (r'[\[\](){}<>;,]', Punctuation, 'root'), (ur'[.+/=&%¬]', Operator, 'root'), (r"'(''|[^'])*'", String, 'root'), - (r'[^ \'.,():\n]+', Name) # Everything else just belongs to a name + (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name) # Everything else just belongs to a name ], } -- cgit v1.2.1 From a8c185e9ed83cc757582085a58c443c7f1ed31fa Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Thu, 18 Apr 2013 22:34:50 +0200 Subject: Added version and author information for Easytrieve, FOCUS, JCL and Rexx lexers. Also cleaned up sort order of authors. --- AUTHORS | 3 ++- pygments/lexers/other.py | 14 +++++++------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/AUTHORS b/AUTHORS index 34b40db4..dc4485f3 100644 --- a/AUTHORS +++ b/AUTHORS @@ -6,8 +6,9 @@ Major developers are Tim Hatch and Armin Ronacher Other contributors, listed alphabetically, are: * Sam Aaron -- Ioke lexer -* Kumar Appaiah -- Debian control lexer * Ali Afshar -- image formatter +* Thomas Aglassinger -- Easytrieve, FOCUS, JCL and Rexx lexers +* Kumar Appaiah -- Debian control lexer * Andreas Amann -- AppleScript lexer * Timothy Armstrong -- Dart lexer fixes * Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 3d5d5118..88ccf35e 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3677,7 +3677,7 @@ class EasytrieveLexer(RegexLexer): It is mainly used on mainframe platforms and can access several of the mainframe's native file formats. It is somewhat comparable to awk. - *New in Pygments 1.x.* + *New in Pygments 1.7.* """ name = 'Easytrieve' aliases = ['easytrieve'] @@ -3839,7 +3839,7 @@ class JclLexer(RegexLexer): on how to run a batch job or start a subsystem. It is somewhat comparable to MS DOS batch and Unix shell scripts. - *New in Pygments 1.x.* + *New in Pygments 1.7.* """ name = 'JCL' aliases = ['jcl'] @@ -3921,7 +3921,7 @@ class WebFocusLexer(RegexLexer): describe, query and modify data from various sources including mainframe platforms. It also includes FIDEL, a language to describe input forms. - *New in Pygments 1.x.* + *New in Pygments 1.7.* """ name = 'WebFOCUS' aliases = ['webfocus', 'FOCUS', 'focus'] @@ -4098,7 +4098,7 @@ class FocusMasterLexer(RegexLexer): FOCUS master files describes files and tables to be queried by FOCUS programs. The are somewhat comparable to SQL ``create table`` statements. - *New in Pygments 1.x.* + *New in Pygments 1.7.* """ name = 'FOCUS master' aliases = ['focus-master'] @@ -4168,7 +4168,7 @@ class FocusAccessLexer(FocusMasterLexer): FOCUS access files associate segments in FOCUS master files with actual tables containing data. - *New in Pygments 1.x.* + *New in Pygments 1.7.* """ name = 'FOCUS access' aliases = ['focus-access'] @@ -4198,7 +4198,7 @@ class FocusStyleSheetLexer(FocusMasterLexer): """ Style sheet to format reports written in FOCUS. - *New in Pygments 1.x.* + *New in Pygments 1.7.* """ name = 'FOCUS style sheet' aliases = ['focus-style'] @@ -4241,7 +4241,7 @@ class RexxLexer(RegexLexer): systems. It is popular for I/O- and data based tasks and can act as glue language to bind different applications together. - *New in Pygments 1.x.* + *New in Pygments 1.7.* """ name = 'REXX' aliases = ['rexx', 'ARexx', 'arexx'] -- cgit v1.2.1 From ecd658f2c7e5cdcf2da94c2b8bb059b87a32a2d3 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Thu, 18 Apr 2013 22:57:08 +0200 Subject: Fixed spurious assertion. --- pygments/lexers/other.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 88ccf35e..c5ac6f16 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -4369,5 +4369,5 @@ class RexxLexer(RegexLexer): if regexToCountMap[RexxLexer._IF_THEN_DO_REGEX] > 0: result += 0.1 result = min(result, 1.0) - assert 0.0 <= result <= result + assert 0.0 <= result <= 1.0 return result -- cgit v1.2.1 From 18fd89a8bdc45000988aaec7717c3b5a0c88a32f Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Mon, 6 May 2013 22:47:02 +0200 Subject: * Fixed potential endless loops like '.*' by changing them to '.+'. * Fixed sort order with common prefixes like '(not|not-in)' to '(not-in|not)'. * Fixed quotes in double quoted strings in FOCUS master. * Changed FOCUS prefix functions to require '.' after function name. * Cleaned up redundant '[a-zA-Z]' in case insensitive expressions. * Cleaned up redundant keywords. * Cleaned up expressions like '[<]' to a simple '<'. --- pygments/lexers/other.py | 50 ++++++++++++++++++++++++------------------------ 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index c4371cb6..f6e97ea2 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3749,7 +3749,7 @@ class EasytrieveLexer(RegexLexer): ], 'after_macro_argument': [ (r'\*.*\n', Comment.Single, 'root'), - (r'[\n\s]+', Whitespace, 'root'), + (r'\s+', Whitespace, 'root'), (r'[\[\](){}<>;,]', Punctuation, 'root'), (ur'[.+/=&%¬]', Operator, 'root'), (r"'(''|[^'])*'", String, 'root'), @@ -3879,7 +3879,7 @@ class JclLexer(RegexLexer): (r'\*', Name.Builtin), (r'[\[\](){}<>;,]', Punctuation), (r'[-+*/=&%]', Operator), - (r'[a-zA-Z_][a-zA-Z_0-9]*', Name), + (r'[a-z_][a-z_0-9]*', Name), (r'[0-9]+\.[0-9]*', Number.Float), (r'\.[0-9]+', Number.Float), (r'[0-9]+', Number.Integer), @@ -3895,7 +3895,7 @@ class JclLexer(RegexLexer): ], 'option_comment': [ (r'\n', Text, 'root'), - (r'.*', Comment.Single), + (r'.+', Comment.Single), ] } @@ -3954,20 +3954,20 @@ class WebFocusLexer(RegexLexer): r'update|when|where|with|within)\b', Keyword.Reserved), (r'"', String, 'focus_fidel'), (r'\b(missing)\b', Name.Constant), - (r'\b(asq|ave|cnt|cnt|ct|dst|fst|lst|max|min|pct|rcpt|st|sum|' - r'tot)\.', Operator), + (r'\b(asq|ave|cnt|ct|dst|fst|lst|max|min|pct|rcpt|st|sum|tot)\.', + Operator), # FOCUS field declaration including display options. (r'([a-z][a-z_0-9]*)([/])([adfip]*[0-9]+(\.[0-9]+)[-%bcdelmnrsty]*)', bygroups(Name.Variable, Operator, Keyword.Type)), # Rules common to 'focus' and 'dialog_manager'. (r'\b(and|contains|div|eq|exceeds|excludes|from|ge|gt|in|' - r'includes|is|is-from|is-from|is-less-than|is-more-than|' - r'is-not-missing|le|like|lt|mod|ne|not|not-from|omits|or|to)\b', + r'includes|is-from|is-less-than|is-more-than|' + r'is-not-missing|is|le|like|lt|mod|ne|not-from|not|omits|or|to)\b', Operator), (r'[-+*/=|!]', Operator), (r'[(){}<>;,]', Punctuation), (r'[a-z_][a-z_0-9]*', Literal), - (r'[&]+[a-z_][a-z_0-9]*', Literal), + (r'&+[a-z_][a-z_0-9]*', Literal), (r'[0-9]+\.[0-9]*', Number.Float), (r'\.[0-9]+', Number.Float), (r'[0-9]+', Number.Integer), @@ -3977,13 +3977,13 @@ class WebFocusLexer(RegexLexer): 'dialog_manager': [ # Detect possible labels in first word of dialog manager line. (r'\s*type\b', Keyword.Reserved, 'dialog_manager_type'), - (r'[:][a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), + (r':[a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), (r'"', String, 'dialog_manager_fidel'), # TODO: Get rid of redundant dialog manager keyword rule which # already could be handled by the included # 'dialog_manager_others'. However, we currently need it to not # recognize classic labels without ':' too soon. - (r'\b([?]|close|cms|crtclear|crtform|default|defaults|else|exit|' + (r'\b(\?|close|cms|crtclear|crtform|default|defaults|else|exit|' r'goto|htmlform|if|include|mvs|pass|prompt|quit|read|repeat|' r'run|set|then|tso|type|window|write)\b', Keyword.Reserved, 'dialog_manager_others'), @@ -3993,8 +3993,8 @@ class WebFocusLexer(RegexLexer): 'dialog_manager_others': [ (r'\n', Text, 'root'), (r'\s*type\b', Keyword.Reserved, 'dialog_manager_type'), - (r'[:][a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), - (r'\b([?]|close|cms|crtclear|crtform|default|defaults|else|exit|' + (r':[a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), + (r'\b(\?|close|cms|crtclear|crtform|default|defaults|else|exit|' r'goto|htmlform|if|include|mvs|pass|prompt|quit|read|repeat|' r'run|set|then|tso|type|window|write)\b', Keyword.Reserved), # Rules common to 'focus' and 'dialog_manager'. @@ -4005,7 +4005,7 @@ class WebFocusLexer(RegexLexer): (r'[-+*/=|!]', Operator), (r'[(){}<>;,]', Punctuation), (r'[a-z_][a-z_0-9]*', Literal), - (r'[&]+[a-z_][a-z_0-9]*', Name.Variable), + (r'&+[a-z_][a-z_0-9]*', Name.Variable), (r'[0-9]+\.[0-9]*', Number.Float), (r'\.[0-9]+', Number.Float), (r'[0-9]+', Number.Integer), @@ -4015,12 +4015,12 @@ class WebFocusLexer(RegexLexer): 'dialog_manager_type': [ # For -TYPE, render everything as ``String`` except variables. (r'\n', Text, 'root'), - (r'[&]+[a-z_][a-z_0-9]*\.*', Name.Variable), - (r'[^&\n]*', String) + (r'&+[a-z_][a-z_0-9]*\.*', Name.Variable), + (r'[^&\n]+', String) ], 'dialog_manager_fidel': [ (r'"', String, 'dialog_manager_fidel_end'), - (r'([<])([&][a-z][a-z_0-9]*)([/])([0-9]+)', + (r'(<)(&[a-z][a-z_0-9]*)([/])([0-9]+)', bygroups(Keyword.Reserved, Name.Variable, Operator, Number.Integer)), (r'.', String) ], @@ -4030,13 +4030,13 @@ class WebFocusLexer(RegexLexer): ], 'focus_fidel': [ (r'"', String, 'focus_fidel_end'), - (r'[&]+[a-z][a-z_0-9]*', Name.Variable), + (r'&+[a-z][a-z_0-9]*', Name.Variable), (r'\>', Keyword.Reserved), # Line continuation. (r'\<0x\s*\n', Keyword.Reserved), - (r'([<])([a-z][a-z_0-9]*)', + (r'(<)([a-z][a-z_0-9]*)', bygroups(Keyword.Reserved, Name.Variable)), - (r'([<])([+-/]?)([0-9]+)', + (r'(<)(\+|-|/)?([0-9]+)', bygroups(Keyword.Reserved, Operator, Number.Integer)), (r'.', String) ], @@ -4127,15 +4127,15 @@ class FocusMasterLexer(RegexLexer): (r'\n', Text), (r',', Punctuation, 'name'), (r'\.!', Punctuation), - (r'["]', String, 'string_double'), + (r'"', String, 'string_double'), (r'\'', String, 'string_single'), (r'[a-z_][a-z0-9_]*', Text), (r'[0-9]+(\.[0-9]+)?', Number), (r'.', Text), ], 'string_double': [ - (r'\'["]["]', String), - (r'["]', String, 'value'), + (r'""', String), + (r'"', String, 'value'), (r'[^"]', String), ], 'string_single': [ @@ -4251,9 +4251,9 @@ class RexxLexer(RegexLexer): tokens = { 'root': [ - (r'[\s\n]', Whitespace), + (r'\s', Whitespace), (r'/\*', Comment.Multiline, 'comment'), - (r'["]', String, 'string_double'), + (r'"', String, 'string_double'), (r"'", String, 'string_single'), (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number), (r'([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)\b', @@ -4283,7 +4283,7 @@ class RexxLexer(RegexLexer): r'while)\b', Keyword.Reserved), ], 'operator': [ - (ur'(-|//|/|\(|\)|\*\*|\*|\\|\\<<|\\<|\\==|\\=|\\>>|\\>|\|\||\||' + (ur'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||' ur'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|' ur'¬>>|¬>|¬|\.|,)', Operator), ], -- cgit v1.2.1 From 7208e79565fa2ad56d135f2144a776fece5ef61b Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Fri, 10 May 2013 00:17:55 +0200 Subject: Improved stack handling in Rexx lexer. --- pygments/lexers/other.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index f6e97ea2..ce70e3e5 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -4288,19 +4288,19 @@ class RexxLexer(RegexLexer): ur'¬>>|¬>|¬|\.|,)', Operator), ], 'string_double': [ + (r'[^"\n]', String), (r'""', String), - (r'"', String, 'root'), - (r'\n', Error, 'root'), - (r'[^"]', String), + (r'"', String, '#pop'), + (r'', Text, '#pop'), # Linefeed also terminates strings. ], 'string_single': [ + (r'[^\'\n]', String), (r'\'\'', String), - (r'\'', String, 'root'), - (r'\n', Error, 'root'), - (r'[^\']', String), + (r'\'', String, '#pop'), + (r'', Text, '#pop'), # Linefeed also terminates strings. ], 'comment': [ - (r'\*/', Comment.Multiline, 'root'), + (r'\*/', Comment.Multiline, '#pop'), (r'(.|\n)', Comment.Multiline), ] } -- cgit v1.2.1 From 677b3e86e68240d8964dd2ab818963dd8b25f8ef Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Fri, 10 May 2013 00:25:28 +0200 Subject: Added missing FOCUS keyword. --- pygments/lexers/other.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index ce70e3e5..2834b27e 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3950,8 +3950,8 @@ class WebFocusLexer(RegexLexer): r'multilines|newpage|nomatch|noprint|nototal|on|over|' r'page-break|print|printonly|ranked|recap|recompute|redefines|' r'reject|row-total|rows|savb|save|set|sub-total|subfoot|' - r'subhead|subtotal|sum|summarize|table|the|then|tiles|total|' - r'update|when|where|with|within)\b', Keyword.Reserved), + r'subhead|subtotal|sum|summarize|tablef|table|the|then|tiles|' + r'total|update|when|where|with|within)\b', Keyword.Reserved), (r'"', String, 'focus_fidel'), (r'\b(missing)\b', Name.Constant), (r'\b(asq|ave|cnt|ct|dst|fst|lst|max|min|pct|rcpt|st|sum|tot)\.', -- cgit v1.2.1 From 0dcac07fbf5b2c4e4aad0fe43f74b97677b0a3df Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sun, 19 May 2013 22:59:56 +0200 Subject: Cleaned up Easytrieve lexer. * Changed stacking 'root' to '#pop'. * Added constants for operators and punctuations. --- pygments/lexers/other.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 2834b27e..7837ebe1 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3700,6 +3700,9 @@ class EasytrieveLexer(RegexLexer): _DELIMITERS_OR_COMENT = _DELIMITERS + '*' _DELIMITER_PATTERN = '[' + _DELIMITERS + ']' _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']' + _PUNCTUATIONS = r'[\[\](){}<>;,]' + _OPERATORS = ur'[+\-/=&%¬]' + tokens = { 'root': [ @@ -3738,21 +3741,20 @@ class EasytrieveLexer(RegexLexer): r'USERID|VALUE|VERIFY|W|WHEN|WHILE|WORK|WRITE|X|XDM|XRST)' r'(' + _DELIMITER_PATTERN + r')', bygroups(Keyword.Reserved, Operator)), - (r'[\[\](){}<>;,]', Punctuation), - (ur'[-+/=&%¬]', Operator), + (_PUNCTUATIONS, Punctuation), + (_OPERATORS, Operator), (r'[0-9]+\.[0-9]*', Number.Float), (r'[0-9]+', Number.Integer), (r"'(''|[^'])*'", String), - (r'\.', Operator), (r'\s+', Whitespace), (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name) # Everything else just belongs to a name ], 'after_macro_argument': [ - (r'\*.*\n', Comment.Single, 'root'), - (r'\s+', Whitespace, 'root'), - (r'[\[\](){}<>;,]', Punctuation, 'root'), - (ur'[.+/=&%¬]', Operator, 'root'), - (r"'(''|[^'])*'", String, 'root'), + (r'\*.*\n', Comment.Single, '#pop'), + (r'\s+', Whitespace, '#pop'), + (_PUNCTUATIONS, Punctuation, '#pop'), + (_OPERATORS, Operator, '#pop'), + (r"'(''|[^'])*'", String, '#pop'), (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name) # Everything else just belongs to a name ], } -- cgit v1.2.1 From a33b098546e31585b6ce854d3099683172e96c47 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Mon, 20 May 2013 00:41:25 +0200 Subject: Fixed unresolved conflict during previous merge. --- pygments/lexers/other.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 211f662c..3a9e5e59 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -14,11 +14,7 @@ import re from pygments.lexer import RegexLexer, include, bygroups, using, \ this, combined, ExtendedRegexLexer from pygments.token import Error, Punctuation, Literal, Token, \ -<<<<<<< local Text, Comment, Operator, Keyword, Name, String, Number, Generic, Other, \ -======= - Text, Comment, Operator, Keyword, Name, String, Number, Generic, \ ->>>>>>> other Whitespace from pygments.util import get_bool_opt from pygments.lexers.web import HtmlLexer -- cgit v1.2.1 From c4484b46914ba46dc5e4c254b1cbf28bddccfe46 Mon Sep 17 00:00:00 2001 From: S?bastien Bigaret Date: Thu, 23 May 2013 13:25:03 +0200 Subject: Added a lexer for QVT Operational --- AUTHORS | 1 + pygments/lexers/_mapping.py | 1 + pygments/lexers/other.py | 133 +++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 134 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 1c4a9992..c99d2ca2 100644 --- a/AUTHORS +++ b/AUTHORS @@ -21,6 +21,7 @@ Other contributors, listed alphabetically, are: * Michael Bayer -- Myghty lexers * John Benediktsson -- Factor lexer * Christopher Bertels -- Fancy lexer +* Sébastien Bigaret -- QVT Operational lexer * Jarrett Billingsley -- MiniD lexer * Adam Blinkinsop -- Haskell, Redcode lexers * Frits van Bommel -- assembler lexers diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 969bdba5..dc7b966e 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -234,6 +234,7 @@ LEXERS = { 'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), 'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')), 'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)), + 'QVToLexer': ('pygments.lexers.other', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()), 'QmlLexer': ('pygments.lexers.web', 'QML', ('qml', 'Qt Meta Language', 'Qt modeling Language'), ('*.qml',), ('application/x-qml',)), 'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), 'RPMSpecLexer': ('pygments.lexers.other', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 10598fb4..10f47414 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -36,7 +36,7 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer', 'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer', 'BroLexer', 'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer', 'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer', - 'CbmBasicV2Lexer', 'AutoItLexer', 'RexxLexer'] + 'CbmBasicV2Lexer', 'AutoItLexer', 'RexxLexer', 'QVToLexer'] class ECLLexer(RegexLexer): @@ -3776,3 +3776,134 @@ class RexxLexer(RegexLexer): for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS if pattern.search(lowerText)) + 0.01 return min(result, 1.0) + +class QVToLexer(RegexLexer): + """ + For the `QVT Operational Mapping language `_. + + Reference for implementing this: «Meta Object Facility (MOF) 2.0 + Query/View/Transformation Specification», Version 1.1 - January 2011 + (http://www.omg.org/spec/QVT/1.1/), see §8.4, «Concrete Syntax» in + particular. + + Notable tokens assignments: + + - Name.Class is assigned to the identifier following any of the following + keywords: metamodel, class, exception, primitive, enum, transformation + or library + + - Name.Function is assigned to the names of mappings and queries + + - Name.Builtin.Pseudo is assigned to the pre-defined variables 'this', + 'self' and 'result'. + """ + # With obvious borrowings & inspiration from the Java, Python and C lexers + + name = 'QVTO' + aliases = ['qvto', 'qvt'] + filenames = ['*.qvto'] + + tokens = { + 'root': [ + (r'\n', Text), + (r'[^\S\n]+', Text), + (r'(--|//)(\s*)(directive:)?(.*)$', + bygroups(Comment, Comment, Comment.Preproc, Comment)), + # Uncomment the following if you want to distinguish between + # '/*' and '/**', à la javadoc + #(r'/[*]{2}(.|\n)*?[*]/', Comment.Multiline), + (r'/[*](.|\n)*?[*]/', Comment.Multiline), + (r'\\\n', Text), + (r'(and|not|or|xor|##?)\b', Operator.Word), + (r'([:]{1-2}=|[-+]=)\b', Operator.Word), + (r'(@|<<|>>)\b', Keyword), # stereotypes + (r'!=|<>|=|==|!->|->|>=|<=|[.]{3}|[+/*%=<>&|.~]', Operator), + (r'[]{}:(),;[]', Punctuation), + (r'(true|false|unlimited|null)\b', Keyword.Constant), + (r'(this|self|result)\b', Name.Builtin.Pseudo), + (r'(var)\b', Keyword.Declaration), + (r'(from|import)\b', Keyword.Namespace, 'fromimport'), + (r'(metamodel|class|exception|primitive|enum|transformation|library)(\s+)([a-zA-Z0-9_]+)', bygroups(Keyword.Word, Text, Name.Class)), + (r'(exception)(\s+)([a-zA-Z0-9_]+)', bygroups(Keyword.Word, Text, Name.Exception)), + (r'(main)\b', Name.Function), + (r'(mapping|helper|query)(\s+)', bygroups(Keyword.Declaration, Text), 'operation'), + (r'(assert)(\s+)\b', bygroups(Keyword, Text), 'assert'), + (r'(Bag|Collection|Dict|OrderedSet|Sequence|Set|Tuple|List)\b', + Keyword.Type), + include('keywords'), + ('"', String, combined('stringescape', 'dqs')), + ("'", String, combined('stringescape', 'sqs')), + include('name'), + include('numbers'), + # (r'([a-zA-Z_][a-zA-Z0-9_]*)(::)([a-zA-Z_][a-zA-Z0-9_]*)', + # bygroups(Text, Text, Text)), + ], + + 'fromimport': [ + (r'(?:[ \t]|\\\n)+', Text), + (r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace), + (r'', Text, '#pop'), + ], + + 'operation': [ + (r'::', Text), + (r'(.*::)([a-zA-Z_][a-zA-Z0-9_]*)[ \t]*(\()', bygroups(Text,Name.Function, Text), '#pop') + ], + + 'assert': [ + (r'(warning|error|fatal)\b', Keyword, '#pop'), + (r'', Text, '#pop') # all else: go back + ], + + 'keywords': [ + (r'(abstract|access|any|assert|' + r'blackbox|break|case|collect|collectNested|' + r'collectOne|collectselect|collectselectOne|composes|' + r'compute|configuration|constructor|continue|datatype|' + r'default|derived|disjuncts|do|elif|else|end|' + r'endif|except|exists|extends|' + r'forAll|forEach|forOne|from|if|' + r'implies|in|inherits|init|inout|' + r'intermediate|invresolve|invresolveIn|invresolveone|' + r'invresolveoneIn|isUnique|iterate|late|let|' + r'literal|log|map|merges|' + r'modeltype|new|object|one|' + r'ordered|out|package|population|' + r'property|raise|readonly|references|refines|' + r'reject|resolve|resolveIn|resolveone|resolveoneIn|' + r'return|select|selectOne|sortedBy|static|switch|' + r'tag|then|try|typedef|' + r'unlimited|uses|when|where|while|with|' + r'xcollect|xmap|xselect)\b', Keyword), + ], + + # There is no need to distinguish between String.Single and + # String.Double: 'strings' is factorised for 'dqs' and 'sqs' + 'strings': [ + (r'[^\\\'"\n]+', String), + # quotes, percents and backslashes must be parsed one at a time + (r'[\'"\\]', String), + ], + 'stringescape': [ + (r'\\([\\btnfr"\']|u[0-3][0-7]{2}|u[0-7]{1,2})', String.Escape) + ], + 'dqs': [ # double-quoted string + (r'"', String, '#pop'), + (r'\\\\|\\"', String.Escape), + include('strings') + ], + 'sqs': [ # single-quoted string + (r"'", String, '#pop'), + (r"\\\\|\\'", String.Escape), + include('strings') + ], + 'name': [ + ('[a-zA-Z_][a-zA-Z0-9_]*', Name), + ], + # numbers: excerpt taken from the python lexer + 'numbers': [ + (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), + (r'\d+[eE][+-]?[0-9]+', Number.Float), + (r'\d+', Number.Integer) + ], + } -- cgit v1.2.1 From 9bc0a50e2563809f0347f379921a512e88b53770 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Thu, 30 May 2013 22:18:11 +0200 Subject: Cleaned up pattern for Easytrieve keywords. --- pygments/lexers/other.py | 73 ++++++++++++++++++++++++++---------------------- pygments/util.py | 15 ++++++++++ 2 files changed, 54 insertions(+), 34 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 3a9e5e59..c39e474c 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -26,6 +26,7 @@ from pygments.lexers._robotframeworklexer import RobotFrameworkLexer from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \ TcshLexer +from pygments.util import sorted_keywords_pattern __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer', 'SmalltalkLexer', 'LogtalkLexer', 'GnuplotLexer', 'PovrayLexer', @@ -3700,10 +3701,41 @@ class EasytrieveLexer(RegexLexer): _DELIMITERS_OR_COMENT = _DELIMITERS + '*' _DELIMITER_PATTERN = '[' + _DELIMITERS + ']' _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']' - _PUNCTUATIONS = r'[\[\](){}<>;,]' - _OPERATORS = ur'[+\-/=&%¬]' - - + _OPERATORS_PATTERN = ur'[.+\-/=\[\](){}<>;,&%¬]' + _KEYWORDS_PATTERN = sorted_keywords_pattern([ + 'AFTER-BREAK', 'AFTER-LINE', 'AFTER-SCREEN', 'AIM', 'AND', 'ATTR', + 'BEFORE', 'BEFORE-BREAK', 'BEFORE-LINE', 'BEFORE-SCREEN', 'BUSHU', + 'BY', 'CALL', 'CASE', 'CHECKPOINT', 'CHKP', 'CHKP-STATUS', 'CLEAR', + 'CLOSE', 'COL', 'COLOR', 'COMMIT', 'CONTROL', 'COPY', 'CURSOR', 'D', + 'DECLARE', 'DEFAULT', 'DEFINE', 'DELETE', 'DENWA', 'DISPLAY', 'DLI', + 'DO', 'DUPLICATE', 'E', 'ELSE', 'ELSE-IF', 'END', 'END-CASE', + 'END-DO', 'END-IF', 'END-PROC', 'ENDPAGE', 'ENDTABLE', 'ENTER', 'EOF', + 'EQ', 'ERROR', 'EXIT', 'EXTERNAL', 'EZLIB', 'F1', 'F10', 'F11', 'F12', + 'F13', 'F14', 'F15', 'F16', 'F17', 'F18', 'F19', 'F2', 'F20', 'F21', + 'F22', 'F23', 'F24', 'F25', 'F26', 'F27', 'F28', 'F29', 'F3', 'F30', + 'F31', 'F32', 'F33', 'F34', 'F35', 'F36', 'F4', 'F5', 'F6', 'F7', + 'F8', 'F9', 'FETCH', 'FILE-STATUS', 'FILL', 'FINAL', 'FIRST', + 'FIRST-DUP', 'FOR', 'GE', 'GET', 'GO', 'GOTO', 'GQ', 'GR', 'GT', + 'HEADING', 'HEX', 'HIGH-VALUES', 'IDD', 'IDMS', 'IF', 'IN', 'INSERT', + 'JUSTIFY', 'KANJI-DATE', 'KANJI-DATE-LONG', 'KANJI-TIME', 'KEY', + 'KEY-PRESSED', 'KOKUGO', 'KUN', 'LAST-DUP', 'LE', 'LEVEL', 'LIKE', + 'LINE', 'LINE-COUNT', 'LINE-NUMBER', 'LINK', 'LIST', 'LOW-VALUES', + 'LQ', 'LS', 'LT', 'MASK', 'MATCHED', 'MEND', 'MESSAGE', 'MOVE', + 'MSTART', 'NE', 'NEWPAGE', 'NOMASK', 'NOPRINT', 'NOT', 'NOTE', + 'NOVERIFY', 'NQ', 'NULL', 'OF', 'OR', 'OTHERWISE', 'PA1', 'PA2', + 'PA3', 'PAGE-COUNT', 'PAGE-NUMBER', 'PARM-REGISTER', 'PATH-ID', + 'PATTERN', 'PERFORM', 'POINT', 'POS', 'PRIMARY', 'PRINT', 'PROCEDURE', + 'PROGRAM', 'PUT', 'READ', 'RECORD', 'RECORD-COUNT', 'RECORD-LENGTH', + 'REFRESH', 'RELEASE', 'RENUM', 'REPEAT', 'REPORT', 'REPORT-INPUT', + 'RESHOW', 'RESTART', 'RETRIEVE', 'RETURN-CODE', 'ROLLBACK', 'ROW', + 'S', 'SCREEN', 'SEARCH', 'SECONDARY', 'SELECT', 'SEQUENCE', 'SIZE', + 'SKIP', 'SOKAKU', 'SORT', 'SQL', 'STOP', 'SUM', 'SYSDATE', + 'SYSDATE-LONG', 'SYSIN', 'SYSIPT', 'SYSLST', 'SYSPRINT', 'SYSSNAP', + 'SYSTIME', 'TALLY', 'TERM-COLUMNS', 'TERM-NAME', 'TERM-ROWS', + 'TERMINATION', 'TITLE', 'TO', 'TRANSFER', 'TRC', 'UNIQUE', 'UNTIL', + 'UPDATE', 'UPPERCASE', 'USER', 'USERID', 'VALUE', 'VERIFY', 'W', + 'WHEN', 'WHILE', 'WORK', 'WRITE', 'X', 'XDM', 'XRST' + ]) tokens = { 'root': [ (r'\*.*\n', Comment.Single), @@ -3714,35 +3746,9 @@ class EasytrieveLexer(RegexLexer): (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable), (r'(FILE|JOB|PARM|PROC|REPORT)(' + _DELIMITER_PATTERN + r')', bygroups(Keyword.Declaration, Operator)), - (r'(AFTER-BREAK|AFTER-LINE|AFTER-SCREEN|AIM|AND|ATTR|BEFORE|' - r'BEFORE-BREAK|BEFORE-LINE|BEFORE-SCREEN|BUSHU|BY|CALL|CASE|' - r'CHECKPOINT|CHKP|CHKP-STATUS|CLEAR|CLOSE|COL|COLOR|COMMIT|' - r'CONTROL|COPY|CURSOR|D|DECLARE|DEFAULT|DEFINE|DELETE|DENWA|' - r'DISPLAY|DLI|DO|DUPLICATE|E|ELSE|ELSE-IF|END|END-CASE|END-DO|' - r'END-IF|END-PROC|ENDPAGE|ENDTABLE|ENTER|EOF|EQ|ERROR|EXIT|' - r'EXTERNAL|EZLIB|F1|F10|F11|F12|F13|F14|F15|F16|F17|F18|F19|F2|' - r'F20|F21|F22|F23|F24|F25|F26|F27|F28|F29|F3|F30|F31|F32|F33|' - r'F34|F35|F36|F4|F5|F6|F7|F8|F9|FETCH|FILE-STATUS|FILL|' - r'FINAL|FIRST|FIRST-DUP|FOR|GE|GET|GO|GOTO|GQ|GR|GT|HEADING|' - r'HEX|HIGH-VALUES|IDD|IDMS|IF|IN|INSERT|JUSTIFY|KANJI-DATE|' - r'KANJI-DATE-LONG|KANJI-TIME|KEY|KEY-PRESSED|KOKUGO|KUN|' - r'LAST-DUP|LE|LEVEL|LIKE|LINE|LINE-COUNT|LINE-NUMBER|LINK|LIST|' - r'LOW-VALUES|LQ|LS|LT|MASK|MATCHED|MEND|MESSAGE|MOVE|MSTART|NE|' - r'NEWPAGE|NOMASK|NOPRINT|NOT|NOTE|NOVERIFY|NQ|NULL|OF|OR|' - r'OTHERWISE|PA1|PA2|PA3|PAGE-COUNT|PAGE-NUMBER|PARM-REGISTER|' - r'PATH-ID|PATTERN|PERFORM|POINT|POS|PRIMARY|PRINT|' - r'PROCEDURE|PROGRAM|PUT|READ|RECORD|RECORD-COUNT|RECORD-LENGTH|' - r'REFRESH|RELEASE|RENUM|REPEAT|REPORT|REPORT-INPUT|RESHOW|' - r'RESTART|RETRIEVE|RETURN-CODE|ROLLBACK|ROW|S|SCREEN|SEARCH|' - r'SECONDARY|SELECT|SEQUENCE|SIZE|SKIP|SOKAKU|SORT|SQL|STOP|SUM|' - r'SYSDATE|SYSDATE-LONG|SYSIN|SYSIPT|SYSLST|SYSPRINT|SYSSNAP|' - r'SYSTIME|TALLY|TERM-COLUMNS|TERM-NAME|TERM-ROWS|TERMINATION|' - r'TITLE|TO|TRANSFER|TRC|UNIQUE|UNTIL|UPDATE|UPPERCASE|USER|' - r'USERID|VALUE|VERIFY|W|WHEN|WHILE|WORK|WRITE|X|XDM|XRST)' - r'(' + _DELIMITER_PATTERN + r')', + (_KEYWORDS_PATTERN + r'(' + _DELIMITER_PATTERN + r')', bygroups(Keyword.Reserved, Operator)), - (_PUNCTUATIONS, Punctuation), - (_OPERATORS, Operator), + (_OPERATORS_PATTERN, Operator), (r'[0-9]+\.[0-9]*', Number.Float), (r'[0-9]+', Number.Integer), (r"'(''|[^'])*'", String), @@ -3752,8 +3758,7 @@ class EasytrieveLexer(RegexLexer): 'after_macro_argument': [ (r'\*.*\n', Comment.Single, '#pop'), (r'\s+', Whitespace, '#pop'), - (_PUNCTUATIONS, Punctuation, '#pop'), - (_OPERATORS, Operator, '#pop'), + (_OPERATORS_PATTERN, Operator, '#pop'), (r"'(''|[^'])*'", String, '#pop'), (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name) # Everything else just belongs to a name ], diff --git a/pygments/util.py b/pygments/util.py index caac1144..9a6c525c 100644 --- a/pygments/util.py +++ b/pygments/util.py @@ -251,6 +251,21 @@ def unirange(a, b): return u'(?:' + u'|'.join(buf) + u')' +def sorted_keywords_pattern(keywords): + ''' + Pattern for a regular expression that matches any word in ``keywords`` even if + some of them are prefixes of others. This is particular useful if '\b' cannot be + use as delimiter after a keyword. + + >>> sorted_keywords_pattern(['a', 'aa', 'aaa', 'b', 'cc']) + u'(aaa|aa|cc|a|b)' + ''' + assert keywords is not None + escaped_keywords = [re.escape(keyword) for keyword in keywords] + sort_key = lambda keyword: (-len(keyword), keyword) + sorted_keywords = sorted(escaped_keywords, key=sort_key) + return u'(' + '|'.join(sorted_keywords) + u')' + # Python 2/3 compatibility if sys.version_info < (3,0): -- cgit v1.2.1 From 26e8af5761aab8daeb85f287d7abf2fa42cb43e1 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Fri, 31 May 2013 01:26:37 +0200 Subject: Cleaned up handling of Rexx functions, keywords and operators. --- pygments/lexers/other.py | 51 ++++++++++++++++++++++++------------------------ pygments/util.py | 4 ++-- 2 files changed, 28 insertions(+), 27 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index c39e474c..72c6bc43 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -4256,6 +4256,28 @@ class RexxLexer(RegexLexer): mimetypes = ['text/x-rexx'] flags = re.IGNORECASE + _FUNCTIONS_PATTERN = sorted_keywords_pattern([ + 'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor', + 'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare', + 'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr', + 'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert', + 'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max', + 'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', + 'sign', 'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', + 'symbol', 'time', 'trace', 'translate', 'trunc', 'value', 'verify', + 'word', 'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', + 'x2d', 'xrange']) + _KEYWORDS_PATTERN = sorted_keywords_pattern([ + 'address', 'arg', 'by', 'call', 'do', 'drop', 'else', 'end', 'exit', + 'for', 'forever', 'if', 'interpret', 'iterate', 'leave', 'nop', + 'numeric', 'off', 'on', 'options', 'parse', 'pull', 'push', 'queue', + 'return', 'say', 'select', 'signal', 'then', 'to', 'trace', 'until', + 'while']) + _OPERATORS_PATTERN = sorted_keywords_pattern([ + '%', '&', '&&', '(', ')', '*', '**', '+', ',', '-', '.', '/', '//', + '<', '<<', '<<=', '<=', '<>', '=', '==', '>', '><', '>=', '>>', '>>=', + '\\', '\\<', '\\<<', '\\=', '\\==', '\\>', '\\>>', '|', '||', u'¬', + u'¬<', u'¬<<', u'¬=', u'¬==', u'¬>', u'¬>>']) tokens = { 'root': [ (r'\s', Whitespace), @@ -4268,32 +4290,11 @@ class RexxLexer(RegexLexer): Keyword.Declaration)), (r'([a-z_][a-z0-9_]*)(\s*)(:)', bygroups(Name.Label, Whitespace, Operator)), - include('function'), - include('keyword'), - include('operator'), - (r'[a-z_][a-z0-9_]*', Text), - ], - 'function': [ - (r'(abbrev|abs|address|arg|b2x|bitand|bitor|bitxor|c2d|c2x|' - r'center|charin|charout|chars|compare|condition|copies|d2c|' - r'd2x|datatype|date|delstr|delword|digits|errortext|form|' - r'format|fuzz|insert|lastpos|left|length|linein|lineout|lines|' - r'max|min|overlay|pos|queued|random|reverse|right|sign|' - r'sourceline|space|stream|strip|substr|subword|symbol|time|' - r'trace|translate|trunc|value|verify|word|wordindex|' - r'wordlength|wordpos|words|x2b|x2c|x2d|xrange)(\s*)(\()', + (_FUNCTIONS_PATTERN + r'(\s*)(\()', bygroups(Name.Builtin, Whitespace, Operator)), - ], - 'keyword': [ - (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|' - r'interpret|iterate|leave|nop|numeric|off|on|options|parse|' - r'pull|push|queue|return|say|select|signal|to|then|trace|until|' - r'while)\b', Keyword.Reserved), - ], - 'operator': [ - (ur'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||' - ur'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|' - ur'¬>>|¬>|¬|\.|,)', Operator), + (_KEYWORDS_PATTERN + r'\b', Keyword.Reserved), + (_OPERATORS_PATTERN, Operator), + (r'[a-z_][a-z0-9_]*', Text), ], 'string_double': [ (r'[^"\n]+', String), diff --git a/pygments/util.py b/pygments/util.py index 9a6c525c..67689ce4 100644 --- a/pygments/util.py +++ b/pygments/util.py @@ -255,7 +255,7 @@ def sorted_keywords_pattern(keywords): ''' Pattern for a regular expression that matches any word in ``keywords`` even if some of them are prefixes of others. This is particular useful if '\b' cannot be - use as delimiter after a keyword. + used as delimiter after a keyword. >>> sorted_keywords_pattern(['a', 'aa', 'aaa', 'b', 'cc']) u'(aaa|aa|cc|a|b)' @@ -264,7 +264,7 @@ def sorted_keywords_pattern(keywords): escaped_keywords = [re.escape(keyword) for keyword in keywords] sort_key = lambda keyword: (-len(keyword), keyword) sorted_keywords = sorted(escaped_keywords, key=sort_key) - return u'(' + '|'.join(sorted_keywords) + u')' + return u'(' + u'|'.join(sorted_keywords) + u')' # Python 2/3 compatibility -- cgit v1.2.1 From b3cbb1e20b74269deeb146513b6de353e5583a67 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sun, 2 Jun 2013 14:37:59 +0200 Subject: Improved lexers for Easytrieve and JCL * Added highlighting names of Easytrive file, macro, procedure and report declarations. * Added missing Easytrive keyword "MACRO". * Cleaned up JCL lexer stack by changing a few named target states to '#pop'. There are still several named states left though. --- pygments/lexers/other.py | 94 +++++++++++++++++++++++++----------------- tests/examplefiles/example.ezt | 16 +++++-- tests/test_lexers_other.py | 22 ++++++++-- 3 files changed, 87 insertions(+), 45 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 72c6bc43..6c094c7e 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3720,21 +3720,21 @@ class EasytrieveLexer(RegexLexer): 'JUSTIFY', 'KANJI-DATE', 'KANJI-DATE-LONG', 'KANJI-TIME', 'KEY', 'KEY-PRESSED', 'KOKUGO', 'KUN', 'LAST-DUP', 'LE', 'LEVEL', 'LIKE', 'LINE', 'LINE-COUNT', 'LINE-NUMBER', 'LINK', 'LIST', 'LOW-VALUES', - 'LQ', 'LS', 'LT', 'MASK', 'MATCHED', 'MEND', 'MESSAGE', 'MOVE', - 'MSTART', 'NE', 'NEWPAGE', 'NOMASK', 'NOPRINT', 'NOT', 'NOTE', - 'NOVERIFY', 'NQ', 'NULL', 'OF', 'OR', 'OTHERWISE', 'PA1', 'PA2', - 'PA3', 'PAGE-COUNT', 'PAGE-NUMBER', 'PARM-REGISTER', 'PATH-ID', - 'PATTERN', 'PERFORM', 'POINT', 'POS', 'PRIMARY', 'PRINT', 'PROCEDURE', - 'PROGRAM', 'PUT', 'READ', 'RECORD', 'RECORD-COUNT', 'RECORD-LENGTH', - 'REFRESH', 'RELEASE', 'RENUM', 'REPEAT', 'REPORT', 'REPORT-INPUT', - 'RESHOW', 'RESTART', 'RETRIEVE', 'RETURN-CODE', 'ROLLBACK', 'ROW', - 'S', 'SCREEN', 'SEARCH', 'SECONDARY', 'SELECT', 'SEQUENCE', 'SIZE', - 'SKIP', 'SOKAKU', 'SORT', 'SQL', 'STOP', 'SUM', 'SYSDATE', - 'SYSDATE-LONG', 'SYSIN', 'SYSIPT', 'SYSLST', 'SYSPRINT', 'SYSSNAP', - 'SYSTIME', 'TALLY', 'TERM-COLUMNS', 'TERM-NAME', 'TERM-ROWS', - 'TERMINATION', 'TITLE', 'TO', 'TRANSFER', 'TRC', 'UNIQUE', 'UNTIL', - 'UPDATE', 'UPPERCASE', 'USER', 'USERID', 'VALUE', 'VERIFY', 'W', - 'WHEN', 'WHILE', 'WORK', 'WRITE', 'X', 'XDM', 'XRST' + 'LQ', 'LS', 'LT', 'MACRO', 'MASK', 'MATCHED', 'MEND', 'MESSAGE', + 'MOVE', 'MSTART', 'NE', 'NEWPAGE', 'NOMASK', 'NOPRINT', 'NOT', + 'NOTE', 'NOVERIFY', 'NQ', 'NULL', 'OF', 'OR', 'OTHERWISE', 'PA1', + 'PA2', 'PA3', 'PAGE-COUNT', 'PAGE-NUMBER', 'PARM-REGISTER', + 'PATH-ID', 'PATTERN', 'PERFORM', 'POINT', 'POS', 'PRIMARY', 'PRINT', + 'PROCEDURE', 'PROGRAM', 'PUT', 'READ', 'RECORD', 'RECORD-COUNT', + 'RECORD-LENGTH', 'REFRESH', 'RELEASE', 'RENUM', 'REPEAT', 'REPORT', + 'REPORT-INPUT', 'RESHOW', 'RESTART', 'RETRIEVE', 'RETURN-CODE', + 'ROLLBACK', 'ROW', 'S', 'SCREEN', 'SEARCH', 'SECONDARY', 'SELECT', + 'SEQUENCE', 'SIZE', 'SKIP', 'SOKAKU', 'SORT', 'SQL', 'STOP', 'SUM', + 'SYSDATE', 'SYSDATE-LONG', 'SYSIN', 'SYSIPT', 'SYSLST', 'SYSPRINT', + 'SYSSNAP', 'SYSTIME', 'TALLY', 'TERM-COLUMNS', 'TERM-NAME', + 'TERM-ROWS', 'TERMINATION', 'TITLE', 'TO', 'TRANSFER', 'TRC', + 'UNIQUE', 'UNTIL', 'UPDATE', 'UPPERCASE', 'USER', 'USERID', 'VALUE', + 'VERIFY', 'W', 'WHEN', 'WHILE', 'WORK', 'WRITE', 'X', 'XDM', 'XRST' ]) tokens = { 'root': [ @@ -3744,17 +3744,26 @@ class EasytrieveLexer(RegexLexer): (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable, 'after_macro_argument'), # Macro call (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable), - (r'(FILE|JOB|PARM|PROC|REPORT)(' + _DELIMITER_PATTERN + r')', + (r'(FILE|MACRO|REPORT)(\s+)', + bygroups(Keyword.Declaration, Whitespace), 'after_declaration'), + (r'(JOB|PARM)' + r'(' + _DELIMITER_PATTERN + r')', bygroups(Keyword.Declaration, Operator)), (_KEYWORDS_PATTERN + r'(' + _DELIMITER_PATTERN + r')', bygroups(Keyword.Reserved, Operator)), (_OPERATORS_PATTERN, Operator), + # Procedure declaration + (r'(' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+)(\s*)(\.?)(\s*)(PROC)(\s*\n)', + bygroups(Name.Function, Whitespace, Operator, Whitespace, Keyword.Declaration, Whitespace)), (r'[0-9]+\.[0-9]*', Number.Float), (r'[0-9]+', Number.Integer), (r"'(''|[^'])*'", String), (r'\s+', Whitespace), (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name) # Everything else just belongs to a name ], + 'after_declaration': [ + (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Function), + ('', Whitespace, '#pop') + ], 'after_macro_argument': [ (r'\*.*\n', Comment.Single, '#pop'), (r'\s+', Whitespace, '#pop'), @@ -3763,6 +3772,8 @@ class EasytrieveLexer(RegexLexer): (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name) # Everything else just belongs to a name ], } + _COMMENT_LINE_REGEX = re.compile(r'^\s*\*') + _MACRO_HEADER_REGEX = re.compile(r'^\s*MACRO') def analyse_text(text): """ @@ -3777,15 +3788,20 @@ class EasytrieveLexer(RegexLexer): hasProc = False hasParm = False hasReport = False - isBroken = False - # Skip possible header comments. - while len(lines) and lines[0].startswith('*'): - hasHeaderComment = True + def isCommentLine(line): + return EasytrieveLexer._COMMENT_LINE_REGEX.match(lines[0]) is not None + + def isEmptyLine(line): + return not bool(line.strip()) + + # Remove possible empty lines and header comments. + while lines and (isEmptyLine(lines[0]) or isCommentLine(lines[0])): + if not isEmptyLine(lines[0]): + hasHeaderComment = True del lines[0] - firstLine = lines[0] - if firstLine[:6] in ('MACRO', 'MACRO '): + if EasytrieveLexer._MACRO_HEADER_REGEX.match(lines[0]): # Looks like an Easytrieve macro. result = 0.4 if hasHeaderComment: @@ -3795,37 +3811,39 @@ class EasytrieveLexer(RegexLexer): for line in lines: words = line.split() if (len(words) >= 2): - first_word = words[0] + firstWord = words[0] if not hasReport: if not hasJob: if not hasFile: if not hasParm: - if first_word == 'PARM': + if firstWord == 'PARM': hasParm = True - if first_word == 'FILE': + if firstWord == 'FILE': hasFile = True - if first_word == 'JOB': + if firstWord == 'JOB': hasJob = True - elif first_word == 'PROC': + elif firstWord == 'PROC': hasProc = True - elif first_word == 'END-PROC': + elif firstWord == 'END-PROC': hasEndProc = True - elif first_word == 'REPORT': + elif firstWord == 'REPORT': hasReport = True # Weight the findings. - if not isBroken and hasJob and (hasProc == hasEndProc): + if hasJob and (hasProc == hasEndProc): + if hasHeaderComment: + result += 0.1 if hasParm: if hasProc: # Found PARM, JOB and PROC/END-PROC: # pretty sure this is Easytrieve. - result = 0.8 + result += 0.8 else: # Found PARAM and JOB: probably this is Easytrieve - result = 0.5 + result += 0.5 else: # Found JOB and possibly other keywords: might be Easytrieve - result = 0.11 + result += 0.11 if hasParm: # Note: PARAM is not a proper English word, so this is # regarded a much better indicator for Easytrieve than @@ -3843,7 +3861,7 @@ class JclLexer(RegexLexer): """ `Job Control Language (JCL) `_ is a scripting language used on mainframe platforms to instruct the system - on how to run a batch job or start a subsystem. It is somewhat + on how to run a batch job or start a subsystem. It is somewhat comparable to MS DOS batch and Unix shell scripts. *New in Pygments 1.7.* @@ -3863,7 +3881,7 @@ class JclLexer(RegexLexer): (r'.*\n', Other) # Input text or inline code in any language. ], 'statement': [ - (r'\s*\n', Whitespace, 'root'), + (r'\s*\n', Whitespace, '#pop'), (r'([a-z][a-z_0-9]*)(\s+)(exec|job)(\s*)', bygroups(Name.Label, Whitespace, Keyword.Reserved, Whitespace), 'option'), @@ -3872,11 +3890,11 @@ class JclLexer(RegexLexer): ], 'statement_command': [ (r'\s+(command|cntl|dd|endctl|endif|else|include|jcllib|' - r'output|pend|proc|set|then|xmit)\s*', Keyword.Reserved, 'option'), + r'output|pend|proc|set|then|xmit)\s+', Keyword.Reserved, 'option'), include('option') ], 'jes2_statement': [ - (r'\s*\n', Whitespace, 'root'), + (r'\s*\n', Whitespace, '#pop'), (r'\$', Keyword, 'option'), (r'\b(jobparam|message|netacct|notify|output|priority|route|' r'setup|signoff|xeq|xmit)\b', Keyword, 'option'), @@ -3898,7 +3916,7 @@ class JclLexer(RegexLexer): (r"(\n)(//)", bygroups(Text, Keyword.Pseudo)), (r"''", String), (r"[^']", String), - (r"'", String, 'option'), + (r"'", String, '#pop'), ], 'option_comment': [ (r'\n', Text, 'root'), diff --git a/tests/examplefiles/example.ezt b/tests/examplefiles/example.ezt index b068fca3..fec2aa4c 100644 --- a/tests/examplefiles/example.ezt +++ b/tests/examplefiles/example.ezt @@ -1,4 +1,4 @@ -* Easytrieve Plus Test Programm. +* Easytrieve Plus example programm. * Environtment section. PARM DEBUG(FLOW FLDCHK) @@ -10,13 +10,23 @@ FILE PERSNL FB(150 1800) DEPT 98 3 N. GROSS 94 4 P 2 * ^ 2 field definitions in 1 line. +* Call macro in example.mac. FILE EXAMPLE FB(80 200) %EXAMPLE SOMEFILE SOME -* Macro declaration (to be valid, this would * Activity Section. -JOB INPUT PERSNL NAME FIRST-PROGRAM +JOB INPUT PERSNL NAME FIRST-PROGRAM START AT-START FINISH AT_FINISH PRINT PAY-RPT REPORT PAY-RPT LINESIZE 80 TITLE 01 'PERSONNEL REPORT EXAMPLE-1' LINE 01 DEPT NAME EMP# GROSS + +* Procedure declarations. +AT-START. PROC + DISPLAY 'PROCESSING...' +END-PROC + +AT-FINISH +PROC + DISPLAY 'DONE.' +END-PROC diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py index 7936fe38..83330c90 100644 --- a/tests/test_lexers_other.py +++ b/tests/test_lexers_other.py @@ -39,10 +39,24 @@ class AnalyseTextTest(unittest.TestCase): exampleFile.close() def testCanRecognizeAndGuessExampleFiles(self): - self._testCanRecognizeAndGuessExampleFiles(EasytrieveLexer) - self._testCanRecognizeAndGuessExampleFiles(JclLexer) - self._testCanRecognizeAndGuessExampleFiles(RexxLexer) - self._testCanRecognizeAndGuessExampleFiles(WebFocusLexer) + LEXERS_TO_TEST = [ + EasytrieveLexer, + JclLexer, + RexxLexer, + ] + for lexerToTest in LEXERS_TO_TEST: + self._testCanRecognizeAndGuessExampleFiles(lexerToTest) + + +class EasyTrieveLexerTest(unittest.TestCase): + def testCanGuessFromText(self): + self.assertLess(0, EasytrieveLexer.analyse_text('MACRO')) + self.assertLess(0, EasytrieveLexer.analyse_text('\nMACRO')) + self.assertLess(0, EasytrieveLexer.analyse_text(' \nMACRO')) + self.assertLess(0, EasytrieveLexer.analyse_text(' \n MACRO')) + self.assertLess(0, EasytrieveLexer.analyse_text('*\nMACRO')) + self.assertLess(0, EasytrieveLexer.analyse_text( + '*\n *\n\n \n*\n MACRO')) class RexxLexerTest(unittest.TestCase): -- cgit v1.2.1 From bfe247e6d60733111dacdba25fd85b16c894b36b Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sun, 2 Jun 2013 14:43:21 +0200 Subject: Fixed example for JCL line continuation in comments. The continuation character now is properly placed in column 72. --- tests/examplefiles/example.jcl | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/examplefiles/example.jcl b/tests/examplefiles/example.jcl index fb006369..18d4ae37 100644 --- a/tests/examplefiles/example.jcl +++ b/tests/examplefiles/example.jcl @@ -22,9 +22,10 @@ ham /* //SORTOUT DD SYSOUT=* /* -//* Test line continuation with comment at the end of a line +//* Test line continuation with comment at end of line continued by a +//* character at column 72 (in this case 'X'). //STP4 EXEC PROC=BILLING,COND.PAID=((20,LT),EVEN), // COND.LATE=(60,GT,FIND), -// COND.BILL=((20,GE),(30,LT,CHGE)) THIS STATEMENT CALLS X -// THE BILLING PROCEDURE AND SPECIFIES RETURN CODE TESTS X -// FOR THREE PROCEDURE STEPS. +// COND.BILL=((20,GE),(30,LT,CHGE)) THIS STATEMENT CALLS THE X +// BILLING PROCEDURE AND SPECIFIES RETURN CODE TESTS FOR THREEX +// PROCEDURE STEPS. -- cgit v1.2.1 From c3e40e23a69ca3f3e48ae48b0f95123261aa79df Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sun, 2 Jun 2013 20:00:51 +0200 Subject: Moved FOCUS related lexers to separate branch in order to simplify pyments.main pull request #169. --- pygments/lexers/_mapping.py | 4 - pygments/lexers/other.py | 323 +---------------------------------------- tests/examplefiles/example.acx | 3 - tests/examplefiles/example.fex | 225 ---------------------------- tests/examplefiles/example.mas | 21 --- tests/examplefiles/example.sty | 10 -- tests/test_basic_api.py | 5 +- tests/test_lexers_other.py | 4 +- 8 files changed, 6 insertions(+), 589 deletions(-) delete mode 100644 tests/examplefiles/example.acx delete mode 100644 tests/examplefiles/example.fex delete mode 100644 tests/examplefiles/example.mas delete mode 100644 tests/examplefiles/example.sty diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 4a94c5d9..f59a6955 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -111,9 +111,6 @@ LEXERS = { 'FancyLexer': ('pygments.lexers.agile', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), 'FantomLexer': ('pygments.lexers.compiled', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), 'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), - 'FocusAccessLexer': ('pygments.lexers.other', 'FOCUS access', ('focus-access',), ('*.acx',), ('text/x-focus-access',)), - 'FocusMasterLexer': ('pygments.lexers.other', 'FOCUS master', ('focus-master',), ('*.mas',), ('text/x-focus-master',)), - 'FocusStyleSheetLexer': ('pygments.lexers.other', 'FOCUS style sheet', ('focus-style',), ('*.sty',), ('text/x-focus-style',)), 'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)), 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('Clipper', 'XBase'), ('*.PRG', '*.prg'), ()), 'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), @@ -303,7 +300,6 @@ LEXERS = { 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)), 'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)), 'VimLexer': ('pygments.lexers.text', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), - 'WebFocusLexer': ('pygments.lexers.other', 'WebFOCUS', ('webfocus', 'FOCUS', 'focus'), ('*.fex',), ('text/x-webfocus', 'text/x-focus')), 'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), 'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')), 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)), diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 6c094c7e..6b181a64 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -38,8 +38,7 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer', 'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer', 'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer', 'CbmBasicV2Lexer', 'AutoItLexer', 'EasytrieveLexer', 'JclLexer', - 'RexxLexer', 'WebFocusLexer', 'FocusAccessLexer', - 'FocusMasterLexer', 'FocusStyleSheetLexer'] + 'RexxLexer'] class ECLLexer(RegexLexer): @@ -3939,326 +3938,6 @@ class JclLexer(RegexLexer): return result -class WebFocusLexer(RegexLexer): - """ - `(Web)FOCUS `_ is - a language for business intelligence applications. It enables to - describe, query and modify data from various sources including mainframe - platforms. It also includes FIDEL, a language to describe input forms. - - *New in Pygments 1.7.* - """ - name = 'WebFOCUS' - aliases = ['webfocus', 'FOCUS', 'focus'] - filenames = ['*.fex'] - mimetypes = ['text/x-webfocus', 'text/x-focus'] - flags = re.IGNORECASE - - # TODO: Consolidate rules common to 'focus' and 'dialog_manager' with 'include' or something. - # TODO: Find out if FIDEL supports "" to escape " and if so implement it. - # TODO: Add support for backslash escapes in single quote strings (and maybe double quote too?). - # TODO: Support dialog manager FIDEL input modifiers such as '.nodisplay'. - # TODO: Highlight function name after DEFINE FUNCTION. - # TODO: Highlight field name for all field types, not only numeric ones. - tokens = { - 'root': [ - (r'-\*.*\n', Comment.Single), - (r'-', Punctuation, 'dialog_manager'), - include('focus') - ], - 'focus': [ - (r'\n', Text, 'root'), - (r'\s*(across|add|alloc|as|by|clear|column-total|compute|count|' - r'crtform|decode|define|dynam|else|end|ex|exceeds|exec|file|' - r'filter|footing|for|format|free|heading|highest|hold|if|' - r'in-groups-of|in-ranges-of|join|list|lowest|match|modify|' - r'multilines|newpage|nomatch|noprint|nototal|on|over|' - r'page-break|print|printonly|ranked|recap|recompute|redefines|' - r'reject|row-total|rows|savb|save|set|sub-total|subfoot|' - r'subhead|subtotal|sum|summarize|tablef|table|the|then|tiles|' - r'total|update|when|where|with|within)\b', Keyword.Reserved), - (r'"', String, 'focus_fidel'), - (r'\b(missing)\b', Name.Constant), - (r'\b(asq|ave|cnt|ct|dst|fst|lst|max|min|pct|rcpt|st|sum|tot)\.', - Operator), - # FOCUS field declaration including display options. - (r'([a-z][a-z_0-9]*)([/])([adfip]*[0-9]+(\.[0-9]+)[-%bcdelmnrsty]*)', - bygroups(Name.Variable, Operator, Keyword.Type)), - # Rules common to 'focus' and 'dialog_manager'. - (r'\b(and|contains|div|eq|exceeds|excludes|from|ge|gt|in|' - r'includes|is-from|is-less-than|is-more-than|' - r'is-not-missing|is|le|like|lt|mod|ne|not-from|not|omits|or|to)\b', - Operator), - (r'[-+*/=|!]', Operator), - (r'[(){}<>;,]', Punctuation), - (r'[a-z_][a-z_0-9]*', Literal), - (r'&+[a-z_][a-z_0-9]*', Literal), - (r'[0-9]+\.[0-9]*', Number.Float), - (r'\.[0-9]+', Number.Float), - (r'[0-9]+', Number.Integer), - (r"'(''|[^'])*'", String), - (r'\s+', Whitespace) - ], - 'dialog_manager': [ - # Detect possible labels in first word of dialog manager line. - (r'\s*type\b', Keyword.Reserved, 'dialog_manager_type'), - (r':[a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), - (r'"', String, 'dialog_manager_fidel'), - # TODO: Get rid of redundant dialog manager keyword rule which - # already could be handled by the included - # 'dialog_manager_others'. However, we currently need it to not - # recognize classic labels without ':' too soon. - (r'\b(\?|close|cms|crtclear|crtform|default|defaults|else|exit|' - r'goto|htmlform|if|include|mvs|pass|prompt|quit|read|repeat|' - r'run|set|then|tso|type|window|write)\b', Keyword.Reserved, - 'dialog_manager_others'), - (r'[a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), - include('dialog_manager_others'), - ], - 'dialog_manager_others': [ - (r'\n', Text, 'root'), - (r'\s*type\b', Keyword.Reserved, 'dialog_manager_type'), - (r':[a-z_][a-z_0-9]*\s*\n', Name.Label, 'root'), - (r'\b(\?|close|cms|crtclear|crtform|default|defaults|else|exit|' - r'goto|htmlform|if|include|mvs|pass|prompt|quit|read|repeat|' - r'run|set|then|tso|type|window|write)\b', Keyword.Reserved), - # Rules common to 'focus' and 'dialog_manager'. - (r'\b(and|contains|div|eq|exceeds|excludes|from|ge|gt|in|' - r'includes|is|is-from|is-from|is-less-than|is-more-than|' - r'is-not-missing|le|like|lt|mod|ne|not|not-from|omits|or|to)\b', - Operator), - (r'[-+*/=|!]', Operator), - (r'[(){}<>;,]', Punctuation), - (r'[a-z_][a-z_0-9]*', Literal), - (r'&+[a-z_][a-z_0-9]*', Name.Variable), - (r'[0-9]+\.[0-9]*', Number.Float), - (r'\.[0-9]+', Number.Float), - (r'[0-9]+', Number.Integer), - (r"'(''|[^'])*'", String), - (r'\s+', Whitespace) - ], - 'dialog_manager_type': [ - # For -TYPE, render everything as ``String`` except variables. - (r'\n', Text, 'root'), - (r'&+[a-z_][a-z_0-9]*\.*', Name.Variable), - (r'[^&\n]+', String) - ], - 'dialog_manager_fidel': [ - (r'"', String, 'dialog_manager_fidel_end'), - (r'(<)(&[a-z][a-z_0-9]*)([/])([0-9]+)', - bygroups(Keyword.Reserved, Name.Variable, Operator, Number.Integer)), - (r'.', String) - ], - 'dialog_manager_fidel_end': [ - (r'\n', Text, 'root'), - (r'\s+', Whitespace) - ], - 'focus_fidel': [ - (r'"', String, 'focus_fidel_end'), - (r'&+[a-z][a-z_0-9]*', Name.Variable), - (r'\>', Keyword.Reserved), - # Line continuation. - (r'\<0x\s*\n', Keyword.Reserved), - (r'(<)([a-z][a-z_0-9]*)', - bygroups(Keyword.Reserved, Name.Variable)), - (r'(<)(\+|-|/)?([0-9]+)', - bygroups(Keyword.Reserved, Operator, Number.Integer)), - (r'.', String) - ], - 'focus_fidel_end': [ - (r'\n', Text, 'root'), - (r'\s+', Whitespace) - ] - } - - def analyse_text(text): - """ - Perform a heuristic analysis for certain very common WebFOCUS - constructs. - """ - result = 0.0 - hasComment = False - hasExec = False - hasInclude = False - hasSet = False - hasTableFile = False - - # Scan the source lines for indicators. - for line in text.lower().split('\n'): - if line.startswith('-'): - words = line[1:].split() - wordCount = len(words) - if wordCount > 0: - firstWord = words[0] - if firstWord.startswith('*'): - hasComment = True - elif wordCount > 1: - if firstWord == 'include': - hasInclude = True - elif (firstWord == 'set') and words[1].startswith('&'): - hasSet = True - else: - words = line.split() - wordCount = len(words) - if wordCount > 1: - if words[0] in ('ex', 'exec'): - hasExec = True - elif (words[0] in ('table', 'tablef')) \ - and (words[1] == 'file'): - hasTableFile = True - if hasComment: - result += 0.2 - if hasExec or hasInclude: - result += 0.1 - if hasTableFile: - result += 0.2 - if hasSet: - result += 0.1 - assert 0.0 <= result <= 1.0 - return result - - -class FocusMasterLexer(RegexLexer): - """ - FOCUS master files describes files and tables to be queried by FOCUS - programs. The are somewhat comparable to SQL ``create table`` statements. - - *New in Pygments 1.7.* - """ - name = 'FOCUS master' - aliases = ['focus-master'] - filenames = ['*.mas'] - mimetypes = ['text/x-focus-master'] - flags = re.IGNORECASE - - tokens = { - 'root': [ - (r'-\*.*\n', Comment.Single), - include('name'), - ], - 'name': [ - (r'\$.*\n', Comment.Single), - (r'\s+', Whitespace), - (r'[a-z_][a-z0-9_]*', Name.Builtin, 'before_value'), - (r'(\\)(\n)', bygroups(Operator, Text)), - ], - 'before_value': [ - (r'=', Punctuation, 'value'), - (r'\s+', Whitespace), - (r'.', Error, 'name') - ], - 'value': [ - (r'\$.*\n', Comment.Single, 'root'), - (r'\n', Text), - (r',', Punctuation, 'name'), - (r'\.!', Punctuation), - (r'"', String, 'string_double'), - (r'\'', String, 'string_single'), - (r'[a-z_][a-z0-9_]*', Text), - (r'[0-9]+(\.[0-9]+)?', Number), - (r'.', Text), - ], - 'string_double': [ - (r'""', String), - (r'"', String, 'value'), - (r'[^"]', String), - ], - 'string_single': [ - (r'\'\'', String), - (r'\'', String, 'value'), - (r'[^\']', String), - ], - } - - _HEADER_PATTERN = re.compile( - r'^((\s*\$.*)\n)*file(name)?\s*=\s*[a-z_][a-z0-9_]+\s*,(\s*(\$.*)\n)*\s*suffix\s*=\s*.+', - re.IGNORECASE - ) - - def analyse_text(text): - """ - Check for ``FILE=..., SUFFIX=...`` while ignoring comments starting with ``$``. - """ - result = 0.0 - - if FocusMasterLexer._HEADER_PATTERN.match(text): - result = 0.8 - - assert 0.0 <= result <= 1.0 - return result - - -class FocusAccessLexer(FocusMasterLexer): - """ - FOCUS access files associate segments in FOCUS master files with actual - tables containing data. - - *New in Pygments 1.7.* - """ - name = 'FOCUS access' - aliases = ['focus-access'] - filenames = ['*.acx'] - mimetypes = ['text/x-focus-access'] - - _HEADER_PATTERN = re.compile( - r'^((\s*\$.*)\n)*segment\s*=\s*[a-z_][a-z0-9_]+\s*,(\s*(\$.*)\n)*\s*tablename\s*=\s*.+', - re.IGNORECASE - ) - - def analyse_text(text): - """ - Check for ``SEGMENT=..., TABLENAME=...`` while ignoring comments - starting with ``$``. - """ - result = 0.0 - - if FocusAccessLexer._HEADER_PATTERN.match(text): - result = 0.8 - - assert 0.0 <= result <= 1.0 - return result - - -class FocusStyleSheetLexer(FocusMasterLexer): - """ - Style sheet to format reports written in FOCUS. - - *New in Pygments 1.7.* - """ - name = 'FOCUS style sheet' - aliases = ['focus-style'] - filenames = ['*.sty'] - mimetypes = ['text/x-focus-style'] - - _HEADER_TYPE_PATTERN = re.compile( - r'^(((\s*\$)|(-\*)).*\n)*type\s*=\s*[a-z]+\s*,.+', - re.IGNORECASE - ) - _HEADER_PAGE_DECLARATION_PATTERN = re.compile( - r'^(((\s*\$)|(-\*)).*\n)*(orientation|pagecolor|pagesize)\s*=\s*.+\s*,.+', - re.IGNORECASE - ) - - def analyse_text(text): - """ - Check for ``TYPE=...,...`` or page layout declaration while - ignoring comments starting with ``$`` or ``-*``. - """ - result = 0.0 - - if FocusStyleSheetLexer._HEADER_TYPE_PATTERN.match(text): - result = 0.7 - elif FocusStyleSheetLexer._HEADER_PAGE_DECLARATION_PATTERN.match(text): - result = 0.5 - if result > 0: - textStartsWithComment = text.startswith('-*') or text.lstrip().startswith('$') - if textStartsWithComment: - result += 0.2 - - assert 0.0 <= result <= 1.0 - return result - - class RexxLexer(RegexLexer): """ `Rexx `_ is a scripting language available for diff --git a/tests/examplefiles/example.acx b/tests/examplefiles/example.acx deleted file mode 100644 index ec80b9a2..00000000 --- a/tests/examplefiles/example.acx +++ /dev/null @@ -1,3 +0,0 @@ -$ Example FOCUS access file. -SEGNAME=CUSTOMERS, TABLENAME = "SOME"."CUSTOMERS", KEYS =1, - WRITE= NO, DBSPACE = SOME.WHERE0,$ diff --git a/tests/examplefiles/example.fex b/tests/examplefiles/example.fex deleted file mode 100644 index 92d0b0f6..00000000 --- a/tests/examplefiles/example.fex +++ /dev/null @@ -1,225 +0,0 @@ --* WebFOCUS test source file for pygments. - --* Some basic commands --TYPE hello world! --SET &NAME='John'; --TYPE hello &NAME ! --SET &DATETMP = DATECVT((DATEADD((DATECVT(&DATEIN, 'I8YYMD', 'YYMD')),'M', -6)),'YYMD','I8YYMD'); - --GOTO LABEL1; --TYPE Skip me - --* Label using classic syntax. --LABEL1 - --* Label using modern syntax. --:LABEL2 - --* Use FIDEL with MODIFY. -MODIFY FILE EMPLOYEE -CRTFORM - "EMPLOYEE UPDATE" - "EMPLOYEE ID #: : BANK, EMPLOYEES AND SALARIES " - "<10 . --HTMLFORM CSTERM --SET &ECHO = ALL; --* --* Replaces IDCAMS SYSIN --* -DYNAM ALLOC FILE CATCTL NEW RECFM FB - - LRECL 80 BLKSIZE 3120 SPACE 1,1 TRK REUSE --* --* Replaces IDCAMS SYSPRINT --* -DYNAM ALLOC FILE CATLST NEW RECFM VBA - - LRECL 125 BLKSIZE 27998 SPACE 2,1 TRK REUSE --* --* PRINT80 is a MFD used to read an 80 byte field named CONTENTS --* -DYNAM ALLOC FILE PRNT80 NEW RECFM FB - - LRECL 80 BLKSIZE 3120 SPACE 1,1 TRK REUSE --* --RUN --* --* Create the tape dataset name mask (first five nodes) --* --SET &CAT1 = ' LISTC LVL(' || '''' || -- 'PN0100.A143200.P87.P87800.STERM' || '''' || ') VOL'; --* --WRITE CATCTL &CAT1 --* --RUN --* --* Call the user written subroutine (i.e., GETCAT) to retrieve a list of tape datasets --* --SET &X = ' '; --SET &X = GETCAT(&X,'A1'); --* --* TABLE the catalog listing of tape datasets to a HOLD file --* -DEFINE FILE PRNT80 - DSNAME/A44 = SUBSTR (80,CONTENTS,1,44,44,'A44'); -END --* -TABLE FILE PRNT80 - PRINT DSNAME - ON TABLE HOLD -END --* -DYNAM FREE FILE CATCTL -DYNAM FREE FILE CATLST -DYNAM FREE FILE PRNT80 --* --RUN --* --*----------------------------------------------------------------------- --* --* Create the HTML drop down list --* -DEFINE FILE HOLD - NODE_POS/I3 = POSIT (DSNAME,44,'.SEM',4,'I3'); - WHATSEM/A4 = SUBSTR (44,DSNAME,NODE_POS+4,44,4,'A4'); - SEMWK/A2 = EDIT (WHATSEM,'$$99'); - SEMYR/A2 = EDIT (WHATSEM,'99$$'); - SEMESTERY2K/A6 = IF SEMYR LT '50' THEN ('20' | WHATSEM) - ELSE ('19' | WHATSEM); - D_SEM/A8 = DECODE SEMWK (03 'Spring' - 20 'Summer 1' - 26 'Summer 2' - 36 'Fall' - ELSE 'ERROR'); - SEMTXT/A16 = D_SEM | ' - ' | WHATSEM; --* Create the dynamic html for the drop down list - SEMESTER/A50 = ''; -END --* -TABLE FILE HOLD - SUM SEMESTER --* Want the most recent semester first - BY HIGHEST SEMESTERY2K NOPRINT - ON TABLE SET HOLDLIST PRINTONLY - ON TABLE HOLD AS HOLDSEMS -END --* --RUN --* --*----------------------------------------------------------------------- --* --SET &SPACE = '&' || 'nbsp;'; --* --* Display the HTML form to allow users to select semester/cycle --* --HTMLFORM CSTERM diff --git a/tests/examplefiles/example.mas b/tests/examplefiles/example.mas deleted file mode 100644 index fe653f07..00000000 --- a/tests/examplefiles/example.mas +++ /dev/null @@ -1,21 +0,0 @@ -$ Example FOCUS master file. -FILE=CENTGL, SUFFIX=FOC -SEGNAME=ACCOUNTS,SEGTYPE = S01 -FIELDNAME=GL_ACCOUNT, ALIAS=GLACCT, FORMAT=A7, - TITLE='Ledger,Account', FIELDTYPE=I, $ -FIELDNAME=GL_ACCOUNT_PARENT, ALIAS=GLPAR, FORMAT=A7, - TITLE=Parent, - PROPERTY=PARENT_OF, REFERENCE=GL_ACCOUNT, $ -FIELDNAME=GL_ACCOUNT_TYPE, ALIAS=GLTYPE, FORMAT=A1, - TITLE=Type,$ - -FIELDNAME=GL_ROLLUP_OP, ALIAS=GLROLL, FORMAT=A1, - TITLE=Op, $ Some comment. -FIELDNAME=GL_ACCOUNT_LEVEL, ALIAS=GLLEVEL, FORMAT=I3, - TITLE=Lev, $ - $ Another comment. -FIELDNAME=GL_ACCOUNT_CAPTION, ALIAS=GLCAP, FORMAT=A30, - TITLE=Caption, - PROPERTY=CAPTION, REFERENCE=GL_ACCOUNT, $ -FIELDNAME=SYS_ACCOUNT, ALIAS=ALINE, FORMAT=A6, - TITLE='System,Account,Line', MISSING=ON, $ diff --git a/tests/examplefiles/example.sty b/tests/examplefiles/example.sty deleted file mode 100644 index 0c113d0f..00000000 --- a/tests/examplefiles/example.sty +++ /dev/null @@ -1,10 +0,0 @@ -$ Example WebFOCUS style sheet. --* A FOCUS comment. -TYPE=REPORT, PAGESIZE=A4, Orientation = LANDSCAPE, $ -TYPE=HEADING, FONT='Helvetica', - SIZE=14, STYLE=BOLD, $ another comment - -PAGECOLOR=blue, $ - -TYPE=HEADING, OBJECT=TEXT, ITEM=2, COLOR=BLUE, SIZE=12, - STYLE=BOLD+UNDERLINE, $ diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py index 18ed8d64..19e1988b 100644 --- a/tests/test_basic_api.py +++ b/tests/test_basic_api.py @@ -60,7 +60,10 @@ def test_lexer_classes(): if cls.name in ['XQuery', 'Opa']: # XXX temporary return - tokens = list(inst.get_tokens(test_content)) + try: + tokens = list(inst.get_tokens(test_content)) + except KeyboardInterrupt: + raise KeyboardInterrupt('interrupted %s.get_tokens(): test_content=%r' % (cls.__name__, test_content)) txt = "" for token in tokens: assert isinstance(token, tuple) diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py index 83330c90..1c8604bb 100644 --- a/tests/test_lexers_other.py +++ b/tests/test_lexers_other.py @@ -11,9 +11,7 @@ import os import unittest from pygments.lexers import guess_lexer -from pygments.lexers.other import EasytrieveLexer, JclLexer, RexxLexer, \ - WebFocusLexer - +from pygments.lexers.other import EasytrieveLexer, JclLexer, RexxLexer def _exampleFilePath(filename): return os.path.join(os.path.dirname(__file__), 'examplefiles', filename) -- cgit v1.2.1 From ab4ebd8a63601dac49d7172bc2576a411c975524 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sun, 2 Jun 2013 20:33:57 +0200 Subject: Reverted code already submitted in pull request #207. --- tests/test_basic_api.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py index 19e1988b..18ed8d64 100644 --- a/tests/test_basic_api.py +++ b/tests/test_basic_api.py @@ -60,10 +60,7 @@ def test_lexer_classes(): if cls.name in ['XQuery', 'Opa']: # XXX temporary return - try: - tokens = list(inst.get_tokens(test_content)) - except KeyboardInterrupt: - raise KeyboardInterrupt('interrupted %s.get_tokens(): test_content=%r' % (cls.__name__, test_content)) + tokens = list(inst.get_tokens(test_content)) txt = "" for token in tokens: assert isinstance(token, tuple) -- cgit v1.2.1 From ca8974329cc95a0d2eedc9948ed7030704557118 Mon Sep 17 00:00:00 2001 From: Thomas Aglassinger Date: Sun, 2 Jun 2013 20:35:58 +0200 Subject: Removed mention of FOCUS as it submitted in a separate pull request. --- AUTHORS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index f4ac499b..045eb9a6 100644 --- a/AUTHORS +++ b/AUTHORS @@ -7,7 +7,7 @@ Other contributors, listed alphabetically, are: * Sam Aaron -- Ioke lexer * Ali Afshar -- image formatter -* Thomas Aglassinger -- Easytrieve, FOCUS, JCL and Rexx lexers +* Thomas Aglassinger -- Easytrieve, JCL and Rexx lexers * Kumar Appaiah -- Debian control lexer * Andreas Amann -- AppleScript lexer * Timothy Armstrong -- Dart lexer fixes -- cgit v1.2.1 From 0fbfa5ccda9dc40c6a569d351f81a07d238733a1 Mon Sep 17 00:00:00 2001 From: chebee7i Date: Mon, 11 Nov 2013 14:46:40 -0600 Subject: Add additional comments to Python lexers. Handle more doctest corner cases. Add another traceback header type. --- pygments/lexers/agile.py | 67 ++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 57 insertions(+), 10 deletions(-) diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index 1f81365e..76c7f640 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -333,18 +333,21 @@ class PythonConsoleLexer(Lexer): for match in line_re.finditer(text): line = match.group() if line.startswith(u'>>> ') or line.startswith(u'... '): + # The line begins or continues user input. tb = 0 insertions.append((len(curcode), [(0, Generic.Prompt, line[:4])])) curcode += line[4:] elif line.rstrip() == u'...' and not tb: - # only a new >>> prompt can end an exception block - # otherwise an ellipsis in place of the traceback frames - # will be mishandled + # The line ends multiline user input. However, we cannot + # end the block of input until a new >>> prompt. The reason + # is that an ellipsis can appear in a traceback (though it + # is usually perpended by whitespace---hence the rstrip). insertions.append((len(curcode), [(0, Generic.Prompt, u'...')])) curcode += line[3:] else: + # The line is not input---is either output or part of traceback if curcode: for item in do_insertions(insertions, pylexer.get_tokens_unprocessed(curcode)): @@ -360,7 +363,31 @@ class PythonConsoleLexer(Lexer): yield match.start(), Name.Class, line elif tb: curtb += line - if not (line.startswith(' ') or line.strip() == u'...'): + # Any line beginning with ' ' is indented and assumed to be + # part of the traceback stack, and so we continue adding + # to curtb. For doctests, none of the traceback stack is + # considered. So, it is customary to replace the actual + # stack with an ellipsis instead. Usually the ellipsis is + # indented, and thus, is covered by the startswith(' '). + # However, the line can also begin with a non-alphanumeric + # character. This means that the ellipsis need not be + # indented and also that even crazier lines can be part + # of the stack as well. + # http://docs.python.org/library/doctest.html#what-about-exceptions + stripped = line.strip() + if not stripped: + # Line was empty and should mark the end of the tb. + break_tb = True + elif line.startswith(' '): + # Indented lines are assumed to be part of stack + break_tb = False + elif stripped[0].isalnum(): + # Handles stripped == '...' as + break_tb = True + else: + break_tb = False + + if break_tb: tb = 0 for i, t, v in tblexer.get_tokens_unprocessed(curtb): yield tbindex+i, t, v @@ -388,6 +415,8 @@ class PythonTracebackLexer(RegexLexer): 'root': [ (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), + (r'^Traceback \(innermost last\):\n', + Generic.Traceback, 'intb'), # SyntaxError starts with this. (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), (r'^.*\n', Other), @@ -399,12 +428,20 @@ class PythonTracebackLexer(RegexLexer): bygroups(Text, Name.Builtin, Text, Number, Text)), (r'^( )(.+)(\n)', bygroups(Text, using(PythonLexer), Text)), + # If there are not exactly 4 spaces, then any number of spaces + # and tabs followed by 3 dots will be tokenized as a comment. + # This is added as a special rule to hide the stack for doctests. (r'^([ \t]*)(\.\.\.)(\n)', - bygroups(Text, Comment, Text)), # for doctests... - (r'^([^:]+)(: )(.+)(\n)', + bygroups(Text, Comment, Text)), + (r'^([^:\n]+)(: )(.+)(\n)', bygroups(Generic.Error, Text, Name, Text), '#pop'), (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)', - bygroups(Generic.Error, Text), '#pop') + bygroups(Generic.Error, Text), '#pop'), + # Doctests also allow the line to begin with any non-alphanumeric + # character. We do a minimal (non-greedy) match to process each + # line on its own. + (r'^(\W.*)(\n)', + bygroups(Comment, Text)), ], } @@ -425,10 +462,12 @@ class Python3TracebackLexer(RegexLexer): 'root': [ (r'\n', Text), (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), + (r'^Traceback \(innermost last\):\n', Generic.Traecback, 'intb'), (r'^During handling of the above exception, another ' r'exception occurred:\n\n', Generic.Traceback), (r'^The above exception was the direct cause of the ' r'following exception:\n\n', Generic.Traceback), + # SyntaxError starts with this. (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), ], 'intb': [ @@ -438,12 +477,20 @@ class Python3TracebackLexer(RegexLexer): bygroups(Text, Name.Builtin, Text, Number, Text)), (r'^( )(.+)(\n)', bygroups(Text, using(Python3Lexer), Text)), + # If there are not exactly 4 spaces, then any number of spaces + # and tabs followed by 3 dots will be tokenized as a comment. + # This is added as a special rule to hide the stack for doctests. (r'^([ \t]*)(\.\.\.)(\n)', - bygroups(Text, Comment, Text)), # for doctests... - (r'^([^:]+)(: )(.+)(\n)', + bygroups(Text, Comment, Text)), + (r'^([^:\n]+)(: )(.+)(\n)', bygroups(Generic.Error, Text, Name, Text), '#pop'), (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)', - bygroups(Generic.Error, Text), '#pop') + bygroups(Generic.Error, Text), '#pop'), + # Doctests also allow the line to begin with any non-alphanumeric + # character. We do a minimal (non-greedy) match to process each + # line on its own. + (r'^(\W.*)(\n)', + bygroups(Comment, Text)), ], } -- cgit v1.2.1 From a703d66026fdce4b1547bd15521fba3912579cd0 Mon Sep 17 00:00:00 2001 From: chebee7i Date: Mon, 11 Nov 2013 16:11:18 -0600 Subject: Add fix for bare prompt lines. --- pygments/lexers/agile.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index 76c7f640..341ad813 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -332,6 +332,10 @@ class PythonConsoleLexer(Lexer): tb = 0 for match in line_re.finditer(text): line = match.group() + if line == u'>>>\n': + # Catch common problem: editors which remove trailing + # whitespace from bare prompt lines. + line = u'>>> \n' if line.startswith(u'>>> ') or line.startswith(u'... '): # The line begins or continues user input. tb = 0 -- cgit v1.2.1 From 8f43f535dbdd8404672d0a5ff4d411bffd1ff31e Mon Sep 17 00:00:00 2001 From: chebee7i Date: Mon, 11 Nov 2013 16:18:34 -0600 Subject: Typo. --- pygments/lexers/agile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index 341ad813..7cc46147 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -346,7 +346,7 @@ class PythonConsoleLexer(Lexer): # The line ends multiline user input. However, we cannot # end the block of input until a new >>> prompt. The reason # is that an ellipsis can appear in a traceback (though it - # is usually perpended by whitespace---hence the rstrip). + # is usually prepended by whitespace---hence the rstrip). insertions.append((len(curcode), [(0, Generic.Prompt, u'...')])) curcode += line[3:] -- cgit v1.2.1 From d17346b2b8a5fe1c40e9b4b81d216254b59d1c55 Mon Sep 17 00:00:00 2001 From: chebee7i Date: Mon, 11 Nov 2013 16:24:25 -0600 Subject: More comment cleanup. --- pygments/lexers/agile.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index 7cc46147..0d2010b8 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -333,8 +333,8 @@ class PythonConsoleLexer(Lexer): for match in line_re.finditer(text): line = match.group() if line == u'>>>\n': - # Catch common problem: editors which remove trailing - # whitespace from bare prompt lines. + # Catch common problem: when editors remove trailing whitespace + # from bare prompt lines. line = u'>>> \n' if line.startswith(u'>>> ') or line.startswith(u'... '): # The line begins or continues user input. @@ -386,7 +386,7 @@ class PythonConsoleLexer(Lexer): # Indented lines are assumed to be part of stack break_tb = False elif stripped[0].isalnum(): - # Handles stripped == '...' as + # Properly covers the case when stripped == '...'. break_tb = True else: break_tb = False @@ -441,9 +441,7 @@ class PythonTracebackLexer(RegexLexer): bygroups(Generic.Error, Text, Name, Text), '#pop'), (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)', bygroups(Generic.Error, Text), '#pop'), - # Doctests also allow the line to begin with any non-alphanumeric - # character. We do a minimal (non-greedy) match to process each - # line on its own. + # doctests: the line can begin with any non-alphanumeric character. (r'^(\W.*)(\n)', bygroups(Comment, Text)), ], @@ -490,9 +488,7 @@ class Python3TracebackLexer(RegexLexer): bygroups(Generic.Error, Text, Name, Text), '#pop'), (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)', bygroups(Generic.Error, Text), '#pop'), - # Doctests also allow the line to begin with any non-alphanumeric - # character. We do a minimal (non-greedy) match to process each - # line on its own. + # doctests: the line can begin with any non-alphanumeric character. (r'^(\W.*)(\n)', bygroups(Comment, Text)), ], -- cgit v1.2.1 From b0ddc2eac6622304ecd1dc07eea130f358f0ac57 Mon Sep 17 00:00:00 2001 From: chebee7i Date: Wed, 13 Nov 2013 16:38:12 -0600 Subject: Improve handling of KeyboardInterrupt exceptions in Python lexers. --- pygments/lexers/agile.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index 0d2010b8..283cf72e 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -358,13 +358,13 @@ class PythonConsoleLexer(Lexer): yield item curcode = '' insertions = [] - if (line.startswith(u'Traceback (most recent call last):') or + if (re.match(u'(\^C)?Traceback.*$', line) or re.match(ur' File "[^"]+", line \d+\n$', line)): tb = 1 curtb = line tbindex = match.start() - elif line == 'KeyboardInterrupt\n': - yield match.start(), Name.Class, line + #elif line == 'KeyboardInterrupt\n': + # yield match.start(), Name.Class, line elif tb: curtb += line # Any line beginning with ' ' is indented and assumed to be @@ -417,10 +417,10 @@ class PythonTracebackLexer(RegexLexer): tokens = { 'root': [ - (r'^Traceback \(most recent call last\):\n', - Generic.Traceback, 'intb'), - (r'^Traceback \(innermost last\):\n', - Generic.Traceback, 'intb'), + # Cover both (most recent call last) and (innermost last) + # The optional ^C allows us to catch keyboard interrupt signals. + (r'^(\^C)?(Traceback.*\n)', + bygroups(Error, Generic.Traceback), 'intb'), # SyntaxError starts with this. (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), (r'^.*\n', Other), -- cgit v1.2.1 From a81c58dcdc05123174ec762191e2d2d95bffb84a Mon Sep 17 00:00:00 2001 From: chebee7i Date: Wed, 13 Nov 2013 16:40:53 -0600 Subject: Remove commented lines, as they are no longer necessary. --- pygments/lexer.py | 2 ++ pygments/lexers/agile.py | 2 -- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexer.py b/pygments/lexer.py index 662c059e..5504fc1b 100644 --- a/pygments/lexer.py +++ b/pygments/lexer.py @@ -604,6 +604,8 @@ class RegexLexer(Lexer): statetokens = tokendefs[statestack[-1]] break else: + # We are here only if all state tokens have been considered + # and there was not a match on any of them. try: if text[pos] == '\n': # at EOL, reset state to "root" diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index 283cf72e..42a13881 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -363,8 +363,6 @@ class PythonConsoleLexer(Lexer): tb = 1 curtb = line tbindex = match.start() - #elif line == 'KeyboardInterrupt\n': - # yield match.start(), Name.Class, line elif tb: curtb += line # Any line beginning with ' ' is indented and assumed to be -- cgit v1.2.1 From 677face1df72e42fdadb6c498d3668aa93aed1f2 Mon Sep 17 00:00:00 2001 From: chebee7i Date: Thu, 14 Nov 2013 23:43:45 -0600 Subject: Completely refactored PythonConsoleLexer. --- pygments/lexers/agile.py | 251 +++++++++++++++++++++++++---------------------- 1 file changed, 135 insertions(+), 116 deletions(-) diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index 42a13881..cb5a8652 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -289,118 +289,6 @@ class Python3Lexer(RegexLexer): return shebang_matches(text, r'pythonw?3(\.\d)?') -class PythonConsoleLexer(Lexer): - """ - For Python console output or doctests, such as: - - .. sourcecode:: pycon - - >>> a = 'foo' - >>> print a - foo - >>> 1 / 0 - Traceback (most recent call last): - File "", line 1, in - ZeroDivisionError: integer division or modulo by zero - - Additional options: - - `python3` - Use Python 3 lexer for code. Default is ``False``. - *New in Pygments 1.0.* - """ - name = 'Python console session' - aliases = ['pycon'] - mimetypes = ['text/x-python-doctest'] - - def __init__(self, **options): - self.python3 = get_bool_opt(options, 'python3', False) - Lexer.__init__(self, **options) - - def get_tokens_unprocessed(self, text): - if self.python3: - pylexer = Python3Lexer(**self.options) - tblexer = Python3TracebackLexer(**self.options) - else: - pylexer = PythonLexer(**self.options) - tblexer = PythonTracebackLexer(**self.options) - - curcode = '' - insertions = [] - curtb = '' - tbindex = 0 - tb = 0 - for match in line_re.finditer(text): - line = match.group() - if line == u'>>>\n': - # Catch common problem: when editors remove trailing whitespace - # from bare prompt lines. - line = u'>>> \n' - if line.startswith(u'>>> ') or line.startswith(u'... '): - # The line begins or continues user input. - tb = 0 - insertions.append((len(curcode), - [(0, Generic.Prompt, line[:4])])) - curcode += line[4:] - elif line.rstrip() == u'...' and not tb: - # The line ends multiline user input. However, we cannot - # end the block of input until a new >>> prompt. The reason - # is that an ellipsis can appear in a traceback (though it - # is usually prepended by whitespace---hence the rstrip). - insertions.append((len(curcode), - [(0, Generic.Prompt, u'...')])) - curcode += line[3:] - else: - # The line is not input---is either output or part of traceback - if curcode: - for item in do_insertions(insertions, - pylexer.get_tokens_unprocessed(curcode)): - yield item - curcode = '' - insertions = [] - if (re.match(u'(\^C)?Traceback.*$', line) or - re.match(ur' File "[^"]+", line \d+\n$', line)): - tb = 1 - curtb = line - tbindex = match.start() - elif tb: - curtb += line - # Any line beginning with ' ' is indented and assumed to be - # part of the traceback stack, and so we continue adding - # to curtb. For doctests, none of the traceback stack is - # considered. So, it is customary to replace the actual - # stack with an ellipsis instead. Usually the ellipsis is - # indented, and thus, is covered by the startswith(' '). - # However, the line can also begin with a non-alphanumeric - # character. This means that the ellipsis need not be - # indented and also that even crazier lines can be part - # of the stack as well. - # http://docs.python.org/library/doctest.html#what-about-exceptions - stripped = line.strip() - if not stripped: - # Line was empty and should mark the end of the tb. - break_tb = True - elif line.startswith(' '): - # Indented lines are assumed to be part of stack - break_tb = False - elif stripped[0].isalnum(): - # Properly covers the case when stripped == '...'. - break_tb = True - else: - break_tb = False - - if break_tb: - tb = 0 - for i, t, v in tblexer.get_tokens_unprocessed(curtb): - yield tbindex+i, t, v - else: - yield match.start(), Generic.Output, line - if curcode: - for item in do_insertions(insertions, - pylexer.get_tokens_unprocessed(curcode)): - yield item - - class PythonTracebackLexer(RegexLexer): """ For Python tracebacks. @@ -461,12 +349,14 @@ class Python3TracebackLexer(RegexLexer): tokens = { 'root': [ (r'\n', Text), - (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), - (r'^Traceback \(innermost last\):\n', Generic.Traecback, 'intb'), + # Cover both (most recent call last) and (innermost last) + # The optional ^C allows us to catch keyboard interrupt signals. + (r'^(\^C)?(Traceback.*\n)', + bygroups(Error, Generic.Traceback), 'intb'), (r'^During handling of the above exception, another ' - r'exception occurred:\n\n', Generic.Traceback), + r'exception occurred:\n', Generic.Traceback), (r'^The above exception was the direct cause of the ' - r'following exception:\n\n', Generic.Traceback), + r'following exception:\n', Generic.Traceback), # SyntaxError starts with this. (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), ], @@ -493,6 +383,135 @@ class Python3TracebackLexer(RegexLexer): } +class PythonConsoleLexer(Lexer): + """ + For Python console output or doctests, such as: + + .. sourcecode:: pycon + + >>> a = 'foo' + >>> print a + foo + >>> 1 / 0 + Traceback (most recent call last): + File "", line 1, in + ZeroDivisionError: integer division or modulo by zero + + Additional options: + + `python3` + Use Python 3 lexer for code. Default is ``False``. + *New in Pygments 1.0.* + + """ + name = 'Python console session' + aliases = ['pycon'] + mimetypes = ['text/x-python-doctest'] + + def __init__(self, **options): + self.python3 = get_bool_opt(options, 'python3', False) + Lexer.__init__(self, **options) + + if self.python3: + pylexer = Python3Lexer + tblexer = Python3TracebackLexer + else: + pylexer = PythonLexer + tblexer = PythonTracebackLexer + + self.pylexer = pylexer(**options) + self.tblexer = tblexer(**options) + + self.tbregex = re.compile(u'(\^C)?Traceback.*$') + self.stregex = re.compile(ur' File "[^"]+", line \d+\n$') + + self.reset() + + def reset(self): + self.mode = 'output' + self.index = 0 + self.buffer = '' + self.insertions = [] + + def buffered_tokens(self): + """ + Generator of unprocessed tokens after doing insertions and before + changing to a new state. + + """ + if self.mode == 'output': + tokens = [(0, Generic.Output, self.buffer)] + elif self.mode == 'input': + tokens = self.pylexer.get_tokens_unprocessed(self.buffer) + else: # traceback + tokens = self.tblexer.get_tokens_unprocessed(self.buffer) + + for i, t, v in do_insertions(self.insertions, tokens): + # All token indexes are relative to the buffer. + yield self.index + i, t, v + + # Clear it all + self.index += len(self.buffer) + self.buffer = '' + self.insertions = [] + + def get_modecode(self, line): + """ + Returns the next mode and code to be added to the next mode's buffer. + + The next mode depends on current mode and contents of line. + + """ + if line.strip() == u'...' and self.mode != 'tb': + # Tail end of an input, except when in tb. + mode = 'output' + code = '' + insertion = (0, Generic.Prompt, '...\n') + elif line.startswith('>>>') or \ + (line.startswith('...') and self.mode != 'tb'): + # New input or when not in tb, continued input. + # We do not check for continued input when in tb since it is + # allowable to replace a long stack with an ellipsis. + mode = 'input' + code = line[4:] + insertion = (0, Generic.Prompt, line[:4]) + elif self.tbregex.match(line) or self.stregex.match(line): + mode = 'tb' + code = line + insertion = None + else: + if self.mode in ('input', 'output'): + # We assume all other text is output. Multiline input via + # an open string does not have a continuation marker (...), + # so these are erroneously tokened as output. Doing this + # right is tricky and perhaps not worth it. + mode = 'output' + else: + mode = 'tb' + code = line + insertion = None + + if insertion: + self.insertions.append((len(self.buffer), [insertion])) + + return mode, code + + def get_tokens_unprocessed(self, text): + self.reset() + for match in line_re.finditer(text): + line = match.group() + mode, code = self.get_modecode(line) + if mode != self.mode: + # Yield buffered tokens before transitioning to new mode. + for token in self.buffered_tokens(): + yield token + self.mode = mode + self.buffer += code + else: + for token in self.buffered_tokens(): + yield token + + class RubyLexer(ExtendedRegexLexer): """ For `Ruby `_ source code. -- cgit v1.2.1 From 25ca6b3250d8e4405f8c170eed5c0591275a0874 Mon Sep 17 00:00:00 2001 From: chebee7i Date: Fri, 15 Nov 2013 02:09:46 -0600 Subject: Removing incorrect comment. --- pygments/lexers/agile.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index cb5a8652..c433c3ab 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -481,10 +481,7 @@ class PythonConsoleLexer(Lexer): insertion = None else: if self.mode in ('input', 'output'): - # We assume all other text is output. Multiline input via - # an open string does not have a continuation marker (...), - # so these are erroneously tokened as output. Doing this - # right is tricky and perhaps not worth it. + # We assume all other text is output. mode = 'output' else: mode = 'tb' -- cgit v1.2.1 From 88a97a17686eb9b0a0d08c6526fdd7df99267919 Mon Sep 17 00:00:00 2001 From: chebee7i Date: Fri, 15 Nov 2013 17:00:30 -0600 Subject: Fix a few unicode issues. --- pygments/lexers/agile.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index c433c3ab..125aeb82 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -430,7 +430,7 @@ class PythonConsoleLexer(Lexer): def reset(self): self.mode = 'output' self.index = 0 - self.buffer = '' + self.buffer = u'' self.insertions = [] def buffered_tokens(self): @@ -450,14 +450,14 @@ class PythonConsoleLexer(Lexer): # All token indexes are relative to the buffer. yield self.index + i, t, v - # Clear it all - self.index += len(self.buffer) - self.buffer = '' + # Update the index into the main text, include insertions. + self.index += len(self.buffer) + 4 * len(self.insertions) + self.buffer = u'' self.insertions = [] def get_modecode(self, line): """ - Returns the next mode and code to be added to the next mode's buffer. + Returns the next mode, code, and insertion. The next mode depends on current mode and contents of line. @@ -465,8 +465,8 @@ class PythonConsoleLexer(Lexer): if line.strip() == u'...' and self.mode != 'tb': # Tail end of an input, except when in tb. mode = 'output' - code = '' - insertion = (0, Generic.Prompt, '...\n') + code = u'' + insertion = (0, Generic.Prompt, u'...\n') elif line.startswith('>>>') or \ (line.startswith('...') and self.mode != 'tb'): # New input or when not in tb, continued input. @@ -488,21 +488,22 @@ class PythonConsoleLexer(Lexer): code = line insertion = None - if insertion: - self.insertions.append((len(self.buffer), [insertion])) - - return mode, code + return mode, code, insertion def get_tokens_unprocessed(self, text): self.reset() for match in line_re.finditer(text): line = match.group() - mode, code = self.get_modecode(line) + mode, code, insertion = self.get_modecode(line) + if mode != self.mode: # Yield buffered tokens before transitioning to new mode. for token in self.buffered_tokens(): yield token self.mode = mode + + if insertion: + self.insertions.append((len(self.buffer), [insertion])) self.buffer += code else: for token in self.buffered_tokens(): -- cgit v1.2.1 From 38b6a0a59123cee5db94adcf961331ad8b24c840 Mon Sep 17 00:00:00 2001 From: Justin Hendrick Date: Mon, 7 Jul 2014 14:02:01 -0400 Subject: Added very basic ParaSail lexer --- pygments/lexers/_mapping.py | 1 + pygments/lexers/compiled.py | 58 +++++++++++++++-- tests/examplefiles/test.psl | 151 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 204 insertions(+), 6 deletions(-) create mode 100644 tests/examplefiles/test.psl diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index f8454357..9953ae71 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -251,6 +251,7 @@ LEXERS = { 'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), 'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), 'PanLexer': ('pygments.lexers.other', 'Pan', ('pan',), ('*.pan',), ()), + 'ParaSailLexer': ('pygments.lexers.compiled', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)), 'PawnLexer': ('pygments.lexers.other', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), 'Perl6Lexer': ('pygments.lexers.agile', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')), 'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')), diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 25c7a4d8..dc9b7d50 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -27,12 +27,12 @@ __all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'ECLexer', 'NesCLexer', 'DylanLexer', 'ObjectiveCLexer', 'ObjectiveCppLexer', 'FortranLexer', 'GLShaderLexer', 'PrologLexer', 'CythonLexer', 'ValaLexer', 'OocLexer', 'GoLexer', 'FelixLexer', 'AdaLexer', - 'Modula2Lexer', 'BlitzMaxLexer', 'BlitzBasicLexer', 'NimrodLexer', - 'FantomLexer', 'RustLexer', 'CudaLexer', 'MonkeyLexer', 'SwigLexer', - 'DylanLidLexer', 'DylanConsoleLexer', 'CobolLexer', - 'CobolFreeformatLexer', 'LogosLexer', 'ClayLexer', 'PikeLexer', - 'ChapelLexer', 'EiffelLexer', 'Inform6Lexer', 'Inform7Lexer', - 'Inform6TemplateLexer', 'MqlLexer', 'SwiftLexer'] + 'ParaSailLexer', 'Modula2Lexer', 'BlitzMaxLexer', 'BlitzBasicLexer', + 'NimrodLexer', 'FantomLexer', 'RustLexer', 'CudaLexer', + 'MonkeyLexer', 'SwigLexer', 'DylanLidLexer', 'DylanConsoleLexer', + 'CobolLexer', 'CobolFreeformatLexer', 'LogosLexer', 'ClayLexer', + 'PikeLexer', 'ChapelLexer', 'EiffelLexer', 'Inform6Lexer', + 'Inform7Lexer', 'Inform6TemplateLexer', 'MqlLexer', 'SwiftLexer'] class CFamilyLexer(RegexLexer): @@ -2560,6 +2560,52 @@ class AdaLexer(RegexLexer): ], } +class ParaSailLexer(RegexLexer): + """ + For ParaSail source code. + + .. versionadded:: TODO + """ + + name = 'ParaSail' + aliases = ['parasail'] + filenames = ['*.psi', '*.psl'] + mimetypes = ['text/x-parasail'] + + flags = re.MULTILINE + + tokens = { + 'root': [ + (r'[^\S\n]+', Text), + (r'//.*?\n', Comment.Single), + (r'[^\S\n]+', Text), + (r'abstract', Keyword.Declaration), + (r'(interface|class)', Keyword.Declaration), + (r'\b(abs|abstract|all|and|block|concurrent|const|continue|each|' + r'end|exit|extends|exports|forward|func|global|implements|in|' + r'interface|import|class|is|and=|or=|xor=' + r'lambda|locked|mod|new|not|null|of|optional|op|or|private|queued|' + r'ref|rem|return|reverse|seperate|some|type|until|var|with|xor|if|' + r'then|else|elsif|case|for|while|loop)\b', + Keyword.Reserved), + (r'"[^"]*"', String), + include('numbers'), + (r'#[a-zA-Z][_a-zA-Z]*', Keyword.Constant), + (r"'[^']'", String.Character), + (r'([a-zA-Z0-9_]+)', Name), + (r'(<|>|:=|\[|\]|\(|\)|\||:|;|,|.|\{|\})' + , Punctuation), + (r'(<==|==>|<=>|\*\*=|<\|=|<<=|>>=|==|!=|=\?|<=|>=|' + r'\*\*|<<|>>|=>|:=|\+=|-=|\*=|\||\|=|/=|->|\+|-|\*|/)', Operator), + (r'\n+', Text), + ], + 'numbers' : [ + (r'[0-9_]+#[0-9a-fA-F]+#', Number.Hex), + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'[0-9_]+\.[0-9_]*', Number.Float), + (r'[0-9_]+', Number.Integer), + ], + } class Modula2Lexer(RegexLexer): """ diff --git a/tests/examplefiles/test.psl b/tests/examplefiles/test.psl new file mode 100644 index 00000000..ee427843 --- /dev/null +++ b/tests/examplefiles/test.psl @@ -0,0 +1,151 @@ +// This is a comment + +// 1. Basics + +// Functions +func Add(X : Univ_Integer; Y : Univ_Integer) -> Univ_Integer is + // End of line semi-colons are optional + return X + Y; +end func Add; + +// If you find Univ_Integer to be too verbose you can import Short_Names +// which defines aliases like Int for Univ_Integer and String for Univ_String +import PSL::Short_Names::*, * + +func Greetings() is + // All declarations are 'const', 'var', or 'ref' + const S : String := "Hello, World!" + Println(S) +end func Greetings + +func Fib(N : Int) {N >= 0} -> Int is + // '{N >= 0}' is a precondition to this function + // Preconditions are built in to the language and checked by the compiler + if N <= 1 then + return N + else + // Left and right side of '+' are computed in Parallel here + return Fib(N - 1) + Fib(N - 2) + end if +end func Fib + +// ParaSail does not have mutable global variables +// Instead, use 'var' parameters +func Increment_All(var Nums : Vector) is + // This function takes a 'var' parameter. + // The modifications made here will be seen by caller + for each Elem of Nums concurrent loop + // The 'concurrent' keyword tells the compiler that + // iterations of the loop can happen in any order. + // It will choose the most optimal number of picothreads to use. + // Other options are 'forward' and 'reverse'. + Elem += 1 + end loop +end func Increment_All + +func Sum_Of_Squares(N : Int) -> Int is + // Built-in and inherently parallel map-reduce + // Initial value is enclosed with angle brackets + return (for I in 1 .. N => <0> + I ** 2) +end func Sum_Of_Squares + +func Sum_Of(N : Int; F : func (Int) -> Int) -> Int is + // It has functional aspects as well + // Here, we're taking an (Int) -> Int function as a parameter + return (for I in 1 .. N => <0> + F(I)) +end func Sum_Of + +func main(Args : Basic_Array) is + Greetings() + Println(Fib(5)); + var Vec : Vector := [0, 1, 2] + Increment_All(Vec) + // '|' is an overloaded operator. Here used for building strings + Println(Vec[1] | ", " | Vec[2] | ", " | Vec[3]) + Println(Sum_Of_Squares(3)) + + // Sum of fibs! + Println(Sum_Of(10, Fib)) +end func main + +// Preceding a type with 'optional' allows it to take the value 'null' +func Divide(A, B : Int) -> optional Int is + if B == 0 then + return null; + else + return A / B; + end if; +end func Divide; + +// 2. Modules +// Modules are composed of an interface and a class +// ParaSail has object orientation + +concurrent interface Locked_Box> is + // Create a box with the given content + func Create(C : optional Content_Type) -> Locked_Box; + + // Put something into the box + func Put(locked var B : Locked_Box; C : Content_Type); + + // Get a copy of current content + func Content(locked B : Locked_Box) -> optional Content_Type; + + // Remove current content, leaving it null + func Remove(locked var B : Locked_Box) -> optional Content_Type; + + // Wait until content is non-null, then return it, leaving it null. + func Get(queued var B : Locked_Box) -> Content_Type; +end interface Locked_Box; + +concurrent class Locked_Box is + var Content : optional Content_Type; + exports + func Create(C : optional Content_Type) -> Locked_Box is + return (Content => C); + end func Create; + + func Put(locked var B : Locked_Box; C : Content_Type) is + B.Content := C; + end func Put; + + func Content(locked B : Locked_Box) -> optional Content_Type is + return B.Content; + end func Content; + + func Remove(locked var B : Locked_Box) -> Result : optional Content_Type is + // '<==' is the move operator + // It moves the right operand into the left operand, + // leaving the right null. + Result <== B.Content; + end func Remove; + + func Get(queued var B : Locked_Box) -> Result : Content_Type is + queued until B.Content not null then + Result <== B.Content; + end func Get; +end class Locked_Box; + +func Use_Box(Seed : Univ_Integer) is + var U_Box : Locked_Box := Create(null); + // Type Inference. The type of 'Ran' can be left out because + // it is inferred from the return type of Random::Start + var Ran := Random::Start(Seed); + + Println("Starting 100 pico-threads trying to put something in the box"); + Println(" or take something out."); + for I in 1..100 concurrent loop + if I < 30 then + Println("Getting out " | Get(U_Box)); + else + Println("Putting in " | I); + U_Box.Put(I); + + // The first parameter can be moved to the front with a dot + // X.Foo(Y) is equivalent to Foo(X, Y) + end if; + end loop; + + Println("And the winner is: " | Remove(U_Box)); + Println("And the box is now " | Content(U_Box)); +end func Use_Box; -- cgit v1.2.1 From ce0b8f96d87f0d20401a76733d15d415b973c0ce Mon Sep 17 00:00:00 2001 From: Justin Hendrick Date: Tue, 8 Jul 2014 11:19:42 -0400 Subject: ParaSail Lexer: Fix operators and numbers. Reorganize keywords. Link to website --- pygments/lexers/compiled.py | 50 ++++++++----- tests/examplefiles/test.psl | 175 +++++++++++++++++++++++++------------------- 2 files changed, 131 insertions(+), 94 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index dc9b7d50..55b12109 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -2562,7 +2562,7 @@ class AdaLexer(RegexLexer): class ParaSailLexer(RegexLexer): """ - For ParaSail source code. + For `ParaSail `_ source code. .. versionadded:: TODO """ @@ -2578,32 +2578,42 @@ class ParaSailLexer(RegexLexer): 'root': [ (r'[^\S\n]+', Text), (r'//.*?\n', Comment.Single), - (r'[^\S\n]+', Text), - (r'abstract', Keyword.Declaration), - (r'(interface|class)', Keyword.Declaration), - (r'\b(abs|abstract|all|and|block|concurrent|const|continue|each|' - r'end|exit|extends|exports|forward|func|global|implements|in|' - r'interface|import|class|is|and=|or=|xor=' - r'lambda|locked|mod|new|not|null|of|optional|op|or|private|queued|' - r'ref|rem|return|reverse|seperate|some|type|until|var|with|xor|if|' - r'then|else|elsif|case|for|while|loop)\b', + # matching and=, or=, and xor= doesn't work yet + (r'\b(and(=|\sthen)?|or(=|\selse)?|xor=?|rem|mod|' + r'(is|not)\snull)\b', + Operator.Word), + # Keywords + (r'\b(abs|abstract|all|block|class|concurrent|const|continue|' + r'each|end|exit|extends|exports|forward|func|global|implements|' + r'import|in|interface|is|lambda|locked|new|not|null|of|op|' + r'optional|private|queued|ref|return|reverse|separate|some|' + r'type|until|var|with|' + # Control flow + r'if|then|else|elsif|case|for|while|loop)\b', Keyword.Reserved), + (r'[abstract]?(interface|class|op|func|type)', Keyword.Declaration), (r'"[^"]*"', String), + (r'\\[\'ntrf"0]', String.Escape), + (r'#[a-zA-Z]\w*', Literal), include('numbers'), - (r'#[a-zA-Z][_a-zA-Z]*', Keyword.Constant), - (r"'[^']'", String.Character), - (r'([a-zA-Z0-9_]+)', Name), - (r'(<|>|:=|\[|\]|\(|\)|\||:|;|,|.|\{|\})' - , Punctuation), + (r"'[^']'", String.Char), + (r'[a-zA-Z]\w*', Name), (r'(<==|==>|<=>|\*\*=|<\|=|<<=|>>=|==|!=|=\?|<=|>=|' - r'\*\*|<<|>>|=>|:=|\+=|-=|\*=|\||\|=|/=|->|\+|-|\*|/)', Operator), + r'\*\*|<<|>>|=>|:=|\+=|-=|\*=|\||\|=|/=|\+|-|\*|/|' + r'\.\.|<\.\.|\.\.<|<\.\.<)', + Operator), + (r'(<|>|\[|\]|\(|\)|\||:|;|,|.|\{|\}|->)', + Punctuation), (r'\n+', Text), ], 'numbers' : [ - (r'[0-9_]+#[0-9a-fA-F]+#', Number.Hex), - (r'0x[0-9a-fA-F]+', Number.Hex), - (r'[0-9_]+\.[0-9_]*', Number.Float), - (r'[0-9_]+', Number.Integer), + (r'\d[0-9_]*#[0-9a-fA-F][0-9a-fA-F_]*#', Number.Hex), # any base + (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex), # C-like hex + (r'0[bB][01][01_]*', Number.Bin), # C-like bin + (r'\d[0-9_]*\.\d[0-9_]*[eE][+-]\d[0-9_]*', # float exp + Number.Float), + (r'\d[0-9_]*\.\d[0-9_]*', Number.Float), # float + (r'\d[0-9_]*', Number.Integer), # integer ], } diff --git a/tests/examplefiles/test.psl b/tests/examplefiles/test.psl index ee427843..422cbcc7 100644 --- a/tests/examplefiles/test.psl +++ b/tests/examplefiles/test.psl @@ -4,23 +4,34 @@ // Functions func Add(X : Univ_Integer; Y : Univ_Integer) -> Univ_Integer is - // End of line semi-colons are optional return X + Y; end func Add; +// End of line semi-colons are optional +// +, +=, -, -=, *, *=, /, /= +// all do what you'd expect (/ is integer division) // If you find Univ_Integer to be too verbose you can import Short_Names // which defines aliases like Int for Univ_Integer and String for Univ_String import PSL::Short_Names::*, * func Greetings() is - // All declarations are 'const', 'var', or 'ref' const S : String := "Hello, World!" Println(S) end func Greetings +// All declarations are 'const', 'var', or 'ref' +// Assignment is :=, equality checks are ==, and != is not equals + +func Boolean_Examples(B : Bool) is + const And := B and #true // Parallel execution of operands + const And_Then := B and then #true // Short-Circuit + const Or := B or #false // Parallel execution of operands + const Or_Else := B or else #false // Short-Cirtuit + const Xor := B xor #true +end func Boolean_Examples +// Booleans are a special type of enumeration +// All enumerations are preceded by a sharp '#' func Fib(N : Int) {N >= 0} -> Int is - // '{N >= 0}' is a precondition to this function - // Preconditions are built in to the language and checked by the compiler if N <= 1 then return N else @@ -28,40 +39,50 @@ func Fib(N : Int) {N >= 0} -> Int is return Fib(N - 1) + Fib(N - 2) end if end func Fib +// '{N >= 0}' is a precondition to this function +// Preconditions are built in to the language and checked by the compiler // ParaSail does not have mutable global variables // Instead, use 'var' parameters func Increment_All(var Nums : Vector) is - // This function takes a 'var' parameter. - // The modifications made here will be seen by caller for each Elem of Nums concurrent loop - // The 'concurrent' keyword tells the compiler that - // iterations of the loop can happen in any order. - // It will choose the most optimal number of picothreads to use. - // Other options are 'forward' and 'reverse'. Elem += 1 end loop end func Increment_All +// The 'concurrent' keyword in the loop header tells the compiler that +// iterations of the loop can happen in any order. +// It will choose the most optimal number of threads to use. +// Other options are 'forward' and 'reverse'. func Sum_Of_Squares(N : Int) -> Int is - // Built-in and inherently parallel map-reduce - // Initial value is enclosed with angle brackets - return (for I in 1 .. N => <0> + I ** 2) + // The type of Sum is inferred + var Sum := 0 + for I in 1 .. N forward loop + Sum += I ** 2 // ** is exponentiation + end loop end func Sum_Of_Squares -func Sum_Of(N : Int; F : func (Int) -> Int) -> Int is - // It has functional aspects as well - // Here, we're taking an (Int) -> Int function as a parameter - return (for I in 1 .. N => <0> + F(I)) +func Sum_Of(N : Int; Map : func (Int) -> Int) -> Int is + return (for I in 1 .. N => <0> + Map(I)) end func Sum_Of +// It has functional aspects as well +// Here, we're taking an (Int) -> Int function as a parameter +// and using the inherently parallel map-reduce. +// Initial value is enclosed with angle brackets func main(Args : Basic_Array) is - Greetings() - Println(Fib(5)); - var Vec : Vector := [0, 1, 2] + Greetings() // Hello World + Println(Fib(5)) // 5 + // Container Comprehension + var Vec : Vector := [for I in 0 .. 10 {I mod 2 == 0} => I ** 2] + // Vec = [0, 4, 16, 36, 64, 100] Increment_All(Vec) - // '|' is an overloaded operator. Here used for building strings - Println(Vec[1] | ", " | Vec[2] | ", " | Vec[3]) + // Vec = [1, 5, 17, 37, 65, 101] + // '|' is an overloaded operator. + // It's usually used for concatenation or adding to a container + Println("First: " | Vec[1] | ", Last: " | Vec[Length(Vec)]); + // Vectors are 1 indexed, 0 indexed ZVectors are also available + Println(Sum_Of_Squares(3)) // Sum of fibs! @@ -69,18 +90,24 @@ func main(Args : Basic_Array) is end func main // Preceding a type with 'optional' allows it to take the value 'null' -func Divide(A, B : Int) -> optional Int is - if B == 0 then - return null; +func Divide(A, B, C : Real) -> optional Real is + // Real is the floating point type + const Epsilon := 1.0e-6; + if B in -Epsilon .. Epsilon then + return null + elsif C in -Epsilon .. Epsilon then + return null else - return A / B; - end if; -end func Divide; + return A / B + A / C + end if +end func Divide // 2. Modules // Modules are composed of an interface and a class -// ParaSail has object orientation +// ParaSail has object orientation features +// modules can be defined as 'concurrent' +// which allows 'locked' and 'queued' parameters concurrent interface Locked_Box> is // Create a box with the given content func Create(C : optional Content_Type) -> Locked_Box; @@ -99,53 +126,53 @@ concurrent interface Locked_Box> is end interface Locked_Box; concurrent class Locked_Box is - var Content : optional Content_Type; - exports - func Create(C : optional Content_Type) -> Locked_Box is - return (Content => C); - end func Create; - - func Put(locked var B : Locked_Box; C : Content_Type) is - B.Content := C; - end func Put; - - func Content(locked B : Locked_Box) -> optional Content_Type is - return B.Content; - end func Content; - - func Remove(locked var B : Locked_Box) -> Result : optional Content_Type is - // '<==' is the move operator - // It moves the right operand into the left operand, - // leaving the right null. - Result <== B.Content; - end func Remove; - - func Get(queued var B : Locked_Box) -> Result : Content_Type is + var Content : optional Content_Type; +exports + func Create(C : optional Content_Type) -> Locked_Box is + return (Content => C); + end func Create; + + func Put(locked var B : Locked_Box; C : Content_Type) is + B.Content := C; + end func Put; + + func Content(locked B : Locked_Box) -> optional Content_Type is + return B.Content; + end func Content; + + func Remove(locked var B : Locked_Box) -> Result : optional Content_Type is + // '<==' is the move operator + // It moves the right operand into the left operand, + // leaving the right null. + Result <== B.Content; + end func Remove; + + func Get(queued var B : Locked_Box) -> Result : Content_Type is queued until B.Content not null then - Result <== B.Content; - end func Get; + Result <== B.Content; + end func Get; end class Locked_Box; func Use_Box(Seed : Univ_Integer) is - var U_Box : Locked_Box := Create(null); - // Type Inference. The type of 'Ran' can be left out because - // it is inferred from the return type of Random::Start - var Ran := Random::Start(Seed); - - Println("Starting 100 pico-threads trying to put something in the box"); - Println(" or take something out."); - for I in 1..100 concurrent loop - if I < 30 then - Println("Getting out " | Get(U_Box)); - else - Println("Putting in " | I); - U_Box.Put(I); - - // The first parameter can be moved to the front with a dot - // X.Foo(Y) is equivalent to Foo(X, Y) - end if; - end loop; - - Println("And the winner is: " | Remove(U_Box)); - Println("And the box is now " | Content(U_Box)); + var U_Box : Locked_Box := Create(null); + // The type of 'Ran' can be left out because + // it is inferred from the return type of Random::Start + var Ran := Random::Start(Seed); + + Println("Starting 100 pico-threads trying to put something in the box"); + Println(" or take something out."); + for I in 1..100 concurrent loop + if I < 30 then + Println("Getting out " | Get(U_Box)); + else + Println("Putting in " | I); + U_Box.Put(I); + + // The first parameter can be moved to the front with a dot + // X.Foo(Y) is equivalent to Foo(X, Y) + end if; + end loop; + + Println("And the winner is: " | Remove(U_Box)); + Println("And the box is now " | Content(U_Box)); end func Use_Box; -- cgit v1.2.1 From 1f2829086f0640b36149f87b7de5c122f9905c9e Mon Sep 17 00:00:00 2001 From: Justin Hendrick Date: Tue, 8 Jul 2014 12:41:23 -0400 Subject: ParaSailLexer: match abstract declarations correctly --- pygments/lexers/compiled.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 55b12109..d96f884c 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -2581,7 +2581,7 @@ class ParaSailLexer(RegexLexer): # matching and=, or=, and xor= doesn't work yet (r'\b(and(=|\sthen)?|or(=|\selse)?|xor=?|rem|mod|' r'(is|not)\snull)\b', - Operator.Word), + Operator.Word), # Keywords (r'\b(abs|abstract|all|block|class|concurrent|const|continue|' r'each|end|exit|extends|exports|forward|func|global|implements|' @@ -2590,8 +2590,9 @@ class ParaSailLexer(RegexLexer): r'type|until|var|with|' # Control flow r'if|then|else|elsif|case|for|while|loop)\b', - Keyword.Reserved), - (r'[abstract]?(interface|class|op|func|type)', Keyword.Declaration), + Keyword.Reserved), + (r'(abstract\s+)?(interface|class|op|func|type)', + Keyword.Declaration), (r'"[^"]*"', String), (r'\\[\'ntrf"0]', String.Escape), (r'#[a-zA-Z]\w*', Literal), -- cgit v1.2.1 From 0d4f84c72fb5a008aaf0f76aed8e0de6f0cc73ee Mon Sep 17 00:00:00 2001 From: Justin Hendrick Date: Tue, 8 Jul 2014 13:40:05 -0400 Subject: ParaSailLexer: and=, or=, and xor= work. A few more comments --- pygments/lexers/compiled.py | 10 ++++++---- tests/examplefiles/test.psl | 4 ++++ 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index d96f884c..d8354ec6 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -2578,9 +2578,9 @@ class ParaSailLexer(RegexLexer): 'root': [ (r'[^\S\n]+', Text), (r'//.*?\n', Comment.Single), - # matching and=, or=, and xor= doesn't work yet - (r'\b(and(=|\sthen)?|or(=|\selse)?|xor=?|rem|mod|' - r'(is|not)\snull)\b', + (r'\b(and|or|xor)=', Operator.Word), + (r'\b(and(\s+then)?|or(\s+else)?|xor|rem|mod|' + r'(is|not)\s+null)\b', Operator.Word), # Keywords (r'\b(abs|abstract|all|block|class|concurrent|const|continue|' @@ -2593,12 +2593,14 @@ class ParaSailLexer(RegexLexer): Keyword.Reserved), (r'(abstract\s+)?(interface|class|op|func|type)', Keyword.Declaration), + # Literals (r'"[^"]*"', String), (r'\\[\'ntrf"0]', String.Escape), - (r'#[a-zA-Z]\w*', Literal), + (r'#[a-zA-Z]\w*', Literal), #Enumeration include('numbers'), (r"'[^']'", String.Char), (r'[a-zA-Z]\w*', Name), + # Operators and Punctuation (r'(<==|==>|<=>|\*\*=|<\|=|<<=|>>=|==|!=|=\?|<=|>=|' r'\*\*|<<|>>|=>|:=|\+=|-=|\*=|\||\|=|/=|\+|-|\*|/|' r'\.\.|<\.\.|\.\.<|<\.\.<)', diff --git a/tests/examplefiles/test.psl b/tests/examplefiles/test.psl index 422cbcc7..3ac99498 100644 --- a/tests/examplefiles/test.psl +++ b/tests/examplefiles/test.psl @@ -27,6 +27,10 @@ func Boolean_Examples(B : Bool) is const Or := B or #false // Parallel execution of operands const Or_Else := B or else #false // Short-Cirtuit const Xor := B xor #true + var Result : Bool := #true; + Result and= #false; + Result or= #true; + Result xor= #false; end func Boolean_Examples // Booleans are a special type of enumeration // All enumerations are preceded by a sharp '#' -- cgit v1.2.1 From 9e2c4c0d5927d1b6a04c4701ad8114b058b6c28f Mon Sep 17 00:00:00 2001 From: Bruno Deferrari Date: Mon, 28 Jul 2014 12:12:38 -0300 Subject: Add lexer for the Shen programming language. --- pygments/lexers/_mapping.py | 1 + pygments/lexers/functional.py | 162 +++++++++++++++++++++++++++++++++++++++++- 2 files changed, 162 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index f8454357..26b234a5 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -310,6 +310,7 @@ LEXERS = { 'ScilabLexer': ('pygments.lexers.math', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)), 'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)), 'ShellSessionLexer': ('pygments.lexers.shell', 'Shell Session', ('shell-session',), ('*.shell-session',), ('application/x-sh-session',)), + 'ShenLexer': ('pygments.lexers.functional', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')), 'SlimLexer': ('pygments.lexers.web', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)), 'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)), 'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)), diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index a22c4f55..bc952981 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -22,7 +22,7 @@ __all__ = ['RacketLexer', 'SchemeLexer', 'CommonLispLexer', 'CryptolLexer', 'OcamlLexer', 'ErlangLexer', 'ErlangShellLexer', 'OpaLexer', 'CoqLexer', 'NewLispLexer', 'NixLexer', 'ElixirLexer', 'ElixirConsoleLexer', 'KokaLexer', 'IdrisLexer', - 'LiterateIdrisLexer'] + 'LiterateIdrisLexer', 'ShenLexer'] line_re = re.compile('.*?\n') @@ -3669,3 +3669,163 @@ class KokaLexer(RegexLexer): (r'\\U[0-9a-fA-F]{6}', String.Escape) ] } + +class ShenLexer(RegexLexer): + """ + Lexer for `Shen `_ source code. + """ + name = 'Shen' + aliases = ['shen'] + filenames = ['*.shen'] + mimetypes = ['text/x-shen', 'application/x-shen'] + + DECLARATIONS = re.findall(r'\S+', """ + datatype define defmacro defprolog defcc synonyms declare package + type function + """) + + SPECIAL_FORMS = re.findall(r'\S+', """ + lambda get let if cases cond put time freeze value load $ + protect or and not do output prolog? trap-error error + make-string /. set @p @s @v + """) + + BUILTINS = re.findall(r'\S+', """ + == = * + - / < > >= <= <-address <-vector abort absvector + absvector? address-> adjoin append arity assoc bind boolean? + bound? call cd close cn compile concat cons cons? cut destroy + difference element? empty? enable-type-theory error-to-string + eval eval-kl exception explode external fail fail-if file + findall fix fst fwhen gensym get-time hash hd hdstr hdv head + identical implementation in include include-all-but inferences + input input+ integer? intern intersection is kill language + length limit lineread loaded macro macroexpand map mapcan + maxinferences mode n->string nl nth null number? occurrences + occurs-check open os out port porters pos pr preclude + preclude-all-but print profile profile-results ps quit read + read+ read-byte read-file read-file-as-bytelist + read-file-as-string read-from-string release remove return + reverse run save set simple-error snd specialise spy step + stinput stoutput str string->n string->symbol string? subst + symbol? systemf tail tc tc? thaw tl tlstr tlv track tuple? + undefmacro unify unify! union unprofile unspecialise untrack + variable? vector vector-> vector? verified version warn when + write-byte write-to-file y-or-n? + """) + + BUILTINS_ANYWHERE = re.findall(r'\S+', """ + where skip >> _ ! + """) + + MAPPINGS = dict((s, Keyword) for s in DECLARATIONS) + MAPPINGS.update((s, Name.Builtin) for s in BUILTINS) + MAPPINGS.update((s, Keyword) for s in SPECIAL_FORMS) + + valid_symbol_chars = r'[\w!$%*+,<=>?/.\'@&#:_-]' + valid_name = '%s+' % valid_symbol_chars + symbol_name = r'[a-z!$%%*+,<=>?/.\'@&#_-]%s*' % valid_symbol_chars + variable = r'[A-Z]%s*' % valid_symbol_chars + + tokens = { + 'string': [ + (r'"', String, '#pop'), + (r'c#\d{1,3};', String.Escape), + (r'~[ARS%]', String.Interpol), + (r'(?s).', String), + ], + + 'root' : [ + (r'(?ms)\\\*.*?\*\\', Comment.Multiline), # \* ... *\ + (r'\\\\.*', Comment.Single), # \\ ... + (r'(?ms)\s+', Text), + (r'_{5,}', Punctuation), + (r'={5,}', Punctuation), + (r'(;|:=|\||--?>|<--?)', Punctuation), + (r'(:-|:|\{|\})', Literal), + (r'[+-]*\d*\.\d+(e[+-]?\d+)?', Number.Float), + (r'[+-]*\d+', Number.Integer), + (r'"', String, 'string'), + (variable, Name.Variable), + (r'(true|false|<>|\[\])', Keyword.Pseudo), + (symbol_name, Literal), + (r'(\[|\]|\(|\))', Punctuation), + ], + } + + def get_tokens_unprocessed(self, text): + tokens = RegexLexer.get_tokens_unprocessed(self, text) + tokens = self._process_symbols(tokens) + tokens = self._process_declarations(tokens) + return tokens + + def _relevant(self, token): + return token not in (Text, Comment.Single, Comment.Multiline) + + def _process_declarations(self, tokens): + opening_paren = False + for index, token, value in tokens: + yield index, token, value + if self._relevant(token): + if opening_paren and token == Keyword and value in self.DECLARATIONS: + declaration = value + for index, token, value \ + in self._process_declaration(declaration, tokens): + yield index, token, value + opening_paren = value == '(' and token == Punctuation + + def _process_symbols(self, tokens): + opening_paren = False + for index, token, value in tokens: + if opening_paren and token in (Literal, Name.Variable): + token = self.MAPPINGS.get(value, Name.Function) + elif token == Literal and value in self.BUILTINS_ANYWHERE: + token = Name.Builtin + opening_paren = value == '(' and token == Punctuation + yield index, token, value + + def _process_declaration(self, declaration, tokens): + for index, token, value in tokens: + if self._relevant(token): + break + yield index, token, value + + if declaration == 'datatype': + prev_was_colon = False + token = Keyword.Type if token == Literal else token + yield index, token, value + for index, token, value in tokens: + if prev_was_colon and token == Literal: + token = Keyword.Type + yield index, token, value + if self._relevant(token): + prev_was_colon = token == Literal and value == ':' + elif declaration == 'package': + token = Name.Namespace if token == Literal else token + yield index, token, value + elif declaration == 'define': + token = Name.Function if token == Literal else token + yield index, token , value + for index, token, value in tokens: + if self._relevant(token): + break + yield index, token, value + if value == '{' and token == Literal: + yield index, Punctuation, value + for index, token, value in self._process_signature(tokens): + yield index, token, value + else: + yield index, token, value + else: + token = Name.Function if token == Literal else token + yield index, token , value + + raise StopIteration + + def _process_signature(self, tokens): + for index, token, value in tokens: + if token == Literal and value == '}': + yield index, Punctuation, value + raise StopIteration + elif token in (Literal, Name.Function): + token = Name.Variable if value.istitle() else Keyword.Type + yield index, token, value -- cgit v1.2.1 From ae99918058d955d2155bf861fee6a128494d7d83 Mon Sep 17 00:00:00 2001 From: Bruno Deferrari Date: Mon, 28 Jul 2014 12:13:07 -0300 Subject: Add example file for the Shen programming language lexer. --- tests/examplefiles/test.shen | 137 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 137 insertions(+) create mode 100644 tests/examplefiles/test.shen diff --git a/tests/examplefiles/test.shen b/tests/examplefiles/test.shen new file mode 100644 index 00000000..7a254334 --- /dev/null +++ b/tests/examplefiles/test.shen @@ -0,0 +1,137 @@ +(package pygments-test [some symbols] + +\* multiline + comment +*\ + +\\ With vars as functions + +(define super + [Value Succ End] Action Combine Zero -> + (if (End Value) + Zero + (Combine (Action Value) + (super [(Succ Value) Succ End] + Action Combine Zero)))) + +(define for + Stream Action -> (super Stream Action (function do) 0)) + +(define filter + Stream Condition -> + (super Stream + (/. Val (if (Condition Val) [Val] [])) + (function append) + [])) + +(for [0 (+ 1) (= 10)] (function print)) + +(filter [0 (+ 1) (= 100)] + (/. X (integer? (/ X 3)))) + + +\\ Typed functions + +(define typed-map + { (A --> B) --> (list A) --> (list B) } + F X -> (typed-map-h F X [])) + +(define typed-map-h + { (A --> B) --> (list A) --> (list B) \\ comment + --> (list B) } + _ [] X -> (reverse X) + F [X | Y] Z -> (typed-map-h F Y [(F X) | Z])) + +(define append-string + { string --> string \* comment *\ --> string } + S1 S2 -> (cn S1 S2)) + +(let X 1 + Y 2 + (+ (type X number) (type Y number))) + +\\ Yacc + +(defcc + + := (package-macro (macroexpand ) ); + := [{ | ]; + := [} | ]; + := [bar! | ]; + := [; | ]; + := [:= | ]; + := [:- | ]; + := [: | ]; + := [(intern ",") | ]; + := [];) + +(defcc + 91 := skip;) + +\\ Pattern matching + +(define matches + 1 X 3 -> X + X Y Z -> Y where (and (= X 1) (= Z 3)) + true false _ -> true + (@p a X c) (@s X "abc") (@v 1 2 3 <>) -> true + [X | Rest] [] [a b c] -> true + [(@p a b)] [[[1] 2] X] "string" -> true + _ _ _ -> false) + + +\\ Prolog + +(defprolog th* + X A Hyps <-- (show [X : A] Hyps) (when false); + X A _ <-- (fwhen (typedf? X)) (bind F (sigf X)) (call [F A]); + (mode [F] -) A Hyp <-- (th* F [--> A] Hyp); + (mode [cons X Y] -) [list A] Hyp <-- (th* X A Hyp) (th* Y [list A] Hyp); + (mode [@s X Y] -) string Hyp <-- (th* X string Hyp) (th* Y string Hyp); + (mode [lambda X Y] -) [A --> B] Hyp <-- ! + (bind X&& (placeholder)) + (bind Z (ebr X&& X Y)) + (th* Z B [[X&& : A] | Hyp]); + (mode [type X A] -) B Hyp <-- ! (unify A B) (th* X A Hyp);) + +\\ Macros + +(defmacro log-macro + [log N] -> [log N 10]) + +\\ Sequent calculus + +(datatype rank + + if (element? X [ace 2 3 4 5 6 7 8 9 10 jack queen king]) + ________ + X : rank;) + +(datatype suit + + if (element? Suit [spades hearts diamonds clubs]) + _________ + Suit : suit;) + +(datatype card + + Rank : rank; Suit : suit; + _________________ + [Rank Suit] : card; + + Rank : rank, Suit : suit >> P; + _____________________ + [Rank Suit] : card >> P;) + +(datatype card + + Rank : rank; Suit : suit; + ================== + [Rank Suit] : card;) + +\\ String interpolation and escape sequences + +"abc~A ~S~R ~% blah + c#30;c#31;blah" + +) -- cgit v1.2.1 From 99dd023556090612f8fca8c0973b450260528452 Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Mon, 10 Nov 2014 15:11:24 +0000 Subject: Add XQuery 3.0 Simple Map Operator --- pygments/lexers/webmisc.py | 2 +- tests/examplefiles/example3.xq | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 tests/examplefiles/example3.xq diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py index 331d78d2..c68090be 100644 --- a/pygments/lexers/webmisc.py +++ b/pygments/lexers/webmisc.py @@ -339,7 +339,7 @@ class XQueryLexer(ExtendedRegexLexer): Operator.Word, 'root'), (r'return|satisfies|to|union|where|preserve\s+strip', Keyword, 'root'), - (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\|\||\||:=|=)', + (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\|\||\||:=|=|!)', operator_root_callback), (r'(::|;|\[|//|/|,)', punctuation_root_callback), diff --git a/tests/examplefiles/example3.xq b/tests/examplefiles/example3.xq new file mode 100644 index 00000000..12fbba9f --- /dev/null +++ b/tests/examplefiles/example3.xq @@ -0,0 +1,9 @@ +xquery version "3.0"; + +declare function local:word-count($elms as element()*) as xs:integer { + sum($elms ! count(tokenize(., '\s+'))) +}; + +declare function local:add($a, $b) { + $a + $b +}; \ No newline at end of file -- cgit v1.2.1 From 9afe3b56e692812dcd311c2a9f530295c64b8fb3 Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Mon, 10 Nov 2014 16:36:15 +0000 Subject: Add XQuery 3.0 Switch Expression --- pygments/lexers/webmisc.py | 5 +++++ tests/examplefiles/example3.xq | 25 +++++++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py index c68090be..08c0f1ee 100644 --- a/pygments/lexers/webmisc.py +++ b/pygments/lexers/webmisc.py @@ -349,6 +349,8 @@ class XQueryLexer(ExtendedRegexLexer): bygroups(Keyword, Text, Keyword), 'itemtype'), (r'(treat)(\s+)(as)\b', bygroups(Keyword, Text, Keyword), 'itemtype'), + (r'(case)(\s+)(' + stringdouble + ')', bygroups(Keyword, Text, String.Double), 'itemtype'), + (r'(case)(\s+)(' + stringsingle + ')', bygroups(Keyword, Text, String.Single), 'itemtype'), (r'(case|as)\b', Keyword, 'itemtype'), (r'(\))(\s*)(as)', bygroups(Punctuation, Text, Keyword), 'itemtype'), @@ -437,6 +439,8 @@ class XQueryLexer(ExtendedRegexLexer): bygroups(Keyword, Text, Keyword), 'singletype'), (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)), (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)), + (r'(case)(\s+)(' + stringdouble + ')', bygroups(Keyword, Text, String.Double), 'itemtype'), + (r'(case)(\s+)(' + stringsingle + ')', bygroups(Keyword, Text, String.Single), 'itemtype'), (r'case|as', Keyword, 'itemtype'), (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'), (ncname + r':\*', Keyword.Type, 'operator'), @@ -649,6 +653,7 @@ class XQueryLexer(ExtendedRegexLexer): pushstate_operator_root_validate_withmode), (r'(validate)(\s*)(\{)', pushstate_operator_root_validate), (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)), + (r'(switch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)), (r'(element|attribute)(\s*)(\{)', pushstate_operator_root_construct_callback), diff --git a/tests/examplefiles/example3.xq b/tests/examplefiles/example3.xq index 12fbba9f..5d1ff635 100644 --- a/tests/examplefiles/example3.xq +++ b/tests/examplefiles/example3.xq @@ -1,9 +1,34 @@ xquery version "3.0"; +(: Simple Map Operator example :) declare function local:word-count($elms as element()*) as xs:integer { sum($elms ! count(tokenize(., '\s+'))) }; declare function local:add($a, $b) { $a + $b +}; + +declare function local:dispatch($node as node()) as item()* { + typeswitch($node) + case text() return $node + case comment() return $node + case element(bill) return local:bill($node) + case element(btitle) return local:btitle($node) + case element(section-id) return local:section-id($node) + case element(bill-text) return local:bill-text($node) + case element(strike) return local:strike($node) + default return local:passthru($node) +}; + +(: Switch expression example :) +declare function local:noise($animal) { + let $duck := "Duck", + $quack := "Quack" + return + switch ($animal) + case "Cow" return "Moo" + case 'Cat' return 'Meow' + case $duck return $quack + default return "What's that odd noise?" }; \ No newline at end of file -- cgit v1.2.1 From b14e5834c478e228f90a80b6450945d85ba136d9 Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Mon, 10 Nov 2014 17:00:07 +0000 Subject: Add XQuery 3.0 Group By Expression --- pygments/lexers/webmisc.py | 3 +- tests/examplefiles/example3.xq | 65 ++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 65 insertions(+), 3 deletions(-) diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py index 08c0f1ee..dc01ef5e 100644 --- a/pygments/lexers/webmisc.py +++ b/pygments/lexers/webmisc.py @@ -333,6 +333,7 @@ class XQueryLexer(ExtendedRegexLexer): (r'(\{)', pushstate_root_callback), (r'then|else|external|at|div|except', Keyword, 'root'), (r'order by', Keyword, 'root'), + (r'group by', Keyword, 'root'), (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'), (r'and|or', Operator.Word, 'root'), (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)', @@ -653,7 +654,7 @@ class XQueryLexer(ExtendedRegexLexer): pushstate_operator_root_validate_withmode), (r'(validate)(\s*)(\{)', pushstate_operator_root_validate), (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)), - (r'(switch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)), + (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)), (r'(element|attribute)(\s*)(\{)', pushstate_operator_root_construct_callback), diff --git a/tests/examplefiles/example3.xq b/tests/examplefiles/example3.xq index 5d1ff635..d7f027ad 100644 --- a/tests/examplefiles/example3.xq +++ b/tests/examplefiles/example3.xq @@ -21,7 +21,7 @@ declare function local:dispatch($node as node()) as item()* { default return local:passthru($node) }; -(: Switch expression example :) +(: `switch` expression example :) declare function local:noise($animal) { let $duck := "Duck", $quack := "Quack" @@ -31,4 +31,65 @@ declare function local:noise($animal) { case 'Cat' return 'Meow' case $duck return $quack default return "What's that odd noise?" -}; \ No newline at end of file +}; + +(: `group by` expression with binding example :) +declare function local:a-to-z() { + let $data as element()* := ( + Apples, + Bananas, + Apricots, + Pears, + Brambles + ) return + { + for $item in $data + group by $key := upper-case(substring($item, 1, 1)) + order by $key + return + {$item} + } +}; + +(: `group by` expression example :) +declare function local:plays-by-character() { + let $plays := ( + document { + + Hamlet + + Hamlet + Claudius + Polonius + Rosencrantz + Guildenstern + Francisco + Reynaldo + + + }, + document { + + Rosenkrantz and Guildenstern are Dead + + Alfred + Rosencrantz + Guildenstern + Hamlet + Claudius + + + } + ) return + + for $play in $plays/play + let $title := $play/title + for $character in $play/characters/character + group by $character + return + + { + $title ! { . } + } + +}; -- cgit v1.2.1 From 863a23ddeb34466dea4520074d2b31ee600e0558 Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Mon, 10 Nov 2014 17:27:26 +0000 Subject: Add XQuery 3.0 Annotations for variables --- pygments/lexers/webmisc.py | 6 ++++++ tests/examplefiles/example3.xq | 11 +++++++++++ 2 files changed, 17 insertions(+) diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py index dc01ef5e..5c01c7f3 100644 --- a/pygments/lexers/webmisc.py +++ b/pygments/lexers/webmisc.py @@ -397,6 +397,11 @@ class XQueryLexer(ExtendedRegexLexer): (r'preserve|no-preserve', Keyword), (r',', Punctuation), ], + 'annotationname':[ + (r'\(:', Comment, 'comment'), + (qname, Name.Decorator), + (r'(\s+)(variable)(\s+)(\$)', bygroups(Text, Keyword, Text, Name.Variable), 'varname') + ], 'varname': [ (r'\(:', Comment, 'comment'), (qname, Name.Variable, 'operator'), @@ -625,6 +630,7 @@ class XQueryLexer(ExtendedRegexLexer): (r'(for|let|some|every)(\s+)(\$)', bygroups(Keyword, Text, Name.Variable), 'varname'), (r'\$', Name.Variable, 'varname'), + (r'(declare)(\s+)(\%)', bygroups(Keyword, Text, Name.Decorator), 'annotationname'), (r'(declare)(\s+)(variable)(\s+)(\$)', bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'), diff --git a/tests/examplefiles/example3.xq b/tests/examplefiles/example3.xq index d7f027ad..a399e1dd 100644 --- a/tests/examplefiles/example3.xq +++ b/tests/examplefiles/example3.xq @@ -1,5 +1,16 @@ xquery version "3.0"; +declare namespace other = "http://other"; + +declare variable $local:straight-var1 := 'one'; + +declare %private variable $local:private-var := 'secret'; +declare %public variable $local:public-var := 'not-secret'; +declare %other:annotation variable $local:some-var := 'anything'; + +declare variable $local:straight-var2 := 'two'; + + (: Simple Map Operator example :) declare function local:word-count($elms as element()*) as xs:integer { sum($elms ! count(tokenize(., '\s+'))) -- cgit v1.2.1 From dc5148df4042d143b2bf55dcb1304ec3f6a38a1e Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Mon, 10 Nov 2014 17:55:51 +0000 Subject: Add XQuery 3.0 Annotations for functions and allow parameters to annotations --- pygments/lexers/webmisc.py | 13 +++++++++++-- tests/examplefiles/example3.xq | 19 ++++++++++++++++++- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py index 5c01c7f3..62f88386 100644 --- a/pygments/lexers/webmisc.py +++ b/pygments/lexers/webmisc.py @@ -400,7 +400,14 @@ class XQueryLexer(ExtendedRegexLexer): 'annotationname':[ (r'\(:', Comment, 'comment'), (qname, Name.Decorator), - (r'(\s+)(variable)(\s+)(\$)', bygroups(Text, Keyword, Text, Name.Variable), 'varname') + (r'(\()(' + stringdouble + ')', bygroups(Punctuation, String.Double)), + (r'(\()(' + stringsingle + ')', bygroups(Punctuation, String.Single)), + (r'(\,)(\s+)(' + stringdouble + ')', bygroups(Punctuation, Text, String.Double)), + (r'(\,)(\s+)(' + stringsingle + ')', bygroups(Punctuation, Text, String.Single)), + (r'\)', Punctuation), + (r'(\s+)(\%)', bygroups(Text, Name.Decorator), 'annotationname'), + (r'(\s+)(variable)(\s+)(\$)', bygroups(Text, Keyword, Text, Name.Variable), 'varname'), + (r'(\s+)(function)(\s+)', bygroups(Text, Keyword, Text), 'root') ], 'varname': [ (r'\(:', Comment, 'comment'), @@ -630,10 +637,12 @@ class XQueryLexer(ExtendedRegexLexer): (r'(for|let|some|every)(\s+)(\$)', bygroups(Keyword, Text, Name.Variable), 'varname'), (r'\$', Name.Variable, 'varname'), - (r'(declare)(\s+)(\%)', bygroups(Keyword, Text, Name.Decorator), 'annotationname'), (r'(declare)(\s+)(variable)(\s+)(\$)', bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'), + # ANNOTATED GLOBAL VARIABLES AND FUNCTIONS + (r'(declare)(\s+)(\%)', bygroups(Keyword, Text, Name.Decorator), 'annotationname'), + # ITEMTYPE (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'), diff --git a/tests/examplefiles/example3.xq b/tests/examplefiles/example3.xq index a399e1dd..f2dee2c1 100644 --- a/tests/examplefiles/example3.xq +++ b/tests/examplefiles/example3.xq @@ -6,7 +6,7 @@ declare variable $local:straight-var1 := 'one'; declare %private variable $local:private-var := 'secret'; declare %public variable $local:public-var := 'not-secret'; -declare %other:annotation variable $local:some-var := 'anything'; +declare %other:annotation('param1', "param2") variable $local:some-var := 'anything'; declare variable $local:straight-var2 := 'two'; @@ -104,3 +104,20 @@ declare function local:plays-by-character() { } }; + +declare + %other:a + %private + %other:b('1') + %other:c("1", "2", "3", "4") +function local:very-annotated() { + let $thing := "thing" + return + $thing +}; + +declare %public function local:slightly-annotated() { + let $nothing := () + return + $nothing +}; -- cgit v1.2.1 From 93b0c88cd992cf3c29b8dd142d39af0bbd2c9de1 Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Mon, 10 Nov 2014 21:30:48 +0000 Subject: Add XQuery 3.0 Higher Order Functions --- pygments/lexers/webmisc.py | 8 +++++++- tests/examplefiles/example3.xq | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py index 62f88386..1445e38c 100644 --- a/pygments/lexers/webmisc.py +++ b/pygments/lexers/webmisc.py @@ -364,6 +364,10 @@ class XQueryLexer(ExtendedRegexLexer): (r'ascending|descending|default', Keyword, '#push'), (r'external', Keyword), (r'collation', Keyword, 'uritooperator'), + + # support for current context on rhs of Simple Map Operator + (r'\.', Operator), + # finally catch all string literals and stay in operator state (stringdouble, String.Double), (stringsingle, String.Single), @@ -411,7 +415,7 @@ class XQueryLexer(ExtendedRegexLexer): ], 'varname': [ (r'\(:', Comment, 'comment'), - (qname, Name.Variable, 'operator'), + (r'(' + qname + ')(\()?', bygroups(Name.Variable, Punctuation), 'operator'), ], 'singletype': [ (r'\(:', Comment, 'comment'), @@ -457,6 +461,7 @@ class XQueryLexer(ExtendedRegexLexer): (r'case|as', Keyword, 'itemtype'), (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'), (ncname + r':\*', Keyword.Type, 'operator'), + (r'(function)(\()', bygroups(Keyword.Type, Punctuation)), (qname, Keyword.Type, 'occurrenceindicator'), ], 'kindtest': [ @@ -736,6 +741,7 @@ class XQueryLexer(ExtendedRegexLexer): # STANDALONE QNAMES (qname + r'(?=\s*\{)', Name.Tag, 'qname_braren'), (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'), + (r'(' + qname + ')(#)([0-9]+)', bygroups(Name.Function, Keyword.Type, Number.Integer)), (qname, Name.Tag, 'operator'), ] } diff --git a/tests/examplefiles/example3.xq b/tests/examplefiles/example3.xq index f2dee2c1..047b7904 100644 --- a/tests/examplefiles/example3.xq +++ b/tests/examplefiles/example3.xq @@ -121,3 +121,37 @@ declare %public function local:slightly-annotated() { return $nothing }; + +declare function local:merge-simple($a as xs:string+, $b as xs:string+) as xs:string+ { + ($a, $b) +}; + +(: higher order function example 1 :) +declare function local:apply($func, $value) { + $func($value) +}; + +(: higher order function example 2 :) +declare function local:apply-all($func, $list) { + $list ! $func(.) +}; + +(: higher order function example 3 :) +declare function local:apply-all-long($func as function(xs:string) as xs:string, $list) { + $list ! $func(.) +}; + +(: higher order function example 4 :) +declare function local:merge($func as function(xs:string+, xs:string+) as xs:string+, $a as xs:string+, $b as xs:string+) as xs:string+ { + $func($a, $b) +}; + +let $to-upper := upper-case#1 +let $to-upper-long as function(xs:string) as xs:string := upper-case#1 +return + + { + local:apply-all($to-upper, ("Hello", "world!")) ! {.}, + local:apply-all-long(lower-case#1, ("Hello", "world!")) ! {.} + } + -- cgit v1.2.1 From ba6e868551542aea732fc2a83daacff683098d4e Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Mon, 10 Nov 2014 21:40:07 +0000 Subject: Added further order by example --- tests/examplefiles/example3.xq | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/examplefiles/example3.xq b/tests/examplefiles/example3.xq index 047b7904..433d91fe 100644 --- a/tests/examplefiles/example3.xq +++ b/tests/examplefiles/example3.xq @@ -122,6 +122,16 @@ declare %public function local:slightly-annotated() { $nothing }; +declare function local:ordered() { + for $hit in doc("/db/doc-with-indexes.xml")//tei:p[other:query(., $search-expression)] + let $score as xs:float := other:score($hit) + order by $score descending + return ( +

Score: {$score}:

, + other:summarize($hit, ) + ) +}; + declare function local:merge-simple($a as xs:string+, $b as xs:string+) as xs:string+ { ($a, $b) }; -- cgit v1.2.1 From c062ddb4221babc7d5e5daa499d21b4fa37b740d Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Mon, 10 Nov 2014 21:53:48 +0000 Subject: Added further XQuery 3.0 String Conatentaion Expression tests --- tests/examplefiles/example3.xq | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/examplefiles/example3.xq b/tests/examplefiles/example3.xq index 433d91fe..8dd9b311 100644 --- a/tests/examplefiles/example3.xq +++ b/tests/examplefiles/example3.xq @@ -132,6 +132,24 @@ declare function local:ordered() { ) }; +declare function local:concat-expr($postfix) { + + let $concatenated := other:uri() || "/" || $postfix + return + $concatenated +}; + +declare function local:human-units($bytes) { + let $unit := if($bytes > math:pow(1024, 3)) then + (math:pow(1024, 3), "GB") + else if($bytes > math:pow(1024, 2)) then + (math:pow(1024, 2), "MB") + else + (1024, "KB") + return + format-number($bytes div $unit[1], ".00") || " " || $unit[2] +}; + declare function local:merge-simple($a as xs:string+, $b as xs:string+) as xs:string+ { ($a, $b) }; -- cgit v1.2.1 From 630a49b792be6eca5a5a06f70dd98bde624b838d Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Mon, 10 Nov 2014 21:54:50 +0000 Subject: Renamed example to test --- tests/examplefiles/example3.xq | 185 ----------------------------------------- tests/examplefiles/test-3.0.xq | 185 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 185 insertions(+), 185 deletions(-) delete mode 100644 tests/examplefiles/example3.xq create mode 100644 tests/examplefiles/test-3.0.xq diff --git a/tests/examplefiles/example3.xq b/tests/examplefiles/example3.xq deleted file mode 100644 index 8dd9b311..00000000 --- a/tests/examplefiles/example3.xq +++ /dev/null @@ -1,185 +0,0 @@ -xquery version "3.0"; - -declare namespace other = "http://other"; - -declare variable $local:straight-var1 := 'one'; - -declare %private variable $local:private-var := 'secret'; -declare %public variable $local:public-var := 'not-secret'; -declare %other:annotation('param1', "param2") variable $local:some-var := 'anything'; - -declare variable $local:straight-var2 := 'two'; - - -(: Simple Map Operator example :) -declare function local:word-count($elms as element()*) as xs:integer { - sum($elms ! count(tokenize(., '\s+'))) -}; - -declare function local:add($a, $b) { - $a + $b -}; - -declare function local:dispatch($node as node()) as item()* { - typeswitch($node) - case text() return $node - case comment() return $node - case element(bill) return local:bill($node) - case element(btitle) return local:btitle($node) - case element(section-id) return local:section-id($node) - case element(bill-text) return local:bill-text($node) - case element(strike) return local:strike($node) - default return local:passthru($node) -}; - -(: `switch` expression example :) -declare function local:noise($animal) { - let $duck := "Duck", - $quack := "Quack" - return - switch ($animal) - case "Cow" return "Moo" - case 'Cat' return 'Meow' - case $duck return $quack - default return "What's that odd noise?" -}; - -(: `group by` expression with binding example :) -declare function local:a-to-z() { - let $data as element()* := ( - Apples, - Bananas, - Apricots, - Pears, - Brambles - ) return - { - for $item in $data - group by $key := upper-case(substring($item, 1, 1)) - order by $key - return - {$item} - } -}; - -(: `group by` expression example :) -declare function local:plays-by-character() { - let $plays := ( - document { - - Hamlet - - Hamlet - Claudius - Polonius - Rosencrantz - Guildenstern - Francisco - Reynaldo - - - }, - document { - - Rosenkrantz and Guildenstern are Dead - - Alfred - Rosencrantz - Guildenstern - Hamlet - Claudius - - - } - ) return - - for $play in $plays/play - let $title := $play/title - for $character in $play/characters/character - group by $character - return - - { - $title ! { . } - } - -}; - -declare - %other:a - %private - %other:b('1') - %other:c("1", "2", "3", "4") -function local:very-annotated() { - let $thing := "thing" - return - $thing -}; - -declare %public function local:slightly-annotated() { - let $nothing := () - return - $nothing -}; - -declare function local:ordered() { - for $hit in doc("/db/doc-with-indexes.xml")//tei:p[other:query(., $search-expression)] - let $score as xs:float := other:score($hit) - order by $score descending - return ( -

Score: {$score}:

, - other:summarize($hit, ) - ) -}; - -declare function local:concat-expr($postfix) { - - let $concatenated := other:uri() || "/" || $postfix - return - $concatenated -}; - -declare function local:human-units($bytes) { - let $unit := if($bytes > math:pow(1024, 3)) then - (math:pow(1024, 3), "GB") - else if($bytes > math:pow(1024, 2)) then - (math:pow(1024, 2), "MB") - else - (1024, "KB") - return - format-number($bytes div $unit[1], ".00") || " " || $unit[2] -}; - -declare function local:merge-simple($a as xs:string+, $b as xs:string+) as xs:string+ { - ($a, $b) -}; - -(: higher order function example 1 :) -declare function local:apply($func, $value) { - $func($value) -}; - -(: higher order function example 2 :) -declare function local:apply-all($func, $list) { - $list ! $func(.) -}; - -(: higher order function example 3 :) -declare function local:apply-all-long($func as function(xs:string) as xs:string, $list) { - $list ! $func(.) -}; - -(: higher order function example 4 :) -declare function local:merge($func as function(xs:string+, xs:string+) as xs:string+, $a as xs:string+, $b as xs:string+) as xs:string+ { - $func($a, $b) -}; - -let $to-upper := upper-case#1 -let $to-upper-long as function(xs:string) as xs:string := upper-case#1 -return - - { - local:apply-all($to-upper, ("Hello", "world!")) ! {.}, - local:apply-all-long(lower-case#1, ("Hello", "world!")) ! {.} - } - diff --git a/tests/examplefiles/test-3.0.xq b/tests/examplefiles/test-3.0.xq new file mode 100644 index 00000000..8dd9b311 --- /dev/null +++ b/tests/examplefiles/test-3.0.xq @@ -0,0 +1,185 @@ +xquery version "3.0"; + +declare namespace other = "http://other"; + +declare variable $local:straight-var1 := 'one'; + +declare %private variable $local:private-var := 'secret'; +declare %public variable $local:public-var := 'not-secret'; +declare %other:annotation('param1', "param2") variable $local:some-var := 'anything'; + +declare variable $local:straight-var2 := 'two'; + + +(: Simple Map Operator example :) +declare function local:word-count($elms as element()*) as xs:integer { + sum($elms ! count(tokenize(., '\s+'))) +}; + +declare function local:add($a, $b) { + $a + $b +}; + +declare function local:dispatch($node as node()) as item()* { + typeswitch($node) + case text() return $node + case comment() return $node + case element(bill) return local:bill($node) + case element(btitle) return local:btitle($node) + case element(section-id) return local:section-id($node) + case element(bill-text) return local:bill-text($node) + case element(strike) return local:strike($node) + default return local:passthru($node) +}; + +(: `switch` expression example :) +declare function local:noise($animal) { + let $duck := "Duck", + $quack := "Quack" + return + switch ($animal) + case "Cow" return "Moo" + case 'Cat' return 'Meow' + case $duck return $quack + default return "What's that odd noise?" +}; + +(: `group by` expression with binding example :) +declare function local:a-to-z() { + let $data as element()* := ( + Apples, + Bananas, + Apricots, + Pears, + Brambles + ) return + { + for $item in $data + group by $key := upper-case(substring($item, 1, 1)) + order by $key + return + {$item} + } +}; + +(: `group by` expression example :) +declare function local:plays-by-character() { + let $plays := ( + document { + + Hamlet + + Hamlet + Claudius + Polonius + Rosencrantz + Guildenstern + Francisco + Reynaldo + + + }, + document { + + Rosenkrantz and Guildenstern are Dead + + Alfred + Rosencrantz + Guildenstern + Hamlet + Claudius + + + } + ) return + + for $play in $plays/play + let $title := $play/title + for $character in $play/characters/character + group by $character + return + + { + $title ! { . } + } + +}; + +declare + %other:a + %private + %other:b('1') + %other:c("1", "2", "3", "4") +function local:very-annotated() { + let $thing := "thing" + return + $thing +}; + +declare %public function local:slightly-annotated() { + let $nothing := () + return + $nothing +}; + +declare function local:ordered() { + for $hit in doc("/db/doc-with-indexes.xml")//tei:p[other:query(., $search-expression)] + let $score as xs:float := other:score($hit) + order by $score descending + return ( +

Score: {$score}:

, + other:summarize($hit, ) + ) +}; + +declare function local:concat-expr($postfix) { + + let $concatenated := other:uri() || "/" || $postfix + return + $concatenated +}; + +declare function local:human-units($bytes) { + let $unit := if($bytes > math:pow(1024, 3)) then + (math:pow(1024, 3), "GB") + else if($bytes > math:pow(1024, 2)) then + (math:pow(1024, 2), "MB") + else + (1024, "KB") + return + format-number($bytes div $unit[1], ".00") || " " || $unit[2] +}; + +declare function local:merge-simple($a as xs:string+, $b as xs:string+) as xs:string+ { + ($a, $b) +}; + +(: higher order function example 1 :) +declare function local:apply($func, $value) { + $func($value) +}; + +(: higher order function example 2 :) +declare function local:apply-all($func, $list) { + $list ! $func(.) +}; + +(: higher order function example 3 :) +declare function local:apply-all-long($func as function(xs:string) as xs:string, $list) { + $list ! $func(.) +}; + +(: higher order function example 4 :) +declare function local:merge($func as function(xs:string+, xs:string+) as xs:string+, $a as xs:string+, $b as xs:string+) as xs:string+ { + $func($a, $b) +}; + +let $to-upper := upper-case#1 +let $to-upper-long as function(xs:string) as xs:string := upper-case#1 +return + + { + local:apply-all($to-upper, ("Hello", "world!")) ! {.}, + local:apply-all-long(lower-case#1, ("Hello", "world!")) ! {.} + } + -- cgit v1.2.1 From bb7085af4e133e44e8031b5c0b0a91a8960d93f0 Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Mon, 10 Nov 2014 22:37:07 +0000 Subject: Improve XQuery highlighting of declare statements --- pygments/lexers/webmisc.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py index 1445e38c..2d581b86 100644 --- a/pygments/lexers/webmisc.py +++ b/pygments/lexers/webmisc.py @@ -410,8 +410,8 @@ class XQueryLexer(ExtendedRegexLexer): (r'(\,)(\s+)(' + stringsingle + ')', bygroups(Punctuation, Text, String.Single)), (r'\)', Punctuation), (r'(\s+)(\%)', bygroups(Text, Name.Decorator), 'annotationname'), - (r'(\s+)(variable)(\s+)(\$)', bygroups(Text, Keyword, Text, Name.Variable), 'varname'), - (r'(\s+)(function)(\s+)', bygroups(Text, Keyword, Text), 'root') + (r'(\s+)(variable)(\s+)(\$)', bygroups(Text, Keyword.Declaration, Text, Name.Variable), 'varname'), + (r'(\s+)(function)(\s+)', bygroups(Text, Keyword.Declaration, Text), 'root') ], 'varname': [ (r'\(:', Comment, 'comment'), @@ -611,9 +611,9 @@ class XQueryLexer(ExtendedRegexLexer): (r'(\d+)', Number.Integer, 'operator'), (r'(\.\.|\.|\))', Punctuation, 'operator'), (r'(declare)(\s+)(construction)', - bygroups(Keyword, Text, Keyword), 'operator'), + bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'operator'), (r'(declare)(\s+)(default)(\s+)(order)', - bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'), + bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'), (ncname + ':\*', Name, 'operator'), ('\*:'+ncname, Name.Tag, 'operator'), ('\*', Name.Tag, 'operator'), @@ -624,29 +624,29 @@ class XQueryLexer(ExtendedRegexLexer): # NAMESPACE DECL (r'(declare)(\s+)(default)(\s+)(collation)', - bygroups(Keyword, Text, Keyword, Text, Keyword)), + bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration)), (r'(module|declare)(\s+)(namespace)', - bygroups(Keyword, Text, Keyword), 'namespacedecl'), + bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacedecl'), (r'(declare)(\s+)(base-uri)', - bygroups(Keyword, Text, Keyword), 'namespacedecl'), + bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacedecl'), # NAMESPACE KEYWORD (r'(declare)(\s+)(default)(\s+)(element|function)', - bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'), + bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'namespacekeyword'), (r'(import)(\s+)(schema|module)', bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'), (r'(declare)(\s+)(copy-namespaces)', - bygroups(Keyword, Text, Keyword), 'namespacekeyword'), + bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacekeyword'), # VARNAMEs (r'(for|let|some|every)(\s+)(\$)', bygroups(Keyword, Text, Name.Variable), 'varname'), (r'\$', Name.Variable, 'varname'), (r'(declare)(\s+)(variable)(\s+)(\$)', - bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'), + bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Name.Variable), 'varname'), # ANNOTATED GLOBAL VARIABLES AND FUNCTIONS - (r'(declare)(\s+)(\%)', bygroups(Keyword, Text, Name.Decorator), 'annotationname'), + (r'(declare)(\s+)(\%)', bygroups(Keyword.Declaration, Text, Name.Decorator), 'annotationname'), # ITEMTYPE (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'), @@ -668,7 +668,7 @@ class XQueryLexer(ExtendedRegexLexer): (r'(<)', pushstate_operator_starttag_callback), (r'(declare)(\s+)(boundary-space)', - bygroups(Keyword, Text, Keyword), 'xmlspace_decl'), + bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'xmlspace_decl'), (r'(validate)(\s+)(lax|strict)', pushstate_operator_root_validate_withmode), @@ -692,7 +692,7 @@ class XQueryLexer(ExtendedRegexLexer): 'operator'), (r'(declare|define)(\s+)(function)', - bygroups(Keyword, Text, Keyword)), + bygroups(Keyword.Declaration, Text, Keyword.Declaration)), (r'(\{)', pushstate_operator_root_callback), @@ -700,7 +700,7 @@ class XQueryLexer(ExtendedRegexLexer): pushstate_operator_order_callback), (r'(declare)(\s+)(ordering)', - bygroups(Keyword, Text, Keyword), 'declareordering'), + bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'declareordering'), (r'(xquery)(\s+)(version)', bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'), @@ -710,7 +710,7 @@ class XQueryLexer(ExtendedRegexLexer): # sometimes return can occur in root state (r'return', Keyword), - (r'(declare)(\s+)(option)', bygroups(Keyword, Text, Keyword), + (r'(declare)(\s+)(option)', bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'option'), # URI LITERALS - single and double quoted -- cgit v1.2.1 From 3be0c87d202bbe810a12f30e2d4c07c8800d2339 Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Mon, 10 Nov 2014 22:49:36 +0000 Subject: Improvements to XQuery variable tokenization --- pygments/lexers/webmisc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py index 2d581b86..ef97a254 100644 --- a/pygments/lexers/webmisc.py +++ b/pygments/lexers/webmisc.py @@ -415,7 +415,7 @@ class XQueryLexer(ExtendedRegexLexer): ], 'varname': [ (r'\(:', Comment, 'comment'), - (r'(' + qname + ')(\()?', bygroups(Name.Variable, Punctuation), 'operator'), + (r'(' + qname + ')(\()?', bygroups(Name, Punctuation), 'operator'), ], 'singletype': [ (r'\(:', Comment, 'comment'), @@ -425,7 +425,7 @@ class XQueryLexer(ExtendedRegexLexer): 'itemtype': [ include('whitespace'), (r'\(:', Comment, 'comment'), - (r'\$', Punctuation, 'varname'), + (r'\$', Name.Variable, 'varname'), (r'(void)(\s*)(\()(\s*)(\))', bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'), (r'(element|attribute|schema-element|schema-attribute|comment|text|' @@ -726,7 +726,7 @@ class XQueryLexer(ExtendedRegexLexer): (r'then|else', Keyword), - # ML specific + # Marklogic specific (r'(try)(\s*)', bygroups(Keyword, Text), 'root'), (r'(catch)(\s*)(\()(\$)', bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'), -- cgit v1.2.1 From 1d8cfafffbe130b51a2f887c919a5c9b9b10b8f5 Mon Sep 17 00:00:00 2001 From: EricFromCanada Date: Fri, 14 Nov 2014 10:00:07 -0500 Subject: don't allow line breaks in hashbang or escapes --- pygments/lexers/javascript.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index 8e258f9a..3d976904 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -522,7 +522,7 @@ class LassoLexer(RegexLexer): tokens = { 'root': [ - (r'^#!.+lasso9\b', Comment.Preproc, 'lasso'), + (r'^#![ \S]+lasso9\b', Comment.Preproc, 'lasso'), (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'), (r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')), (r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')), @@ -672,7 +672,7 @@ class LassoLexer(RegexLexer): (r'\\', String.Double), ], 'escape': [ - (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:]+:|' + (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:\n\r]+:|' r'[abefnrtv?"\'\\]|$)', String.Escape), ], 'signature': [ -- cgit v1.2.1 From eba8428f44e1c6ecbd0d2917305cf277f3300ba9 Mon Sep 17 00:00:00 2001 From: EricFromCanada Date: Fri, 14 Nov 2014 10:06:36 -0500 Subject: allow space after dot operators --- pygments/lexers/javascript.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index 3d976904..c899f08c 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -588,7 +588,7 @@ class LassoLexer(RegexLexer): bygroups(Name.Builtin.Pseudo, Name.Variable.Class)), (r"(self)(\s*->\s*)('[a-z_][\w.]*')", bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)), - (r'(\.\.?)([a-z_][\w.]*(=(?!=))?)', + (r'(\.\.?\s*)([a-z_][\w.]*(=(?!=))?)', bygroups(Name.Builtin.Pseudo, Name.Other.Member)), (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)', bygroups(Operator, Name.Other.Member)), -- cgit v1.2.1 From a9b47e1898cda6e45eab709c6ea1daff161a93bf Mon Sep 17 00:00:00 2001 From: EricFromCanada Date: Fri, 14 Nov 2014 10:08:08 -0500 Subject: -infinity is valid, -nan is not --- pygments/lexers/javascript.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index c899f08c..b08f66d4 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -576,7 +576,7 @@ class LassoLexer(RegexLexer): (r'\d*\.\d+(e[+-]?\d+)?', Number.Float), (r'0x[\da-f]+', Number.Hex), (r'\d+', Number.Integer), - (r'([+-]?)(infinity|NaN)\b', bygroups(Operator, Number)), + (r'(infinity|NaN)\b', Number), (r"'", String.Single, 'singlestring'), (r'"', String.Double, 'doublestring'), (r'`[^`]*`', String.Backtick), @@ -593,7 +593,7 @@ class LassoLexer(RegexLexer): (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)', bygroups(Operator, Name.Other.Member)), (r'(self|inherited)\b', Name.Builtin.Pseudo), - (r'-[a-z_][\w.]*', Name.Attribute), + (r'-(?!infinity)[a-z_][\w.]*', Name.Attribute), (r'::\s*[a-z_][\w.]*', Name.Label), (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|' r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|' -- cgit v1.2.1 From 4f5c646e14b5cee644a4fd87f413f08a3d02b042 Mon Sep 17 00:00:00 2001 From: EricFromCanada Date: Fri, 14 Nov 2014 10:10:40 -0500 Subject: [noprocess] should still work after [no_square_brackets] --- pygments/lexers/javascript.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index b08f66d4..0cd68d24 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -541,9 +541,11 @@ class LassoLexer(RegexLexer): (r'[^[<]+', Other), ], 'nosquarebrackets': [ + (r'\[noprocess\]', Comment.Preproc, 'noprocess'), + (r'\[', Other), (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'), - (r'<', Other), - (r'[^<]+', Other), + (r'<(!--.*?-->)?', Other), + (r'[^[<]+', Other), ], 'noprocess': [ (r'\[/noprocess\]', Comment.Preproc, '#pop'), -- cgit v1.2.1 From 141850d80c4c5f5ca25a2a508da0355eb98f2f8a Mon Sep 17 00:00:00 2001 From: EricFromCanada Date: Fri, 14 Nov 2014 10:11:58 -0500 Subject: allow space in data member accessor --- pygments/lexers/javascript.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index 0cd68d24..3e47821a 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -586,7 +586,7 @@ class LassoLexer(RegexLexer): # names (r'\$[a-z_][\w.]*', Name.Variable), (r'#([a-z_][\w.]*|\d+)', Name.Variable.Instance), - (r"(\.)('[a-z_][\w.]*')", + (r"(\.\s*)('[a-z_][\w.]*')", bygroups(Name.Builtin.Pseudo, Name.Variable.Class)), (r"(self)(\s*->\s*)('[a-z_][\w.]*')", bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)), -- cgit v1.2.1 From 79824687b0b2d1555ff4f13e454cc89d421a62ba Mon Sep 17 00:00:00 2001 From: EricFromCanada Date: Fri, 14 Nov 2014 10:21:22 -0500 Subject: improved list of built-in Lasso types --- pygments/lexers/javascript.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index 3e47821a..178d63de 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -625,7 +625,8 @@ class LassoLexer(RegexLexer): (r'(true|false|none|minimal|full|all|void)\b', Keyword.Constant), (r'(local|var|variable|global|data(?=\s))\b', Keyword.Declaration), (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|' - r'null|bytes|list|queue|set|stack|staticarray|tie)\b', Keyword.Type), + r'null|boolean|bytes|keyword|list|locale|queue|set|stack|' + r'staticarray)\b', Keyword.Type), (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)), (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)), (r'require\b', Keyword, 'requiresection'), -- cgit v1.2.1 From 5cf6a34c119184b68877fd479a86196389457470 Mon Sep 17 00:00:00 2001 From: Dejan Muhamedagic Date: Mon, 17 Nov 2014 14:44:50 +0100 Subject: Add a lexer for crmsh --- pygments/lexers/dsls.py | 72 +++++++++++++++++++++++++++++++++++++++-- tests/examplefiles/example.pcmk | 40 +++++++++++++++++++++++ 2 files changed, 110 insertions(+), 2 deletions(-) create mode 100644 tests/examplefiles/example.pcmk diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index 433287d4..8d27726c 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -13,10 +13,11 @@ import re from pygments.lexer import RegexLexer, bygroups, words, include, default from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Literal + Number, Punctuation, Literal, Whitespace __all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer', - 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer'] + 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer', + 'CrmshLexer'] class ProtoBufLexer(RegexLexer): @@ -512,3 +513,70 @@ class PanLexer(RegexLexer): include('root'), ], } + + +class CrmshLexer(RegexLexer): + """ + Lexer for `crmsh `_ configuration files + for Pacemaker clusters. + + .. versionadded:: 1.0 + """ + name = 'Crmsh' + aliases = ['crmsh', 'pcmk'] + filenames = ['*.crmsh', '*.pcmk'] + mimetypes = [] + + elem = (r'node|primitive|group|clone|ms|location|colocation|order|' + r'fencing_topology|' + r'rsc_ticket|rsc_template|property|rsc_defaults|op_defaults|' + r'acl_target|acl_group|user|role') + sub = (r'params|meta|operations|op|rule|attributes|utilization') + acl = (r'read|write|deny') + acl_mod = (r'tag|ref|xpath') + bin_rel=(r'and|or') + un_ops=(r'defined|not_defined') + bin_ops=(r'lt|gt|lte|gte|eq|ne') + val_qual=(r'string|version|number') + date_exp=(r'in_range|date|spec') + rsc_role_action=(r'Master|Started|Slave|Stopped|' + r'start|promote|demote|stop') + + tokens = { + 'root': [ + # attr=value (nvpair) + (r'([\w#$-]+)(=)("(?:""|[^"])*"|\S+)', + bygroups(Name.Attribute, Punctuation, String)), + # need this construct, otherwise numeric node ids + # are matched as scores + # elem id: + (r'(%s)(\s+)([\w#$-]+)(:)' % elem, + bygroups(Keyword, Whitespace, Name, Punctuation)), + # scores + (r'([0-9]+|[+-]?inf):', Number), + # keywords (elements and other) + (r'(%s|%s|%s)(?![\w#$-])' % (elem,sub,acl), Keyword), + # binary operators + (r'(?:%s:)?%s(?![\w#$-])' % (val_qual,bin_ops), \ + Operator.Word), + # other operators + (r'(%s|%s|%s)(?![\w#$-])' % (bin_rel,un_ops,date_exp), \ + Operator.Word), + # builtin attributes (e.g. #uname) + (r'#[a-z]+(?![\w#$-])', Name.Builtin), + # rsc_id[:(role|action)] + (r'([\w#$-]+)(?:(:)(%s))?' % rsc_role_action, \ + bygroups(Name, Punctuation, Operator.Word)), + # acl_mod:blah + (r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod, \ + bygroups(Operator.Word, Punctuation, Name)), + # ids, and everything else not matched above + (r'([\w#$-]+)(?![\w#$-])', Name), + # punctuation + (r'(\\(?=\n)|[[\](){}/:])', Punctuation), + (r'#.*\n', Comment), + (r'\s+|\n', Whitespace), + ], + } + +# vim:ts=4:sw=4:et: diff --git a/tests/examplefiles/example.pcmk b/tests/examplefiles/example.pcmk new file mode 100644 index 00000000..7ec10686 --- /dev/null +++ b/tests/examplefiles/example.pcmk @@ -0,0 +1,40 @@ +node 167906355: sle12-a +node 167906357: sle12-c +primitive fs1 Filesystem \ + params device="/dev/nfs-vg/fs1" directory="/srv/nfs" fstype=ext3 \ + op monitor interval=10s +primitive nfs-server nfsserver \ + params nfs_shared_infodir="/srv/nfs/state" nfs_ip=10.2.12.100 \ + op monitor interval=0 trace_ra=1 +primitive nfs-vg LVM \ + params volgrpname=nfs-vg +primitive p_drbd_nfs ocf:linbit:drbd \ + params drbd_resource=nfs \ + op monitor interval=15 role=Master \ + op monitor interval=30 role=Slave \ + op start interval=0 timeout=300 \ + op stop interval=0 timeout=120 +primitive s-libvirt stonith:external/libvirt \ + params hostlist="sle12-a sle12-c" hypervisor_uri="qemu+ssh://hex-10.suse.de/system?keyfile=/root/.ssh/xen" reset_method=reboot \ + op monitor interval=5m timeout=60s +primitive virtual-ip IPaddr2 \ + params ip=10.2.12.100 +group nfs-disk nfs-vg fs1 +group nfs-srv virtual-ip nfs-server +ms ms_drbd_nfs p_drbd_nfs \ + meta notify=true clone-max=2 +location nfs-pref virtual-ip 100: sle12-a +colocation c-nfs inf: nfs-server fs1 +colocation vg-with-drbd inf: nfs-vg ms_drbd_nfs:Master +order drbd-before-vg inf: ms_drbd_nfs:promote nfs-vg:start +order o-nfs inf: fs1 nfs-server +property cib-bootstrap-options: \ + dc-version=1.1.12-ad083a8 \ + cluster-infrastructure=corosync \ + cluster-name=sle12-test3l-public \ + no-quorum-policy=ignore \ + startup-fencing=false \ + last-lrm-refresh=1415877622 \ + maintenance-mode=false +op_defaults op-options: \ + timeout=120s -- cgit v1.2.1 From 5b43bc04ba5e7287bc36d6e0f50b2e7797c58d3e Mon Sep 17 00:00:00 2001 From: EricFromCanada Date: Mon, 17 Nov 2014 10:34:42 -0500 Subject: improvements for Lasso builtins generator updating & sorting Lasso builtins file --- external/lasso-builtins-generator-9.lasso | 116 +- pygments/lexers/_lasso_builtins.py | 9115 +++++++++++++++-------------- 2 files changed, 4626 insertions(+), 4605 deletions(-) diff --git a/external/lasso-builtins-generator-9.lasso b/external/lasso-builtins-generator-9.lasso index 6a619106..d199ed50 100755 --- a/external/lasso-builtins-generator-9.lasso +++ b/external/lasso-builtins-generator-9.lasso @@ -4,9 +4,9 @@ Builtins Generator for Lasso 9 This is the shell script that was used to extract Lasso 9's built-in keywords - and generate most of the _lassobuiltins.py file. When run, it creates a file - named "lassobuiltins-9.py" containing the types, traits, methods, and members - of the currently-installed version of Lasso 9. + and generate most of the _lasso_builtins.py file. When run, it creates a file + containing the types, traits, methods, and members of the currently-installed + version of Lasso 9. A list of tags in Lasso 8 can be generated with this code: @@ -29,14 +29,14 @@ */ output("This output statement is required for a complete list of methods.") -local(f) = file("lassobuiltins-9.py") +local(f) = file("_lasso_builtins-9.py") #f->doWithClose => { -#f->openWrite +#f->openTruncate #f->writeString('# -*- coding: utf-8 -*- """ - pygments.lexers._lassobuiltins - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + pygments.lexers._lasso_builtins + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Built-in Lasso types, traits, methods, and members. @@ -46,18 +46,19 @@ local(f) = file("lassobuiltins-9.py") ') -lcapi_loadModules +// Load and register contents of $LASSO9_MASTER_HOME/LassoModules/ +database_initialize // Load all of the libraries from builtins and lassoserver // This forces all possible available types and methods to be registered local(srcs = - tie( + (: dir(sys_masterHomePath + 'LassoLibraries/builtins/')->eachFilePath, dir(sys_masterHomePath + 'LassoLibraries/lassoserver/')->eachFilePath ) ) -with topLevelDir in #srcs +with topLevelDir in delve(#srcs) where not #topLevelDir->lastComponent->beginsWith('.') do protect => { handle_error => { @@ -67,38 +68,53 @@ do protect => { stdoutnl('Loaded: ' + #topLevelDir) } +email_initialize +log_initialize +session_initialize + local( - typesList = list(), - traitsList = list(), - unboundMethodsList = list(), - memberMethodsList = list() + typesList = set(), + traitsList = set(), + unboundMethodsList = set(), + memberMethodsList = set() ) // types with type in sys_listTypes -where #typesList !>> #type +where not #type->asString->endsWith('$') // skip threads do { #typesList->insert(#type) - with method in #type->getType->listMethods - let name = #method->methodName - where not #name->asString->endsWith('=') // skip setter methods - where #name->asString->isAlpha(1) // skip unpublished methods - where #memberMethodsList !>> #name - do #memberMethodsList->insert(#name) } // traits with trait in sys_listTraits where not #trait->asString->beginsWith('$') // skip combined traits -where #traitsList !>> #trait do { #traitsList->insert(#trait) +} + +// member methods +with type in #typesList +do { + with method in #type->getType->listMethods + where #method->typeName == #type // skip inherited methods + let name = #method->methodName + where not #name->asString->endsWith('=') // skip setter methods + where #name->asString->isAlpha(1) // skip unpublished methods + do { + #memberMethodsList->insert(#name) + } +} +with trait in #traitsList +do { with method in tie(#trait->getType->provides, #trait->getType->requires) + where #method->typeName == #trait // skip inherited methods let name = #method->methodName where not #name->asString->endsWith('=') // skip setter methods where #name->asString->isAlpha(1) // skip unpublished methods - where #memberMethodsList !>> #name - do #memberMethodsList->insert(#name) + do { + #memberMethodsList->insert(#name) + } } // unbound methods @@ -108,36 +124,38 @@ where not #name->asString->endsWith('=') // skip setter methods where #name->asString->isAlpha(1) // skip unpublished methods where #typesList !>> #name where #traitsList !>> #name -where #unboundMethodsList !>> #name -do #unboundMethodsList->insert(#name) - -#f->writeString("BUILTINS = { - 'Types': [ -") -with t in #typesList -do !#t->asString->endsWith('$') ? #f->writeString(" '"+string_lowercase(#t->asString)+"',\n") - -#f->writeString(" ], - 'Traits': [ -") -with t in #traitsList -do #f->writeString(" '"+string_lowercase(#t->asString)+"',\n") - -#f->writeString(" ], - 'Unbound Methods': [ -") -with t in #unboundMethodsList -do #f->writeString(" '"+string_lowercase(#t->asString)+"',\n") +do { + #unboundMethodsList->insert(#name) +} -#f->writeString(" ] +// write to file +with i in (: + pair(#typesList, "BUILTINS = { + 'Types': ( +"), + pair(#traitsList, " ), + 'Traits': ( +"), + pair(#unboundMethodsList, " ), + 'Unbound Methods': ( +"), + pair(#memberMethodsList, " ) } MEMBERS = { - 'Member Methods': [ + 'Member Methods': ( ") -with t in #memberMethodsList -do #f->writeString(" '"+string_lowercase(#t->asString)+"',\n") +) +do { + #f->writeString(#i->second) + with t in (#i->first) + let ts = #t->asString + do { + #ts->lowercase + #f->writeString(" '"+#ts+"',\n") + } +} -#f->writeString(" ] +#f->writeString(" ) } ") diff --git a/pygments/lexers/_lasso_builtins.py b/pygments/lexers/_lasso_builtins.py index f7413fce..bda47d87 100644 --- a/pygments/lexers/_lasso_builtins.py +++ b/pygments/lexers/_lasso_builtins.py @@ -11,4354 +11,4214 @@ BUILTINS = { 'Types': ( - 'null', - 'void', - 'tag', - 'trait', - 'integer', - 'decimal', + 'portal_impl', + 'array', + 'atbegin', 'boolean', - 'capture', - 'string', + 'bytes_document_body', 'bytes', - 'keyword', - 'custom', - 'staticarray', - 'signature', - 'memberstream', - 'dsinfo', - 'sourcefile', - 'array', - 'pair', - 'opaque', - 'filedesc', - 'dirdesc', - 'locale', - 'ucal', - 'xml_domimplementation', - 'xml_node', - 'xml_characterdata', - 'xml_document', - 'xml_element', - 'xml_attr', - 'xml_text', - 'xml_cdatasection', - 'xml_entityreference', - 'xml_entity', - 'xml_processinginstruction', - 'xml_comment', - 'xml_documenttype', - 'xml_documentfragment', - 'xml_notation', - 'xml_nodelist', - 'xml_namednodemap', - 'xml_namednodemap_ht', - 'xml_namednodemap_attr', - 'xmlstream', - 'sqlite3', - 'sqlite3_stmt', - 'mime_reader', + 'cache_server_element', + 'cache_server', + 'capture', + 'client_address', + 'client_ip', + 'component_container', + 'component_render_state', + 'component', 'curltoken', - 'regexp', - 'zip_impl', - 'zip_file_impl', - 'library_thread_loader', - 'generateforeachunkeyed', - 'generateforeachkeyed', - 'eacher', - 'queriable_where', - 'queriable_select', - 'queriable_selectmany', - 'queriable_groupby', - 'queriable_join', - 'queriable_groupjoin', - 'queriable_orderby', - 'queriable_orderbydescending', - 'queriable_thenby', - 'queriable_thenbydescending', - 'queriable_skip', - 'queriable_take', - 'queriable_grouping', - 'generateseries', - 'tie', - 'pairup', - 'delve', - 'repeat', - 'pair_compare', - 'serialization_object_identity_compare', - 'serialization_element', - 'serialization_writer_standin', - 'serialization_writer_ref', - 'serialization_writer', - 'serialization_reader', - 'tree_nullnode', - 'tree_node', - 'tree_base', - 'map_node', - 'map', - 'file', - 'date', - 'dir', - 'magick_image', - 'ldap', - 'os_process', - 'java_jnienv', - 'jobject', - 'jmethodid', - 'jfieldid', - 'database_registry', - 'sqlite_db', - 'sqlite_results', - 'sqlite_currentrow', - 'sqlite_table', - 'sqlite_column', 'curl', - 'debugging_stack', - 'dbgp_server', - 'dbgp_packet', - 'duration', - 'inline_type', - 'json_literal', - 'json_object', - 'list_node', - 'list', - 'jchar', - 'jchararray', - 'jbyte', - 'jbytearray', - 'jfloat', - 'jint', - 'jshort', 'currency', - 'scientific', - 'percent', + 'custom', + 'data_document', + 'database_registry', 'dateandtime', - 'timeonly', - 'net_tcp', - 'net_tcpssl', - 'net_tcp_ssl', - 'net_named_pipe', - 'net_udppacket', - 'net_udp_packet', - 'net_udp', - 'pdf_typebase', - 'pdf_doc', - 'pdf_color', - 'pdf_barcode', - 'pdf_font', - 'pdf_image', - 'pdf_list', - 'pdf_read', - 'pdf_table', - 'pdf_text', - 'pdf_hyphenator', - 'pdf_chunk', - 'pdf_phrase', - 'pdf_paragraph', - 'queue', - 'set', - 'sys_process', - 'worker_pool', - 'zip_file', - 'zip', - 'cache_server_element', - 'cache_server', + 'date', + 'dbgp_packet', + 'dbgp_server', + 'debugging_stack', + 'decimal', + 'delve', + 'dirdesc', + 'dir', 'dns_response', - 'component_render_state', - 'component', - 'component_container', 'document_base', 'document_body', 'document_header', - 'text_document', - 'data_document', + 'dsinfo', + 'duration', + 'eacher', 'email_compose', - 'email_pop', 'email_parse', + 'email_pop', 'email_queue_impl_base', + 'email_queue_impl', + 'email_smtp', 'email_stage_impl_base', - 'fcgi_record', - 'web_request_impl', - 'fcgi_request', - 'include_cache', - 'atbegin', + 'email_stage_impl', 'fastcgi_each_fcgi_param', 'fastcgi_server', + 'fcgi_record', + 'fcgi_request', + 'filedesc', 'filemaker_datasource', - 'http_document', - 'http_document_header', - 'http_header_field', - 'html_document_head', - 'html_document_body', - 'raw_document_body', - 'bytes_document_body', - 'html_attr', + 'file', + 'generateforeachkeyed', + 'generateforeachunkeyed', + 'generateseries', 'html_atomic_element', - 'html_container_element', - 'http_error', - 'html_script', - 'html_text', - 'html_raw', + 'html_attr', + 'html_base', 'html_binary', - 'html_json', + 'html_br', 'html_cdata', - 'html_eol', + 'html_container_element', 'html_div', - 'html_span', - 'html_br', - 'html_hr', + 'html_document_body', + 'html_document_head', + 'html_eol', + 'html_fieldset', + 'html_form', 'html_h1', 'html_h2', 'html_h3', 'html_h4', 'html_h5', 'html_h6', - 'html_meta', + 'html_hr', + 'html_img', + 'html_input', + 'html_json', + 'html_label', + 'html_legend', 'html_link', + 'html_meta', 'html_object', + 'html_option', + 'html_raw', + 'html_script', + 'html_select', + 'html_span', 'html_style', - 'html_base', 'html_table', - 'html_tr', 'html_td', + 'html_text', 'html_th', - 'html_img', - 'html_form', - 'html_fieldset', - 'html_legend', - 'html_input', - 'html_label', - 'html_option', - 'html_select', + 'html_tr', + 'http_document_header', + 'http_document', + 'http_error', + 'http_header_field', + 'http_server_connection_handler_globals', + 'http_server_connection_handler', + 'http_server_request_logger_thread', 'http_server_web_connection', 'http_server', - 'http_server_connection_handler', 'image', - 'lassoapp_installer', - 'lassoapp_content_rep_halt', - 'lassoapp_dirsrc_fileresource', - 'lassoapp_dirsrc_appsource', - 'lassoapp_livesrc_fileresource', - 'lassoapp_livesrc_appsource', - 'lassoapp_long_expiring_bytes', - 'lassoapp_zip_file_server', - 'lassoapp_zipsrc_fileresource', - 'lassoapp_zipsrc_appsource', - 'lassoapp_compiledsrc_fileresource', + 'include_cache', + 'inline_type', + 'integer', + 'java_jnienv', + 'jbytearray', + 'jbyte', + 'jchararray', + 'jchar', + 'jfieldid', + 'jfloat', + 'jint', + 'jmethodid', + 'jobject', + 'jshort', + 'json_literal', + 'json_object', + 'keyword', 'lassoapp_compiledsrc_appsource', + 'lassoapp_compiledsrc_fileresource', + 'lassoapp_content_rep_halt', + 'lassoapp_dirsrc_appsource', + 'lassoapp_dirsrc_fileresource', + 'lassoapp_installer', + 'lassoapp_livesrc_appsource', + 'lassoapp_livesrc_fileresource', + 'lassoapp_long_expiring_bytes', 'lassoapp_manualsrc_appsource', + 'lassoapp_zip_file_server', + 'lassoapp_zipsrc_appsource', + 'lassoapp_zipsrc_fileresource', + 'ldap', + 'library_thread_loader', + 'list_node', + 'list', + 'locale', 'log_impl_base', - 'portal_impl', - 'security_registry', + 'log_impl', + 'magick_image', + 'map_node', + 'map', + 'memberstream', 'memory_session_driver_impl_entry', 'memory_session_driver_impl', - 'sqlite_session_driver_impl_entry', - 'sqlite_session_driver_impl', + 'memory_session_driver', + 'mime_reader', 'mysql_session_driver_impl', + 'mysql_session_driver', + 'net_named_pipe', + 'net_tcp_ssl', + 'net_tcp', + 'net_udp_packet', + 'net_udp', + 'null', 'odbc_session_driver_impl', + 'odbc_session_driver', + 'opaque', + 'os_process', + 'pairup', + 'pair_compare', + 'pair', + 'pdf_barcode', + 'pdf_chunk', + 'pdf_color', + 'pdf_doc', + 'pdf_font', + 'pdf_hyphenator', + 'pdf_image', + 'pdf_list', + 'pdf_paragraph', + 'pdf_phrase', + 'pdf_read', + 'pdf_table', + 'pdf_text', + 'pdf_typebase', + 'percent', + 'queriable_groupby', + 'queriable_groupjoin', + 'queriable_grouping', + 'queriable_join', + 'queriable_orderbydescending', + 'queriable_orderby', + 'queriable_selectmany', + 'queriable_select', + 'queriable_skip', + 'queriable_take', + 'queriable_thenbydescending', + 'queriable_thenby', + 'queriable_where', + 'queue', + 'raw_document_body', + 'regexp', + 'repeat', + 'scientific', + 'security_registry', + 'serialization_element', + 'serialization_object_identity_compare', + 'serialization_reader', + 'serialization_writer_ref', + 'serialization_writer_standin', + 'serialization_writer', 'session_delete_expired_thread', - 'email_smtp', - 'client_address', - 'client_ip', + 'set', + 'signature', + 'sourcefile', + 'sqlite3_stmt', + 'sqlite3', + 'sqlite_column', + 'sqlite_currentrow', + 'sqlite_db', + 'sqlite_results', + 'sqlite_session_driver_impl_entry', + 'sqlite_session_driver_impl', + 'sqlite_session_driver', + 'sqlite_table', + 'staticarray', + 'string', + 'sys_process', + 'tag', + 'text_document', + 'tie', + 'timeonly', + 'trait', + 'tree_base', + 'tree_node', + 'tree_nullnode', + 'ucal', + 'void', + 'web_error_atend', 'web_node_base', - 'web_node_root', - 'web_node_content_representation_xhr_container', - 'web_node_content_representation_html_specialized', 'web_node_content_representation_css_specialized', + 'web_node_content_representation_html_specialized', 'web_node_content_representation_js_specialized', + 'web_node_content_representation_xhr_container', 'web_node_echo', - 'web_error_atend', + 'web_node_root', + 'web_request_impl', + 'web_request', 'web_response_impl', - 'web_router' + 'web_response', + 'web_router', + 'worker_pool', + 'xml_domimplementation', + 'xml_attr', + 'xml_cdatasection', + 'xml_characterdata', + 'xml_comment', + 'xml_documentfragment', + 'xml_documenttype', + 'xml_document', + 'xml_element', + 'xml_entityreference', + 'xml_entity', + 'xml_namednodemap_attr', + 'xml_namednodemap_ht', + 'xml_namednodemap', + 'xml_nodelist', + 'xml_node', + 'xml_notation', + 'xml_processinginstruction', + 'xml_text', + 'xmlstream', + 'zip_file_impl', + 'zip_file', + 'zip_impl', + 'zip', ), 'Traits': ( - 'trait_asstring', 'any', - 'trait_generator', - 'trait_decompose_assignment', - 'trait_foreach', - 'trait_generatorcentric', - 'trait_foreachtextelement', - 'trait_finite', - 'trait_finiteforeach', - 'trait_keyed', - 'trait_keyedfinite', - 'trait_keyedforeach', - 'trait_frontended', + 'formattingbase', + 'html_attributed', + 'html_element_coreattrs', + 'html_element_eventsattrs', + 'html_element_i18nattrs', + 'lassoapp_capabilities', + 'lassoapp_resource', + 'lassoapp_source', + 'queriable_asstring', + 'session_driver', + 'trait_array', + 'trait_asstring', + 'trait_backcontractible', 'trait_backended', - 'trait_doubleended', - 'trait_positionallykeyed', - 'trait_expandable', - 'trait_frontexpandable', 'trait_backexpandable', + 'trait_close', 'trait_contractible', + 'trait_decompose_assignment', + 'trait_doubleended', + 'trait_each_sub', + 'trait_encodeurl', + 'trait_endedfullymutable', + 'trait_expandable', + 'trait_file', + 'trait_finiteforeach', + 'trait_finite', + 'trait_foreachtextelement', + 'trait_foreach', 'trait_frontcontractible', - 'trait_backcontractible', + 'trait_frontended', + 'trait_frontexpandable', 'trait_fullymutable', - 'trait_keyedmutable', - 'trait_endedfullymutable', - 'trait_setoperations', - 'trait_searchable', - 'trait_positionallysearchable', - 'trait_pathcomponents', - 'trait_readbytes', - 'trait_writebytes', - 'trait_setencoding', - 'trait_readstring', - 'trait_writestring', + 'trait_generatorcentric', + 'trait_generator', 'trait_hashable', - 'trait_each_sub', - 'trait_stack', + 'trait_json_serialize', + 'trait_keyedfinite', + 'trait_keyedforeach', + 'trait_keyedmutable', + 'trait_keyed', 'trait_list', - 'trait_array', 'trait_map', - 'trait_close', - 'trait_file', - 'trait_scalar', + 'trait_net', + 'trait_pathcomponents', + 'trait_positionallykeyed', + 'trait_positionallysearchable', 'trait_queriablelambda', 'trait_queriable', - 'queriable_asstring', + 'trait_readbytes', + 'trait_readstring', + 'trait_scalar', + 'trait_searchable', 'trait_serializable', + 'trait_setencoding', + 'trait_setoperations', + 'trait_stack', 'trait_treenode', - 'trait_json_serialize', - 'formattingbase', - 'trait_net', + 'trait_writebytes', + 'trait_writestring', 'trait_xml_elementcompat', 'trait_xml_nodecompat', 'web_connection', - 'html_element_coreattrs', - 'html_element_i18nattrs', - 'html_element_eventsattrs', - 'html_attributed', - 'lassoapp_resource', - 'lassoapp_source', - 'lassoapp_capabilities', - 'session_driver', - 'web_node_content_json_specialized', - 'web_node', 'web_node_container', + 'web_node_content_css_specialized', + 'web_node_content_document', + 'web_node_content_html_specialized', + 'web_node_content_js_specialized', + 'web_node_content_json_specialized', 'web_node_content_representation', 'web_node_content', - 'web_node_content_document', 'web_node_postable', - 'web_node_content_html_specialized', - 'web_node_content_css_specialized', - 'web_node_content_js_specialized' + 'web_node', ), 'Unbound Methods': ( - 'fail_now', - 'register', - 'register_thread', - 'escape_tag', - 'handle', - 'handle_failure', - 'protect_now', - 'threadvar_get', - 'threadvar_set', - 'threadvar_set_asrt', - 'threadvar_find', - 'abort_now', - 'abort_clear', - 'failure_clear', - 'var_keys', - 'var_values', - 'staticarray_join', - 'suspend', - 'main_thread_only', - 'split_thread', - 'capture_nearestloopcount', - 'capture_nearestloopcontinue', - 'capture_nearestloopabort', - 'io_file_o_rdonly', - 'io_file_o_wronly', - 'io_file_o_rdwr', - 'io_file_o_nonblock', - 'io_file_o_sync', - 'io_file_o_shlock', - 'io_file_o_exlock', - 'io_file_o_async', - 'io_file_o_fsync', - 'io_file_o_nofollow', - 'io_file_s_irwxu', - 'io_file_s_irusr', - 'io_file_s_iwusr', - 'io_file_s_ixusr', - 'io_file_s_irwxg', - 'io_file_s_irgrp', - 'io_file_s_iwgrp', - 'io_file_s_ixgrp', - 'io_file_s_irwxo', - 'io_file_s_iroth', - 'io_file_s_iwoth', - 'io_file_s_ixoth', - 'io_file_s_isuid', - 'io_file_s_isgid', - 'io_file_s_isvtx', - 'io_file_s_ifmt', - 'io_file_s_ifchr', - 'io_file_s_ifdir', - 'io_file_s_ifreg', - 'io_file_o_append', - 'io_file_o_creat', - 'io_file_o_trunc', - 'io_file_o_excl', - 'io_file_seek_set', - 'io_file_seek_cur', - 'io_file_seek_end', - 'io_file_s_ififo', - 'io_file_s_ifblk', - 'io_file_s_iflnk', - 'io_file_s_ifsock', - 'io_net_shut_rd', - 'io_net_shut_wr', - 'io_net_shut_rdwr', - 'io_net_sock_stream', - 'io_net_sock_dgram', - 'io_net_sock_raw', - 'io_net_sock_rdm', - 'io_net_sock_seqpacket', - 'io_net_so_debug', - 'io_net_so_acceptconn', - 'io_net_so_reuseaddr', - 'io_net_so_keepalive', - 'io_net_so_dontroute', - 'io_net_so_broadcast', - 'io_net_so_useloopback', - 'io_net_so_linger', - 'io_net_so_oobinline', - 'io_net_so_timestamp', - 'io_net_so_sndbuf', - 'io_net_so_rcvbuf', - 'io_net_so_sndlowat', - 'io_net_so_rcvlowat', - 'io_net_so_sndtimeo', - 'io_net_so_rcvtimeo', - 'io_net_so_error', - 'io_net_so_type', - 'io_net_sol_socket', - 'io_net_af_unix', - 'io_net_af_inet', - 'io_net_af_inet6', - 'io_net_ipproto_ip', - 'io_net_ipproto_udp', - 'io_net_msg_peek', - 'io_net_msg_oob', - 'io_net_msg_waitall', - 'io_file_fioclex', - 'io_file_fionclex', - 'io_file_fionread', - 'io_file_fionbio', - 'io_file_fioasync', - 'io_file_fiosetown', - 'io_file_fiogetown', - 'io_file_fiodtype', - 'io_file_f_dupfd', - 'io_file_f_getfd', - 'io_file_f_setfd', - 'io_file_f_getfl', - 'io_file_f_setfl', - 'io_file_f_getlk', - 'io_file_f_setlk', - 'io_file_f_setlkw', - 'io_file_fd_cloexec', - 'io_file_f_rdlck', - 'io_file_f_unlck', - 'io_file_f_wrlck', - 'io_dir_dt_unknown', - 'io_dir_dt_fifo', - 'io_dir_dt_chr', - 'io_dir_dt_blk', - 'io_dir_dt_reg', - 'io_dir_dt_sock', - 'io_dir_dt_wht', - 'io_dir_dt_lnk', - 'io_dir_dt_dir', - 'io_file_access', - 'io_file_chdir', - 'io_file_getcwd', - 'io_file_chown', - 'io_file_lchown', - 'io_file_truncate', - 'io_file_link', - 'io_file_pipe', - 'io_file_rmdir', - 'io_file_symlink', - 'io_file_unlink', - 'io_file_remove', - 'io_file_rename', - 'io_file_tempnam', - 'io_file_mkstemp', - 'io_file_dirname', - 'io_file_realpath', - 'io_file_chmod', - 'io_file_mkdir', - 'io_file_mkfifo', - 'io_file_umask', - 'io_net_socket', - 'io_net_bind', - 'io_net_connect', - 'io_net_listen', - 'io_net_recv', - 'io_net_recvfrom', - 'io_net_accept', - 'io_net_send', - 'io_net_sendto', - 'io_net_shutdown', - 'io_net_getpeername', - 'io_net_getsockname', - 'io_net_ssl_begin', - 'io_net_ssl_end', - 'io_net_ssl_shutdown', - 'io_net_ssl_setverifylocations', - 'io_net_ssl_usecertificatechainfile', - 'io_net_ssl_useprivatekeyfile', - 'io_net_ssl_connect', - 'io_net_ssl_accept', - 'io_net_ssl_error', - 'io_net_ssl_errorstring', - 'io_net_ssl_liberrorstring', - 'io_net_ssl_funcerrorstring', - 'io_net_ssl_reasonerrorstring', - 'io_net_ssl_setconnectstate', - 'io_net_ssl_setacceptstate', - 'io_net_ssl_read', - 'io_net_ssl_write', - 'io_file_stat_size', - 'io_file_stat_mode', - 'io_file_stat_mtime', - 'io_file_stat_atime', - 'io_file_lstat_size', - 'io_file_lstat_mode', - 'io_file_lstat_mtime', - 'io_file_lstat_atime', - 'io_file_readlink', - 'io_file_lockf', - 'io_file_f_ulock', - 'io_file_f_tlock', - 'io_file_f_test', - 'io_file_stdin', - 'io_file_stdout', - 'io_file_stderr', - 'uchar_alphabetic', - 'uchar_ascii_hex_digit', - 'uchar_bidi_control', - 'uchar_bidi_mirrored', - 'uchar_dash', - 'uchar_default_ignorable_code_point', - 'uchar_deprecated', - 'uchar_diacritic', - 'uchar_extender', - 'uchar_full_composition_exclusion', - 'uchar_grapheme_base', - 'uchar_grapheme_extend', - 'uchar_grapheme_link', - 'uchar_hex_digit', - 'uchar_hyphen', - 'uchar_id_continue', - 'uchar_ideographic', - 'uchar_ids_binary_operator', - 'uchar_ids_trinary_operator', - 'uchar_join_control', - 'uchar_logical_order_exception', - 'uchar_lowercase', - 'uchar_math', - 'uchar_noncharacter_code_point', - 'uchar_quotation_mark', - 'uchar_radical', - 'uchar_soft_dotted', - 'uchar_terminal_punctuation', - 'uchar_unified_ideograph', - 'uchar_uppercase', - 'uchar_white_space', - 'uchar_xid_continue', - 'uchar_case_sensitive', - 'uchar_s_term', - 'uchar_variation_selector', - 'uchar_nfd_inert', - 'uchar_nfkd_inert', - 'uchar_nfc_inert', - 'uchar_nfkc_inert', - 'uchar_segment_starter', - 'uchar_pattern_syntax', - 'uchar_pattern_white_space', - 'uchar_posix_alnum', - 'uchar_posix_blank', - 'uchar_posix_graph', - 'uchar_posix_print', - 'uchar_posix_xdigit', - 'uchar_bidi_class', - 'uchar_block', - 'uchar_canonical_combining_class', - 'uchar_decomposition_type', - 'uchar_east_asian_width', - 'uchar_general_category', - 'uchar_joining_group', - 'uchar_joining_type', - 'uchar_line_break', - 'uchar_numeric_type', - 'uchar_script', - 'uchar_hangul_syllable_type', - 'uchar_nfd_quick_check', - 'uchar_nfkd_quick_check', - 'uchar_nfc_quick_check', - 'uchar_nfkc_quick_check', - 'uchar_lead_canonical_combining_class', - 'uchar_trail_canonical_combining_class', - 'uchar_grapheme_cluster_break', - 'uchar_sentence_break', - 'uchar_word_break', - 'uchar_general_category_mask', - 'uchar_numeric_value', - 'uchar_age', - 'uchar_bidi_mirroring_glyph', - 'uchar_case_folding', - 'uchar_iso_comment', - 'uchar_lowercase_mapping', - 'uchar_name', - 'uchar_simple_case_folding', - 'uchar_simple_lowercase_mapping', - 'uchar_simple_titlecase_mapping', - 'uchar_simple_uppercase_mapping', - 'uchar_titlecase_mapping', - 'uchar_unicode_1_name', - 'uchar_uppercase_mapping', - 'u_wb_other', - 'u_wb_aletter', - 'u_wb_format', - 'u_wb_katakana', - 'u_wb_midletter', - 'u_wb_midnum', - 'u_wb_numeric', - 'u_wb_extendnumlet', - 'u_sb_other', - 'u_sb_aterm', - 'u_sb_close', - 'u_sb_format', - 'u_sb_lower', - 'u_sb_numeric', - 'u_sb_oletter', - 'u_sb_sep', - 'u_sb_sp', - 'u_sb_sterm', - 'u_sb_upper', - 'u_lb_unknown', - 'u_lb_ambiguous', - 'u_lb_alphabetic', - 'u_lb_break_both', - 'u_lb_break_after', - 'u_lb_break_before', - 'u_lb_mandatory_break', - 'u_lb_contingent_break', - 'u_lb_close_punctuation', - 'u_lb_combining_mark', - 'u_lb_carriage_return', - 'u_lb_exclamation', - 'u_lb_glue', - 'u_lb_hyphen', - 'u_lb_ideographic', - 'u_lb_inseparable', - 'u_lb_infix_numeric', - 'u_lb_line_feed', - 'u_lb_nonstarter', - 'u_lb_numeric', - 'u_lb_open_punctuation', - 'u_lb_postfix_numeric', - 'u_lb_prefix_numeric', - 'u_lb_quotation', - 'u_lb_complex_context', - 'u_lb_surrogate', - 'u_lb_space', - 'u_lb_break_symbols', - 'u_lb_zwspace', - 'u_lb_next_line', - 'u_lb_word_joiner', - 'u_lb_h2', - 'u_lb_h3', - 'u_lb_jl', - 'u_lb_jt', - 'u_lb_jv', - 'u_nt_none', - 'u_nt_decimal', - 'u_nt_digit', - 'u_nt_numeric', - 'locale_english', - 'locale_french', - 'locale_german', - 'locale_italian', - 'locale_japanese', - 'locale_korean', - 'locale_chinese', - 'locale_simplifiedchinese', - 'locale_traditionalchinese', - 'locale_france', - 'locale_germany', - 'locale_italy', - 'locale_japan', - 'locale_korea', - 'locale_china', - 'locale_prc', - 'locale_taiwan', - 'locale_uk', - 'locale_us', - 'locale_canada', - 'locale_canadafrench', - 'locale_default', - 'locale_setdefault', - 'locale_isocountries', - 'locale_isolanguages', - 'locale_availablelocales', - 'ucal_listtimezones', - 'ucal_era', - 'ucal_year', - 'ucal_month', - 'ucal_weekofyear', - 'ucal_weekofmonth', - 'ucal_dayofmonth', - 'ucal_dayofyear', - 'ucal_dayofweek', - 'ucal_dayofweekinmonth', - 'ucal_ampm', - 'ucal_hour', - 'ucal_hourofday', - 'ucal_minute', - 'ucal_second', - 'ucal_millisecond', - 'ucal_zoneoffset', - 'ucal_dstoffset', - 'ucal_yearwoy', - 'ucal_dowlocal', - 'ucal_extendedyear', - 'ucal_julianday', - 'ucal_millisecondsinday', - 'ucal_lenient', - 'ucal_firstdayofweek', - 'ucal_daysinfirstweek', - 'sys_sigalrm', - 'sys_sighup', - 'sys_sigkill', - 'sys_sigpipe', - 'sys_sigquit', - 'sys_sigusr1', - 'sys_sigusr2', - 'sys_sigchld', - 'sys_sigcont', - 'sys_sigstop', - 'sys_sigtstp', - 'sys_sigttin', - 'sys_sigttou', - 'sys_sigbus', - 'sys_sigprof', - 'sys_sigsys', - 'sys_sigtrap', - 'sys_sigurg', - 'sys_sigvtalrm', - 'sys_sigxcpu', - 'sys_sigxfsz', - 'sys_wcontinued', - 'sys_wnohang', - 'sys_wuntraced', - 'sys_sigabrt', - 'sys_sigfpe', - 'sys_sigill', - 'sys_sigint', - 'sys_sigsegv', - 'sys_sigterm', - 'sys_exit', - 'sys_fork', - 'sys_kill', - 'sys_waitpid', - 'sys_getegid', - 'sys_geteuid', - 'sys_getgid', - 'sys_getlogin', - 'sys_getpid', - 'sys_getppid', - 'sys_getuid', - 'sys_setuid', - 'sys_setgid', - 'sys_setsid', - 'sys_errno', - 'sys_strerror', - 'sys_time', - 'sys_difftime', - 'sys_getpwuid', - 'sys_getpwnam', - 'sys_getgrnam', - 'sys_drand48', - 'sys_erand48', - 'sys_jrand48', - 'sys_lcong48', - 'sys_lrand48', - 'sys_mrand48', - 'sys_nrand48', - 'sys_srand48', - 'sys_random', - 'sys_srandom', - 'sys_seed48', - 'sys_rand', - 'sys_srand', - 'sys_environ', - 'sys_getenv', - 'sys_setenv', - 'sys_unsetenv', - 'sys_uname', - 'uuid_compare', - 'uuid_copy', - 'uuid_generate', - 'uuid_generate_random', - 'uuid_generate_time', - 'uuid_is_null', - 'uuid_parse', - 'uuid_unparse', - 'uuid_unparse_lower', - 'uuid_unparse_upper', - 'sys_credits', - 'sleep', - 'sys_dll_ext', - 'sys_listtypes', - 'sys_listtraits', - 'sys_listunboundmethods', - 'sys_getthreadcount', - 'sys_growheapby', - 'sys_getheapsize', - 'sys_getheapfreebytes', - 'sys_getbytessincegc', - 'sys_garbagecollect', - 'sys_clock', - 'sys_getstartclock', - 'sys_clockspersec', - 'sys_pointersize', - 'sys_loadlibrary', - 'sys_getchar', - 'sys_chroot', - 'sys_exec', - 'sys_kill_exec', - 'sys_wait_exec', - 'sys_test_exec', - 'sys_detach_exec', - 'sys_pid_exec', - 'wifexited', - 'wexitstatus', - 'wifsignaled', - 'wtermsig', - 'wifstopped', - 'wstopsig', - 'wifcontinued', - 'sys_eol', - 'sys_iswindows', - 'sys_is_windows', - 'sys_isfullpath', - 'sys_is_full_path', - 'lcapi_loadmodule', - 'lcapi_listdatasources', - 'encrypt_blowfish', - 'decrypt_blowfish', - 'cipher_digest', - 'cipher_encrypt', - 'cipher_decrypt', - 'cipher_list', - 'cipher_keylength', - 'cipher_hmac', - 'cipher_seal', - 'cipher_open', - 'cipher_sign', - 'cipher_verify', - 'cipher_decrypt_private', - 'cipher_decrypt_public', - 'cipher_encrypt_private', - 'cipher_encrypt_public', - 'cipher_generate_key', - 'tag_exists', - 'curl_easy_init', - 'curl_easy_duphandle', - 'curl_easy_cleanup', - 'curl_easy_getinfo', - 'curl_multi_perform', - 'curl_multi_result', - 'curl_easy_reset', - 'curl_easy_setopt', - 'curl_easy_strerror', - 'curl_getdate', - 'curl_version', - 'curl_version_info', - 'curlinfo_effective_url', - 'curlinfo_content_type', - 'curlinfo_response_code', - 'curlinfo_header_size', - 'curlinfo_request_size', - 'curlinfo_ssl_verifyresult', - 'curlinfo_filetime', - 'curlinfo_redirect_count', - 'curlinfo_http_connectcode', - 'curlinfo_httpauth_avail', - 'curlinfo_proxyauth_avail', - 'curlinfo_os_errno', - 'curlinfo_num_connects', - 'curlinfo_total_time', - 'curlinfo_namelookup_time', - 'curlinfo_connect_time', - 'curlinfo_pretransfer_time', - 'curlinfo_size_upload', - 'curlinfo_size_download', - 'curlinfo_speed_download', - 'curlinfo_speed_upload', - 'curlinfo_content_length_download', - 'curlinfo_content_length_upload', - 'curlinfo_starttransfer_time', - 'curlinfo_redirect_time', - 'curlinfo_ssl_engines', - 'curlopt_url', - 'curlopt_postfields', - 'curlopt_cainfo', - 'curlopt_capath', - 'curlopt_cookie', - 'curlopt_cookiefile', - 'curlopt_cookiejar', - 'curlopt_customrequest', - 'curlopt_egdsocket', - 'curlopt_encoding', - 'curlopt_ftp_account', - 'curlopt_ftpport', - 'curlopt_interface', - 'curlopt_krb4level', - 'curlopt_netrc_file', - 'curlopt_proxy', - 'curlopt_proxyuserpwd', - 'curlopt_random_file', - 'curlopt_range', - 'curlopt_readdata', - 'curlopt_referer', - 'curlopt_ssl_cipher_list', - 'curlopt_sslcert', - 'curlopt_sslcerttype', - 'curlopt_sslengine', - 'curlopt_sslkey', - 'curlopt_sslkeypasswd', - 'curlopt_sslkeytype', - 'curlopt_useragent', - 'curlopt_userpwd', - 'curlopt_postfieldsize', - 'curlopt_autoreferer', - 'curlopt_buffersize', - 'curlopt_connecttimeout', - 'curlopt_cookiesession', - 'curlopt_crlf', - 'curlopt_dns_use_global_cache', - 'curlopt_failonerror', - 'curlopt_filetime', - 'curlopt_followlocation', - 'curlopt_forbid_reuse', - 'curlopt_fresh_connect', - 'curlopt_ftp_create_missing_dirs', - 'curlopt_ftp_response_timeout', - 'curlopt_ftp_ssl', - 'curlopt_use_ssl', - 'curlopt_ftp_use_eprt', - 'curlopt_ftp_use_epsv', - 'curlopt_ftpappend', - 'curlopt_ftplistonly', - 'curlopt_ftpsslauth', - 'curlopt_header', - 'curlopt_http_version', - 'curlopt_httpauth', - 'curlopt_httpget', - 'curlopt_httpproxytunnel', - 'curlopt_infilesize', - 'curlopt_ipresolve', - 'curlopt_low_speed_limit', - 'curlopt_low_speed_time', - 'curlopt_maxconnects', - 'curlopt_maxfilesize', - 'curlopt_maxredirs', - 'curlopt_netrc', - 'curlopt_nobody', - 'curlopt_noprogress', - 'curlopt_port', - 'curlopt_post', - 'curlopt_proxyauth', - 'curlopt_proxyport', - 'curlopt_proxytype', - 'curlopt_put', - 'curlopt_resume_from', - 'curlopt_ssl_verifyhost', - 'curlopt_ssl_verifypeer', - 'curlopt_sslengine_default', - 'curlopt_sslversion', - 'curlopt_tcp_nodelay', - 'curlopt_timecondition', - 'curlopt_timeout', - 'curlopt_timevalue', - 'curlopt_transfertext', - 'curlopt_unrestricted_auth', - 'curlopt_upload', - 'curlopt_verbose', - 'curlopt_infilesize_large', - 'curlopt_maxfilesize_large', - 'curlopt_postfieldsize_large', - 'curlopt_resume_from_large', - 'curlopt_http200aliases', - 'curlopt_httpheader', - 'curlopt_postquote', - 'curlopt_prequote', - 'curlopt_quote', - 'curlopt_httppost', - 'curlopt_writedata', - 'curl_version_ipv6', - 'curl_version_kerberos4', - 'curl_version_ssl', - 'curl_version_libz', - 'curl_version_ntlm', - 'curl_version_gssnegotiate', - 'curl_version_debug', - 'curl_version_asynchdns', - 'curl_version_spnego', - 'curl_version_largefile', - 'curl_version_idn', - 'curl_netrc_ignored', - 'curl_netrc_optional', - 'curl_netrc_required', - 'curl_http_version_none', - 'curl_http_version_1_0', - 'curl_http_version_1_1', - 'curl_ipresolve_whatever', - 'curl_ipresolve_v4', - 'curl_ipresolve_v6', - 'curlftpssl_none', - 'curlftpssl_try', - 'curlftpssl_control', - 'curlftpssl_all', - 'curlftpssl_last', - 'curlftpauth_default', - 'curlftpauth_ssl', - 'curlftpauth_tls', - 'curlauth_none', - 'curlauth_basic', - 'curlauth_digest', - 'curlauth_gssnegotiate', - 'curlauth_ntlm', - 'curlauth_any', - 'curlauth_anysafe', - 'curlproxy_http', - 'curlproxy_socks4', - 'curlproxy_socks5', - 'curle_ok', - 'curle_unsupported_protocol', - 'curle_failed_init', - 'curle_url_malformat', - 'curle_url_malformat_user', - 'curle_couldnt_resolve_proxy', - 'curle_couldnt_resolve_host', - 'curle_couldnt_connect', - 'curle_ftp_weird_server_reply', - 'curle_ftp_access_denied', - 'curle_ftp_user_password_incorrect', - 'curle_ftp_weird_pass_reply', - 'curle_ftp_weird_user_reply', - 'curle_ftp_weird_pasv_reply', - 'curle_ftp_weird_227_format', - 'curle_ftp_cant_get_host', - 'curle_ftp_cant_reconnect', - 'curle_ftp_couldnt_set_binary', - 'curle_partial_file', - 'curle_ftp_couldnt_retr_file', - 'curle_ftp_write_error', - 'curle_ftp_quote_error', - 'curle_http_returned_error', - 'curle_write_error', - 'curle_malformat_user', - 'curle_read_error', - 'curle_out_of_memory', - 'curle_operation_timeouted', - 'curle_ftp_couldnt_set_ascii', - 'curle_ftp_port_failed', - 'curle_ftp_couldnt_use_rest', - 'curle_ftp_couldnt_get_size', - 'curle_http_range_error', - 'curle_http_post_error', - 'curle_ssl_connect_error', - 'curle_bad_download_resume', - 'curle_file_couldnt_read_file', - 'curle_ldap_cannot_bind', - 'curle_ldap_search_failed', - 'curle_library_not_found', - 'curle_function_not_found', - 'curle_aborted_by_callback', - 'curle_bad_function_argument', - 'curle_bad_calling_order', - 'curle_interface_failed', - 'curle_bad_password_entered', - 'curle_too_many_redirects', - 'curle_unknown_telnet_option', - 'curle_telnet_option_syntax', - 'curle_obsolete', - 'curle_ssl_peer_certificate', - 'curle_got_nothing', - 'curle_ssl_engine_notfound', - 'curle_ssl_engine_setfailed', - 'curle_send_error', - 'curle_recv_error', - 'curle_share_in_use', - 'curle_ssl_certproblem', - 'curle_ssl_cipher', - 'curle_ssl_cacert', - 'curle_bad_content_encoding', - 'curle_ldap_invalid_url', - 'curle_filesize_exceeded', - 'curle_ftp_ssl_failed', - 'curle_send_fail_rewind', - 'curle_ssl_engine_initfailed', - 'curle_login_denied', - 'curlmsg_done', - 'zip_open', - 'zip_name_locate', - 'zip_fopen', - 'zip_fopen_index', - 'zip_fread', - 'zip_fclose', - 'zip_close', - 'zip_stat', - 'zip_stat_index', - 'zip_get_archive_comment', - 'zip_get_file_comment', - 'zip_get_name', - 'zip_get_num_files', - 'zip_add', - 'zip_replace', - 'zip_add_dir', - 'zip_set_file_comment', - 'zip_rename', - 'zip_delete', - 'zip_unchange', - 'zip_unchange_all', - 'zip_unchange_archive', - 'zip_set_archive_comment', - 'zip_error_to_str', - 'zip_file_strerror', - 'zip_strerror', - 'zip_error_get', - 'zip_file_error_get', - 'zip_error_get_sys_type', - 'zlib_version', - 'fastcgi_initiate_request', - 'debugging_enabled', - 'debugging_stop', - 'evdns_resolve_ipv4', - 'evdns_resolve_ipv6', - 'evdns_resolve_reverse', - 'evdns_resolve_reverse_ipv6', - 'stdout', - 'stdoutnl', - 'fail', - 'fail_if', - 'fail_ifnot', - 'error_code', - 'error_msg', - 'error_obj', - 'error_stack', - 'error_push', - 'error_pop', - 'error_reset', - 'error_msg_invalidparameter', - 'error_code_invalidparameter', - 'error_msg_networkerror', - 'error_code_networkerror', - 'error_msg_runtimeassertion', - 'error_code_runtimeassertion', - 'error_msg_methodnotfound', - 'error_code_methodnotfound', - 'error_msg_resnotfound', - 'error_code_resnotfound', - 'error_msg_filenotfound', - 'error_code_filenotfound', - 'error_msg_aborted', - 'error_code_aborted', - 'error_msg_dividebyzero', - 'error_code_dividebyzero', - 'error_msg_noerror', - 'error_code_noerror', - 'abort', - 'protect', - 'generateforeach', - 'method_name', - 'queriable_do', - 'queriable_sum', - 'queriable_average', - 'queriable_min', - 'queriable_max', - 'queriable_internal_combinebindings', - 'queriable_defaultcompare', - 'queriable_reversecompare', - 'queriable_qsort', - 'timer', - 'thread_var_push', - 'thread_var_pop', - 'thread_var_get', - 'loop_value', - 'loop_value_push', - 'loop_value_pop', - 'loop_key', - 'loop_key_push', - 'loop_key_pop', - 'loop_push', - 'loop_pop', - 'loop_count', - 'loop_continue', - 'loop_abort', - 'loop', - 'sys_while', - 'sys_iterate', - 'string_validcharset', - 'eol', - 'encoding_utf8', - 'encoding_iso88591', - 'integer_random', - 'integer_bitor', - 'millis', - 'micros', - 'max', - 'min', - 'range', - 'median', - 'decimal_random', - 'pi', - 'lcapi_datasourceinit', - 'lcapi_datasourceterm', - 'lcapi_datasourcenames', - 'lcapi_datasourcetablenames', - 'lcapi_datasourcesearch', - 'lcapi_datasourceadd', - 'lcapi_datasourceupdate', - 'lcapi_datasourcedelete', - 'lcapi_datasourceinfo', - 'lcapi_datasourceexecsql', - 'lcapi_datasourcerandom', - 'lcapi_datasourceschemanames', - 'lcapi_datasourcecloseconnection', - 'lcapi_datasourcetickle', - 'lcapi_datasourceduplicate', - 'lcapi_datasourcescripts', - 'lcapi_datasourceimage', - 'lcapi_datasourcefindall', - 'lcapi_datasourcematchesname', - 'lcapi_datasourcepreparesql', - 'lcapi_datasourceunpreparesql', - 'lcapi_datasourcenothing', - 'lcapi_fourchartointeger', - 'lcapi_datasourcetypestring', - 'lcapi_datasourcetypeinteger', - 'lcapi_datasourcetypeboolean', - 'lcapi_datasourcetypeblob', - 'lcapi_datasourcetypedecimal', - 'lcapi_datasourcetypedate', - 'lcapi_datasourceprotectionnone', - 'lcapi_datasourceprotectionreadonly', - 'lcapi_datasourceopgt', - 'lcapi_datasourceopgteq', - 'lcapi_datasourceopeq', - 'lcapi_datasourceopneq', - 'lcapi_datasourceoplt', - 'lcapi_datasourceoplteq', - 'lcapi_datasourceopbw', - 'lcapi_datasourceopew', - 'lcapi_datasourceopct', - 'lcapi_datasourceopnct', - 'lcapi_datasourceopnbw', - 'lcapi_datasourceopnew', - 'lcapi_datasourceopand', - 'lcapi_datasourceopor', - 'lcapi_datasourceopnot', - 'lcapi_datasourceopno', - 'lcapi_datasourceopany', - 'lcapi_datasourceopin', - 'lcapi_datasourceopnin', - 'lcapi_datasourceopft', - 'lcapi_datasourceoprx', - 'lcapi_datasourceopnrx', - 'lcapi_datasourcesortascending', - 'lcapi_datasourcesortdescending', - 'lcapi_datasourcesortcustom', - 'lcapi_updatedatasourceslist', - 'lcapi_loadmodules', - 'lasso_version', - 'lasso_uniqueid', - 'usage', - 'file_defaultencoding', - 'file_copybuffersize', - 'file_modeline', - 'file_modechar', - 'file_forceroot', - 'file_tempfile', - 'file_stdin', - 'file_stdout', - 'file_stderr', - 'lasso_tagexists', - 'lasso_methodexists', - 'output', - 'if_empty', - 'if_null', - 'if_true', - 'if_false', - 'process', - 'treemap', - 'locale_format', - 'compress', - 'uncompress', - 'decompress', - 'tag_name', - 'series', - 'nslookup', - 'all', - 'bw', - 'cn', - 'eq', - 'ew', - 'ft', - 'gt', - 'gte', - 'lt', - 'lte', - 'neq', - 'nrx', - 'rx', - 'none', - 'minimal', - 'full', - 'output_none', - 'lasso_executiontimelimit', - 'namespace_global', - 'namespace_using', - 'namespace_import', - 'site_id', - 'site_name', - 'sys_homepath', - 'sys_masterhomepath', - 'sys_supportpath', - 'sys_librariespath', - 'sys_databasespath', - 'sys_usercapimodulepath', - 'sys_appspath', - 'sys_userstartuppath', - 'ldap_scope_base', - 'ldap_scope_onelevel', - 'ldap_scope_subtree', - 'mysqlds', - 'odbc', - 'sqliteconnector', - 'sqlite_createdb', - 'sqlite_setsleepmillis', - 'sqlite_setsleeptries', - 'java_jvm_getenv', - 'java_jvm_create', - 'java_jdbc_load', - 'database_database', - 'database_table_datasources', - 'database_table_datasource_hosts', - 'database_table_datasource_databases', - 'database_table_database_tables', - 'database_table_table_fields', - 'database_qs', - 'database_initialize', - 'database_util_cleanpath', - 'database_adddefaultsqlitehost', - 'sqlite_ok', - 'sqlite_error', - 'sqlite_internal', - 'sqlite_perm', - 'sqlite_abort', - 'sqlite_busy', - 'sqlite_locked', - 'sqlite_nomem', - 'sqlite_readonly', - 'sqlite_interrupt', - 'sqlite_ioerr', - 'sqlite_corrupt', - 'sqlite_notfound', - 'sqlite_full', - 'sqlite_cantopen', - 'sqlite_protocol', - 'sqlite_empty', - 'sqlite_schema', - 'sqlite_toobig', - 'sqlite_constraint', - 'sqlite_mismatch', - 'sqlite_misuse', - 'sqlite_nolfs', - 'sqlite_auth', - 'sqlite_format', - 'sqlite_range', - 'sqlite_notadb', - 'sqlite_row', - 'sqlite_done', - 'sqlite_integer', - 'sqlite_float', - 'sqlite_blob', - 'sqlite_null', - 'sqlite_text', - 'bom_utf16be', - 'bom_utf16le', - 'bom_utf32be', - 'bom_utf32le', - 'bom_utf8', - 'include_url', - 'ftp_getdata', - 'ftp_getfile', - 'ftp_getlisting', - 'ftp_putdata', - 'ftp_putfile', - 'ftp_deletefile', - 'debugging_step_in', - 'debugging_get_stack', - 'debugging_get_context', - 'debugging_detach', - 'debugging_step_over', - 'debugging_step_out', - 'debugging_run', - 'debugging_break', - 'debugging_breakpoint_set', - 'debugging_breakpoint_get', - 'debugging_breakpoint_remove', - 'debugging_breakpoint_list', - 'debugging_breakpoint_update', - 'debugging_terminate', - 'debugging_context_locals', - 'debugging_context_vars', - 'debugging_context_self', - 'dbgp_stop_stack_name', - 'encrypt_md5', - 'inline_columninfo_pos', - 'inline_resultrows_pos', - 'inline_foundcount_pos', - 'inline_colinfo_name_pos', - 'inline_colinfo_valuelist_pos', - 'inline_scopeget', - 'inline_scopepush', - 'inline_scopepop', - 'inline_namedget', - 'inline_namedput', - 'inline', - 'resultset_count', - 'resultset', - 'resultsets', - 'rows', - 'rows_impl', - 'records', - 'column', - 'field', - 'column_names', - 'field_names', - 'column_name', - 'field_name', - 'found_count', - 'shown_count', - 'shown_first', - 'shown_last', - 'action_statement', - 'lasso_currentaction', - 'maxrecords_value', - 'skiprecords_value', - 'action_param', - 'action_params', - 'admin_authorization', - 'admin_currentgroups', - 'admin_currentuserid', - 'admin_currentusername', - 'database_name', - 'table_name', - 'layout_name', - 'schema_name', - 'keycolumn_name', - 'keyfield_name', - 'keycolumn_value', - 'keyfield_value', - 'inline_colinfo_type_pos', - 'column_type', - 'rows_array', - 'records_array', - 'records_map', - 'json_serialize', - 'json_consume_string', - 'json_consume_token', - 'json_consume_array', - 'json_consume_object', - 'json_deserialize', - 'json_rpccall', - 'ljapi_initialize', - 'locale_format_style_full', - 'locale_format_style_long', - 'locale_format_style_medium', - 'locale_format_style_short', - 'locale_format_style_default', - 'locale_format_style_none', - 'locale_format_style_date_time', - 'net_connectinprogress', - 'net_connectok', - 'net_typessl', - 'net_typessltcp', - 'net_typessludp', - 'net_typetcp', - 'net_typeudp', - 'net_waitread', - 'net_waittimeout', - 'net_waitwrite', - 'admin_initialize', - 'admin_getpref', - 'admin_setpref', - 'admin_removepref', - 'admin_userexists', - 'admin_lassoservicepath', - 'pdf_package', - 'pdf_rectangle', - 'pdf_serve', - 'random_seed', - 'xml', - 'xml_transform', - 'zip_create', - 'zip_excl', - 'zip_checkcons', - 'zip_fl_nocase', - 'zip_fl_nodir', - 'zip_fl_compressed', - 'zip_fl_unchanged', - 'zip_er_ok', - 'zip_er_multidisk', - 'zip_er_rename', - 'zip_er_close', - 'zip_er_seek', - 'zip_er_read', - 'zip_er_write', - 'zip_er_crc', - 'zip_er_zipclosed', - 'zip_er_noent', - 'zip_er_exists', - 'zip_er_open', - 'zip_er_tmpopen', - 'zip_er_zlib', - 'zip_er_memory', - 'zip_er_changed', - 'zip_er_compnotsupp', - 'zip_er_eof', - 'zip_er_inval', - 'zip_er_nozip', - 'zip_er_internal', - 'zip_er_incons', - 'zip_er_remove', - 'zip_er_deleted', - 'zip_et_none', - 'zip_et_sys', - 'zip_et_zlib', - 'zip_cm_default', - 'zip_cm_store', - 'zip_cm_shrink', - 'zip_cm_reduce_1', - 'zip_cm_reduce_2', - 'zip_cm_reduce_3', - 'zip_cm_reduce_4', - 'zip_cm_implode', - 'zip_cm_deflate', - 'zip_cm_deflate64', - 'zip_cm_pkware_implode', - 'zip_cm_bzip2', - 'zip_em_none', - 'zip_em_trad_pkware', - 'zip_em_des', - 'zip_em_rc2_old', - 'zip_em_3des_168', - 'zip_em_3des_112', - 'zip_em_aes_128', - 'zip_em_aes_192', - 'zip_em_aes_256', - 'zip_em_rc2', - 'zip_em_rc4', - 'zip_em_unknown', - 'dns_lookup', - 'dns_default', - 'string_charfromname', - 'string_concatenate', - 'string_endswith', - 'string_extract', - 'string_findposition', - 'string_findregexp', - 'string_getunicodeversion', - 'string_insert', - 'string_isalpha', - 'string_isalphanumeric', - 'string_isdigit', - 'string_ishexdigit', - 'string_islower', - 'string_isnumeric', - 'string_ispunctuation', - 'string_isspace', - 'string_isupper', - 'string_length', - 'string_remove', - 'string_removeleading', - 'string_removetrailing', - 'string_replace', - 'string_replaceregexp', - 'string_todecimal', - 'string_tointeger', - 'string_uppercase', - 'string_lowercase', - 'document', - 'email_attachment_mime_type', - 'email_translatebreakstocrlf', - 'email_findemails', - 'email_fix_address', - 'email_fix_address_list', - 'encode_qheader', - 'email_send', - 'email_queue', - 'email_immediate', - 'email_result', - 'email_status', - 'email_token', - 'email_merge', - 'email_batch', - 'email_safeemail', - 'email_extract', - 'email_pop_priv_substring', - 'email_pop_priv_extract', - 'email_digestchallenge', - 'email_pop_priv_quote', - 'email_digestresponse', - 'encrypt_hmac', - 'encrypt_crammd5', - 'email_fs_error_clean', - 'email_initialize', - 'email_mxlookup', - 'lasso_errorreporting', - 'fcgi_version_1', - 'fcgi_null_request_id', - 'fcgi_begin_request', - 'fcgi_abort_request', - 'fcgi_end_request', - 'fcgi_params', - 'fcgi_stdin', - 'fcgi_stdout', - 'fcgi_stderr', - 'fcgi_data', - 'fcgi_get_values', - 'fcgi_get_values_result', - 'fcgi_unknown_type', - 'fcgi_keep_conn', - 'fcgi_responder', - 'fcgi_authorize', - 'fcgi_filter', - 'fcgi_request_complete', - 'fcgi_cant_mpx_conn', - 'fcgi_overloaded', - 'fcgi_unknown_role', - 'fcgi_max_conns', - 'fcgi_max_reqs', - 'fcgi_mpxs_conns', - 'fcgi_read_timeout_seconds', - 'fcgi_makeendrequestbody', - 'fcgi_bodychunksize', - 'fcgi_makestdoutbody', - 'fcgi_readparam', - 'web_request', - 'include_cache_compare', - 'fastcgi_initialize', - 'fastcgi_handlecon', - 'fastcgi_handlereq', - 'fastcgi_createfcgirequest', - 'web_handlefcgirequest', - 'filemakerds_initialize', - 'filemakerds', - 'value_listitem', - 'valuelistitem', - 'selected', - 'checked', - 'value_list', - 'http_char_space', - 'http_char_htab', - 'http_char_cr', - 'http_char_lf', - 'http_char_question', - 'http_char_colon', - 'http_read_timeout_secs', - 'http_default_files', - 'http_server_apps_path', - 'jdbc_initialize', - 'lassoapp_settingsdb', - 'lassoapp_format_mod_date', - 'lassoapp_include_current', - 'lassoapp_include', - 'lassoapp_find_missing_file', - 'lassoapp_get_capabilities_name', - 'lassoapp_exists', - 'lassoapp_path_to_method_name', - 'lassoapp_invoke_resource', - 'lassoapp_initialize_db', - 'lassoapp_initialize', - 'lassoapp_issourcefileextension', - 'lassoapp_current_include', - 'lassoapp_current_app', - 'lassoapp_do_with_include', - 'lassoapp_link', - 'lassoapp_load_module', - 'lassoapp_mime_type_html', - 'lassoapp_mime_type_lasso', - 'lassoapp_mime_type_xml', - 'lassoapp_mime_type_ppt', - 'lassoapp_mime_type_js', - 'lassoapp_mime_type_txt', - 'lassoapp_mime_type_jpg', - 'lassoapp_mime_type_png', - 'lassoapp_mime_type_gif', - 'lassoapp_mime_type_css', - 'lassoapp_mime_type_csv', - 'lassoapp_mime_type_tif', - 'lassoapp_mime_type_ico', - 'lassoapp_mime_type_rss', - 'lassoapp_mime_type_xhr', - 'lassoapp_mime_type_pdf', - 'lassoapp_mime_type_docx', - 'lassoapp_mime_type_doc', - 'lassoapp_mime_type_zip', - 'lassoapp_mime_type_svg', - 'lassoapp_mime_type_ttf', - 'lassoapp_mime_type_woff', - 'lassoapp_mime_type_swf', - 'lassoapp_mime_get', - 'log_level_critical', - 'log_level_warning', - 'log_level_detail', - 'log_level_sql', - 'log_level_deprecated', - 'log_destination_console', - 'log_destination_file', - 'log_destination_database', - 'log', - 'log_setdestination', - 'log_always', - 'log_critical', - 'log_warning', - 'log_detail', - 'log_sql', - 'log_deprecated', - 'log_max_file_size', - 'log_trim_file_size', - 'log_initialize', - 'portal', - 'security_database', - 'security_table_groups', - 'security_table_users', - 'security_table_ug_map', - 'security_default_realm', - 'security_initialize', - 'session_initialize', - 'session_getdefaultdriver', - 'session_setdefaultdriver', - 'session_start', - 'session_addvar', - 'session_removevar', - 'session_end', - 'session_id', - 'session_abort', - 'session_result', - 'session_deleteexpired', - 'odbc_session_driver_mssql', - 'session_decorate', - 'auth_admin', - 'auth_check', - 'auth_custom', - 'auth_group', - 'auth_prompt', - 'auth_user', - 'client_addr', - 'client_authorization', - 'client_browser', - 'client_contentlength', - 'client_contenttype', - 'client_cookielist', - 'client_cookies', - 'client_encoding', - 'client_formmethod', - 'client_getargs', - 'client_getparams', - 'client_getparam', - 'client_headers', - 'client_integertoip', - 'client_iptointeger', - 'client_password', - 'client_postargs', - 'client_postparams', - 'client_postparam', - 'client_type', - 'client_username', - 'client_url', - 'referer_url', - 'referrer_url', - 'content_type', - 'content_encoding', - 'cookie', - 'cookie_set', - 'include', - 'include_currentpath', - 'include_filepath', - 'include_localpath', - 'include_once', - 'include_path', - 'include_raw', - 'includes', - 'library', - 'library_once', - 'response_filepath', - 'response_localpath', - 'response_path', - 'response_realm', - 'response_root', - 'redirect_url', - 'server_admin', - 'server_name', - 'server_ip', - 'server_port', - 'server_protocol', - 'server_signature', - 'server_software', - 'server_push', - 'token_value', - 'wap_isenabled', - 'wap_maxbuttons', - 'wap_maxhorzpixels', - 'wap_maxvertpixels', - 'wap_maxcolumns', - 'wap_maxrows', - 'define_atbegin', - 'define_atend', - 'content_header', - 'content_addheader', - 'content_replaceheader', - 'content_body', - 'html_comment', - 'web_node_forpath', - 'web_nodes_requesthandler', - 'web_nodes_normalizeextension', - 'web_nodes_processcontentnode', - 'web_nodes_initialize', - 'web_node_content_representation_xhr', - 'web_node_content_representation_html', - 'web_node_content_representation_css', - 'web_node_content_representation_js', - 'web_response_nodesentry', - 'web_response', - 'web_router_database', - 'web_router_initialize' - ), - 'Lasso 8 Tags': ( - '__char', - '__sync_timestamp__', - '_admin_addgroup', - '_admin_adduser', - '_admin_defaultconnector', - '_admin_defaultconnectornames', - '_admin_defaultdatabase', - '_admin_defaultfield', - '_admin_defaultgroup', - '_admin_defaulthost', - '_admin_defaulttable', - '_admin_defaultuser', - '_admin_deleteconnector', - '_admin_deletedatabase', - '_admin_deletefield', - '_admin_deletegroup', - '_admin_deletehost', - '_admin_deletetable', - '_admin_deleteuser', - '_admin_duplicategroup', - '_admin_internaldatabase', - '_admin_listconnectors', - '_admin_listdatabases', - '_admin_listfields', - '_admin_listgroups', - '_admin_listhosts', - '_admin_listtables', - '_admin_listusers', - '_admin_refreshconnector', - '_admin_refreshsecurity', - '_admin_servicepath', - '_admin_updateconnector', - '_admin_updatedatabase', - '_admin_updatefield', - '_admin_updategroup', - '_admin_updatehost', - '_admin_updatetable', - '_admin_updateuser', - '_chartfx_activation_string', - '_chartfx_getchallengestring', - '_chop_args', - '_chop_mimes', - '_client_addr_old', - '_client_address_old', - '_client_ip_old', - '_database_names', - '_datasource_reload', - '_date_current', - '_date_format', - '_date_msec', - '_date_parse', - '_execution_timelimit', - '_file_chmod', - '_initialize', - '_jdbc_acceptsurl', - '_jdbc_debug', - '_jdbc_deletehost', - '_jdbc_driverclasses', - '_jdbc_driverinfo', - '_jdbc_metainfo', - '_jdbc_propertyinfo', - '_jdbc_setdriver', - '_lasso_param', - '_log_helper', - '_proc_noparam', - '_proc_withparam', - '_recursion_limit', - '_request_param', - '_security_binaryexpiration', - '_security_flushcaches', - '_security_isserialized', - '_security_serialexpiration', - '_srand', - '_strict_literals', - '_substring', - '_xmlrpc_exconverter', - '_xmlrpc_inconverter', - '_xmlrpc_xmlinconverter', - 'abort', - 'action_addinfo', - 'action_addrecord', - 'action_param', - 'action_params', - 'action_setfoundcount', - 'action_setrecordid', - 'action_settotalcount', - 'action_statement', - 'admin_allowedfileroots', - 'admin_changeuser', - 'admin_createuser', - 'admin_currentgroups', - 'admin_currentuserid', - 'admin_currentusername', - 'admin_getpref', - 'admin_groupassignuser', - 'admin_grouplistusers', - 'admin_groupremoveuser', - 'admin_lassoservicepath', - 'admin_listgroups', - 'admin_refreshlicensing', - 'admin_refreshsecurity', - 'admin_reloaddatasource', - 'admin_removepref', - 'admin_setpref', - 'admin_userexists', - 'admin_userlistgroups', - 'all', - 'and', - 'array', - 'array_iterator', - 'auth', - 'auth_admin', - 'auth_auth', - 'auth_custom', - 'auth_group', - 'auth_prompt', - 'auth_user', - 'base64', - 'bean', - 'bigint', - 'bom_utf16be', - 'bom_utf16le', - 'bom_utf32be', - 'bom_utf32le', - 'bom_utf8', - 'boolean', - 'bw', - 'bytes', - 'cache', - 'cache_delete', - 'cache_empty', - 'cache_exists', - 'cache_fetch', - 'cache_internal', - 'cache_maintenance', - 'cache_object', - 'cache_preferences', - 'cache_store', - 'case', - 'chartfx', - 'chartfx_records', - 'chartfx_serve', - 'checked', - 'choice_list', - 'choice_listitem', - 'choicelistitem', - 'cipher_decrypt', - 'cipher_digest', - 'cipher_encrypt', - 'cipher_hmac', - 'cipher_keylength', - 'cipher_list', - 'click_text', - 'client_addr', - 'client_address', - 'client_authorization', - 'client_browser', - 'client_contentlength', - 'client_contenttype', - 'client_cookielist', - 'client_cookies', - 'client_encoding', - 'client_formmethod', - 'client_getargs', - 'client_getparams', - 'client_headers', - 'client_ip', - 'client_ipfrominteger', - 'client_iptointeger', - 'client_password', - 'client_postargs', - 'client_postparams', - 'client_type', - 'client_url', - 'client_username', - 'cn', - 'column', - 'column_name', - 'column_names', - 'compare_beginswith', - 'compare_contains', - 'compare_endswith', - 'compare_equalto', - 'compare_greaterthan', - 'compare_greaterthanorequals', - 'compare_greaterthanorequls', - 'compare_lessthan', - 'compare_lessthanorequals', - 'compare_notbeginswith', - 'compare_notcontains', - 'compare_notendswith', - 'compare_notequalto', - 'compare_notregexp', - 'compare_regexp', - 'compare_strictequalto', - 'compare_strictnotequalto', - 'compiler_removecacheddoc', - 'compiler_setdefaultparserflags', - 'compress', - 'content_body', - 'content_encoding', - 'content_header', - 'content_type', - 'cookie', - 'cookie_set', - 'curl_ftp_getfile', - 'curl_ftp_getlisting', - 'curl_ftp_putfile', - 'curl_include_url', - 'currency', - 'database_changecolumn', - 'database_changefield', - 'database_createcolumn', - 'database_createfield', - 'database_createtable', - 'database_fmcontainer', - 'database_hostinfo', - 'database_inline', - 'database_name', - 'database_nameitem', - 'database_names', - 'database_realname', - 'database_removecolumn', - 'database_removefield', - 'database_removetable', - 'database_repeating', - 'database_repeating_valueitem', - 'database_repeatingvalueitem', - 'database_schemanameitem', - 'database_schemanames', - 'database_tablecolumn', - 'database_tablenameitem', - 'database_tablenames', - 'datasource_name', - 'datasource_register', - 'date', - 'date__date_current', - 'date__date_format', - 'date__date_msec', - 'date__date_parse', - 'date_add', - 'date_date', - 'date_difference', - 'date_duration', - 'date_format', - 'date_getcurrentdate', - 'date_getday', - 'date_getdayofweek', - 'date_gethour', - 'date_getlocaltimezone', - 'date_getminute', - 'date_getmonth', - 'date_getsecond', - 'date_gettime', - 'date_getyear', - 'date_gmttolocal', - 'date_localtogmt', - 'date_maximum', - 'date_minimum', - 'date_msec', - 'date_setformat', - 'date_subtract', - 'db_layoutnameitem', - 'db_layoutnames', - 'db_nameitem', - 'db_names', - 'db_tablenameitem', - 'db_tablenames', - 'dbi_column_names', - 'dbi_field_names', - 'decimal', - 'decimal_setglobaldefaultprecision', - 'decode_base64', - 'decode_bheader', - 'decode_hex', - 'decode_html', - 'decode_json', - 'decode_qheader', - 'decode_quotedprintable', - 'decode_quotedprintablebytes', - 'decode_url', - 'decode_xml', - 'decompress', - 'decrypt_blowfish', - 'decrypt_blowfish2', - 'default', - 'define_atbegin', - 'define_atend', - 'define_constant', - 'define_prototype', - 'define_tag', - 'define_tagp', - 'define_type', - 'define_typep', - 'deserialize', - 'directory_directorynameitem', - 'directory_lister', - 'directory_nameitem', - 'directorynameitem', - 'dns_default', - 'dns_lookup', - 'dns_response', - 'duration', - 'else', - 'email_batch', - 'email_compose', - 'email_digestchallenge', - 'email_digestresponse', - 'email_extract', - 'email_findemails', - 'email_immediate', - 'email_merge', - 'email_mxerror', - 'email_mxlookup', - 'email_parse', - 'email_pop', - 'email_queue', - 'email_result', - 'email_safeemail', - 'email_send', - 'email_smtp', - 'email_status', - 'email_token', - 'email_translatebreakstocrlf', - 'encode_base64', - 'encode_bheader', - 'encode_break', - 'encode_breaks', - 'encode_crc32', - 'encode_hex', - 'encode_html', - 'encode_htmltoxml', - 'encode_json', - 'encode_qheader', - 'encode_quotedprintable', - 'encode_quotedprintablebytes', - 'encode_set', - 'encode_smart', - 'encode_sql', - 'encode_sql92', - 'encode_stricturl', - 'encode_url', - 'encode_xml', - 'encrypt_blowfish', - 'encrypt_blowfish2', - 'encrypt_crammd5', - 'encrypt_hmac', - 'encrypt_md5', - 'eq', - 'error_adderror', - 'error_code', - 'error_code_aborted', - 'error_code_assert', - 'error_code_bof', - 'error_code_connectioninvalid', - 'error_code_couldnotclosefile', - 'error_code_couldnotcreateoropenfile', - 'error_code_couldnotdeletefile', - 'error_code_couldnotdisposememory', - 'error_code_couldnotlockmemory', - 'error_code_couldnotreadfromfile', - 'error_code_couldnotunlockmemory', - 'error_code_couldnotwritetofile', - 'error_code_criterianotmet', - 'error_code_datasourceerror', - 'error_code_directoryfull', - 'error_code_diskfull', - 'error_code_dividebyzero', - 'error_code_eof', - 'error_code_failure', - 'error_code_fieldrestriction', - 'error_code_file', - 'error_code_filealreadyexists', - 'error_code_filecorrupt', - 'error_code_fileinvalid', - 'error_code_fileinvalidaccessmode', - 'error_code_fileisclosed', - 'error_code_fileisopen', - 'error_code_filelocked', - 'error_code_filenotfound', - 'error_code_fileunlocked', - 'error_code_httpfilenotfound', - 'error_code_illegalinstruction', - 'error_code_illegaluseoffrozeninstance', - 'error_code_invaliddatabase', - 'error_code_invalidfilename', - 'error_code_invalidmemoryobject', - 'error_code_invalidparameter', - 'error_code_invalidpassword', - 'error_code_invalidpathname', - 'error_code_invalidusername', - 'error_code_ioerror', - 'error_code_loopaborted', - 'error_code_memory', - 'error_code_network', - 'error_code_nilpointer', - 'error_code_noerr', - 'error_code_nopermission', - 'error_code_outofmemory', - 'error_code_outofstackspace', - 'error_code_overflow', - 'error_code_postconditionfailed', - 'error_code_preconditionfailed', - 'error_code_resnotfound', - 'error_code_resource', - 'error_code_streamreaderror', - 'error_code_streamwriteerror', - 'error_code_syntaxerror', - 'error_code_tagnotfound', - 'error_code_unknownerror', - 'error_code_varnotfound', - 'error_code_volumedoesnotexist', - 'error_code_webactionnotsupported', - 'error_code_webadderror', - 'error_code_webdeleteerror', - 'error_code_webmodulenotfound', - 'error_code_webnosuchobject', - 'error_code_webrepeatingrelatedfield', - 'error_code_webrequiredfieldmissing', - 'error_code_webtimeout', - 'error_code_webupdateerror', - 'error_columnrestriction', - 'error_currenterror', - 'error_databaseconnectionunavailable', - 'error_databasetimeout', - 'error_deleteerror', - 'error_fieldrestriction', - 'error_filenotfound', - 'error_invaliddatabase', - 'error_invalidpassword', - 'error_invalidusername', - 'error_modulenotfound', - 'error_msg', - 'error_msg_aborted', - 'error_msg_assert', - 'error_msg_bof', - 'error_msg_connectioninvalid', - 'error_msg_couldnotclosefile', - 'error_msg_couldnotcreateoropenfile', - 'error_msg_couldnotdeletefile', - 'error_msg_couldnotdisposememory', - 'error_msg_couldnotlockmemory', - 'error_msg_couldnotreadfromfile', - 'error_msg_couldnotunlockmemory', - 'error_msg_couldnotwritetofile', - 'error_msg_criterianotmet', - 'error_msg_datasourceerror', - 'error_msg_directoryfull', - 'error_msg_diskfull', - 'error_msg_dividebyzero', - 'error_msg_eof', - 'error_msg_failure', - 'error_msg_fieldrestriction', - 'error_msg_file', - 'error_msg_filealreadyexists', - 'error_msg_filecorrupt', - 'error_msg_fileinvalid', - 'error_msg_fileinvalidaccessmode', - 'error_msg_fileisclosed', - 'error_msg_fileisopen', - 'error_msg_filelocked', - 'error_msg_filenotfound', - 'error_msg_fileunlocked', - 'error_msg_httpfilenotfound', - 'error_msg_illegalinstruction', - 'error_msg_illegaluseoffrozeninstance', - 'error_msg_invaliddatabase', - 'error_msg_invalidfilename', - 'error_msg_invalidmemoryobject', - 'error_msg_invalidparameter', - 'error_msg_invalidpassword', - 'error_msg_invalidpathname', - 'error_msg_invalidusername', - 'error_msg_ioerror', - 'error_msg_loopaborted', - 'error_msg_memory', - 'error_msg_network', - 'error_msg_nilpointer', - 'error_msg_noerr', - 'error_msg_nopermission', - 'error_msg_outofmemory', - 'error_msg_outofstackspace', - 'error_msg_overflow', - 'error_msg_postconditionfailed', - 'error_msg_preconditionfailed', - 'error_msg_resnotfound', - 'error_msg_resource', - 'error_msg_streamreaderror', - 'error_msg_streamwriteerror', - 'error_msg_syntaxerror', - 'error_msg_tagnotfound', - 'error_msg_unknownerror', - 'error_msg_varnotfound', - 'error_msg_volumedoesnotexist', - 'error_msg_webactionnotsupported', - 'error_msg_webadderror', - 'error_msg_webdeleteerror', - 'error_msg_webmodulenotfound', - 'error_msg_webnosuchobject', - 'error_msg_webrepeatingrelatedfield', - 'error_msg_webrequiredfieldmissing', - 'error_msg_webtimeout', - 'error_msg_webupdateerror', - 'error_noerror', - 'error_nopermission', - 'error_norecordsfound', - 'error_outofmemory', - 'error_pop', - 'error_push', - 'error_reqcolumnmissing', - 'error_reqfieldmissing', - 'error_requiredcolumnmissing', - 'error_requiredfieldmissing', - 'error_reset', - 'error_seterrorcode', - 'error_seterrormessage', - 'error_updateerror', - 'euro', - 'event_schedule', - 'ew', - 'fail', - 'fail_if', - 'false', - 'field', - 'field_name', - 'field_names', - 'file', - 'file_autoresolvefullpaths', - 'file_chmod', - 'file_control', - 'file_copy', - 'file_create', - 'file_creationdate', - 'file_currenterror', - 'file_delete', - 'file_exists', - 'file_getlinecount', - 'file_getsize', - 'file_isdirectory', - 'file_listdirectory', - 'file_moddate', - 'file_modechar', - 'file_modeline', - 'file_move', - 'file_openread', - 'file_openreadwrite', - 'file_openwrite', - 'file_openwriteappend', - 'file_openwritetruncate', - 'file_probeeol', - 'file_processuploads', - 'file_read', - 'file_readline', - 'file_rename', - 'file_serve', - 'file_setsize', - 'file_stream', - 'file_streamcopy', - 'file_uploads', - 'file_waitread', - 'file_waittimeout', - 'file_waitwrite', - 'file_write', - 'find_soap_ops', - 'form_param', - 'found_count', - 'ft', - 'ftp_getfile', - 'ftp_getlisting', - 'ftp_putfile', - 'full', - 'global', - 'global_defined', - 'global_remove', - 'global_reset', - 'globals', - 'gt', - 'gte', - 'handle', - 'handle_error', - 'header', - 'html_comment', - 'http_getfile', - 'ical_alarm', - 'ical_attribute', - 'ical_calendar', - 'ical_daylight', - 'ical_event', - 'ical_freebusy', - 'ical_item', - 'ical_journal', - 'ical_parse', - 'ical_standard', - 'ical_timezone', - 'ical_todo', - 'if', - 'if_empty', - 'if_false', - 'if_null', - 'if_true', - 'image', - 'image_url', - 'img', - 'include', - 'include_cgi', - 'include_currentpath', - 'include_once', - 'include_raw', - 'include_url', - 'inline', - 'integer', - 'iterate', - 'iterator', - 'java', - 'java_bean', - 'json_records', - 'json_rpccall', - 'keycolumn_name', - 'keycolumn_value', - 'keyfield_name', - 'keyfield_value', - 'lasso_comment', - 'lasso_currentaction', - 'lasso_datasourceis', - 'lasso_datasourceis4d', - 'lasso_datasourceisfilemaker', - 'lasso_datasourceisfilemaker7', - 'lasso_datasourceisfilemaker9', - 'lasso_datasourceisfilemakersa', - 'lasso_datasourceisjdbc', - 'lasso_datasourceislassomysql', - 'lasso_datasourceismysql', - 'lasso_datasourceisodbc', - 'lasso_datasourceisopenbase', - 'lasso_datasourceisoracle', - 'lasso_datasourceispostgresql', - 'lasso_datasourceisspotlight', - 'lasso_datasourceissqlite', - 'lasso_datasourceissqlserver', - 'lasso_datasourcemodulename', - 'lasso_datatype', - 'lasso_disableondemand', - 'lasso_errorreporting', - 'lasso_executiontimelimit', - 'lasso_parser', - 'lasso_process', - 'lasso_sessionid', - 'lasso_siteid', - 'lasso_siteisrunning', - 'lasso_sitename', - 'lasso_siterestart', - 'lasso_sitestart', - 'lasso_sitestop', - 'lasso_tagexists', - 'lasso_tagmodulename', - 'lasso_uniqueid', - 'lasso_updatecheck', - 'lasso_uptime', - 'lasso_version', - 'lassoapp_create', - 'lassoapp_dump', - 'lassoapp_flattendir', - 'lassoapp_getappdata', - 'lassoapp_link', - 'lassoapp_list', - 'lassoapp_process', - 'lassoapp_unitize', - 'layout_name', - 'ldap', - 'ldap_scope_base', - 'ldap_scope_onelevel', - 'ldap_scope_subtree', - 'ldml', - 'ldml_ldml', - 'library', - 'library_once', - 'link', - 'link_currentaction', - 'link_currentactionparams', - 'link_currentactionurl', - 'link_currentgroup', - 'link_currentgroupparams', - 'link_currentgroupurl', - 'link_currentrecord', - 'link_currentrecordparams', - 'link_currentrecordurl', - 'link_currentsearch', - 'link_currentsearchparams', - 'link_currentsearchurl', - 'link_detail', - 'link_detailparams', - 'link_detailurl', - 'link_firstgroup', - 'link_firstgroupparams', - 'link_firstgroupurl', - 'link_firstrecord', - 'link_firstrecordparams', - 'link_firstrecordurl', - 'link_lastgroup', - 'link_lastgroupparams', - 'link_lastgroupurl', - 'link_lastrecord', - 'link_lastrecordparams', - 'link_lastrecordurl', - 'link_nextgroup', - 'link_nextgroupparams', - 'link_nextgroupurl', - 'link_nextrecord', - 'link_nextrecordparams', - 'link_nextrecordurl', - 'link_params', - 'link_prevgroup', - 'link_prevgroupparams', - 'link_prevgroupurl', - 'link_prevrecord', - 'link_prevrecordparams', - 'link_prevrecordurl', - 'link_setformat', - 'link_url', - 'list', - 'list_additem', - 'list_fromlist', - 'list_fromstring', - 'list_getitem', - 'list_itemcount', - 'list_iterator', - 'list_removeitem', - 'list_replaceitem', - 'list_reverseiterator', - 'list_tostring', - 'literal', - 'ljax_end', - 'ljax_hastarget', - 'ljax_include', - 'ljax_start', - 'ljax_target', - 'local', - 'local_defined', - 'local_remove', - 'local_reset', - 'locale_format', - 'locals', - 'log', - 'log_always', - 'log_critical', - 'log_deprecated', - 'log_destination_console', - 'log_destination_database', - 'log_destination_file', - 'log_detail', - 'log_level_critical', - 'log_level_deprecated', - 'log_level_detail', - 'log_level_sql', - 'log_level_warning', - 'log_setdestination', - 'log_sql', - 'log_warning', - 'logicalop_value', - 'logicaloperator_value', - 'loop', - 'loop_abort', - 'loop_continue', - 'loop_count', - 'lt', - 'lte', - 'magick_image', - 'map', - 'map_iterator', - 'match_comparator', - 'match_notrange', - 'match_notregexp', - 'match_range', - 'match_regexp', - 'math_abs', - 'math_acos', - 'math_add', - 'math_asin', - 'math_atan', - 'math_atan2', - 'math_ceil', - 'math_converteuro', - 'math_cos', - 'math_div', - 'math_exp', - 'math_floor', - 'math_internal_rand', - 'math_internal_randmax', - 'math_internal_srand', - 'math_ln', - 'math_log', - 'math_log10', - 'math_max', - 'math_min', - 'math_mod', - 'math_mult', - 'math_pow', - 'math_random', - 'math_range', - 'math_rint', - 'math_roman', - 'math_round', - 'math_sin', - 'math_sqrt', - 'math_sub', - 'math_tan', - 'maxrecords_value', - 'memory_session_driver', - 'mime_type', - 'minimal', - 'misc__srand', - 'misc_randomnumber', - 'misc_roman', - 'misc_valid_creditcard', - 'mysql_session_driver', - 'named_param', - 'namespace_current', - 'namespace_delimiter', - 'namespace_exists', - 'namespace_file_fullpathexists', - 'namespace_global', - 'namespace_import', - 'namespace_load', - 'namespace_page', - 'namespace_unload', - 'namespace_using', - 'neq', - 'net', - 'net_connectinprogress', - 'net_connectok', - 'net_typessl', - 'net_typessltcp', - 'net_typessludp', - 'net_typetcp', - 'net_typeudp', - 'net_waitread', - 'net_waittimeout', - 'net_waitwrite', - 'no_default_output', - 'none', - 'noprocess', - 'not', - 'nrx', - 'nslookup', - 'null', - 'object', - 'once', - 'oneoff', - 'op_logicalvalue', - 'operator_logicalvalue', - 'option', - 'or', - 'os_process', - 'output', - 'output_none', - 'pair', - 'params_up', - 'pdf_barcode', - 'pdf_color', - 'pdf_doc', - 'pdf_font', - 'pdf_image', - 'pdf_list', - 'pdf_read', - 'pdf_serve', - 'pdf_table', - 'pdf_text', - 'percent', - 'portal', - 'postcondition', - 'precondition', - 'prettyprintingnsmap', - 'prettyprintingtypemap', - 'priorityqueue', - 'private', - 'proc_convert', - 'proc_convertbody', - 'proc_convertone', - 'proc_extract', - 'proc_extractone', - 'proc_find', - 'proc_first', - 'proc_foreach', - 'proc_get', - 'proc_join', - 'proc_lasso', - 'proc_last', - 'proc_map_entry', - 'proc_null', - 'proc_regexp', - 'proc_xml', - 'proc_xslt', - 'process', - 'protect', - 'queue', - 'rand', - 'randomnumber', - 'raw', - 'recid_value', - 'record_count', - 'recordcount', - 'recordid_value', - 'records', - 'records_array', - 'records_map', - 'redirect_url', - 'reference', - 'referer', - 'referer_url', - 'referrer', - 'referrer_url', - 'regexp', - 'repeating', - 'repeating_valueitem', - 'repeatingvalueitem', - 'repetition', - 'req_column', - 'req_field', - 'required_column', - 'required_field', - 'response_fileexists', - 'response_filepath', - 'response_localpath', - 'response_path', - 'response_realm', - 'resultset', - 'resultset_count', - 'return', - 'return_value', - 'reverseiterator', - 'roman', - 'row_count', - 'rows', - 'rows_array', - 'run_children', - 'rx', - 'schema_name', - 'scientific', - 'search_args', - 'search_arguments', - 'search_columnitem', - 'search_fielditem', - 'search_operatoritem', - 'search_opitem', - 'search_valueitem', - 'searchfielditem', - 'searchoperatoritem', - 'searchopitem', - 'searchvalueitem', - 'select', - 'selected', - 'self', - 'serialize', - 'series', - 'server_date', - 'server_day', - 'server_ip', - 'server_name', - 'server_port', - 'server_push', - 'server_siteisrunning', - 'server_sitestart', - 'server_sitestop', - 'server_time', - 'session_abort', - 'session_addoutputfilter', - 'session_addvar', - 'session_addvariable', - 'session_deleteexpired', - 'session_driver', - 'session_end', - 'session_id', - 'session_removevar', - 'session_removevariable', - 'session_result', - 'session_setdriver', - 'session_start', - 'set', - 'set_iterator', - 'set_reverseiterator', - 'shown_count', - 'shown_first', - 'shown_last', - 'site_atbegin', - 'site_id', - 'site_name', - 'site_restart', - 'skiprecords_value', - 'sleep', - 'soap_convertpartstopairs', - 'soap_definetag', - 'soap_info', - 'soap_lastrequest', - 'soap_lastresponse', - 'soap_stub', - 'sort_args', - 'sort_arguments', - 'sort_columnitem', - 'sort_fielditem', - 'sort_orderitem', - 'sortcolumnitem', - 'sortfielditem', - 'sortorderitem', - 'sqlite_createdb', - 'sqlite_session_driver', - 'sqlite_setsleepmillis', - 'sqlite_setsleeptries', - 'srand', - 'stack', - 'stock_quote', - 'string', - 'string_charfromname', - 'string_concatenate', - 'string_countfields', - 'string_endswith', - 'string_extract', - 'string_findposition', - 'string_findregexp', - 'string_fordigit', - 'string_getfield', - 'string_getunicodeversion', - 'string_insert', - 'string_isalpha', - 'string_isalphanumeric', - 'string_isdigit', - 'string_ishexdigit', - 'string_islower', - 'string_isnumeric', - 'string_ispunctuation', - 'string_isspace', - 'string_isupper', - 'string_length', - 'string_lowercase', - 'string_remove', - 'string_removeleading', - 'string_removetrailing', - 'string_replace', - 'string_replaceregexp', - 'string_todecimal', - 'string_tointeger', - 'string_uppercase', - 'string_validcharset', - 'table_name', - 'table_realname', - 'tag', - 'tag_name', - 'tags', - 'tags_find', - 'tags_list', - 'tcp_close', - 'tcp_open', - 'tcp_send', - 'tcp_tcp_close', - 'tcp_tcp_open', - 'tcp_tcp_send', - 'thread_abort', - 'thread_atomic', - 'thread_event', - 'thread_exists', - 'thread_getcurrentid', - 'thread_getpriority', - 'thread_info', - 'thread_list', - 'thread_lock', - 'thread_pipe', - 'thread_priority_default', - 'thread_priority_high', - 'thread_priority_low', - 'thread_rwlock', - 'thread_semaphore', - 'thread_setpriority', - 'token_value', - 'total_records', - 'treemap', - 'treemap_iterator', - 'true', - 'url_rewrite', - 'valid_creditcard', - 'valid_date', - 'valid_email', - 'valid_url', - 'value_list', - 'value_listitem', - 'valuelistitem', - 'var', - 'var_defined', - 'var_remove', - 'var_reset', - 'var_set', - 'variable', - 'variable_defined', - 'variable_set', - 'variables', - 'variant_count', - 'vars', - 'wap_isenabled', - 'wap_maxbuttons', - 'wap_maxcolumns', - 'wap_maxhorzpixels', - 'wap_maxrows', - 'wap_maxvertpixels', - 'while', - 'wsdl_extract', - 'wsdl_getbinding', - 'wsdl_getbindingforoperation', - 'wsdl_getbindingoperations', - 'wsdl_getmessagenamed', - 'wsdl_getmessageparts', - 'wsdl_getmessagetriofromporttype', - 'wsdl_getopbodystyle', - 'wsdl_getopbodyuse', - 'wsdl_getoperation', - 'wsdl_getoplocation', - 'wsdl_getopmessagetypes', - 'wsdl_getopsoapaction', - 'wsdl_getportaddress', - 'wsdl_getportsforservice', - 'wsdl_getporttype', - 'wsdl_getporttypeoperation', - 'wsdl_getservicedocumentation', - 'wsdl_getservices', - 'wsdl_gettargetnamespace', - 'wsdl_issoapoperation', - 'wsdl_listoperations', - 'wsdl_maketest', - 'xml', - 'xml_extract', - 'xml_rpc', - 'xml_rpccall', - 'xml_rw', - 'xml_serve', - 'xml_transform', - 'xml_xml', - 'xml_xmlstream', - 'xmlstream', - 'xsd_attribute', - 'xsd_blankarraybase', - 'xsd_blankbase', - 'xsd_buildtype', - 'xsd_cache', - 'xsd_checkcardinality', - 'xsd_continueall', - 'xsd_continueannotation', - 'xsd_continueany', - 'xsd_continueanyattribute', - 'xsd_continueattribute', - 'xsd_continueattributegroup', - 'xsd_continuechoice', - 'xsd_continuecomplexcontent', - 'xsd_continuecomplextype', - 'xsd_continuedocumentation', - 'xsd_continueextension', - 'xsd_continuegroup', - 'xsd_continuekey', - 'xsd_continuelist', - 'xsd_continuerestriction', - 'xsd_continuesequence', - 'xsd_continuesimplecontent', - 'xsd_continuesimpletype', - 'xsd_continueunion', - 'xsd_deserialize', - 'xsd_fullyqualifyname', - 'xsd_generate', - 'xsd_generateblankfromtype', - 'xsd_generateblanksimpletype', - 'xsd_generatetype', - 'xsd_getschematype', - 'xsd_issimpletype', - 'xsd_loadschema', - 'xsd_lookupnamespaceuri', - 'xsd_lookuptype', - 'xsd_processany', - 'xsd_processattribute', - 'xsd_processattributegroup', - 'xsd_processcomplextype', - 'xsd_processelement', - 'xsd_processgroup', - 'xsd_processimport', - 'xsd_processinclude', - 'xsd_processschema', - 'xsd_processsimpletype', - 'xsd_ref', - 'xsd_type' - ) -} -MEMBERS = { - 'Member Methods': ( - 'escape_member', - 'oncompare', - 'sameas', - 'isa', - 'ascopy', - 'asstring', - 'ascopydeep', - 'type', - 'trait', - 'parent', - 'settrait', - 'oncreate', - 'listmethods', - 'hasmethod', - 'invoke', - 'addtrait', - 'isnota', - 'isallof', - 'isanyof', - 'size', - 'gettype', - 'istype', - 'doccomment', - 'requires', - 'provides', - 'name', - 'subtraits', - 'description', - 'hash', - 'hosttonet16', - 'hosttonet32', - 'nettohost16', - 'nettohost32', - 'nettohost64', - 'hosttonet64', - 'bitset', - 'bittest', - 'bitflip', - 'bitclear', - 'bitor', - 'bitand', - 'bitxor', - 'bitnot', - 'bitshiftleft', - 'bitshiftright', - 'bytes', - 'abs', - 'div', - 'dereferencepointer', - 'asdecimal', - 'serializationelements', - 'acceptdeserializedelement', - 'serialize', - 'deg2rad', - 'asstringhex', - 'asstringoct', - 'acos', - 'asin', - 'atan', - 'atan2', - 'ceil', - 'cos', - 'cosh', - 'exp', - 'fabs', - 'floor', - 'frexp', - 'ldexp', - 'log', - 'log10', - 'modf', - 'pow', - 'sin', - 'sinh', - 'sqrt', - 'tan', - 'tanh', - 'erf', - 'erfc', - 'gamma', - 'hypot', - 'j0', - 'j1', - 'jn', - 'lgamma', - 'y0', - 'y1', - 'yn', - 'isnan', - 'acosh', - 'asinh', - 'atanh', - 'cbrt', - 'expm1', - 'nextafter', - 'scalb', - 'ilogb', - 'log1p', - 'logb', - 'remainder', - 'rint', - 'asinteger', - 'self', - 'detach', - 'restart', - 'resume', - 'continuation', - 'home', - 'callsite_file', - 'callsite_line', - 'callsite_col', - 'callstack', - 'splitthread', - 'threadreaddesc', - 'givenblock', - 'autocollectbuffer', - 'calledname', - 'methodname', - 'invokeuntil', - 'invokewhile', - 'invokeautocollect', - 'asasync', - 'append', - 'appendchar', - 'private_find', - 'private_findlast', - 'length', - 'chardigitvalue', - 'private_compare', - 'remove', - 'charname', - 'chartype', - 'decompose', - 'normalize', - 'digit', - 'foldcase', - 'sub', - 'integer', - 'private_merge', - 'unescape', - 'trim', - 'titlecase', - 'reverse', - 'getisocomment', - 'getnumericvalue', - 'totitle', - 'toupper', - 'tolower', - 'lowercase', - 'uppercase', - 'isalnum', - 'isalpha', - 'isbase', - 'iscntrl', - 'isdigit', - 'isxdigit', - 'islower', - 'isprint', - 'isspace', - 'istitle', - 'ispunct', - 'isgraph', - 'isblank', - 'isualphabetic', - 'isulowercase', - 'isupper', - 'isuuppercase', - 'isuwhitespace', - 'iswhitespace', - 'encodehtml', - 'decodehtml', - 'encodexml', - 'decodexml', - 'encodehtmltoxml', - 'getpropertyvalue', - 'hasbinaryproperty', - 'asbytes', - 'find', - 'findlast', - 'contains', - 'get', - 'equals', - 'compare', - 'comparecodepointorder', - 'padleading', - 'padtrailing', - 'merge', - 'split', - 'removeleading', - 'removetrailing', - 'beginswith', - 'endswith', - 'replace', - 'values', - 'foreachcharacter', - 'foreachlinebreak', - 'foreachwordbreak', - 'eachwordbreak', - 'eachcharacter', - 'foreachmatch', - 'eachmatch', - 'encodesql92', - 'encodesql', - 'keys', - 'decomposeassignment', - 'firstcomponent', - 'ifempty', - 'eachsub', - 'stripfirstcomponent', - 'isnotempty', - 'first', - 'lastcomponent', - 'foreachpathcomponent', - 'isfullpath', - 'back', - 'second', - 'componentdelimiter', - 'isempty', - 'foreachsub', - 'front', - 'striplastcomponent', - 'eachcomponent', - 'eachline', - 'splitextension', - 'hastrailingcomponent', - 'last', - 'ifnotempty', - 'extensiondelimiter', - 'eachword', - 'substring', - 'setsize', - 'reserve', - 'getrange', - 'private_setrange', - 'importas', - 'import8bits', - 'import32bits', - 'import64bits', - 'import16bits', - 'importbytes', - 'importpointer', - 'export8bits', - 'export16bits', - 'export32bits', - 'export64bits', - 'exportbytes', - 'exportsigned8bits', - 'exportsigned16bits', - 'exportsigned32bits', - 'exportsigned64bits', - 'marker', - 'swapbytes', - 'encodeurl', - 'decodeurl', - 'encodebase64', - 'decodebase64', - 'encodeqp', - 'decodeqp', - 'encodemd5', - 'encodehex', - 'decodehex', - 'uncompress', - 'compress', - 'detectcharset', - 'bestcharset', - 'crc', - 'importstring', - 'setrange', - 'exportas', - 'exportstring', - 'exportpointerbits', - 'foreachbyte', - 'eachbyte', - 'setposition', - 'position', - 'value', - 'join', - 'asstaticarray', - 'foreach', - 'findposition', - 'min', - 'groupjoin', - 'orderbydescending', - 'average', - 'take', - 'do', - 'selectmany', - 'skip', - 'select', - 'sum', - 'max', - 'asarray', - 'thenbydescending', - 'aslist', - 'orderby', - 'thenby', - 'where', - 'groupby', - 'asgenerator', - 'typename', - 'returntype', - 'restname', - 'paramdescs', - 'action', - 'statement', - 'inputcolumns', - 'keycolumns', - 'returncolumns', - 'sortcolumns', - 'skiprows', - 'maxrows', - 'rowsfound', - 'statementonly', - 'lop', - 'databasename', - 'tablename', - 'schemaname', - 'hostid', - 'hostdatasource', - 'hostname', - 'hostport', - 'hostusername', - 'hostpassword', - 'hostschema', - 'hosttableencoding', - 'hostextra', - 'hostisdynamic', - 'refobj', - 'connection', - 'prepared', - 'getset', - 'addset', - 'numsets', - 'addrow', - 'addcolumninfo', - 'forcedrowid', - 'makeinheritedcopy', - 'filename', - 'expose', - 'recover', - 'insert', - 'removeall', - 'count', - 'exchange', - 'findindex', - 'foreachpair', - 'foreachkey', - 'sort', - 'insertfirst', - 'difference', - 'removeback', - 'insertback', - 'removelast', - 'removefront', - 'insertfrom', - 'intersection', - 'top', - 'insertlast', - 'push', - 'union', - 'removefirst', - 'insertfront', - 'pop', - 'fd', - 'family', - 'isvalid', - 'isssl', - 'open', - 'close', - 'read', - 'write', - 'ioctl', - 'seek', - 'mode', - 'mtime', - 'atime', - 'dup', - 'dup2', - 'fchdir', - 'fchown', - 'fsync', - 'ftruncate', - 'fchmod', - 'sendfd', - 'receivefd', - 'readobject', - 'tryreadobject', - 'writeobject', - 'leaveopen', - 'rewind', - 'tell', - 'language', - 'script', - 'country', - 'variant', - 'displaylanguage', - 'displayscript', - 'displaycountry', - 'displayvariant', - 'displayname', - 'basename', - 'keywords', - 'iso3language', - 'iso3country', - 'formatas', - 'formatnumber', - 'parsenumber', - 'parseas', - 'format', - 'parse', - 'add', - 'roll', - 'set', - 'getattr', - 'setattr', - 'clear', - 'isset', - 'settimezone', - 'timezone', - 'time', - 'indaylighttime', - 'createdocument', - 'parsedocument', - 'hasfeature', - 'createdocumenttype', - 'nodename', - 'nodevalue', - 'nodetype', - 'parentnode', - 'childnodes', - 'firstchild', - 'lastchild', - 'previoussibling', - 'nextsibling', - 'attributes', - 'ownerdocument', - 'namespaceuri', - 'prefix', - 'localname', - 'insertbefore', - 'replacechild', - 'removechild', - 'appendchild', - 'haschildnodes', - 'clonenode', - 'issupported', - 'hasattributes', - 'extract', - 'extractone', - 'extractfast', - 'transform', - 'foreachchild', - 'eachchild', - 'extractfastone', - 'data', - 'substringdata', - 'appenddata', - 'insertdata', - 'deletedata', - 'replacedata', - 'doctype', - 'implementation', - 'documentelement', - 'createelement', - 'createdocumentfragment', - 'createtextnode', - 'createcomment', - 'createcdatasection', - 'createprocessinginstruction', - 'createattribute', - 'createentityreference', - 'getelementsbytagname', - 'importnode', - 'createelementns', - 'createattributens', - 'getelementsbytagnamens', - 'getelementbyid', - 'tagname', - 'getattribute', - 'setattribute', - 'removeattribute', - 'getattributenode', - 'setattributenode', - 'removeattributenode', - 'getattributens', - 'setattributens', - 'removeattributens', - 'getattributenodens', - 'setattributenodens', - 'hasattribute', - 'hasattributens', - 'setname', - 'contents', - 'specified', - 'ownerelement', - 'splittext', - 'notationname', - 'publicid', - 'systemid', - 'target', - 'entities', - 'notations', - 'internalsubset', - 'item', - 'getnameditem', - 'getnameditemns', - 'setnameditem', - 'setnameditemns', - 'removenameditem', - 'removenameditemns', - 'askeyedgenerator', - 'eachpair', - 'eachkey', - 'next', - 'readstring', - 'readattributevalue', - 'attributecount', - 'baseuri', - 'depth', - 'hasvalue', - 'isemptyelement', - 'xmllang', - 'getattributenamespace', - 'lookupnamespace', - 'movetoattribute', - 'movetoattributenamespace', - 'movetofirstattribute', - 'movetonextattribute', - 'movetoelement', - 'prepare', - 'last_insert_rowid', - 'total_changes', - 'interrupt', - 'errcode', - 'errmsg', - 'addmathfunctions', - 'finalize', - 'step', - 'bind_blob', - 'bind_double', - 'bind_int', - 'bind_null', - 'bind_text', - 'bind_parameter_index', - 'reset', - 'column_count', + 'curlauth_anysafe', + 'curlauth_any', + 'curlauth_basic', + 'curlauth_digest', + 'curlauth_gssnegotiate', + 'curlauth_none', + 'curlauth_ntlm', + 'curle_aborted_by_callback', + 'curle_bad_calling_order', + 'curle_bad_content_encoding', + 'curle_bad_download_resume', + 'curle_bad_function_argument', + 'curle_bad_password_entered', + 'curle_couldnt_connect', + 'curle_couldnt_resolve_host', + 'curle_couldnt_resolve_proxy', + 'curle_failed_init', + 'curle_filesize_exceeded', + 'curle_file_couldnt_read_file', + 'curle_ftp_access_denied', + 'curle_ftp_cant_get_host', + 'curle_ftp_cant_reconnect', + 'curle_ftp_couldnt_get_size', + 'curle_ftp_couldnt_retr_file', + 'curle_ftp_couldnt_set_ascii', + 'curle_ftp_couldnt_set_binary', + 'curle_ftp_couldnt_use_rest', + 'curle_ftp_port_failed', + 'curle_ftp_quote_error', + 'curle_ftp_ssl_failed', + 'curle_ftp_user_password_incorrect', + 'curle_ftp_weird_227_format', + 'curle_ftp_weird_pass_reply', + 'curle_ftp_weird_pasv_reply', + 'curle_ftp_weird_server_reply', + 'curle_ftp_weird_user_reply', + 'curle_ftp_write_error', + 'curle_function_not_found', + 'curle_got_nothing', + 'curle_http_post_error', + 'curle_http_range_error', + 'curle_http_returned_error', + 'curle_interface_failed', + 'curle_ldap_cannot_bind', + 'curle_ldap_invalid_url', + 'curle_ldap_search_failed', + 'curle_library_not_found', + 'curle_login_denied', + 'curle_malformat_user', + 'curle_obsolete', + 'curle_ok', + 'curle_operation_timeouted', + 'curle_out_of_memory', + 'curle_partial_file', + 'curle_read_error', + 'curle_recv_error', + 'curle_send_error', + 'curle_send_fail_rewind', + 'curle_share_in_use', + 'curle_ssl_cacert', + 'curle_ssl_certproblem', + 'curle_ssl_cipher', + 'curle_ssl_connect_error', + 'curle_ssl_engine_initfailed', + 'curle_ssl_engine_notfound', + 'curle_ssl_engine_setfailed', + 'curle_ssl_peer_certificate', + 'curle_telnet_option_syntax', + 'curle_too_many_redirects', + 'curle_unknown_telnet_option', + 'curle_unsupported_protocol', + 'curle_url_malformat_user', + 'curle_url_malformat', + 'curle_write_error', + 'curlftpauth_default', + 'curlftpauth_ssl', + 'curlftpauth_tls', + 'curlftpssl_all', + 'curlftpssl_control', + 'curlftpssl_last', + 'curlftpssl_none', + 'curlftpssl_try', + 'curlinfo_connect_time', + 'curlinfo_content_length_download', + 'curlinfo_content_length_upload', + 'curlinfo_content_type', + 'curlinfo_effective_url', + 'curlinfo_filetime', + 'curlinfo_header_size', + 'curlinfo_httpauth_avail', + 'curlinfo_http_connectcode', + 'curlinfo_namelookup_time', + 'curlinfo_num_connects', + 'curlinfo_os_errno', + 'curlinfo_pretransfer_time', + 'curlinfo_proxyauth_avail', + 'curlinfo_redirect_count', + 'curlinfo_redirect_time', + 'curlinfo_request_size', + 'curlinfo_response_code', + 'curlinfo_size_download', + 'curlinfo_size_upload', + 'curlinfo_speed_download', + 'curlinfo_speed_upload', + 'curlinfo_ssl_engines', + 'curlinfo_ssl_verifyresult', + 'curlinfo_starttransfer_time', + 'curlinfo_total_time', + 'curlmsg_done', + 'curlopt_autoreferer', + 'curlopt_buffersize', + 'curlopt_cainfo', + 'curlopt_capath', + 'curlopt_connecttimeout', + 'curlopt_cookiefile', + 'curlopt_cookiejar', + 'curlopt_cookiesession', + 'curlopt_cookie', + 'curlopt_crlf', + 'curlopt_customrequest', + 'curlopt_dns_use_global_cache', + 'curlopt_egdsocket', + 'curlopt_encoding', + 'curlopt_failonerror', + 'curlopt_filetime', + 'curlopt_followlocation', + 'curlopt_forbid_reuse', + 'curlopt_fresh_connect', + 'curlopt_ftpappend', + 'curlopt_ftplistonly', + 'curlopt_ftpport', + 'curlopt_ftpsslauth', + 'curlopt_ftp_account', + 'curlopt_ftp_create_missing_dirs', + 'curlopt_ftp_response_timeout', + 'curlopt_ftp_ssl', + 'curlopt_ftp_use_eprt', + 'curlopt_ftp_use_epsv', + 'curlopt_header', + 'curlopt_http200aliases', + 'curlopt_httpauth', + 'curlopt_httpget', + 'curlopt_httpheader', + 'curlopt_httppost', + 'curlopt_httpproxytunnel', + 'curlopt_http_version', + 'curlopt_infilesize_large', + 'curlopt_infilesize', + 'curlopt_interface', + 'curlopt_ipresolve', + 'curlopt_krb4level', + 'curlopt_low_speed_limit', + 'curlopt_low_speed_time', + 'curlopt_maxconnects', + 'curlopt_maxfilesize_large', + 'curlopt_maxfilesize', + 'curlopt_maxredirs', + 'curlopt_netrc_file', + 'curlopt_netrc', + 'curlopt_nobody', + 'curlopt_noprogress', + 'curlopt_port', + 'curlopt_postfieldsize_large', + 'curlopt_postfieldsize', + 'curlopt_postfields', + 'curlopt_postquote', + 'curlopt_post', + 'curlopt_prequote', + 'curlopt_proxyauth', + 'curlopt_proxyport', + 'curlopt_proxytype', + 'curlopt_proxyuserpwd', + 'curlopt_proxy', + 'curlopt_put', + 'curlopt_quote', + 'curlopt_random_file', + 'curlopt_range', + 'curlopt_readdata', + 'curlopt_referer', + 'curlopt_resume_from_large', + 'curlopt_resume_from', + 'curlopt_sslcerttype', + 'curlopt_sslcert', + 'curlopt_sslengine_default', + 'curlopt_sslengine', + 'curlopt_sslkeypasswd', + 'curlopt_sslkeytype', + 'curlopt_sslkey', + 'curlopt_sslversion', + 'curlopt_ssl_cipher_list', + 'curlopt_ssl_verifyhost', + 'curlopt_ssl_verifypeer', + 'curlopt_tcp_nodelay', + 'curlopt_timecondition', + 'curlopt_timeout', + 'curlopt_timevalue', + 'curlopt_transfertext', + 'curlopt_unrestricted_auth', + 'curlopt_upload', + 'curlopt_url', + 'curlopt_useragent', + 'curlopt_userpwd', + 'curlopt_use_ssl', + 'curlopt_verbose', + 'curlopt_writedata', + 'curlproxy_http', + 'curlproxy_socks4', + 'curlproxy_socks5', + 'curl_http_version_1_0', + 'curl_http_version_1_1', + 'curl_http_version_none', + 'curl_ipresolve_v4', + 'curl_ipresolve_v6', + 'curl_ipresolve_whatever', + 'curl_netrc_ignored', + 'curl_netrc_optional', + 'curl_netrc_required', + 'curl_version_asynchdns', + 'curl_version_debug', + 'curl_version_gssnegotiate', + 'curl_version_idn', + 'curl_version_ipv6', + 'curl_version_kerberos4', + 'curl_version_largefile', + 'curl_version_libz', + 'curl_version_ntlm', + 'curl_version_spnego', + 'curl_version_ssl', + 'define_atend', + 'email_immediate', + 'email_queue', + 'fcgi_abort_request', + 'fcgi_authorize', + 'fcgi_begin_request', + 'fcgi_cant_mpx_conn', + 'fcgi_data', + 'fcgi_end_request', + 'fcgi_filter', + 'fcgi_get_values_result', + 'fcgi_get_values', + 'fcgi_keep_conn', + 'fcgi_max_conns', + 'fcgi_max_reqs', + 'fcgi_mpxs_conns', + 'fcgi_null_request_id', + 'fcgi_overloaded', + 'fcgi_params', + 'fcgi_request_complete', + 'fcgi_responder', + 'fcgi_stderr', + 'fcgi_stdin', + 'fcgi_stdout', + 'fcgi_unknown_role', + 'fcgi_unknown_type', + 'fcgi_version_1', + 'fcgi_x_stdin', + 'inline_colinfo_name_pos', + 'inline_colinfo_type_pos', + 'inline_colinfo_valuelist_pos', + 'inline_columninfo_pos', + 'inline_foundcount_pos', + 'inline_resultrows_pos', + 'mysqlds', + 'net_connectinprogress', + 'net_connectok', + 'net_typessltcp', + 'net_typessludp', + 'net_typessl', + 'net_typetcp', + 'net_typeudp', + 'net_waitread', + 'net_waittimeout', + 'net_waitwrite', + 'odbc', + 'portal', + 'postgresql', + 'server_admin', + 'server_name', + 'server_port', + 'server_protocol', + 'server_signature', + 'server_software', + 'sqlite_abort', + 'sqlite_auth', + 'sqlite_blob', + 'sqlite_busy', + 'sqlite_cantopen', + 'sqlite_constraint', + 'sqlite_corrupt', + 'sqlite_done', + 'sqlite_empty', + 'sqlite_error', + 'sqlite_float', + 'sqlite_format', + 'sqlite_full', + 'sqlite_integer', + 'sqlite_internal', + 'sqlite_interrupt', + 'sqlite_ioerr', + 'sqlite_locked', + 'sqlite_mismatch', + 'sqlite_misuse', + 'sqlite_nolfs', + 'sqlite_nomem', + 'sqlite_notadb', + 'sqlite_notfound', + 'sqlite_null', + 'sqlite_ok', + 'sqlite_perm', + 'sqlite_protocol', + 'sqlite_range', + 'sqlite_readonly', + 'sqlite_row', + 'sqlite_schema', + 'sqlite_text', + 'sqlite_toobig', + 'sqliteconnector', + 'string_validcharset', + 'uchar_age', + 'uchar_alphabetic', + 'uchar_ascii_hex_digit', + 'uchar_bidi_class', + 'uchar_bidi_control', + 'uchar_bidi_mirrored', + 'uchar_bidi_mirroring_glyph', + 'uchar_block', + 'uchar_canonical_combining_class', + 'uchar_case_folding', + 'uchar_case_sensitive', + 'uchar_dash', + 'uchar_decomposition_type', + 'uchar_default_ignorable_code_point', + 'uchar_deprecated', + 'uchar_diacritic', + 'uchar_east_asian_width', + 'uchar_extender', + 'uchar_full_composition_exclusion', + 'uchar_general_category_mask', + 'uchar_general_category', + 'uchar_grapheme_base', + 'uchar_grapheme_cluster_break', + 'uchar_grapheme_extend', + 'uchar_grapheme_link', + 'uchar_hangul_syllable_type', + 'uchar_hex_digit', + 'uchar_hyphen', + 'uchar_ideographic', + 'uchar_ids_binary_operator', + 'uchar_ids_trinary_operator', + 'uchar_id_continue', + 'uchar_iso_comment', + 'uchar_joining_group', + 'uchar_joining_type', + 'uchar_join_control', + 'uchar_lead_canonical_combining_class', + 'uchar_line_break', + 'uchar_logical_order_exception', + 'uchar_lowercase_mapping', + 'uchar_lowercase', + 'uchar_math', + 'uchar_name', + 'uchar_nfc_inert', + 'uchar_nfc_quick_check', + 'uchar_nfd_inert', + 'uchar_nfd_quick_check', + 'uchar_nfkc_inert', + 'uchar_nfkc_quick_check', + 'uchar_nfkd_inert', + 'uchar_nfkd_quick_check', + 'uchar_noncharacter_code_point', + 'uchar_numeric_type', + 'uchar_numeric_value', + 'uchar_pattern_syntax', + 'uchar_pattern_white_space', + 'uchar_posix_alnum', + 'uchar_posix_blank', + 'uchar_posix_graph', + 'uchar_posix_print', + 'uchar_posix_xdigit', + 'uchar_quotation_mark', + 'uchar_radical', + 'uchar_script', + 'uchar_segment_starter', + 'uchar_sentence_break', + 'uchar_simple_case_folding', + 'uchar_simple_lowercase_mapping', + 'uchar_simple_titlecase_mapping', + 'uchar_simple_uppercase_mapping', + 'uchar_soft_dotted', + 'uchar_s_term', + 'uchar_terminal_punctuation', + 'uchar_titlecase_mapping', + 'uchar_trail_canonical_combining_class', + 'uchar_unicode_1_name', + 'uchar_unified_ideograph', + 'uchar_uppercase_mapping', + 'uchar_uppercase', + 'uchar_variation_selector', + 'uchar_white_space', + 'uchar_word_break', + 'uchar_xid_continue', + 'u_lb_alphabetic', + 'u_lb_ambiguous', + 'u_lb_break_after', + 'u_lb_break_before', + 'u_lb_break_both', + 'u_lb_break_symbols', + 'u_lb_carriage_return', + 'u_lb_close_punctuation', + 'u_lb_combining_mark', + 'u_lb_complex_context', + 'u_lb_contingent_break', + 'u_lb_exclamation', + 'u_lb_glue', + 'u_lb_h2', + 'u_lb_h3', + 'u_lb_hyphen', + 'u_lb_ideographic', + 'u_lb_infix_numeric', + 'u_lb_inseparable', + 'u_lb_jl', + 'u_lb_jt', + 'u_lb_jv', + 'u_lb_line_feed', + 'u_lb_mandatory_break', + 'u_lb_next_line', + 'u_lb_nonstarter', + 'u_lb_numeric', + 'u_lb_open_punctuation', + 'u_lb_postfix_numeric', + 'u_lb_prefix_numeric', + 'u_lb_quotation', + 'u_lb_space', + 'u_lb_surrogate', + 'u_lb_unknown', + 'u_lb_word_joiner', + 'u_lb_zwspace', + 'u_nt_decimal', + 'u_nt_digit', + 'u_nt_none', + 'u_nt_numeric', + 'u_sb_aterm', + 'u_sb_close', + 'u_sb_format', + 'u_sb_lower', + 'u_sb_numeric', + 'u_sb_oletter', + 'u_sb_other', + 'u_sb_sep', + 'u_sb_sp', + 'u_sb_sterm', + 'u_sb_upper', + 'u_wb_aletter', + 'u_wb_extendnumlet', + 'u_wb_format', + 'u_wb_katakana', + 'u_wb_midletter', + 'u_wb_midnum', + 'u_wb_numeric', + 'u_wb_other', + 'wexitstatus', + 'wifcontinued', + 'wifexited', + 'wifsignaled', + 'wifstopped', + 'wstopsig', + 'wtermsig', + 'zip_checkcons', + 'zip_cm_bzip2', + 'zip_cm_default', + 'zip_cm_deflate64', + 'zip_cm_deflate', + 'zip_cm_implode', + 'zip_cm_pkware_implode', + 'zip_cm_reduce_1', + 'zip_cm_reduce_2', + 'zip_cm_reduce_3', + 'zip_cm_reduce_4', + 'zip_cm_shrink', + 'zip_cm_store', + 'zip_create', + 'zip_em_3des_112', + 'zip_em_3des_168', + 'zip_em_aes_128', + 'zip_em_aes_192', + 'zip_em_aes_256', + 'zip_em_des', + 'zip_em_none', + 'zip_em_rc2_old', + 'zip_em_rc2', + 'zip_em_rc4', + 'zip_em_trad_pkware', + 'zip_em_unknown', + 'zip_er_changed', + 'zip_er_close', + 'zip_er_compnotsupp', + 'zip_er_crc', + 'zip_er_deleted', + 'zip_er_eof', + 'zip_er_exists', + 'zip_er_incons', + 'zip_er_internal', + 'zip_er_inval', + 'zip_er_memory', + 'zip_er_multidisk', + 'zip_er_noent', + 'zip_er_nozip', + 'zip_er_ok', + 'zip_er_open', + 'zip_er_read', + 'zip_er_remove', + 'zip_er_rename', + 'zip_er_seek', + 'zip_er_tmpopen', + 'zip_er_write', + 'zip_er_zipclosed', + 'zip_er_zlib', + 'zip_et_none', + 'zip_et_sys', + 'zip_et_zlib', + 'zip_excl', + 'zip_fl_compressed', + 'zip_fl_nocase', + 'zip_fl_nodir', + 'zip_fl_unchanged', + 'abort_clear', + 'abort_now', + 'abort', + 'action_params', + 'action_param', + 'action_statement', + 'admin_authorization', + 'admin_currentgroups', + 'admin_currentuserid', + 'admin_currentusername', + 'admin_getpref', + 'admin_initialize', + 'admin_lassoservicepath', + 'admin_removepref', + 'admin_setpref', + 'admin_userexists', + 'all', + 'auth_admin', + 'auth_check', + 'auth_custom', + 'auth_group', + 'auth_prompt', + 'auth_user', + 'bom_utf16be', + 'bom_utf16le', + 'bom_utf32be', + 'bom_utf32le', + 'bom_utf8', + 'bw', + 'capture_nearestloopabort', + 'capture_nearestloopcontinue', + 'capture_nearestloopcount', + 'checked', + 'cipher_decrypt_private', + 'cipher_decrypt_public', + 'cipher_decrypt', + 'cipher_digest', + 'cipher_encrypt_private', + 'cipher_encrypt_public', + 'cipher_encrypt', + 'cipher_generate_key', + 'cipher_hmac', + 'cipher_keylength', + 'cipher_list', + 'cipher_open', + 'cipher_seal', + 'cipher_sign', + 'cipher_verify', + 'client_addr', + 'client_authorization', + 'client_browser', + 'client_contentlength', + 'client_contenttype', + 'client_cookielist', + 'client_cookies', + 'client_encoding', + 'client_formmethod', + 'client_getargs', + 'client_getparams', + 'client_getparam', + 'client_headers', + 'client_integertoip', + 'client_iptointeger', + 'client_password', + 'client_postargs', + 'client_postparams', + 'client_postparam', + 'client_type', + 'client_url', + 'client_username', + 'cn', + 'column_names', + 'column_name', + 'column_type', + 'column', + 'compress', + 'content_addheader', + 'content_body', + 'content_encoding', + 'content_header', + 'content_replaceheader', + 'content_type', + 'cookie_set', + 'cookie', + 'curl_easy_cleanup', + 'curl_easy_duphandle', + 'curl_easy_getinfo', + 'curl_easy_init', + 'curl_easy_reset', + 'curl_easy_setopt', + 'curl_easy_strerror', + 'curl_getdate', + 'curl_multi_perform', + 'curl_multi_result', + 'curl_version_info', + 'curl_version', + 'database_adddefaultsqlitehost', + 'database_database', + 'database_initialize', + 'database_name', + 'database_qs', + 'database_table_database_tables', + 'database_table_datasource_databases', + 'database_table_datasource_hosts', + 'database_table_datasources', + 'database_table_table_fields', + 'database_util_cleanpath', + 'dbgp_stop_stack_name', + 'debugging_breakpoint_get', + 'debugging_breakpoint_list', + 'debugging_breakpoint_remove', + 'debugging_breakpoint_set', + 'debugging_breakpoint_update', + 'debugging_break', + 'debugging_context_locals', + 'debugging_context_self', + 'debugging_context_vars', + 'debugging_detach', + 'debugging_enabled', + 'debugging_get_context', + 'debugging_get_stack', + 'debugging_run', + 'debugging_step_in', + 'debugging_step_out', + 'debugging_step_over', + 'debugging_stop', + 'debugging_terminate', + 'decimal_random', + 'decompress', + 'decrypt_blowfish', + 'define_atbegin', + 'dns_default', + 'dns_lookup', + 'document', + 'email_attachment_mime_type', + 'email_batch', + 'email_digestchallenge', + 'email_digestresponse', + 'email_extract', + 'email_findemails', + 'email_fix_address_list', + 'email_fix_address', + 'email_fs_error_clean', + 'email_initialize', + 'email_merge', + 'email_mxlookup', + 'email_pop_priv_extract', + 'email_pop_priv_quote', + 'email_pop_priv_substring', + 'email_result', + 'email_safeemail', + 'email_send', + 'email_status', + 'email_token', + 'email_translatebreakstocrlf', + 'encode_qheader', + 'encoding_iso88591', + 'encoding_utf8', + 'encrypt_blowfish', + 'encrypt_crammd5', + 'encrypt_hmac', + 'encrypt_md5', + 'eol', + 'eq', + 'error_code_aborted', + 'error_code_dividebyzero', + 'error_code_filenotfound', + 'error_code_invalidparameter', + 'error_code_methodnotfound', + 'error_code_networkerror', + 'error_code_noerror', + 'error_code_resnotfound', + 'error_code_runtimeassertion', + 'error_code', + 'error_msg_aborted', + 'error_msg_dividebyzero', + 'error_msg_filenotfound', + 'error_msg_invalidparameter', + 'error_msg_methodnotfound', + 'error_msg_networkerror', + 'error_msg_noerror', + 'error_msg_resnotfound', + 'error_msg_runtimeassertion', + 'error_msg', + 'error_obj', + 'error_pop', + 'error_push', + 'error_reset', + 'error_stack', + 'escape_tag', + 'evdns_resolve_ipv4', + 'evdns_resolve_ipv6', + 'evdns_resolve_reverse_ipv6', + 'evdns_resolve_reverse', + 'ew', + 'fail_ifnot', + 'fail_if', + 'fail_now', + 'failure_clear', + 'fail', + 'fastcgi_createfcgirequest', + 'fastcgi_handlecon', + 'fastcgi_handlereq', + 'fastcgi_initialize', + 'fastcgi_initiate_request', + 'fcgi_bodychunksize', + 'fcgi_makeendrequestbody', + 'fcgi_makestdoutbody', + 'fcgi_readparam', + 'fcgi_read_timeout_seconds', + 'field_names', + 'field_name', + 'field', + 'file_copybuffersize', + 'file_defaultencoding', + 'file_forceroot', + 'file_modeline', + 'file_modechar', + 'file_stderr', + 'file_stdin', + 'file_stdout', + 'file_tempfile', + 'filemakerds_initialize', + 'filemakerds', + 'found_count', + 'ftp_deletefile', + 'ftp_getdata', + 'ftp_getfile', + 'ftp_getlisting', + 'ftp_putdata', + 'ftp_putfile', + 'ft', + 'full', + 'generateforeach', + 'gte', + 'gt', + 'handle_failure', + 'handle', + 'html_comment', + 'http_char_colon', + 'http_char_cr', + 'http_char_htab', + 'http_char_lf', + 'http_char_question', + 'http_char_space', + 'http_default_files', + 'http_read_timeout_secs', + 'http_server_apps_path', + 'http_server_request_logger', + 'if_empty', + 'if_false', + 'if_null', + 'if_true', + 'include_cache_compare', + 'include_currentpath', + 'include_filepath', + 'include_localpath', + 'include_once', + 'include_path', + 'include_raw', + 'include_url', + 'includes', + 'include', + 'inline_namedget', + 'inline_namedput', + 'inline_scopeget', + 'inline_scopepop', + 'inline_scopepush', + 'inline', + 'integer_bitor', + 'integer_random', + 'io_dir_dt_blk', + 'io_dir_dt_chr', + 'io_dir_dt_dir', + 'io_dir_dt_fifo', + 'io_dir_dt_lnk', + 'io_dir_dt_reg', + 'io_dir_dt_sock', + 'io_dir_dt_unknown', + 'io_dir_dt_wht', + 'io_file_access', + 'io_file_chdir', + 'io_file_chmod', + 'io_file_chown', + 'io_file_dirname', + 'io_file_f_dupfd', + 'io_file_f_getfd', + 'io_file_f_getfl', + 'io_file_f_getlk', + 'io_file_f_rdlck', + 'io_file_f_setfd', + 'io_file_f_setfl', + 'io_file_f_setlkw', + 'io_file_f_setlk', + 'io_file_f_test', + 'io_file_f_tlock', + 'io_file_f_ulock', + 'io_file_f_unlck', + 'io_file_f_wrlck', + 'io_file_fd_cloexec', + 'io_file_fioasync', + 'io_file_fioclex', + 'io_file_fiodtype', + 'io_file_fiogetown', + 'io_file_fionbio', + 'io_file_fionclex', + 'io_file_fionread', + 'io_file_fiosetown', + 'io_file_getcwd', + 'io_file_lchown', + 'io_file_link', + 'io_file_lockf', + 'io_file_lstat_atime', + 'io_file_lstat_mode', + 'io_file_lstat_mtime', + 'io_file_lstat_size', + 'io_file_mkdir', + 'io_file_mkfifo', + 'io_file_mkstemp', + 'io_file_o_append', + 'io_file_o_async', + 'io_file_o_creat', + 'io_file_o_excl', + 'io_file_o_exlock', + 'io_file_o_fsync', + 'io_file_o_nofollow', + 'io_file_o_nonblock', + 'io_file_o_rdonly', + 'io_file_o_rdwr', + 'io_file_o_shlock', + 'io_file_o_sync', + 'io_file_o_trunc', + 'io_file_o_wronly', + 'io_file_pipe', + 'io_file_readlink', + 'io_file_realpath', + 'io_file_remove', + 'io_file_rename', + 'io_file_rmdir', + 'io_file_s_ifblk', + 'io_file_s_ifchr', + 'io_file_s_ifdir', + 'io_file_s_ififo', + 'io_file_s_iflnk', + 'io_file_s_ifmt', + 'io_file_s_ifreg', + 'io_file_s_ifsock', + 'io_file_s_irgrp', + 'io_file_s_iroth', + 'io_file_s_irusr', + 'io_file_s_irwxg', + 'io_file_s_irwxo', + 'io_file_s_irwxu', + 'io_file_s_isgid', + 'io_file_s_isuid', + 'io_file_s_isvtx', + 'io_file_s_iwgrp', + 'io_file_s_iwoth', + 'io_file_s_iwusr', + 'io_file_s_ixgrp', + 'io_file_s_ixoth', + 'io_file_s_ixusr', + 'io_file_seek_cur', + 'io_file_seek_end', + 'io_file_seek_set', + 'io_file_stat_atime', + 'io_file_stat_mode', + 'io_file_stat_mtime', + 'io_file_stat_size', + 'io_file_stderr', + 'io_file_stdin', + 'io_file_stdout', + 'io_file_symlink', + 'io_file_tempnam', + 'io_file_truncate', + 'io_file_umask', + 'io_file_unlink', + 'io_net_accept', + 'io_net_af_inet6', + 'io_net_af_inet', + 'io_net_af_unix', + 'io_net_bind', + 'io_net_connect', + 'io_net_getpeername', + 'io_net_getsockname', + 'io_net_ipproto_ip', + 'io_net_ipproto_udp', + 'io_net_listen', + 'io_net_msg_oob', + 'io_net_msg_peek', + 'io_net_msg_waitall', + 'io_net_recvfrom', + 'io_net_recv', + 'io_net_sendto', + 'io_net_send', + 'io_net_shut_rdwr', + 'io_net_shut_rd', + 'io_net_shut_wr', + 'io_net_shutdown', + 'io_net_so_acceptconn', + 'io_net_so_broadcast', + 'io_net_so_debug', + 'io_net_so_dontroute', + 'io_net_so_error', + 'io_net_so_keepalive', + 'io_net_so_linger', + 'io_net_so_oobinline', + 'io_net_so_rcvbuf', + 'io_net_so_rcvlowat', + 'io_net_so_rcvtimeo', + 'io_net_so_reuseaddr', + 'io_net_so_sndbuf', + 'io_net_so_sndlowat', + 'io_net_so_sndtimeo', + 'io_net_so_timestamp', + 'io_net_so_type', + 'io_net_so_useloopback', + 'io_net_sock_dgram', + 'io_net_sock_raw', + 'io_net_sock_rdm', + 'io_net_sock_seqpacket', + 'io_net_sock_stream', + 'io_net_socket', + 'io_net_sol_socket', + 'io_net_ssl_accept', + 'io_net_ssl_begin', + 'io_net_ssl_connect', + 'io_net_ssl_end', + 'io_net_ssl_errorstring', + 'io_net_ssl_error', + 'io_net_ssl_funcerrorstring', + 'io_net_ssl_liberrorstring', + 'io_net_ssl_read', + 'io_net_ssl_reasonerrorstring', + 'io_net_ssl_setacceptstate', + 'io_net_ssl_setconnectstate', + 'io_net_ssl_setverifylocations', + 'io_net_ssl_shutdown', + 'io_net_ssl_usecertificatechainfile', + 'io_net_ssl_useprivatekeyfile', + 'io_net_ssl_write', + 'java_jvm_create', + 'java_jvm_getenv', + 'jdbc_initialize', + 'json_consume_array', + 'json_consume_object', + 'json_consume_string', + 'json_consume_token', + 'json_deserialize', + 'json_rpccall', + 'json_serialize', + 'keycolumn_name', + 'keycolumn_value', + 'keyfield_name', + 'keyfield_value', + 'lasso_currentaction', + 'lasso_errorreporting', + 'lasso_executiontimelimit', + 'lasso_methodexists', + 'lasso_tagexists', + 'lasso_uniqueid', + 'lasso_version', + 'lassoapp_current_app', + 'lassoapp_current_include', + 'lassoapp_do_with_include', + 'lassoapp_exists', + 'lassoapp_find_missing_file', + 'lassoapp_format_mod_date', + 'lassoapp_get_capabilities_name', + 'lassoapp_include_current', + 'lassoapp_include', + 'lassoapp_initialize_db', + 'lassoapp_initialize', + 'lassoapp_invoke_resource', + 'lassoapp_issourcefileextension', + 'lassoapp_link', + 'lassoapp_load_module', + 'lassoapp_mime_get', + 'lassoapp_mime_type_appcache', + 'lassoapp_mime_type_css', + 'lassoapp_mime_type_csv', + 'lassoapp_mime_type_docx', + 'lassoapp_mime_type_doc', + 'lassoapp_mime_type_gif', + 'lassoapp_mime_type_html', + 'lassoapp_mime_type_ico', + 'lassoapp_mime_type_jpg', + 'lassoapp_mime_type_js', + 'lassoapp_mime_type_lasso', + 'lassoapp_mime_type_map', + 'lassoapp_mime_type_pdf', + 'lassoapp_mime_type_png', + 'lassoapp_mime_type_ppt', + 'lassoapp_mime_type_rss', + 'lassoapp_mime_type_svg', + 'lassoapp_mime_type_swf', + 'lassoapp_mime_type_tif', + 'lassoapp_mime_type_ttf', + 'lassoapp_mime_type_txt', + 'lassoapp_mime_type_woff', + 'lassoapp_mime_type_xaml', + 'lassoapp_mime_type_xap', + 'lassoapp_mime_type_xbap', + 'lassoapp_mime_type_xhr', + 'lassoapp_mime_type_xml', + 'lassoapp_mime_type_zip', + 'lassoapp_path_to_method_name', + 'lassoapp_settingsdb', + 'layout_name', + 'lcapi_datasourceadd', + 'lcapi_datasourcecloseconnection', + 'lcapi_datasourcedelete', + 'lcapi_datasourceduplicate', + 'lcapi_datasourceexecsql', + 'lcapi_datasourcefindall', + 'lcapi_datasourceimage', + 'lcapi_datasourceinfo', + 'lcapi_datasourceinit', + 'lcapi_datasourcematchesname', + 'lcapi_datasourcenames', + 'lcapi_datasourcenothing', + 'lcapi_datasourceopand', + 'lcapi_datasourceopany', + 'lcapi_datasourceopbw', + 'lcapi_datasourceopct', + 'lcapi_datasourceopeq', + 'lcapi_datasourceopew', + 'lcapi_datasourceopft', + 'lcapi_datasourceopgteq', + 'lcapi_datasourceopgt', + 'lcapi_datasourceopin', + 'lcapi_datasourceoplteq', + 'lcapi_datasourceoplt', + 'lcapi_datasourceopnbw', + 'lcapi_datasourceopnct', + 'lcapi_datasourceopneq', + 'lcapi_datasourceopnew', + 'lcapi_datasourceopnin', + 'lcapi_datasourceopnot', + 'lcapi_datasourceopnrx', + 'lcapi_datasourceopno', + 'lcapi_datasourceopor', + 'lcapi_datasourceoprx', + 'lcapi_datasourcepreparesql', + 'lcapi_datasourceprotectionnone', + 'lcapi_datasourceprotectionreadonly', + 'lcapi_datasourcerandom', + 'lcapi_datasourceschemanames', + 'lcapi_datasourcescripts', + 'lcapi_datasourcesearch', + 'lcapi_datasourcesortascending', + 'lcapi_datasourcesortcustom', + 'lcapi_datasourcesortdescending', + 'lcapi_datasourcetablenames', + 'lcapi_datasourceterm', + 'lcapi_datasourcetickle', + 'lcapi_datasourcetypeblob', + 'lcapi_datasourcetypeboolean', + 'lcapi_datasourcetypedate', + 'lcapi_datasourcetypedecimal', + 'lcapi_datasourcetypeinteger', + 'lcapi_datasourcetypestring', + 'lcapi_datasourceunpreparesql', + 'lcapi_datasourceupdate', + 'lcapi_fourchartointeger', + 'lcapi_listdatasources', + 'lcapi_loadmodules', + 'lcapi_loadmodule', + 'lcapi_updatedatasourceslist', + 'ldap_scope_base', + 'ldap_scope_onelevel', + 'ldap_scope_subtree', + 'library_once', + 'library', + 'ljapi_initialize', + 'locale_availablelocales', + 'locale_canadafrench', + 'locale_canada', + 'locale_china', + 'locale_chinese', + 'locale_default', + 'locale_english', + 'locale_format_style_date_time', + 'locale_format_style_default', + 'locale_format_style_full', + 'locale_format_style_long', + 'locale_format_style_medium', + 'locale_format_style_none', + 'locale_format_style_short', + 'locale_format', + 'locale_france', + 'locale_french', + 'locale_germany', + 'locale_german', + 'locale_isocountries', + 'locale_isolanguages', + 'locale_italian', + 'locale_italy', + 'locale_japanese', + 'locale_japan', + 'locale_korean', + 'locale_korea', + 'locale_prc', + 'locale_setdefault', + 'locale_simplifiedchinese', + 'locale_taiwan', + 'locale_traditionalchinese', + 'locale_uk', + 'locale_us', + 'log_always', + 'log_critical', + 'log_deprecated', + 'log_destination_console', + 'log_destination_database', + 'log_destination_file', + 'log_detail', + 'log_initialize', + 'log_level_critical', + 'log_level_deprecated', + 'log_level_detail', + 'log_level_sql', + 'log_level_warning', + 'log_max_file_size', + 'log_setdestination', + 'log_sql', + 'log_trim_file_size', + 'log_warning', + 'log', + 'loop_abort', + 'loop_continue', + 'loop_count', + 'loop_key_pop', + 'loop_key_push', + 'loop_key', + 'loop_pop', + 'loop_push', + 'loop_value_pop', + 'loop_value_push', + 'loop_value', + 'loop', + 'lte', + 'lt', + 'main_thread_only', + 'maxrecords_value', + 'max', + 'median', + 'method_name', + 'micros', + 'millis', + 'minimal', + 'min', + 'namespace_global', + 'namespace_import', + 'namespace_using', + 'nbw', + 'ncn', + 'neq', + 'new', + 'none', + 'nrx', + 'nslookup', + 'odbc_session_driver_mssql', + 'output_none', + 'output', + 'pdf_package', + 'pdf_rectangle', + 'pdf_serve', + 'pi', + 'process', + 'protect_now', + 'protect', + 'queriable_average', + 'queriable_defaultcompare', + 'queriable_do', + 'queriable_internal_combinebindings', + 'queriable_max', + 'queriable_min', + 'queriable_qsort', + 'queriable_reversecompare', + 'queriable_sum', + 'random_seed', + 'range', + 'records_array', + 'records_map', + 'records', + 'redirect_url', + 'referer_url', + 'referrer_url', + 'register_thread', + 'register', + 'response_filepath', + 'response_localpath', + 'response_path', + 'response_realm', + 'response_root', + 'resultset_count', + 'resultsets', + 'resultset', + 'rows_array', + 'rows_impl', + 'rows', + 'rx', + 'schema_name', + 'security_database', + 'security_default_realm', + 'security_initialize', + 'security_table_groups', + 'security_table_ug_map', + 'security_table_users', + 'selected', + 'series', + 'server_ip', + 'server_push', + 'session_abort', + 'session_addvar', + 'session_decorate', + 'session_deleteexpired', + 'session_end', + 'session_getdefaultdriver', + 'session_id', + 'session_initialize', + 'session_removevar', + 'session_result', + 'session_setdefaultdriver', + 'session_start', + 'shown_count', + 'shown_first', + 'shown_last', + 'site_id', + 'site_name', + 'skiprecords_value', + 'sleep', + 'split_thread', + 'sqlite_createdb', + 'sqlite_setsleepmillis', + 'sqlite_setsleeptries', + 'staticarray_join', + 'stdoutnl', + 'stdout', + 'suspend', + 'sys_sigabrt', + 'sys_sigalrm', + 'sys_sigbus', + 'sys_sigchld', + 'sys_sigcont', + 'sys_sigfpe', + 'sys_sighup', + 'sys_sigill', + 'sys_sigint', + 'sys_sigkill', + 'sys_sigpipe', + 'sys_sigprof', + 'sys_sigquit', + 'sys_sigsegv', + 'sys_sigstop', + 'sys_sigsys', + 'sys_sigterm', + 'sys_sigtrap', + 'sys_sigtstp', + 'sys_sigttin', + 'sys_sigttou', + 'sys_sigurg', + 'sys_sigusr1', + 'sys_sigusr2', + 'sys_sigvtalrm', + 'sys_sigxcpu', + 'sys_sigxfsz', + 'sys_wcontinued', + 'sys_wnohang', + 'sys_wuntraced', + 'sys_appspath', + 'sys_chroot', + 'sys_clockspersec', + 'sys_clock', + 'sys_credits', + 'sys_databasespath', + 'sys_detach_exec', + 'sys_difftime', + 'sys_dll_ext', + 'sys_drand48', + 'sys_environ', + 'sys_eol', + 'sys_erand48', + 'sys_errno', + 'sys_exec', + 'sys_exit', + 'sys_fork', + 'sys_garbagecollect', + 'sys_getbytessincegc', + 'sys_getchar', + 'sys_getegid', + 'sys_getenv', + 'sys_geteuid', + 'sys_getgid', + 'sys_getgrnam', + 'sys_getheapfreebytes', + 'sys_getheapsize', + 'sys_getlogin', + 'sys_getpid', + 'sys_getppid', + 'sys_getpwnam', + 'sys_getpwuid', + 'sys_getstartclock', + 'sys_getthreadcount', + 'sys_getuid', + 'sys_growheapby', + 'sys_homepath', + 'sys_iswindows', + 'sys_is_full_path', + 'sys_is_windows', + 'sys_isfullpath', + 'sys_iterate', + 'sys_jrand48', + 'sys_kill_exec', + 'sys_kill', + 'sys_lcong48', + 'sys_librariespath', + 'sys_listtraits', + 'sys_listtypes', + 'sys_listunboundmethods', + 'sys_loadlibrary', + 'sys_lrand48', + 'sys_masterhomepath', + 'sys_mrand48', + 'sys_nrand48', + 'sys_pid_exec', + 'sys_pointersize', + 'sys_random', + 'sys_rand', + 'sys_seed48', + 'sys_setenv', + 'sys_setgid', + 'sys_setsid', + 'sys_setuid', + 'sys_srand48', + 'sys_srandom', + 'sys_srand', + 'sys_strerror', + 'sys_supportpath', + 'sys_test_exec', + 'sys_time', + 'sys_uname', + 'sys_unsetenv', + 'sys_usercapimodulepath', + 'sys_userstartuppath', + 'sys_wait_exec', + 'sys_waitpid', + 'sys_while', + 'table_name', + 'tag_exists', + 'tag_name', + 'thread_var_get', + 'thread_var_pop', + 'thread_var_push', + 'threadvar_find', + 'threadvar_get', + 'threadvar_set_asrt', + 'threadvar_set', + 'timer', + 'token_value', + 'treemap', + 'ucal_ampm', + 'ucal_dayofmonth', + 'ucal_dayofweekinmonth', + 'ucal_dayofweek', + 'ucal_dayofyear', + 'ucal_daysinfirstweek', + 'ucal_dowlocal', + 'ucal_dstoffset', + 'ucal_era', + 'ucal_extendedyear', + 'ucal_firstdayofweek', + 'ucal_hourofday', + 'ucal_hour', + 'ucal_julianday', + 'ucal_lenient', + 'ucal_listtimezones', + 'ucal_millisecondsinday', + 'ucal_millisecond', + 'ucal_minute', + 'ucal_month', + 'ucal_second', + 'ucal_weekofmonth', + 'ucal_weekofyear', + 'ucal_yearwoy', + 'ucal_year', + 'ucal_zoneoffset', + 'uncompress', + 'usage', + 'uuid_compare', + 'uuid_copy', + 'uuid_generate_random', + 'uuid_generate_time', + 'uuid_generate', + 'uuid_is_null', + 'uuid_parse', + 'uuid_unparse_lower', + 'uuid_unparse_upper', + 'uuid_unparse', + 'value_listitem', + 'value_list', + 'valuelistitem', + 'var_keys', + 'var_values', + 'wap_isenabled', + 'wap_maxbuttons', + 'wap_maxcolumns', + 'wap_maxhorzpixels', + 'wap_maxrows', + 'wap_maxvertpixels', + 'web_handlefcgirequest', + 'web_node_content_representation_css', + 'web_node_content_representation_html', + 'web_node_content_representation_js', + 'web_node_content_representation_xhr', + 'web_node_forpath', + 'web_nodes_initialize', + 'web_nodes_normalizeextension', + 'web_nodes_processcontentnode', + 'web_nodes_requesthandler', + 'web_response_nodesentry', + 'web_router_database', + 'web_router_initialize', + 'xml_transform', + 'xml', + 'zip_add_dir', + 'zip_add', + 'zip_close', + 'zip_delete', + 'zip_error_get_sys_type', + 'zip_error_get', + 'zip_error_to_str', + 'zip_fclose', + 'zip_file_error_get', + 'zip_file_strerror', + 'zip_fopen_index', + 'zip_fopen', + 'zip_fread', + 'zip_get_archive_comment', + 'zip_get_file_comment', + 'zip_get_name', + 'zip_get_num_files', + 'zip_name_locate', + 'zip_open', + 'zip_rename', + 'zip_replace', + 'zip_set_archive_comment', + 'zip_set_file_comment', + 'zip_stat_index', + 'zip_stat', + 'zip_strerror', + 'zip_unchange_all', + 'zip_unchange_archive', + 'zip_unchange', + 'zlib_version', + ), + 'Lasso 8 Tags': ( + '__char', + '__sync_timestamp__', + '_admin_addgroup', + '_admin_adduser', + '_admin_defaultconnector', + '_admin_defaultconnectornames', + '_admin_defaultdatabase', + '_admin_defaultfield', + '_admin_defaultgroup', + '_admin_defaulthost', + '_admin_defaulttable', + '_admin_defaultuser', + '_admin_deleteconnector', + '_admin_deletedatabase', + '_admin_deletefield', + '_admin_deletegroup', + '_admin_deletehost', + '_admin_deletetable', + '_admin_deleteuser', + '_admin_duplicategroup', + '_admin_internaldatabase', + '_admin_listconnectors', + '_admin_listdatabases', + '_admin_listfields', + '_admin_listgroups', + '_admin_listhosts', + '_admin_listtables', + '_admin_listusers', + '_admin_refreshconnector', + '_admin_refreshsecurity', + '_admin_servicepath', + '_admin_updateconnector', + '_admin_updatedatabase', + '_admin_updatefield', + '_admin_updategroup', + '_admin_updatehost', + '_admin_updatetable', + '_admin_updateuser', + '_chartfx_activation_string', + '_chartfx_getchallengestring', + '_chop_args', + '_chop_mimes', + '_client_addr_old', + '_client_address_old', + '_client_ip_old', + '_database_names', + '_datasource_reload', + '_date_current', + '_date_format', + '_date_msec', + '_date_parse', + '_execution_timelimit', + '_file_chmod', + '_initialize', + '_jdbc_acceptsurl', + '_jdbc_debug', + '_jdbc_deletehost', + '_jdbc_driverclasses', + '_jdbc_driverinfo', + '_jdbc_metainfo', + '_jdbc_propertyinfo', + '_jdbc_setdriver', + '_lasso_param', + '_log_helper', + '_proc_noparam', + '_proc_withparam', + '_recursion_limit', + '_request_param', + '_security_binaryexpiration', + '_security_flushcaches', + '_security_isserialized', + '_security_serialexpiration', + '_srand', + '_strict_literals', + '_substring', + '_xmlrpc_exconverter', + '_xmlrpc_inconverter', + '_xmlrpc_xmlinconverter', + 'abort', + 'action_addinfo', + 'action_addrecord', + 'action_param', + 'action_params', + 'action_setfoundcount', + 'action_setrecordid', + 'action_settotalcount', + 'action_statement', + 'admin_allowedfileroots', + 'admin_changeuser', + 'admin_createuser', + 'admin_currentgroups', + 'admin_currentuserid', + 'admin_currentusername', + 'admin_getpref', + 'admin_groupassignuser', + 'admin_grouplistusers', + 'admin_groupremoveuser', + 'admin_lassoservicepath', + 'admin_listgroups', + 'admin_refreshlicensing', + 'admin_refreshsecurity', + 'admin_reloaddatasource', + 'admin_removepref', + 'admin_setpref', + 'admin_userexists', + 'admin_userlistgroups', + 'all', + 'and', + 'array', + 'array_iterator', + 'auth', + 'auth_admin', + 'auth_auth', + 'auth_custom', + 'auth_group', + 'auth_prompt', + 'auth_user', + 'base64', + 'bean', + 'bigint', + 'bom_utf16be', + 'bom_utf16le', + 'bom_utf32be', + 'bom_utf32le', + 'bom_utf8', + 'boolean', + 'bw', + 'bytes', + 'cache', + 'cache_delete', + 'cache_empty', + 'cache_exists', + 'cache_fetch', + 'cache_internal', + 'cache_maintenance', + 'cache_object', + 'cache_preferences', + 'cache_store', + 'case', + 'chartfx', + 'chartfx_records', + 'chartfx_serve', + 'checked', + 'choice_list', + 'choice_listitem', + 'choicelistitem', + 'cipher_decrypt', + 'cipher_digest', + 'cipher_encrypt', + 'cipher_hmac', + 'cipher_keylength', + 'cipher_list', + 'click_text', + 'client_addr', + 'client_address', + 'client_authorization', + 'client_browser', + 'client_contentlength', + 'client_contenttype', + 'client_cookielist', + 'client_cookies', + 'client_encoding', + 'client_formmethod', + 'client_getargs', + 'client_getparams', + 'client_headers', + 'client_ip', + 'client_ipfrominteger', + 'client_iptointeger', + 'client_password', + 'client_postargs', + 'client_postparams', + 'client_type', + 'client_url', + 'client_username', + 'cn', + 'column', 'column_name', - 'column_decltype', - 'column_blob', - 'column_double', - 'column_int64', - 'column_text', - 'column_type', - 'ismultipart', - 'gotfileupload', - 'setmaxfilesize', - 'getparts', - 'trackingid', - 'currentfile', - 'addtobuffer', - 'input', - 'replacepattern', - 'findpattern', - 'ignorecase', - 'setinput', - 'setreplacepattern', - 'setfindpattern', - 'setignorecase', - 'output', - 'appendreplacement', - 'matches', - 'private_replaceall', - 'appendtail', - 'groupcount', - 'matchposition', - 'matchesstart', - 'private_replacefirst', - 'private_split', - 'matchstring', - 'replaceall', - 'replacefirst', - 'findall', - 'findcount', - 'findfirst', - 'findsymbols', - 'loadlibrary', - 'getlibrary', - 'atend', - 'f', - 'r', - 'form', - 'gen', - 'callfirst', - 'key', - 'by', - 'from', - 'init', - 'to', - 'd', - 't', + 'column_names', + 'compare_beginswith', + 'compare_contains', + 'compare_endswith', + 'compare_equalto', + 'compare_greaterthan', + 'compare_greaterthanorequals', + 'compare_greaterthanorequls', + 'compare_lessthan', + 'compare_lessthanorequals', + 'compare_notbeginswith', + 'compare_notcontains', + 'compare_notendswith', + 'compare_notequalto', + 'compare_notregexp', + 'compare_regexp', + 'compare_strictequalto', + 'compare_strictnotequalto', + 'compiler_removecacheddoc', + 'compiler_setdefaultparserflags', + 'compress', + 'content_body', + 'content_encoding', + 'content_header', + 'content_type', + 'cookie', + 'cookie_set', + 'curl_ftp_getfile', + 'curl_ftp_getlisting', + 'curl_ftp_putfile', + 'curl_include_url', + 'currency', + 'database_changecolumn', + 'database_changefield', + 'database_createcolumn', + 'database_createfield', + 'database_createtable', + 'database_fmcontainer', + 'database_hostinfo', + 'database_inline', + 'database_name', + 'database_nameitem', + 'database_names', + 'database_realname', + 'database_removecolumn', + 'database_removefield', + 'database_removetable', + 'database_repeating', + 'database_repeating_valueitem', + 'database_repeatingvalueitem', + 'database_schemanameitem', + 'database_schemanames', + 'database_tablecolumn', + 'database_tablenameitem', + 'database_tablenames', + 'datasource_name', + 'datasource_register', + 'date', + 'date__date_current', + 'date__date_format', + 'date__date_msec', + 'date__date_parse', + 'date_add', + 'date_date', + 'date_difference', + 'date_duration', + 'date_format', + 'date_getcurrentdate', + 'date_getday', + 'date_getdayofweek', + 'date_gethour', + 'date_getlocaltimezone', + 'date_getminute', + 'date_getmonth', + 'date_getsecond', + 'date_gettime', + 'date_getyear', + 'date_gmttolocal', + 'date_localtogmt', + 'date_maximum', + 'date_minimum', + 'date_msec', + 'date_setformat', + 'date_subtract', + 'db_layoutnameitem', + 'db_layoutnames', + 'db_nameitem', + 'db_names', + 'db_tablenameitem', + 'db_tablenames', + 'dbi_column_names', + 'dbi_field_names', + 'decimal', + 'decimal_setglobaldefaultprecision', + 'decode_base64', + 'decode_bheader', + 'decode_hex', + 'decode_html', + 'decode_json', + 'decode_qheader', + 'decode_quotedprintable', + 'decode_quotedprintablebytes', + 'decode_url', + 'decode_xml', + 'decompress', + 'decrypt_blowfish', + 'decrypt_blowfish2', + 'default', + 'define_atbegin', + 'define_atend', + 'define_constant', + 'define_prototype', + 'define_tag', + 'define_tagp', + 'define_type', + 'define_typep', + 'deserialize', + 'directory_directorynameitem', + 'directory_lister', + 'directory_nameitem', + 'directorynameitem', + 'dns_default', + 'dns_lookup', + 'dns_response', + 'duration', + 'else', + 'email_batch', + 'email_compose', + 'email_digestchallenge', + 'email_digestresponse', + 'email_extract', + 'email_findemails', + 'email_immediate', + 'email_merge', + 'email_mxerror', + 'email_mxlookup', + 'email_parse', + 'email_pop', + 'email_queue', + 'email_result', + 'email_safeemail', + 'email_send', + 'email_smtp', + 'email_status', + 'email_token', + 'email_translatebreakstocrlf', + 'encode_base64', + 'encode_bheader', + 'encode_break', + 'encode_breaks', + 'encode_crc32', + 'encode_hex', + 'encode_html', + 'encode_htmltoxml', + 'encode_json', + 'encode_qheader', + 'encode_quotedprintable', + 'encode_quotedprintablebytes', + 'encode_set', + 'encode_smart', + 'encode_sql', + 'encode_sql92', + 'encode_stricturl', + 'encode_url', + 'encode_xml', + 'encrypt_blowfish', + 'encrypt_blowfish2', + 'encrypt_crammd5', + 'encrypt_hmac', + 'encrypt_md5', + 'eq', + 'error_adderror', + 'error_code', + 'error_code_aborted', + 'error_code_assert', + 'error_code_bof', + 'error_code_connectioninvalid', + 'error_code_couldnotclosefile', + 'error_code_couldnotcreateoropenfile', + 'error_code_couldnotdeletefile', + 'error_code_couldnotdisposememory', + 'error_code_couldnotlockmemory', + 'error_code_couldnotreadfromfile', + 'error_code_couldnotunlockmemory', + 'error_code_couldnotwritetofile', + 'error_code_criterianotmet', + 'error_code_datasourceerror', + 'error_code_directoryfull', + 'error_code_diskfull', + 'error_code_dividebyzero', + 'error_code_eof', + 'error_code_failure', + 'error_code_fieldrestriction', + 'error_code_file', + 'error_code_filealreadyexists', + 'error_code_filecorrupt', + 'error_code_fileinvalid', + 'error_code_fileinvalidaccessmode', + 'error_code_fileisclosed', + 'error_code_fileisopen', + 'error_code_filelocked', + 'error_code_filenotfound', + 'error_code_fileunlocked', + 'error_code_httpfilenotfound', + 'error_code_illegalinstruction', + 'error_code_illegaluseoffrozeninstance', + 'error_code_invaliddatabase', + 'error_code_invalidfilename', + 'error_code_invalidmemoryobject', + 'error_code_invalidparameter', + 'error_code_invalidpassword', + 'error_code_invalidpathname', + 'error_code_invalidusername', + 'error_code_ioerror', + 'error_code_loopaborted', + 'error_code_memory', + 'error_code_network', + 'error_code_nilpointer', + 'error_code_noerr', + 'error_code_nopermission', + 'error_code_outofmemory', + 'error_code_outofstackspace', + 'error_code_overflow', + 'error_code_postconditionfailed', + 'error_code_preconditionfailed', + 'error_code_resnotfound', + 'error_code_resource', + 'error_code_streamreaderror', + 'error_code_streamwriteerror', + 'error_code_syntaxerror', + 'error_code_tagnotfound', + 'error_code_unknownerror', + 'error_code_varnotfound', + 'error_code_volumedoesnotexist', + 'error_code_webactionnotsupported', + 'error_code_webadderror', + 'error_code_webdeleteerror', + 'error_code_webmodulenotfound', + 'error_code_webnosuchobject', + 'error_code_webrepeatingrelatedfield', + 'error_code_webrequiredfieldmissing', + 'error_code_webtimeout', + 'error_code_webupdateerror', + 'error_columnrestriction', + 'error_currenterror', + 'error_databaseconnectionunavailable', + 'error_databasetimeout', + 'error_deleteerror', + 'error_fieldrestriction', + 'error_filenotfound', + 'error_invaliddatabase', + 'error_invalidpassword', + 'error_invalidusername', + 'error_modulenotfound', + 'error_msg', + 'error_msg_aborted', + 'error_msg_assert', + 'error_msg_bof', + 'error_msg_connectioninvalid', + 'error_msg_couldnotclosefile', + 'error_msg_couldnotcreateoropenfile', + 'error_msg_couldnotdeletefile', + 'error_msg_couldnotdisposememory', + 'error_msg_couldnotlockmemory', + 'error_msg_couldnotreadfromfile', + 'error_msg_couldnotunlockmemory', + 'error_msg_couldnotwritetofile', + 'error_msg_criterianotmet', + 'error_msg_datasourceerror', + 'error_msg_directoryfull', + 'error_msg_diskfull', + 'error_msg_dividebyzero', + 'error_msg_eof', + 'error_msg_failure', + 'error_msg_fieldrestriction', + 'error_msg_file', + 'error_msg_filealreadyexists', + 'error_msg_filecorrupt', + 'error_msg_fileinvalid', + 'error_msg_fileinvalidaccessmode', + 'error_msg_fileisclosed', + 'error_msg_fileisopen', + 'error_msg_filelocked', + 'error_msg_filenotfound', + 'error_msg_fileunlocked', + 'error_msg_httpfilenotfound', + 'error_msg_illegalinstruction', + 'error_msg_illegaluseoffrozeninstance', + 'error_msg_invaliddatabase', + 'error_msg_invalidfilename', + 'error_msg_invalidmemoryobject', + 'error_msg_invalidparameter', + 'error_msg_invalidpassword', + 'error_msg_invalidpathname', + 'error_msg_invalidusername', + 'error_msg_ioerror', + 'error_msg_loopaborted', + 'error_msg_memory', + 'error_msg_network', + 'error_msg_nilpointer', + 'error_msg_noerr', + 'error_msg_nopermission', + 'error_msg_outofmemory', + 'error_msg_outofstackspace', + 'error_msg_overflow', + 'error_msg_postconditionfailed', + 'error_msg_preconditionfailed', + 'error_msg_resnotfound', + 'error_msg_resource', + 'error_msg_streamreaderror', + 'error_msg_streamwriteerror', + 'error_msg_syntaxerror', + 'error_msg_tagnotfound', + 'error_msg_unknownerror', + 'error_msg_varnotfound', + 'error_msg_volumedoesnotexist', + 'error_msg_webactionnotsupported', + 'error_msg_webadderror', + 'error_msg_webdeleteerror', + 'error_msg_webmodulenotfound', + 'error_msg_webnosuchobject', + 'error_msg_webrepeatingrelatedfield', + 'error_msg_webrequiredfieldmissing', + 'error_msg_webtimeout', + 'error_msg_webupdateerror', + 'error_noerror', + 'error_nopermission', + 'error_norecordsfound', + 'error_outofmemory', + 'error_pop', + 'error_push', + 'error_reqcolumnmissing', + 'error_reqfieldmissing', + 'error_requiredcolumnmissing', + 'error_requiredfieldmissing', + 'error_reset', + 'error_seterrorcode', + 'error_seterrormessage', + 'error_updateerror', + 'euro', + 'event_schedule', + 'ew', + 'fail', + 'fail_if', + 'false', + 'field', + 'field_name', + 'field_names', + 'file', + 'file_autoresolvefullpaths', + 'file_chmod', + 'file_control', + 'file_copy', + 'file_create', + 'file_creationdate', + 'file_currenterror', + 'file_delete', + 'file_exists', + 'file_getlinecount', + 'file_getsize', + 'file_isdirectory', + 'file_listdirectory', + 'file_moddate', + 'file_modechar', + 'file_modeline', + 'file_move', + 'file_openread', + 'file_openreadwrite', + 'file_openwrite', + 'file_openwriteappend', + 'file_openwritetruncate', + 'file_probeeol', + 'file_processuploads', + 'file_read', + 'file_readline', + 'file_rename', + 'file_serve', + 'file_setsize', + 'file_stream', + 'file_streamcopy', + 'file_uploads', + 'file_waitread', + 'file_waittimeout', + 'file_waitwrite', + 'file_write', + 'find_soap_ops', + 'form_param', + 'found_count', + 'ft', + 'ftp_getfile', + 'ftp_getlisting', + 'ftp_putfile', + 'full', + 'global', + 'global_defined', + 'global_remove', + 'global_reset', + 'globals', + 'gt', + 'gte', + 'handle', + 'handle_error', + 'header', + 'html_comment', + 'http_getfile', + 'ical_alarm', + 'ical_attribute', + 'ical_calendar', + 'ical_daylight', + 'ical_event', + 'ical_freebusy', + 'ical_item', + 'ical_journal', + 'ical_parse', + 'ical_standard', + 'ical_timezone', + 'ical_todo', + 'if', + 'if_empty', + 'if_false', + 'if_null', + 'if_true', + 'image', + 'image_url', + 'img', + 'include', + 'include_cgi', + 'include_currentpath', + 'include_once', + 'include_raw', + 'include_url', + 'inline', + 'integer', + 'iterate', + 'iterator', + 'java', + 'java_bean', + 'json_records', + 'json_rpccall', + 'keycolumn_name', + 'keycolumn_value', + 'keyfield_name', + 'keyfield_value', + 'lasso_comment', + 'lasso_currentaction', + 'lasso_datasourceis', + 'lasso_datasourceis4d', + 'lasso_datasourceisfilemaker', + 'lasso_datasourceisfilemaker7', + 'lasso_datasourceisfilemaker9', + 'lasso_datasourceisfilemakersa', + 'lasso_datasourceisjdbc', + 'lasso_datasourceislassomysql', + 'lasso_datasourceismysql', + 'lasso_datasourceisodbc', + 'lasso_datasourceisopenbase', + 'lasso_datasourceisoracle', + 'lasso_datasourceispostgresql', + 'lasso_datasourceisspotlight', + 'lasso_datasourceissqlite', + 'lasso_datasourceissqlserver', + 'lasso_datasourcemodulename', + 'lasso_datatype', + 'lasso_disableondemand', + 'lasso_errorreporting', + 'lasso_executiontimelimit', + 'lasso_parser', + 'lasso_process', + 'lasso_sessionid', + 'lasso_siteid', + 'lasso_siteisrunning', + 'lasso_sitename', + 'lasso_siterestart', + 'lasso_sitestart', + 'lasso_sitestop', + 'lasso_tagexists', + 'lasso_tagmodulename', + 'lasso_uniqueid', + 'lasso_updatecheck', + 'lasso_uptime', + 'lasso_version', + 'lassoapp_create', + 'lassoapp_dump', + 'lassoapp_flattendir', + 'lassoapp_getappdata', + 'lassoapp_link', + 'lassoapp_list', + 'lassoapp_process', + 'lassoapp_unitize', + 'layout_name', + 'ldap', + 'ldap_scope_base', + 'ldap_scope_onelevel', + 'ldap_scope_subtree', + 'ldml', + 'ldml_ldml', + 'library', + 'library_once', + 'link', + 'link_currentaction', + 'link_currentactionparams', + 'link_currentactionurl', + 'link_currentgroup', + 'link_currentgroupparams', + 'link_currentgroupurl', + 'link_currentrecord', + 'link_currentrecordparams', + 'link_currentrecordurl', + 'link_currentsearch', + 'link_currentsearchparams', + 'link_currentsearchurl', + 'link_detail', + 'link_detailparams', + 'link_detailurl', + 'link_firstgroup', + 'link_firstgroupparams', + 'link_firstgroupurl', + 'link_firstrecord', + 'link_firstrecordparams', + 'link_firstrecordurl', + 'link_lastgroup', + 'link_lastgroupparams', + 'link_lastgroupurl', + 'link_lastrecord', + 'link_lastrecordparams', + 'link_lastrecordurl', + 'link_nextgroup', + 'link_nextgroupparams', + 'link_nextgroupurl', + 'link_nextrecord', + 'link_nextrecordparams', + 'link_nextrecordurl', + 'link_params', + 'link_prevgroup', + 'link_prevgroupparams', + 'link_prevgroupurl', + 'link_prevrecord', + 'link_prevrecordparams', + 'link_prevrecordurl', + 'link_setformat', + 'link_url', + 'list', + 'list_additem', + 'list_fromlist', + 'list_fromstring', + 'list_getitem', + 'list_itemcount', + 'list_iterator', + 'list_removeitem', + 'list_replaceitem', + 'list_reverseiterator', + 'list_tostring', + 'literal', + 'ljax_end', + 'ljax_hastarget', + 'ljax_include', + 'ljax_start', + 'ljax_target', + 'local', + 'local_defined', + 'local_remove', + 'local_reset', + 'locale_format', + 'locals', + 'log', + 'log_always', + 'log_critical', + 'log_deprecated', + 'log_destination_console', + 'log_destination_database', + 'log_destination_file', + 'log_detail', + 'log_level_critical', + 'log_level_deprecated', + 'log_level_detail', + 'log_level_sql', + 'log_level_warning', + 'log_setdestination', + 'log_sql', + 'log_warning', + 'logicalop_value', + 'logicaloperator_value', + 'loop', + 'loop_abort', + 'loop_continue', + 'loop_count', + 'lt', + 'lte', + 'magick_image', + 'map', + 'map_iterator', + 'match_comparator', + 'match_notrange', + 'match_notregexp', + 'match_range', + 'match_regexp', + 'math_abs', + 'math_acos', + 'math_add', + 'math_asin', + 'math_atan', + 'math_atan2', + 'math_ceil', + 'math_converteuro', + 'math_cos', + 'math_div', + 'math_exp', + 'math_floor', + 'math_internal_rand', + 'math_internal_randmax', + 'math_internal_srand', + 'math_ln', + 'math_log', + 'math_log10', + 'math_max', + 'math_min', + 'math_mod', + 'math_mult', + 'math_pow', + 'math_random', + 'math_range', + 'math_rint', + 'math_roman', + 'math_round', + 'math_sin', + 'math_sqrt', + 'math_sub', + 'math_tan', + 'maxrecords_value', + 'memory_session_driver', + 'mime_type', + 'minimal', + 'misc__srand', + 'misc_randomnumber', + 'misc_roman', + 'misc_valid_creditcard', + 'mysql_session_driver', + 'named_param', + 'namespace_current', + 'namespace_delimiter', + 'namespace_exists', + 'namespace_file_fullpathexists', + 'namespace_global', + 'namespace_import', + 'namespace_load', + 'namespace_page', + 'namespace_unload', + 'namespace_using', + 'neq', + 'net', + 'net_connectinprogress', + 'net_connectok', + 'net_typessl', + 'net_typessltcp', + 'net_typessludp', + 'net_typetcp', + 'net_typeudp', + 'net_waitread', + 'net_waittimeout', + 'net_waitwrite', + 'no_default_output', + 'none', + 'noprocess', + 'not', + 'nrx', + 'nslookup', + 'null', 'object', - 'inneroncompare', - 'members', - 'writeid', - 'addmember', - 'refid', - 'index', - 'objects', - 'tabs', - 'trunk', - 'trace', - 'asxml', - 'tabstr', - 'toxmlstring', - 'document', - 'idmap', - 'readidobjects', - 'left', - 'right', - 'up', - 'red', - 'root', - 'getnode', - 'firstnode', - 'lastnode', - 'nextnode', - 'private_rebalanceforremove', - 'private_rotateleft', - 'private_rotateright', - 'private_rebalanceforinsert', - 'eachnode', - 'foreachnode', - 'encoding', - 'resolvelinks', - 'readbytesfully', - 'dowithclose', - 'readsomebytes', - 'readbytes', - 'writestring', - 'parentdir', - 'aslazystring', - 'path', - 'openread', - 'openwrite', - 'openwriteonly', - 'openappend', - 'opentruncate', - 'writebytes', - 'exists', - 'modificationtime', - 'lastaccesstime', - 'modificationdate', - 'lastaccessdate', - 'delete', - 'moveto', - 'copyto', - 'linkto', - 'flush', - 'chmod', - 'chown', - 'isopen', - 'setmarker', - 'setmode', - 'foreachline', - 'lock', - 'unlock', - 'trylock', - 'testlock', - 'perms', - 'islink', - 'isdir', - 'realpath', - 'openwith', - 'asraw', - 'rawdiff', - 'getformat', - 'setformat', - 'subtract', - 'gmt', - 'dst', - 'era', - 'year', - 'month', - 'week', - 'weekofyear', - 'weekofmonth', - 'day', - 'dayofmonth', - 'dayofyear', - 'dayofweek', - 'dayofweekinmonth', + 'once', + 'oneoff', + 'op_logicalvalue', + 'operator_logicalvalue', + 'option', + 'or', + 'os_process', + 'output', + 'output_none', + 'pair', + 'params_up', + 'pdf_barcode', + 'pdf_color', + 'pdf_doc', + 'pdf_font', + 'pdf_image', + 'pdf_list', + 'pdf_read', + 'pdf_serve', + 'pdf_table', + 'pdf_text', + 'percent', + 'portal', + 'postcondition', + 'precondition', + 'prettyprintingnsmap', + 'prettyprintingtypemap', + 'priorityqueue', + 'private', + 'proc_convert', + 'proc_convertbody', + 'proc_convertone', + 'proc_extract', + 'proc_extractone', + 'proc_find', + 'proc_first', + 'proc_foreach', + 'proc_get', + 'proc_join', + 'proc_lasso', + 'proc_last', + 'proc_map_entry', + 'proc_null', + 'proc_regexp', + 'proc_xml', + 'proc_xslt', + 'process', + 'protect', + 'queue', + 'rand', + 'randomnumber', + 'raw', + 'recid_value', + 'record_count', + 'recordcount', + 'recordid_value', + 'records', + 'records_array', + 'records_map', + 'redirect_url', + 'reference', + 'referer', + 'referer_url', + 'referrer', + 'referrer_url', + 'regexp', + 'repeating', + 'repeating_valueitem', + 'repeatingvalueitem', + 'repetition', + 'req_column', + 'req_field', + 'required_column', + 'required_field', + 'response_fileexists', + 'response_filepath', + 'response_localpath', + 'response_path', + 'response_realm', + 'resultset', + 'resultset_count', + 'return', + 'return_value', + 'reverseiterator', + 'roman', + 'row_count', + 'rows', + 'rows_array', + 'run_children', + 'rx', + 'schema_name', + 'scientific', + 'search_args', + 'search_arguments', + 'search_columnitem', + 'search_fielditem', + 'search_operatoritem', + 'search_opitem', + 'search_valueitem', + 'searchfielditem', + 'searchoperatoritem', + 'searchopitem', + 'searchvalueitem', + 'select', + 'selected', + 'self', + 'serialize', + 'series', + 'server_date', + 'server_day', + 'server_ip', + 'server_name', + 'server_port', + 'server_push', + 'server_siteisrunning', + 'server_sitestart', + 'server_sitestop', + 'server_time', + 'session_abort', + 'session_addoutputfilter', + 'session_addvar', + 'session_addvariable', + 'session_deleteexpired', + 'session_driver', + 'session_end', + 'session_id', + 'session_removevar', + 'session_removevariable', + 'session_result', + 'session_setdriver', + 'session_start', + 'set', + 'set_iterator', + 'set_reverseiterator', + 'shown_count', + 'shown_first', + 'shown_last', + 'site_atbegin', + 'site_id', + 'site_name', + 'site_restart', + 'skiprecords_value', + 'sleep', + 'soap_convertpartstopairs', + 'soap_definetag', + 'soap_info', + 'soap_lastrequest', + 'soap_lastresponse', + 'soap_stub', + 'sort_args', + 'sort_arguments', + 'sort_columnitem', + 'sort_fielditem', + 'sort_orderitem', + 'sortcolumnitem', + 'sortfielditem', + 'sortorderitem', + 'sqlite_createdb', + 'sqlite_session_driver', + 'sqlite_setsleepmillis', + 'sqlite_setsleeptries', + 'srand', + 'stack', + 'stock_quote', + 'string', + 'string_charfromname', + 'string_concatenate', + 'string_countfields', + 'string_endswith', + 'string_extract', + 'string_findposition', + 'string_findregexp', + 'string_fordigit', + 'string_getfield', + 'string_getunicodeversion', + 'string_insert', + 'string_isalpha', + 'string_isalphanumeric', + 'string_isdigit', + 'string_ishexdigit', + 'string_islower', + 'string_isnumeric', + 'string_ispunctuation', + 'string_isspace', + 'string_isupper', + 'string_length', + 'string_lowercase', + 'string_remove', + 'string_removeleading', + 'string_removetrailing', + 'string_replace', + 'string_replaceregexp', + 'string_todecimal', + 'string_tointeger', + 'string_uppercase', + 'string_validcharset', + 'table_name', + 'table_realname', + 'tag', + 'tag_name', + 'tags', + 'tags_find', + 'tags_list', + 'tcp_close', + 'tcp_open', + 'tcp_send', + 'tcp_tcp_close', + 'tcp_tcp_open', + 'tcp_tcp_send', + 'thread_abort', + 'thread_atomic', + 'thread_event', + 'thread_exists', + 'thread_getcurrentid', + 'thread_getpriority', + 'thread_info', + 'thread_list', + 'thread_lock', + 'thread_pipe', + 'thread_priority_default', + 'thread_priority_high', + 'thread_priority_low', + 'thread_rwlock', + 'thread_semaphore', + 'thread_setpriority', + 'token_value', + 'total_records', + 'treemap', + 'treemap_iterator', + 'true', + 'url_rewrite', + 'valid_creditcard', + 'valid_date', + 'valid_email', + 'valid_url', + 'value_list', + 'value_listitem', + 'valuelistitem', + 'var', + 'var_defined', + 'var_remove', + 'var_reset', + 'var_set', + 'variable', + 'variable_defined', + 'variable_set', + 'variables', + 'variant_count', + 'vars', + 'wap_isenabled', + 'wap_maxbuttons', + 'wap_maxcolumns', + 'wap_maxhorzpixels', + 'wap_maxrows', + 'wap_maxvertpixels', + 'while', + 'wsdl_extract', + 'wsdl_getbinding', + 'wsdl_getbindingforoperation', + 'wsdl_getbindingoperations', + 'wsdl_getmessagenamed', + 'wsdl_getmessageparts', + 'wsdl_getmessagetriofromporttype', + 'wsdl_getopbodystyle', + 'wsdl_getopbodyuse', + 'wsdl_getoperation', + 'wsdl_getoplocation', + 'wsdl_getopmessagetypes', + 'wsdl_getopsoapaction', + 'wsdl_getportaddress', + 'wsdl_getportsforservice', + 'wsdl_getporttype', + 'wsdl_getporttypeoperation', + 'wsdl_getservicedocumentation', + 'wsdl_getservices', + 'wsdl_gettargetnamespace', + 'wsdl_issoapoperation', + 'wsdl_listoperations', + 'wsdl_maketest', + 'xml', + 'xml_extract', + 'xml_rpc', + 'xml_rpccall', + 'xml_rw', + 'xml_serve', + 'xml_transform', + 'xml_xml', + 'xml_xmlstream', + 'xmlstream', + 'xsd_attribute', + 'xsd_blankarraybase', + 'xsd_blankbase', + 'xsd_buildtype', + 'xsd_cache', + 'xsd_checkcardinality', + 'xsd_continueall', + 'xsd_continueannotation', + 'xsd_continueany', + 'xsd_continueanyattribute', + 'xsd_continueattribute', + 'xsd_continueattributegroup', + 'xsd_continuechoice', + 'xsd_continuecomplexcontent', + 'xsd_continuecomplextype', + 'xsd_continuedocumentation', + 'xsd_continueextension', + 'xsd_continuegroup', + 'xsd_continuekey', + 'xsd_continuelist', + 'xsd_continuerestriction', + 'xsd_continuesequence', + 'xsd_continuesimplecontent', + 'xsd_continuesimpletype', + 'xsd_continueunion', + 'xsd_deserialize', + 'xsd_fullyqualifyname', + 'xsd_generate', + 'xsd_generateblankfromtype', + 'xsd_generateblanksimpletype', + 'xsd_generatetype', + 'xsd_getschematype', + 'xsd_issimpletype', + 'xsd_loadschema', + 'xsd_lookupnamespaceuri', + 'xsd_lookuptype', + 'xsd_processany', + 'xsd_processattribute', + 'xsd_processattributegroup', + 'xsd_processcomplextype', + 'xsd_processelement', + 'xsd_processgroup', + 'xsd_processimport', + 'xsd_processinclude', + 'xsd_processschema', + 'xsd_processsimpletype', + 'xsd_ref', + 'xsd_type', + ) +} +MEMBERS = { + 'Member Methods': ( 'ampm', 'am', + 'addbarcode', + 'addchapter', + 'addcheckbox', + 'addcombobox', + 'addhiddenfield', + 'addimage', + 'addjavascript', + 'addlist', + 'addpage', + 'addparagraph', + 'addpasswordfield', + 'addphrase', + 'addradiobutton', + 'addradiogroup', + 'addresetbutton', + 'addsection', + 'addselectlist', + 'addsubmitbutton', + 'addtable', + 'addtextarea', + 'addtextfield', + 'addtext', + 'arc', + 'circle', + 'closepath', + 'curveto', + 'drawtext', + 'exportfdf', + 'extractimage', + 'fieldnames', + 'fieldposition', + 'fieldtype', + 'fieldvalue', + 'findclass', + 'getabswidth', + 'getalignment', + 'getbold', + 'getbordercolor', + 'getborderwidth', + 'getcolor', + 'getcolumncount', + 'getencoding', + 'getface', + 'getfullfontname', + 'getheaders', + 'getheader', + 'getitalic', + 'getmargins', + 'getpsfontname', + 'getpadding', + 'getpagenumber', + 'getrowcount', + 'getspacing', + 'getstyle', + 'getsupportedencodings', + 'gettext', + 'getunderline', + 'getversion', + 'importfdf', + 'insertpage', + 'istruetype', + 'javascript', + 'line', 'pm', - 'hour', - 'hourofday', - 'hourofampm', - 'minute', - 'millisecond', - 'zoneoffset', - 'dstoffset', - 'yearwoy', - 'dowlocal', - 'extendedyear', - 'julianday', - 'millisecondsinday', - 'firstdayofweek', - 'fixformat', - 'minutesbetween', - 'hoursbetween', - 'secondsbetween', - 'daysbetween', - 'businessdaysbetween', - 'pdifference', - 'getfield', - 'create', - 'setcwd', - 'foreachentry', - 'eachpath', - 'eachfilepath', - 'eachdirpath', - 'each', - 'eachfile', - 'eachdir', - 'eachpathrecursive', - 'eachfilepathrecursive', - 'eachdirpathrecursive', - 'eachentry', - 'makefullpath', - 'annotate', - 'blur', - 'command', - 'composite', - 'contrast', - 'convert', - 'crop', - 'execute', - 'enhance', - 'flipv', - 'fliph', - 'modulate', - 'rotate', - 'save', - 'scale', - 'sharpen', + 'pagecount', + 'pagerotation', + 'rect', + 'setalignment', + 'setbaseline', + 'setbold', + 'setbordercolor', + 'setborderwidth', + 'setcolor', + 'setface', + 'setfieldvalue', + 'setfont', + 'setitalic', + 'setlinewidth', + 'setpadding', + 'setpagenumber', + 'setpagerange', + 'setspacing', + 'setunderline', + 'textwidth', + 'abort', + 'abs', + 'acceptconnections', + 'acceptdeserializedelement', + 'acceptnossl', + 'acceptpost', + 'accept_charset', + 'accept', + 'accesskey', + 'acosh', + 'acos', + 'actionparams', + 'action', + 'active_tick', + 'addatend', + 'addcss', + 'addcomponents', + 'addcomponent', + 'adddatabasetable', + 'adddatasourcedatabase', + 'adddatasourcehost', + 'adddatasource', + 'adddirpath', + 'adddir', + 'addendjstext', + 'addendjs', + 'adderror', + 'addfavicon', + 'addfile', + 'addgroup', + 'addheader', + 'addjstext', + 'addjs', + 'addmember', + 'addoneheaderline', + 'addpostdispatch', + 'addpredispatch', + 'addsubnode', + 'addtask', + 'addtobuffer', + 'addtrait', + 'addusertogroup', + 'adduser', + 'addwarning', + 'addzip', + 'addattachment', + 'addcolumninfo', 'addcomment', - 'comments', - 'describe', - 'file', - 'height', - 'pixel', - 'resolutionv', - 'resolutionh', - 'width', - 'setcolorspace', - 'colorspace', - 'debug', - 'histogram', - 'imgptr', - 'appendimagetolist', - 'fx', - 'applyheatcolors', - 'authenticate', - 'search', - 'searchurl', - 'readerror', - 'readline', - 'setencoding', - 'closewrite', - 'exitcode', - 'getversion', - 'findclass', - 'throw', - 'thrownew', - 'exceptionoccurred', - 'exceptiondescribe', - 'exceptionclear', - 'fatalerror', - 'newglobalref', - 'deleteglobalref', - 'deletelocalref', - 'issameobject', + 'addhtmlpart', + 'addmathfunctions', + 'addpart', + 'addrow', + 'addset', + 'addtextpart', + 'add', 'allocobject', - 'newobject', - 'getobjectclass', - 'isinstanceof', - 'getmethodid', - 'callobjectmethod', + 'annotate', + 'answer', + 'apop', + 'appmessage', + 'appprefix', + 'appendchar', + 'appendchild', + 'appenddata', + 'appendimagetolist', + 'appendreplacement', + 'appendtail', + 'append', + 'applyheatcolors', + 'appname', + 'appstatus', + 'archive', + 'argumentvalue', + 'arguments', + 'asarray', + 'asasync', + 'asbytes', + 'asdecimal', + 'asgenerator', + 'asinteger', + 'askeyedgenerator', + 'aslazystring', + 'aslist', + 'asstringhex', + 'asstringoct', + 'asxml', + 'ascopydeep', + 'ascopy', + 'asinh', + 'asin', + 'asraw', + 'asstaticarray', + 'asstring', + 'atends', + 'atend', + 'atan2', + 'atanh', + 'atan', + 'atime', + 'attributecount', + 'attributes', + 'attrs', + 'authenticate', + 'authorize', + 'auth', + 'autocollectbuffer', + 'average', + 'back', + 'basename', + 'baseuri', + 'bcc', + 'beginssl', + 'begintls', + 'beginswith', + 'bestcharset', + 'bindcount', + 'bindone', + 'bindparam', + 'bind_blob', + 'bind_double', + 'bind_int', + 'bind_null', + 'bind_parameter_index', + 'bind_text', + 'bind', + 'bitand', + 'bitclear', + 'bitflip', + 'bitformat', + 'bitnot', + 'bitor', + 'bitset', + 'bitshiftleft', + 'bitshiftright', + 'bittest', + 'bitxor', + 'blur', + 'bodybytes', + 'body', + 'boundary', + 'bptoxml', + 'bptypetostr', + 'buff', + 'buildquery', + 'businessdaysbetween', + 'bytes', + 'by', + 'cachedappprefix', + 'cachedroot', + 'callboolean', + 'callfirst', + 'callint', 'callbooleanmethod', 'callbytemethod', 'callcharmethod', - 'callshortmethod', + 'calldoublemethod', + 'calledname', + 'callfloatmethod', + 'callfloat', 'callintmethod', 'calllongmethod', - 'callfloatmethod', - 'calldoublemethod', - 'callvoidmethod', - 'callnonvirtualobjectmethod', 'callnonvirtualbooleanmethod', 'callnonvirtualbytemethod', 'callnonvirtualcharmethod', - 'callnonvirtualshortmethod', + 'callnonvirtualdoublemethod', + 'callnonvirtualfloatmethod', 'callnonvirtualintmethod', 'callnonvirtuallongmethod', - 'callnonvirtualfloatmethod', - 'callnonvirtualdoublemethod', + 'callnonvirtualobjectmethod', + 'callnonvirtualshortmethod', 'callnonvirtualvoidmethod', - 'getfieldid', - 'getobjectfield', - 'getbooleanfield', - 'getbytefield', - 'getcharfield', - 'getshortfield', - 'getintfield', - 'getlongfield', - 'getfloatfield', - 'getdoublefield', - 'setobjectfield', - 'setbooleanfield', - 'setbytefield', - 'setcharfield', - 'setshortfield', - 'setintfield', - 'setlongfield', - 'setfloatfield', - 'setdoublefield', - 'getstaticmethodid', - 'callstaticobjectmethod', + 'callobjectmethod', + 'callobject', + 'callshortmethod', + 'callsite_col', + 'callsite_file', + 'callsite_line', + 'callstack', 'callstaticbooleanmethod', + 'callstaticboolean', 'callstaticbytemethod', 'callstaticcharmethod', - 'callstaticshortmethod', + 'callstaticdoublemethod', + 'callstaticfloatmethod', 'callstaticintmethod', + 'callstaticint', 'callstaticlongmethod', - 'callstaticfloatmethod', - 'callstaticdoublemethod', + 'callstaticobjectmethod', + 'callstaticobject', + 'callstaticshortmethod', + 'callstaticstring', 'callstaticvoidmethod', - 'getstaticfieldid', - 'getstaticobjectfield', - 'getstaticbooleanfield', - 'getstaticbytefield', - 'getstaticcharfield', - 'getstaticshortfield', - 'getstaticintfield', - 'getstaticlongfield', - 'getstaticfloatfield', - 'getstaticdoublefield', - 'setstaticobjectfield', - 'setstaticbooleanfield', - 'setstaticbytefield', - 'setstaticcharfield', - 'setstaticshortfield', - 'setstaticintfield', - 'setstaticlongfield', - 'setstaticfloatfield', - 'setstaticdoublefield', - 'newstring', - 'getstringlength', - 'getstringchars', - 'getarraylength', - 'newobjectarray', - 'getobjectarrayelement', - 'setobjectarrayelement', - 'newbooleanarray', - 'newbytearray', - 'newchararray', - 'newshortarray', - 'newintarray', - 'newlongarray', - 'newfloatarray', - 'newdoublearray', - 'getbooleanarrayelements', - 'getbytearrayelements', - 'getchararrayelements', - 'getshortarrayelements', - 'getintarrayelements', - 'getlongarrayelements', - 'getfloatarrayelements', - 'getdoublearrayelements', - 'getbooleanarrayregion', - 'getbytearrayregion', - 'getchararrayregion', - 'getshortarrayregion', - 'getintarrayregion', - 'getlongarrayregion', - 'getfloatarrayregion', - 'getdoublearrayregion', - 'setbooleanarrayregion', - 'setbytearrayregion', - 'setchararrayregion', - 'setshortarrayregion', - 'setintarrayregion', - 'setlongarrayregion', - 'setfloatarrayregion', - 'setdoublearrayregion', - 'monitorenter', - 'monitorexit', - 'fromreflectedmethod', - 'fromreflectedfield', - 'toreflectedmethod', - 'toreflectedfield', - 'exceptioncheck', + 'callstring', + 'callvoidmethod', + 'callvoid', + 'cancel', + 'capabilities', + 'capa', + 'capi', + 'cap', + 'cbrt', + 'cc', + 'ceil', + 'chardigitvalue', + 'charname', + 'charset', + 'chartype', + 'checkdebugging', + 'checkuser', + 'checked', + 'childnodes', + 'chk', + 'chmod', + 'choosecolumntype', + 'chown', + 'chunked', + 'classid', + 'class', + 'clear', + 'clonenode', + 'closeprepared', + 'closewrite', + 'close', + 'codebase', + 'codetype', + 'code', + 'colmap', + 'colorspace', + 'column_blob', + 'column_count', + 'column_decltype', + 'column_double', + 'column_int64', + 'column_name', + 'column_text', + 'column_type', + 'command', + 'comments', + 'comparecodepointorder', + 'compare', + 'componentdelimiter', + 'components', + 'composite', + 'compress', + 'condtoint', + 'configuredskeys', + 'configureds', + 'connhandler', + 'connectionhandler', + 'connection', + 'connect', + 'consume_domain', + 'consume_label', + 'consume_message', + 'consume_rdata', + 'consume_string', + 'contains', + 'contentlength', + 'contenttype', + 'content_disposition', + 'content_transfer_encoding', + 'content_type', + 'contents', + 'content', + 'continuationpacket', + 'continuationpoint', + 'continuationstack', + 'continuation', + 'continue', + 'contrast', + 'conventionaltop', + 'convert', + 'cookiesarray', + 'cookiesary', + 'cookies', + 'cookie', + 'copyto', + 'cosh', + 'cos', + 'countusersbygroup', + 'country', + 'count', + 'crc', + 'createdocumenttype', + 'createdocument', + 'createtable', + 'createattributens', + 'createattribute', + 'createcdatasection', + 'createcomment', + 'createdocumentfragment', + 'createelementns', + 'createelement', + 'createentityreference', + 'createprocessinginstruction', + 'createtextnode', + 'create', + 'criteria', + 'crop', + 'csscontent', + 'curl', + 'currentfile', + 'databasecolumnnames', + 'databasecolumns', + 'databasemap', + 'databasename', + 'datasourcecolumnnames', + 'datasourcecolumns', + 'datasourcemap', + 'data', + 'date', + 'dayofmonth', + 'dayofweekinmonth', + 'dayofweek', + 'dayofyear', + 'daysbetween', + 'days', + 'day', 'dbtablestable', + 'db', + 'debug', + 'declare', + 'decodebase64', + 'decodehex', + 'decodehtml', + 'decodeqp', + 'decodeurl', + 'decodexml', + 'decomposeassignment', + 'decompose', + 'defaultcontentrepresentation', + 'defer', + 'deg2rad', + 'deletedata', + 'deleteglobalref', + 'deletelocalref', + 'delete', + 'dele', + 'delim', + 'depth', + 'dereferencepointer', + 'describe', + 'description', + 'detach', + 'detectcharset', + 'didinclude', + 'difference', + 'digit', + 'dir', + 'displaycountry', + 'displaylanguage', + 'displayname', + 'displayscript', + 'displayvariant', + 'div', + 'dns_response', + 'doatbegins', + 'doatends', + 'doclose', + 'domainbody', + 'dosessions', + 'dowithclose', + 'doccomment', + 'doctype', + 'documentelement', + 'documentroot', + 'document', + 'done', + 'dowlocal', + 'download', + 'do', 'dstable', 'dsdbtable', 'dshoststable', + 'dsinfo', + 'dstoffset', + 'dst', + 'dtdid', + 'dup2', + 'dup', + 'd', + 'eachbyte', + 'eachcharacter', + 'eachchild', + 'eachcomponent', + 'eachdirpathrecursive', + 'eachdirpath', + 'eachdir', + 'eachentry', + 'eachfilename', + 'eachfilepathrecursive', + 'eachfilepath', + 'eachfile', + 'eachkey', + 'eachlinebreak', + 'eachline', + 'eachmatch', + 'eachnode', + 'eachpair', + 'eachpathrecursive', + 'eachpath', + 'eachrow', + 'eachsub', + 'eachwordbreak', + 'eachword', + 'each', + 'element', + 'eligiblepaths', + 'eligiblepath', + 'encodepassword', + 'encodebase64', + 'encodehex', + 'encodehtmltoxml', + 'encodehtml', + 'encodemd5', + 'encodeqp', + 'encodesql92', + 'encodesql', + 'encodeurl', + 'encodexml', + 'encoding', + 'enctype', + 'endjs', + 'endssl', + 'endtls', + 'endswith', + 'end', + 'enhance', + 'ensurestopped', + 'entities', + 'entry', + 'env', + 'equals', + 'era', + 'erfc', + 'erf', + 'errstack', + 'errcode', + 'errmsg', + 'errors', + 'error', + 'err', + 'escape_member', + 'establisherrorstate', + 'exceptioncheck', + 'exceptionclear', + 'exceptiondescribe', + 'exceptionoccurred', + 'exchange', + 'execinits', + 'execinstalls', + 'executelazy', + 'executenow', + 'execute', + 'exists', + 'exitcode', + 'expireminutes', + 'expiresminutes', + 'expire', + 'expm1', + 'export16bits', + 'export32bits', + 'export64bits', + 'export8bits', + 'exportas', + 'exportpointerbits', + 'exportstring', + 'exportbytes', + 'exportsigned16bits', + 'exportsigned32bits', + 'exportsigned64bits', + 'exportsigned8bits', + 'expose', + 'exp', + 'extendedyear', + 'extensiondelimiter', + 'extensions', + 'extractfastone', + 'extractfast', + 'extractone', + 'extract', + 'fabs', + 'failnoconnectionhandler', + 'fail', + 'family', + 'fatalerror', + 'fcgireq', + 'fchdir', + 'fchmod', + 'fchown', + 'fd', + 'features', + 'fetchdata', 'fieldstable', - 'sql', - 'adddatasource', - 'loaddatasourceinfo', - 'loaddatasourcehostinfo', - 'getdatasource', + 'filenames', + 'filequeue', + 'fileuploadsary', + 'fileuploads', + 'filename', + 'file', + 'filterinputcolumn', + 'finalize', + 'findinclude', + 'findlast', + 'findall', + 'findcount', + 'findfirst', + 'findindex', + 'findpattern', + 'findposition', + 'findsymbols', + 'find', + 'firstchild', + 'firstcomponent', + 'firstdayofweek', + 'firstnode', + 'first', + 'fixformat', + 'fliph', + 'flipv', + 'floor', + 'flush', + 'foldcase', + 'foo', + 'foreachaccept', + 'foreachbyte', + 'foreachcharacter', + 'foreachchild', + 'foreachentry', + 'foreachfilename', + 'foreachfile', + 'foreachkey', + 'foreachlinebreak', + 'foreachline', + 'foreachmatch', + 'foreachnode', + 'foreachpair', + 'foreachpathcomponent', + 'foreachrow', + 'foreachsub', + 'foreachwordbreak', + 'foreach', + 'forcedrowid', + 'foreachday', + 'foreachspool', + 'formatas', + 'formatcontextelements', + 'formatcontextelement', + 'formatnumber', + 'format', + 'form', + 'for', + 'frexp', + 'fromname', + 'fromport', + 'fromreflectedfield', + 'fromreflectedmethod', + 'from', + 'front', + 'fsync', + 'ftpdeletefile', + 'ftpgetlisting', + 'ftruncate', + 'fullpath', + 'fx', + 'f', + 'gamma', + 'gatewayinterface', + 'generatechecksum', + 'gen', + 'getappsource', + 'getattr', + 'getbarheight', + 'getbarmultiplier', + 'getbarwidth', + 'getbaseline', + 'getclass', + 'getcode', + 'getcolumns', + 'getcolumn', + 'getdatabasebyalias', + 'getdatabasebyid', + 'getdatabasebyname', + 'getdatabasehost', + 'getdatabasetablebyid', + 'getdatabasetablepart', + 'getdatabasetablebyalias', + 'getdatabasetable', + 'getdatasourcedatabasebyid', + 'getdatasourcedatabase', + 'getdatasourcehost', 'getdatasourceid', 'getdatasourcename', - 'listdatasources', - 'listactivedatasources', - 'removedatasource', - 'listdatasourcehosts', - 'listhosts', - 'adddatasourcehost', - 'getdatasourcehost', - 'removedatasourcehost', - 'getdatabasehost', + 'getdatasource', + 'getdefaultstorage', + 'getfile', + 'getfont', + 'getgroupid', + 'getgroup', + 'gethtmlattrstring', + 'gethtmlattr', 'gethostdatabase', - 'listalldatabases', - 'listdatasourcedatabases', - 'listhostdatabases', - 'getdatasourcedatabase', - 'getdatasourcedatabasebyid', - 'getdatabasebyname', - 'getdatabasebyid', - 'getdatabasebyalias', - 'adddatasourcedatabase', - 'removedatasourcedatabase', - 'listalltables', - 'listdatabasetables', - 'getdatabasetable', - 'getdatabasetablebyalias', - 'getdatabasetablebyid', + 'getinclude', + 'getlcapitype', + 'getnode', + 'getprowcount', + 'getsize', + 'getsortfieldspart', + 'getstatus', 'gettablebyid', - 'adddatabasetable', - 'removedatabasetable', - 'removefield', - 'maybevalue', + 'gettextalignment', + 'gettextsize', + 'gettrigger', 'getuniquealiasname', - 'makecolumnlist', - 'makecolumnmap', - 'datasourcecolumns', - 'datasourcemap', - 'hostcolumns', - 'hostmap', - 'hostcolumns2', - 'hostmap2', - 'databasecolumns', - 'databasemap', - 'tablecolumns', - 'tablemap', - 'databasecolumnnames', - 'hostcolumnnames', - 'hostcolumnnames2', - 'datasourcecolumnnames', - 'tablecolumnnames', - 'bindcount', - 'sqlite3', - 'db', - 'tables', - 'hastable', - 'tablehascolumn', - 'eachrow', - 'bindparam', - 'foreachrow', - 'executelazy', - 'executenow', - 'lastinsertid', - 'table', - 'bindone', - 'src', - 'stat', - 'colmap', - 'getcolumn', - 'locals', - 'getcolumns', - 'bodybytes', - 'headerbytes', - 'ready', - 'token', - 'url', - 'done', - 'header', - 'result', - 'statuscode', - 'raw', - 'version', - 'download', - 'upload', - 'ftpdeletefile', - 'ftpgetlisting', - 'perform', - 'performonce', - 's', - 'linediffers', - 'sourcefile', - 'sourceline', - 'sourcecolumn', - 'continuationpacket', - 'continuationpoint', - 'continuationstack', - 'features', - 'lastpoint', - 'net', - 'running', - 'source', - 'run', - 'pathtouri', - 'sendpacket', - 'readpacket', - 'handlefeatureset', - 'handlefeatureget', - 'handlestdin', - 'handlestdout', - 'handlestderr', - 'isfirststep', - 'handlecontinuation', - 'ensurestopped', - 'handlestackget', - 'handlecontextnames', - 'formatcontextelements', - 'formatcontextelement', - 'bptypetostr', - 'bptoxml', - 'handlebreakpointlist', + 'getuserbykey', + 'getuserid', + 'getuser', + 'getzipfilebytes', + 'getarraylength', + 'getattributenamespace', + 'getattributenodens', + 'getattributenode', + 'getattributens', + 'getattribute', + 'getbooleanarrayelements', + 'getbooleanarrayregion', + 'getbooleanfield', + 'getbytearrayelements', + 'getbytearrayregion', + 'getbytefield', + 'getchararrayelements', + 'getchararrayregion', + 'getcharfield', + 'getdoublearrayelements', + 'getdoublearrayregion', + 'getdoublefield', + 'getelementbyid', + 'getelementsbytagnamens', + 'getelementsbytagname', + 'getfieldid', + 'getfield', + 'getfloatarrayelements', + 'getfloatarrayregion', + 'getfloatfield', + 'getformat', + 'getintarrayelements', + 'getintarrayregion', + 'getintfield', + 'getisocomment', + 'getlibrary', + 'getlongarrayelements', + 'getlongarrayregion', + 'getlongfield', + 'getmethodid', + 'getmode', + 'getnameditemns', + 'getnameditem', + 'getnumericvalue', + 'getobjectarrayelement', + 'getobjectclass', + 'getobjectfield', + 'getparts', + 'getprefs', + 'getpropertyvalue', + 'getrange', + 'getset', + 'getshortarrayelements', + 'getshortarrayregion', + 'getshortfield', + 'getstaticbooleanfield', + 'getstaticbytefield', + 'getstaticcharfield', + 'getstaticdoublefield', + 'getstaticfieldid', + 'getstaticfloatfield', + 'getstaticintfield', + 'getstaticlongfield', + 'getstaticmethodid', + 'getstaticobjectfield', + 'getstaticshortfield', + 'getstringchars', + 'getstringlength', + 'gettype', + 'get', + 'givenblock', + 'gmt', + 'gotconnection', + 'gotfileupload', + 'groupby', + 'groupcolumns', + 'groupjoin', + 'groupcount', 'handlebreakpointget', + 'handlebreakpointlist', 'handlebreakpointremove', - 'condtoint', - 'inttocond', - 'handlebreakpointupdate', 'handlebreakpointset', + 'handlebreakpointupdate', 'handlecontextget', + 'handlecontextnames', + 'handlecontinuation', + 'handledefinitionbody', + 'handledefinitionhead', + 'handledefinitionresource', + 'handledevconnection', + 'handleevalexpired', + 'handlefeatureget', + 'handlefeatureset', + 'handlelassoappcontent', + 'handlelassoappresponse', + 'handlenormalconnection', + 'handleresource', 'handlesource', - 'error', - 'setstatus', - 'getstatus', - 'stoprunning', - 'pollide', - 'polldbg', - 'runonce', - 'arguments', - 'id', - 'argumentvalue', - 'end', - 'start', - 'days', - 'foreachday', - 'padzero', - 'actionparams', - 'capi', - 'doclose', - 'dsinfo', - 'isnothing', - 'named', - 'workinginputcolumns', - 'workingkeycolumns', - 'workingreturncolumns', - 'workingsortcolumns', - 'workingkeyfield_name', - 'scanfordatasource', - 'configureds', - 'configuredskeys', - 'scrubkeywords', - 'closeprepared', - 'filterinputcolumn', - 'prev', + 'handlestackget', + 'handlestderr', + 'handlestdin', + 'handlestdout', + 'hasexpired', + 'hasfeature', + 'hashtmlattr', + 'hastable', + 'hastrailingcomponent', + 'hasattributens', + 'hasattributes', + 'hasattribute', + 'hasbinaryproperty', + 'haschildnodes', + 'hash', + 'hasmethod', + 'hasvalue', + 'headerbytes', + 'headersarray', + 'headersmap', + 'headers', + 'header', 'head', - 'removenode', - 'listnode', - 'bind', - 'listen', - 'remoteaddress', - 'shutdownrdwr', - 'shutdownwr', - 'shutdownrd', - 'localaddress', - 'accept', - 'connect', - 'foreachaccept', - 'writeobjecttcp', - 'readobjecttcp', - 'beginssl', - 'endssl', - 'begintls', - 'endtls', - 'acceptnossl', - 'loadcerts', - 'sslerrfail', - 'fromname', - 'fromport', - 'env', - 'checked', - 'getclass', - 'jobjectisa', - 'new', - 'callvoid', - 'callint', - 'callfloat', - 'callboolean', - 'callobject', - 'callstring', - 'callstaticobject', - 'callstaticstring', - 'callstaticint', - 'callstaticboolean', - 'chk', - 'makecolor', - 'realdoc', - 'addbarcode', - 'addchapter', - 'addcheckbox', - 'addcombobox', - 'addhiddenfield', - 'addimage', - 'addlist', - 'addpage', - 'addparagraph', - 'addpasswordfield', - 'addphrase', - 'addradiobutton', - 'addradiogroup', - 'addresetbutton', - 'addsection', - 'addselectlist', - 'addsubmitbutton', - 'addtable', - 'addtextarea', - 'addtextfield', - 'addtext', - 'arc', - 'circle', - 'closepath', - 'curveto', - 'drawtext', - 'getcolor', - 'getheader', - 'getheaders', - 'getmargins', - 'getpagenumber', - 'getsize', - 'insertpage', - 'line', - 'rect', - 'setcolor', - 'setfont', - 'setlinewidth', - 'setpagenumber', - 'conventionaltop', - 'lowagiefont', - 'jcolor', + 'height', + 'histogram', + 'home', + 'hostcolumnnames2', + 'hostcolumnnames', + 'hostcolumns2', + 'hostcolumns', + 'hostmap2', + 'hostdatasource', + 'hostextra', + 'hostid', + 'hostisdynamic', + 'hostmap', + 'hostname', + 'hostpassword', + 'hostport', + 'hostschema', + 'hosttableencoding', + 'hosttonet16', + 'hosttonet32', + 'hosttonet64', + 'hostusername', + 'host', + 'hourofampm', + 'hourofday', + 'hoursbetween', + 'hour', + 'hreflang', + 'href', + 'htmlcontent', + 'htmlizestacktracelink', + 'htmlizestacktrace', + 'httpacceptencoding', + 'httpacceptlanguage', + 'httpaccept', + 'httpauthorization', + 'httpcachecontrol', + 'httpconnection', + 'httpcookie', + 'httphost', + 'httpreferer', + 'httpreferrer', + 'httpuseragent', + 'httpequiv', + 'hypot', + 'idmap', + 'idealinmemory', + 'id', + 'ifempty', + 'ifnotempty', + 'ignorecase', + 'ilogb', + 'imgptr', + 'implementation', + 'import16bits', + 'import32bits', + 'import64bits', + 'import8bits', + 'importstring', + 'importas', + 'importbytes', + 'importnode', + 'importpointer', + 'includebytes', + 'includelibraryonce', + 'includelibrary', + 'includeonce', + 'includestack', + 'includes', + 'include', + 'indaylighttime', + 'index', + 'initrequest', + 'initialize', + 'inits', + 'init', + 'inneroncompare', + 'inputcolumns', + 'inputtype', + 'input', + 'insertback', + 'insertfirst', + 'insertfrom', + 'insertfront', + 'insertlast', + 'insertbefore', + 'insertdata', + 'insert', + 'installs', + 'install', + 'inttocond', + 'integer', + 'internalsubset', + 'interrupt', + 'intersection', + 'invokeautocollect', + 'invokeuntil', + 'invokewhile', + 'invoke', + 'in', + 'ioctl', + 'isallof', + 'isanyof', + 'isdir', + 'isempty', + 'isfirststep', + 'isfullpath', + 'ishttps', + 'isidle', + 'islink', + 'isnota', + 'isnotempty', + 'isnothing', + 'isopen', + 'issourcefile', + 'isualphabetic', + 'isulowercase', + 'isuuppercase', + 'isuwhitespace', + 'iswhitespace', + 'isxhr', + 'isalnum', + 'isalpha', + 'isa', + 'isbase', + 'isblank', + 'iscntrl', + 'isdigit', + 'isemptyelement', + 'isgraph', + 'isinstanceof', + 'islower', + 'ismultipart', + 'isnan', + 'iso3country', + 'iso3language', + 'isprint', + 'ispunct', + 'issameobject', + 'isset', + 'isspace', + 'isssl', + 'issupported', + 'istitle', + 'istype', + 'isupper', + 'isvalid', + 'isxdigit', + 'item', + 'j0', + 'j1', 'jbarcode', - 'generatechecksum', - 'getbarheight', - 'getbarmultiplier', - 'getbarwidth', - 'getbaseline', - 'getcode', - 'getfont', - 'gettextalignment', - 'gettextsize', - 'setbarheight', - 'setbarmultiplier', - 'setbarwidth', - 'setbaseline', - 'setcode', - 'setgeneratechecksum', - 'setshowchecksum', - 'settextalignment', - 'settextsize', - 'showchecksum', - 'showcode39startstop', - 'showeanguardbars', + 'jcolor', 'jfont', - 'getencoding', - 'getface', - 'getfullfontname', - 'getpsfontname', - 'getsupportedencodings', - 'istruetype', - 'getstyle', - 'getbold', - 'getitalic', - 'getunderline', - 'setface', - 'setunderline', - 'setbold', - 'setitalic', - 'textwidth', 'jimage', - 'ontop', 'jlist', + 'jn', + 'jobjectisa', + 'join', 'jread', - 'addjavascript', - 'exportfdf', - 'extractimage', - 'fieldnames', - 'fieldposition', - 'fieldtype', - 'fieldvalue', - 'gettext', - 'importfdf', - 'javascript', - 'pagecount', - 'pagerotation', - 'pagesize', - 'setfieldvalue', - 'setpagerange', + 'jscontent', + 'jsonfornode', + 'jsonhtml', + 'jsonisleaf', + 'jsonlabel', 'jtable', - 'getabswidth', - 'getalignment', - 'getbordercolor', - 'getborderwidth', - 'getcolumncount', - 'getpadding', - 'getrowcount', - 'getspacing', - 'setalignment', - 'setbordercolor', - 'setborderwidth', - 'setpadding', - 'setspacing', 'jtext', - 'element', - 'foreachspool', - 'unspool', - 'err', - 'in', - 'out', - 'pid', - 'wait', - 'testexitcode', + 'julianday', + 'keycolumns', + 'keys', + 'keywords', + 'key', + 'kill', + 'label', + 'language', + 'lang', + 'lastaccessdate', + 'lastaccesstime', + 'lastchild', + 'lastcomponent', + 'lastinsertid', + 'lastnode', + 'lastpoint', + 'lasttouched', + 'last_insert_rowid', + 'lasterror', + 'last', + 'lazyvalue', + 'ldexp', + 'leaveopen', + 'left', + 'length', + 'lgamma', + 'linediffers', + 'linkto', + 'linktype', + 'listactivedatasources', + 'listalldatabases', + 'listalltables', + 'listdatabasetables', + 'listdatasourcedatabases', + 'listdatasourcehosts', + 'listdatasources', + 'listgroupsbyuser', + 'listgroups', + 'listhostdatabases', + 'listhosts', + 'listnode', + 'listusersbygroup', + 'listusers', + 'listen', + 'listmethods', + 'list', + 'loadcerts', + 'loaddatasourcehostinfo', + 'loaddatasourceinfo', + 'loadlibrary', + 'localaddress', + 'localname', + 'locals', + 'lock', + 'log10', + 'log1p', + 'logb', + 'log', + 'lookupnamespace', + 'lop', + 'lowagiefont', + 'lowercase', + 'makecolor', + 'makecolumnlist', + 'makecolumnmap', + 'makecookieyumyum', + 'makefullpath', + 'makeinheritedcopy', + 'makenonrelative', + 'makeurl', + 'map', + 'marker', + 'matchtriggers', + 'matchesstart', + 'matches', + 'matchposition', + 'matchstring', + 'maxinmemory', + 'maxrows', + 'maxlength', 'maxworkers', - 'tasks', - 'workers', - 'startone', - 'addtask', - 'waitforcompletion', - 'isidle', - 'scanworkers', - 'scantasks', - 'z', - 'addfile', - 'adddir', - 'adddirpath', - 'foreachfile', - 'foreachfilename', - 'eachfilename', - 'filenames', - 'getfile', + 'max', + 'maybeslash', + 'maybevalue', + 'md5hex', + 'media', + 'members', + 'merge', 'meta', - 'criteria', - 'map', - 'valid', - 'lazyvalue', - 'dns_response', - 'qdcount', - 'qdarray', - 'answer', - 'bitformat', - 'consume_rdata', - 'consume_string', - 'consume_label', - 'consume_domain', - 'consume_message', - 'errors', - 'warnings', - 'addwarning', - 'adderror', - 'renderbytes', - 'renderstring', - 'components', - 'addcomponent', - 'addcomponents', - 'body', - 'renderdocumentbytes', - 'contenttype', + 'methodname', + 'method', + 'millisecondsinday', + 'millisecond', 'mime_boundary', 'mime_contenttype', 'mime_hdrs', - 'addtextpart', - 'addhtmlpart', - 'addattachment', - 'addpart', - 'recipients', - 'pop_capa', - 'pop_debug', - 'pop_err', - 'pop_get', - 'pop_ids', - 'pop_index', - 'pop_log', - 'pop_mode', - 'pop_net', - 'pop_res', - 'pop_server', - 'pop_timeout', - 'pop_token', - 'pop_cmd', - 'user', - 'pass', - 'apop', - 'auth', - 'quit', - 'rset', - 'list', - 'uidl', - 'retr', - 'dele', + 'mimes', + 'mime', + 'minutesbetween', + 'minute', + 'min', + 'moddatestr', + 'mode', + 'modf', + 'modificationdate', + 'modificationtime', + 'modulate', + 'monitorenter', + 'monitorexit', + 'month', + 'moveto', + 'movetoattributenamespace', + 'movetoattribute', + 'movetoelement', + 'movetofirstattribute', + 'movetonextattribute', + 'msg', + 'mtime', + 'multiple', + 'named', + 'namespaceuri', + 'name', + 'needinitialization', + 'nettohost16', + 'nettohost32', + 'nettohost64', + 'net', + 'newbooleanarray', + 'newbytearray', + 'newchararray', + 'newdoublearray', + 'newfloatarray', + 'newglobalref', + 'newintarray', + 'newlongarray', + 'newobjectarray', + 'newobject', + 'newshortarray', + 'newstring', + 'new', + 'nextnode', + 'nextprunedelta', + 'nextprune', + 'nextsibling', + 'nextafter', + 'next', + 'novaluelists', + 'nodeforpath', + 'nodelist', + 'nodename', + 'nodetype', + 'nodevalue', 'noop', - 'capa', - 'stls', - 'authorize', - 'retrieve', - 'headers', - 'uniqueid', - 'capabilities', - 'cancel', - 'results', - 'lasterror', + 'normalize', + 'notationname', + 'notations', + 'numsets', + 'n', + 'objects', + 'objecttype', + 'object', + 'oncomparestrict', + 'onconvert', + 'ontop', + 'onclick', + 'oncompare', + 'oncreate', + 'ondblclick', + 'onkeydown', + 'onkeypress', + 'onkeyup', + 'onmousedown', + 'onmousemove', + 'onmouseout', + 'onmouseover', + 'onmouseup', + 'onreset', + 'onsubmit', + 'openappend', + 'openread', + 'opentruncate', + 'openwith', + 'openwriteonly', + 'openwrite', + 'open', + 'orderbydescending', + 'orderby', + 'outputencoding', + 'output', + 'out', + 'ownerdocument', + 'ownerelement', + 'pdifference', + 'padleading', + 'padtrailing', + 'padzero', + 'pagesize', + 'paramdescs', + 'params', + 'param', + 'parentdir', + 'parentnode', + 'parent', + 'parseas', + 'parsedocument', + 'parseoneheaderline', 'parse_body', 'parse_boundary', 'parse_charset', @@ -4370,356 +4230,499 @@ MEMBERS = { 'parse_msg', 'parse_parts', 'parse_rawhdrs', - 'rawheaders', - 'content_type', - 'content_transfer_encoding', - 'content_disposition', - 'boundary', - 'charset', - 'cc', - 'subject', - 'bcc', - 'date', + 'parsenumber', + 'parse', + 'pass', + 'pathinfo', + 'pathtouri', + 'pathtranslated', + 'path', 'pause', - 'continue', - 'touch', - 'refresh', - 'queue', - 'status', - 'queue_status', - 'active_tick', - 'getprefs', - 'initialize', + 'payload', + 'performonce', + 'perform', + 'perms', + 'pid', + 'pixel', + 'polldbg', + 'pollide', + 'popinclude', + 'pop_capa', + 'pop_cmd', + 'pop_debug', + 'pop_err', + 'pop_get', + 'pop_ids', + 'pop_index', + 'pop_log', + 'pop_mode', + 'pop_net', + 'pop_res', + 'pop_server', + 'pop_timeout', + 'pop_token', + 'pop', + 'port', + 'position', + 'postdispatch', + 'postparamsary', + 'postparam', + 'poststring', + 'postparams', + 'pow', + 'predispatch', + 'prefix', + 'preflight', + 'prepared', + 'prepare', + 'previoussibling', + 'prev', + 'printsimplemsg', + 'private_compare', + 'private_findlast', + 'private_find', + 'private_merge', + 'private_rebalanceforinsert', + 'private_rebalanceforremove', + 'private_replaceall', + 'private_replacefirst', + 'private_rotateleft', + 'private_rotateright', + 'private_setrange', + 'private_split', + 'probemimetype', + 'provides', + 'proxying', + 'prune', + 'publicid', + 'pullhttpheader', + 'pullmimepost', + 'pulloneheaderline', + 'pullrawpostchunks', + 'pullrawpost', + 'pullrequestline', + 'pullrequest', + 'pullurlpost', + 'pushinclude', + 'push', + 'qdarray', + 'qdcount', + 'queryparamsary', + 'queryparams', + 'queryparam', + 'querystring', 'queue_maintenance', 'queue_messages', - 'content', + 'queue_status', + 'queue', + 'quit', + 'rawcontent', + 'rawheader', + 'rawinvokable', + 'rawdiff', + 'rawheaders', + 'raw', + 'readbytesfully', + 'readbytes', + 'readdestinations', + 'readerror', + 'readidobjects', + 'readline', + 'readobjecttcp', + 'readpacket', + 'readsomebytes', + 'readattributevalue', + 'readobject', + 'readstring', + 'ready', + 'read', + 'realdoc', + 'realpath', + 'receivefd', + 'recipients', + 'recover', 'rectype', - 'requestid', - 'cachedappprefix', - 'cachedroot', - 'cookiesary', - 'fcgireq', - 'fileuploadsary', - 'headersmap', - 'httpauthorization', - 'postparamsary', - 'queryparamsary', - 'documentroot', - 'appprefix', - 'httpconnection', - 'httpcookie', - 'httphost', - 'httpuseragent', - 'httpcachecontrol', - 'httpreferer', - 'httpreferrer', - 'contentlength', - 'pathtranslated', + 'redirectto', + 'red', + 'refid', + 'refobj', + 'refresh', + 'rel', + 'remainder', + 'remoteaddress', 'remoteaddr', 'remoteport', - 'requestmethod', - 'requesturi', - 'scriptfilename', - 'scriptname', - 'scripturi', - 'scripturl', - 'serveraddr', - 'serveradmin', - 'servername', - 'serverport', - 'serverprotocol', - 'serversignature', - 'serversoftware', - 'pathinfo', - 'gatewayinterface', - 'httpaccept', - 'httpacceptencoding', - 'httpacceptlanguage', - 'ishttps', - 'cookies', - 'cookie', - 'rawheader', - 'queryparam', - 'postparam', - 'param', - 'queryparams', - 'querystring', - 'postparams', - 'poststring', - 'params', - 'fileuploads', - 'isxhr', + 'removeback', + 'removedatabasetable', + 'removedatasourcedatabase', + 'removedatasourcehost', + 'removedatasource', + 'removefield', + 'removefirst', + 'removefront', + 'removegroup', + 'removelast', + 'removenode', + 'removesubnode', + 'removetrailing', + 'removeuserfromallgroups', + 'removeuserfromgroup', + 'removeuser', + 'removeall', + 'removeattributenode', + 'removeattributens', + 'removeattribute', + 'removechild', + 'removeleading', + 'removenameditemns', + 'removenameditem', + 'remove', + 'renderbytes', + 'renderdocumentbytes', + 'renderstring', + 'replaceheader', + 'replaceall', + 'replacechild', + 'replacedata', + 'replacefirst', + 'replacepattern', + 'replace', + 'representnoderesult', + 'representnode', 'reqid', - 'statusmsg', + 'requestid', + 'requestmethod', 'requestparams', - 'stdin', - 'mimes', - 'writeheaderline', - 'writeheaderbytes', - 'writebodybytes', - 'cap', - 'n', - 'proxying', - 'stop', - 'printsimplemsg', - 'handleevalexpired', - 'handlenormalconnection', - 'handledevconnection', - 'splittoprivatedev', - 'getmode', - 'curl', - 'novaluelists', - 'makeurl', - 'choosecolumntype', - 'getdatabasetablepart', - 'getlcapitype', - 'buildquery', - 'getsortfieldspart', - 'endjs', - 'title', - 'addjs', - 'addjstext', - 'addendjs', - 'addendjstext', - 'addcss', - 'addfavicon', - 'attrs', - 'dtdid', - 'lang', - 'xhtml', - 'style', - 'gethtmlattr', - 'hashtmlattr', - 'onmouseover', - 'onkeydown', - 'dir', - 'onclick', - 'onkeypress', - 'onmouseout', - 'onkeyup', - 'onmousemove', - 'onmouseup', - 'ondblclick', - 'onmousedown', - 'sethtmlattr', - 'class', - 'gethtmlattrstring', - 'tag', - 'code', - 'msg', - 'scripttype', - 'defer', - 'httpequiv', - 'scheme', - 'href', - 'hreflang', - 'linktype', - 'rel', + 'requesturi', + 'requires', + 'reserve', + 'reset', + 'resolutionh', + 'resolutionv', + 'resolvelinks', + 'resourcedata', + 'resourceinvokable', + 'resourcename', + 'resources', + 'respond', + 'restart', + 'restname', + 'results', + 'result', + 'resume', + 'retrieve', + 'retr', + 'returncolumns', + 'returntype', + 'reverse', 'rev', - 'media', - 'declare', - 'classid', - 'codebase', - 'objecttype', - 'codetype', - 'archive', - 'standby', - 'usemap', - 'tabindex', - 'styletype', - 'method', - 'enctype', - 'accept_charset', - 'onsubmit', - 'onreset', - 'accesskey', - 'inputtype', - 'maxlength', - 'for', - 'selected', - 'label', - 'multiple', - 'buff', - 'wroteheaders', - 'pullrequest', - 'pullrawpost', - 'shouldclose', - 'pullurlpost', - 'pullmimepost', - 'pullhttpheader', - 'pulloneheaderline', - 'parseoneheaderline', - 'addoneheaderline', + 'rewind', + 'right', + 'rint', + 'roll', + 'rootmap', + 'root', + 'rotate', + 'route', + 'rowsfound', + 'rset', + 'rules', + 'rule', + 'runonce', + 'running', + 'run', + 'r', 'safeexport8bits', - 'writeheader', - 'fail', - 'connhandler', - 'port', - 'connectionhandler', - 'acceptconnections', - 'gotconnection', - 'failnoconnectionhandler', - 'splitconnection', + 'sameas', + 'savedata', + 'save', + 'scalb', + 'scale', + 'scanfordatasource', + 'scantasks', + 'scanworkers', + 'schemaname', + 'scheme', 'scriptextensions', + 'scriptfilename', + 'scriptname', + 'scripturi', + 'scripturl', + 'scripttype', + 'script', + 'scrubkeywords', + 'searchurl', + 'search', + 'secondsbetween', + 'second', + 'seek', + 'selectmany', + 'selected', + 'select', + 'self', + 'sendchunk', 'sendfile', - 'probemimetype', - 'appname', - 'inits', - 'installs', - 'rootmap', - 'install', - 'getappsource', - 'preflight', - 'splituppath', - 'handleresource', - 'handledefinitionhead', - 'handledefinitionbody', - 'handledefinitionresource', - 'execinstalls', - 'execinits', - 'payload', - 'fullpath', - 'resourcename', - 'issourcefile', - 'resourceinvokable', - 'srcpath', - 'resources', - 'eligiblepath', - 'eligiblepaths', - 'expiresminutes', - 'moddatestr', - 'zips', - 'addzip', - 'getzipfilebytes', - 'resourcedata', - 'zip', - 'zipfile', - 'zipname', - 'zipfilename', - 'rawinvokable', - 'route', - 'setdestination', - 'getprowcount', - 'encodepassword', - 'checkuser', - 'needinitialization', - 'adduser', - 'getuserid', - 'getuser', - 'getuserbykey', - 'removeuser', - 'listusers', - 'listusersbygroup', - 'countusersbygroup', - 'addgroup', - 'updategroup', - 'getgroupid', - 'getgroup', - 'removegroup', - 'listgroups', - 'listgroupsbyuser', - 'addusertogroup', - 'removeuserfromgroup', - 'removeuserfromallgroups', - 'md5hex', - 'usercolumns', - 'groupcolumns', - 'expireminutes', - 'lasttouched', - 'hasexpired', - 'idealinmemory', - 'maxinmemory', - 'nextprune', - 'nextprunedelta', + 'sendpacket', + 'sendresponse', + 'sendfd', + 'send', + 'serializationelements', + 'serialize', + 'serveraddr', + 'serveradmin', + 'servername', + 'serverport', + 'serverprotocol', + 'serversignature', + 'serversoftware', 'sessionsdump', + 'sessionsmap', + 'setattr', + 'setbarheight', + 'setbarmultiplier', + 'setbarwidth', + 'setcode', + 'setcookie', + 'setdefaultstorage', + 'setencoding', + 'setgeneratechecksum', + 'sethtmlattr', + 'setheaders', + 'setname', + 'setrange', + 'setshowchecksum', + 'setstatus', + 'settextalignment', + 'settextsize', + 'setattributenodens', + 'setattributenode', + 'setattributens', + 'setattribute', + 'setbooleanarrayregion', + 'setbooleanfield', + 'setbytearrayregion', + 'setbytefield', + 'setchararrayregion', + 'setcharfield', + 'setcolorspace', + 'setcwd', + 'setdestination', + 'setdoublearrayregion', + 'setdoublefield', + 'setfindpattern', + 'setfloatarrayregion', + 'setfloatfield', + 'setformat', + 'setignorecase', + 'setinput', + 'setintarrayregion', + 'setintfield', + 'setlongarrayregion', + 'setlongfield', + 'setmarker', + 'setmaxfilesize', + 'setmode', + 'setnameditemns', + 'setnameditem', + 'setobjectarrayelement', + 'setobjectfield', + 'setposition', + 'setreplacepattern', + 'setshortarrayregion', + 'setshortfield', + 'setsize', + 'setstaticbooleanfield', + 'setstaticbytefield', + 'setstaticcharfield', + 'setstaticdoublefield', + 'setstaticfloatfield', + 'setstaticintfield', + 'setstaticlongfield', + 'setstaticobjectfield', + 'setstaticshortfield', + 'settimezone', + 'settrait', + 'set', + 'sharpen', + 'shouldabort', + 'shouldclose', + 'showchecksum', + 'showcode39startstop', + 'showeanguardbars', + 'shutdownrdwr', + 'shutdownrd', + 'shutdownwr', + 'sinh', + 'sin', + 'size', + 'skiprows', + 'skip', + 'sortcolumns', + 'sort', + 'sourcecolumn', + 'sourceline', + 'sourcefile', + 'source', + 'specified', + 'splitconnection', + 'splitdebuggingthread', + 'splitextension', + 'splittoprivatedev', + 'splituppath', + 'splittext', + 'splitthread', + 'split', + 'sqlite3', + 'sql', + 'sqrt', + 'srcpath', + 'src', + 'sslerrfail', + 'standby', + 'startone', 'startup', - 'validatesessionstable', - 'createtable', - 'fetchdata', - 'savedata', - 'kill', - 'expire', - 'prune', - 'entry', - 'host', - 'tb', - 'setdefaultstorage', - 'getdefaultstorage', - 'onconvert', - 'send', - 'nodelist', - 'delim', - 'subnode', + 'start', + 'statementonly', + 'statement', + 'statuscode', + 'statusmsg', + 'status', + 'stat', + 'stdin', + 'step', + 'stls', + 'stoprunning', + 'stop', + 'stripfirstcomponent', + 'striplastcomponent', + 'styletype', + 'style', 'subnodes', - 'addsubnode', - 'removesubnode', - 'nodeforpath', - 'representnoderesult', - 'mime', - 'extensions', - 'representnode', - 'jsonfornode', - 'defaultcontentrepresentation', + 'subnode', + 'subject', + 'substringdata', + 'subtract', + 'subtraits', + 'sub', + 'sum', 'supportscontentrepresentation', - 'htmlcontent', - 'appmessage', - 'appstatus', - 'atends', - 'chunked', - 'cookiesarray', - 'didinclude', - 'errstack', - 'headersarray', - 'includestack', - 'outputencoding', - 'sessionsmap', - 'htmlizestacktrace', - 'includes', - 'respond', - 'sendresponse', - 'sendchunk', - 'makecookieyumyum', - 'getinclude', - 'include', - 'includeonce', - 'includelibrary', - 'includelibraryonce', - 'includebytes', - 'addatend', - 'setcookie', - 'addheader', - 'replaceheader', - 'setheaders', - 'rawcontent', - 'redirectto', - 'htmlizestacktracelink', - 'doatbegins', - 'handlelassoappcontent', - 'handlelassoappresponse', - 'domainbody', - 'establisherrorstate', - 'tryfinderrorfile', - 'doatends', - 'dosessions', - 'makenonrelative', - 'pushinclude', - 'popinclude', - 'findinclude', - 'checkdebugging', - 'splitdebuggingthread', - 'matchtriggers', - 'rules', - 'shouldabort', - 'gettrigger', + 'swapbytes', + 'systemid', + 's', + 'tabstr', + 'tabindex', + 'tablecolumnnames', + 'tablecolumns', + 'tablehascolumn', + 'tablemap', + 'tablename', + 'tables', + 'table', + 'tabs', + 'tagname', + 'tag', + 'take', + 'tanh', + 'tan', + 'target', + 'tasks', + 'tb', + 'tell', + 'testexitcode', + 'testlock', + 'thenbydescending', + 'thenby', + 'threadreaddesc', + 'thrownew', + 'throw', + 'timezone', + 'time', + 'titlecase', + 'title', + 'toxmlstring', + 'token', + 'tolower', + 'top', + 'toreflectedfield', + 'toreflectedmethod', + 'total_changes', + 'totitle', + 'touch', + 'toupper', + 'to', + 'trace', + 'trackingid', + 'trait', + 'transform', 'trigger', - 'rule', - 'foo', - 'jsonlabel', - 'jsonhtml', - 'jsonisleaf', - 'acceptpost', - 'csscontent', - 'jscontent' + 'trim', + 'trunk', + 'tryfinderrorfile', + 'trylock', + 'tryreadobject', + 'typename', + 'type', + 't', + 'uidl', + 'uncompress', + 'unescape', + 'union', + 'uniqueid', + 'unlock', + 'unspool', + 'updategroup', + 'upload', + 'uppercase', + 'up', + 'url', + 'usemap', + 'usercolumns', + 'user', + 'validatesessionstable', + 'valid', + 'values', + 'value', + 'variant', + 'version', + 'waitforcompletion', + 'wait', + 'warnings', + 'weekofmonth', + 'weekofyear', + 'week', + 'where', + 'width', + 'workers', + 'workinginputcolumns', + 'workingkeycolumns', + 'workingreturncolumns', + 'workingsortcolumns', + 'workingkeyfield_name', + 'writebodybytes', + 'writebytes', + 'writeheaderbytes', + 'writeheaderline', + 'writeheader', + 'writeid', + 'writeobjecttcp', + 'writestring', + 'writeobject', + 'write', + 'wroteheaders', + 'xhtml', + 'xmllang', + 'y0', + 'y1', + 'yearwoy', + 'year', + 'yn', + 'zipfilename', + 'zipname', + 'zipfile', + 'zips', + 'zip', + 'zoneoffset', + 'z', ), 'Lasso 8 Member Tags': ( 'accept', @@ -5177,6 +5180,6 @@ MEMBERS = { 'writeunlock', 'xmllang', 'xmlschematype', - 'year' + 'year', ) } -- cgit v1.2.1 From 20a8fa44c8cb49178d5aacd2b00d4dec284cfb5d Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Sun, 23 Nov 2014 17:33:55 +0000 Subject: Tests for eXist's dialect of XQuery Update Facility --- tests/examplefiles/test-exist-update.xq | 75 +++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 tests/examplefiles/test-exist-update.xq diff --git a/tests/examplefiles/test-exist-update.xq b/tests/examplefiles/test-exist-update.xq new file mode 100644 index 00000000..073cc22b --- /dev/null +++ b/tests/examplefiles/test-exist-update.xq @@ -0,0 +1,75 @@ +xquery version "3.0"; + +declare function local:add-log-message($message as xs:string) as empty-sequence()? +{ + let $logfile-collection := "/db/apps/exist101/log" + let $logfile-name := "exist101-log.xml" + let $logfile-full := concat($logfile-collection, '/', $logfile-name) + let $logfile-created := + if(doc-available($logfile-full))then + $logfile-full + else + xmldb:store($logfile-collection, $logfile-name, ) + return + update insert + {$message} + into doc($logfile-full)/* +}; + +declare function local:insert-attributes() { + let $elm as element() := doc('/db/Path/To/Some/Document.xml')/* + return ( + update insert into $elm, + update insert attribute x { 'y' } into $elm/*[last()], + update insert attribute a { 'b' } into $elm/*[last()] + ) +}; + +declare function local:insert-elem() { + let $elm as element() := doc('/db/Path/To/Some/Document.xml')/* + return + update insert into $elm +}; + +declare function local:insert-elem2() { + let $elm as element() := doc('/db/Path/To/Some/Document.xml')/* + let $new-element as element() := + return + update insert $new-element into $elm +}; + +declare function local:insert-single() { + update insert Something happened... into doc('/db/logs/mainlog.xml')/* +}; + + +declare function local:trim-insert() { + let $document := doc('/db/logs/mainlog.xml') + let $newentry := Something happened... + return + update delete $document/*/LogEntry[position() ge 10], + if(exists($document/*/LogEntry[1]))then + update insert $newentry preceding $document/*/LogEntry[1] + else + update insert $newentry into $document/* +}; + + +declare function local:attempt-document-node-insert() { + + (: This is invalid: :) + let $document as document-node() := + return + update insert into $document/* +}; + +declare function local:attempt-attr-update-with-node() { + update replace doc('/db/test/test.xml')/*/@name with + aaabbb +}; + + +(# exist:batch-transaction #) { + update delete $document/*/LogEntry[position() ge 10], + update insert $newentry preceding $document/*/LogEntry[1] +} \ No newline at end of file -- cgit v1.2.1 From 8c7bc623f31f816204ca2d762fc1e55905b7e438 Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Sun, 23 Nov 2014 17:34:26 +0000 Subject: Basic support for eXist's dialect of XQuery Update Facility --- pygments/lexers/webmisc.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py index ef97a254..9f877e66 100644 --- a/pygments/lexers/webmisc.py +++ b/pygments/lexers/webmisc.py @@ -365,6 +365,9 @@ class XQueryLexer(ExtendedRegexLexer): (r'external', Keyword), (r'collation', Keyword, 'uritooperator'), + # eXist specific XQUF + (r'(into|following|preceding|with)', Keyword, 'root'), + # support for current context on rhs of Simple Map Operator (r'\.', Operator), @@ -434,7 +437,7 @@ class XQueryLexer(ExtendedRegexLexer): # Marklogic specific type? (r'(processing-instruction)(\s*)(\()', bygroups(Keyword, Text, Punctuation), - ('occurrenceindicator', 'kindtestforpi')), + ('occurrenceindicator', 'kindtestforpi')), (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])', bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'occurrenceindicator'), @@ -726,11 +729,16 @@ class XQueryLexer(ExtendedRegexLexer): (r'then|else', Keyword), + # eXist specific XQUF + (r'(update)(\s*)(insert|delete|replace|value|rename)', bygroups(Keyword, Text, Keyword)), + (r'(into|following|preceding|with)', Keyword), + # Marklogic specific (r'(try)(\s*)', bygroups(Keyword, Text), 'root'), (r'(catch)(\s*)(\()(\$)', bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'), + (r'(@'+qname+')', Name.Attribute), (r'(@'+ncname+')', Name.Attribute), (r'@\*:'+ncname, Name.Attribute), -- cgit v1.2.1 From 0dac37a7b91033511a988982c35e5b805e7a5914 Mon Sep 17 00:00:00 2001 From: Adam Retter Date: Sun, 23 Nov 2014 17:55:00 +0000 Subject: Bugfix to allow whitespace between the start of a pragma and it's QName --- pygments/lexers/webmisc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py index 9f877e66..3a09e767 100644 --- a/pygments/lexers/webmisc.py +++ b/pygments/lexers/webmisc.py @@ -441,7 +441,7 @@ class XQueryLexer(ExtendedRegexLexer): (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])', bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'occurrenceindicator'), - (r'\(\#', Punctuation, 'pragma'), + (r'(\(\#)(\s*)', bygroups(Punctuation, Text), 'pragma'), (r';', Punctuation, '#pop'), (r'then|else', Keyword, '#pop'), (r'(at)(\s+)(' + stringdouble + ')', @@ -708,7 +708,7 @@ class XQueryLexer(ExtendedRegexLexer): (r'(xquery)(\s+)(version)', bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'), - (r'(\(#)', Punctuation, 'pragma'), + (r'(\(#)(\s*)', bygroups(Punctuation, Text), 'pragma'), # sometimes return can occur in root state (r'return', Keyword), -- cgit v1.2.1 From fd3cc78b60412885bb9ef231532e9f9f525d9b0f Mon Sep 17 00:00:00 2001 From: EricFromCanada Date: Mon, 24 Nov 2014 12:15:46 -0500 Subject: add currentcapture & givenblock to list of special Lasso builtins --- pygments/lexers/javascript.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index 178d63de..eb6297c0 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -594,7 +594,7 @@ class LassoLexer(RegexLexer): bygroups(Name.Builtin.Pseudo, Name.Other.Member)), (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)', bygroups(Operator, Name.Other.Member)), - (r'(self|inherited)\b', Name.Builtin.Pseudo), + (r'(self|inherited|currentcapture|givenblock)\b', Name.Builtin.Pseudo), (r'-(?!infinity)[a-z_][\w.]*', Name.Attribute), (r'::\s*[a-z_][\w.]*', Name.Label), (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|' -- cgit v1.2.1 From df288d5655c0c5fbd6ed9b4f86b99547c8798e69 Mon Sep 17 00:00:00 2001 From: EricFromCanada Date: Mon, 24 Nov 2014 13:36:46 -0500 Subject: account for when self and givenblock appear as member methods --- pygments/lexers/javascript.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index eb6297c0..30ec0600 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -594,7 +594,8 @@ class LassoLexer(RegexLexer): bygroups(Name.Builtin.Pseudo, Name.Other.Member)), (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)', bygroups(Operator, Name.Other.Member)), - (r'(self|inherited|currentcapture|givenblock)\b', Name.Builtin.Pseudo), + (r'(?)(self|inherited|currentcapture|givenblock)\b', + Name.Builtin.Pseudo), (r'-(?!infinity)[a-z_][\w.]*', Name.Attribute), (r'::\s*[a-z_][\w.]*', Name.Label), (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|' -- cgit v1.2.1 From f2d003fb108fad618f116a0a01747d3d9324c360 Mon Sep 17 00:00:00 2001 From: Florian Hahn Date: Tue, 25 Nov 2014 20:51:37 +0100 Subject: Add lexer for Boogie --- pygments/formatters/_mapping.py | 1 - pygments/lexers/_mapping.py | 1 + pygments/lexers/esoteric.py | 51 ++++++++++++++- tests/examplefiles/test.bpl | 140 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 189 insertions(+), 4 deletions(-) create mode 100644 tests/examplefiles/test.bpl diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py index 678c069d..bc4d606e 100755 --- a/pygments/formatters/_mapping.py +++ b/pygments/formatters/_mapping.py @@ -32,7 +32,6 @@ FORMATTERS = { 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.') } - if __name__ == '__main__': # pragma: no cover import sys import os diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 05ada4f3..a0071ac9 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -49,6 +49,7 @@ LEXERS = { 'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), 'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), + 'BoogieLexer': ('pygments.lexers.esoteric', 'Boogie', ('boogie',), ('*.bpl',), ()), 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), 'BroLexer': ('pygments.lexers.dsls', 'Bro', ('bro',), ('*.bro',), ()), 'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py index 7a026aed..3f3e3015 100644 --- a/pygments/lexers/esoteric.py +++ b/pygments/lexers/esoteric.py @@ -9,11 +9,11 @@ :license: BSD, see LICENSE for details. """ -from pygments.lexer import RegexLexer, include +from pygments.lexer import RegexLexer, include, words from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Error + Number, Punctuation, Error, Whitespace -__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer'] +__all__ = ['BrainfuckLexer', 'BefungeLexer', 'BoogieLexer', 'RedcodeLexer'] class BrainfuckLexer(RegexLexer): @@ -112,3 +112,48 @@ class RedcodeLexer(RegexLexer): (r'[-+]?\d+', Number.Integer), ], } + + +class BoogieLexer(RegexLexer): + """ + For `Boogie `_ source code. + + .. versionadded:: 2.0 + """ + name = 'Boogie' + aliases = ['boogie'] + filenames = ['*.bpl'] + + tokens = { + 'root': [ + # Whitespace and Comments + (r'\n', Whitespace), + (r'\s+', Whitespace), + (r'//[/!](.*?)\n', Comment.Doc), + (r'//(.*?)\n', Comment.Single), + (r'/\*', Comment.Multiline, 'comment'), + + (words(( + 'axiom', 'break', 'call', 'ensures', 'else', 'exists', 'function', + 'forall', 'if', 'invariant', 'modifies', 'procedure', 'requires', + 'then', 'var', 'while'), + suffix=r'\b'), Keyword), + (words(('const',), suffix=r'\b'), Keyword.Reserved), + + (words(('bool', 'int', 'ref'), suffix=r'\b'), Keyword.Type), + include('numbers'), + (r"(>=|<=|:=|!=|==>|&&|\|\||[+/\-=>*<\[\]])", Operator), + (r"([{}():;,.])", Punctuation), + # Identifier + (r'[a-zA-Z_]\w*', Name), + ], + 'comment': [ + (r'[^*/]+', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline), + ], + 'numbers': [ + (r'[0-9]+', Number.Integer), + ], + } diff --git a/tests/examplefiles/test.bpl b/tests/examplefiles/test.bpl new file mode 100644 index 00000000..add25e1a --- /dev/null +++ b/tests/examplefiles/test.bpl @@ -0,0 +1,140 @@ +/* + * Test Boogie rendering +*/ + +const N: int; +axiom 0 <= N; + +procedure foo() { + break; +} +// array to sort as global array, because partition & quicksort have to +var a: [int] int; +var original: [int] int; +var perm: [int] int; + +// Is array a of length N sorted? +function is_sorted(a: [int] int, l: int, r: int): bool +{ + (forall j, k: int :: l <= j && j < k && k <= r ==> a[j] <= a[k]) +} + +// is range a[l:r] unchanged? +function is_unchanged(a: [int] int, b: [int] int, l: int, r: int): bool { + (forall i: int :: l <= i && i <= r ==> a[i] == b[i]) +} + +function is_permutation(a: [int] int, original: [int] int, perm: [int] int, N: int): bool +{ + (forall k: int :: 0 <= k && k < N ==> 0 <= perm[k] && perm[k] < N) && + (forall k, j: int :: 0 <= k && k < j && j < N ==> perm[k] != perm[j]) && + (forall k: int :: 0 <= k && k < N ==> a[k] == original[perm[k]]) +} + +function count(a: [int] int, x: int, N: int) returns (int) +{ if N == 0 then 0 else if a[N-1] == x then count(a, x, N - 1) + 1 else count(a, x, N-1) } + + +/* +function count(a: [int] int, x: int, N: int) returns (int) +{ if N == 0 then 0 else if a[N-1] == x then count(a, x, N - 1) + 1 else count(a, x, N-1) } + +function is_permutation(a: [int] int, b: [int] int, l: int, r: int): bool { + (forall i: int :: l <= i && i <= r ==> count(a, a[i], r+1) == count(b, a[i], r+1)) +} +*/ + +procedure partition(l: int, r: int, N: int) returns (p: int) + modifies a, perm; + requires N > 0; + requires l >= 0 && l < r && r < N; + requires ((r+1) < N) ==> (forall k: int :: (k >= l && k <= r) ==> a[k] <= a[r+1]); + requires ((l-1) >= 0) ==> (forall k: int :: (k >= l && k <= r) ==> a[k] > a[l-1]); + + /* a is a permutation of the original array original */ + requires is_permutation(a, original, perm, N); + + ensures (forall k: int :: (k >= l && k <= p ) ==> a[k] <= a[p]); + ensures (forall k: int :: (k > p && k <= r ) ==> a[k] > a[p]); + ensures p >= l && p <= r; + ensures is_unchanged(a, old(a), 0, l-1); + ensures is_unchanged(a, old(a), r+1, N); + ensures ((r+1) < N) ==> (forall k: int :: (k >= l && k <= r) ==> a[k] <= a[r+1]); + ensures ((l-1) >= 0) ==> (forall k: int :: (k >= l && k <= r) ==> a[k] > a[l-1]); + + /* a is a permutation of the original array original */ + ensures is_permutation(a, original, perm, N); +{ + var i: int; + var sv: int; + var pivot: int; + var tmp: int; + + i := l; + sv := l; + pivot := a[r]; + + while (i < r) + invariant i <= r && i >= l; + invariant sv <= i && sv >= l; + invariant pivot == a[r]; + invariant (forall k: int :: (k >= l && k < sv) ==> a[k] <= old(a[r])); + invariant (forall k: int :: (k >= sv && k < i) ==> a[k] > old(a[r])); + + /* a is a permutation of the original array original */ + invariant is_permutation(a, original, perm, N); + + invariant is_unchanged(a, old(a), 0, l-1); + invariant is_unchanged(a, old(a), r+1, N); + invariant ((r+1) < N) ==> (forall k: int :: (k >= l && k <= r) ==> a[k] <= a[r+1]); + invariant ((l-1) >= 0) ==> (forall k: int :: (k >= l && k <= r) ==> a[k] > a[l-1]); + { + if ( a[i] <= pivot) { + tmp := a[i]; a[i] := a[sv]; a[sv] := tmp; + tmp := perm[i]; perm[i] := perm[sv]; perm[sv] := tmp; + sv := sv +1; + } + i := i + 1; + } + + //swap + tmp := a[i]; a[i] := a[sv]; a[sv] := tmp; + tmp := perm[i]; perm[i] := perm[sv]; perm[sv] := tmp; + + p := sv; +} + + +procedure quicksort(l: int, r: int, N: int) + modifies a, perm; + + requires N > 0; + requires l >= 0 && l < r && r < N; + requires ((r+1) < N) ==> (forall k: int :: (k >= l && k <= r) ==> a[k] <= a[r+1]); + requires ((l-1) >= 0) ==> (forall k: int :: (k >= l && k <= r) ==> a[k] > a[l-1]); + + /* a is a permutation of the original array original */ + requires is_permutation(a, original, perm, N); + + ensures ((r+1) < N) ==> (forall k: int :: (k >= l && k <= r) ==> a[k] <= a[r+1]); + ensures ((l-1) >= 0) ==> (forall k: int :: (k >= l && k <= r) ==> a[k] > a[l-1]); + + ensures is_unchanged(a, old(a), 0, l-1); + ensures is_unchanged(a, old(a), r+1, N); + ensures is_sorted(a, l, r); + + /* a is a permutation of the original array original */ + ensures is_permutation(a, original, perm, N); +{ + var p: int; + + call p := partition(l, r, N); + + if ((p-1) > l) { + call quicksort(l, p-1, N); + } + + if ((p+1) < r) { + call quicksort(p+1, r, N); + } +} -- cgit v1.2.1 From 4ed3436e685c1ce41706d0cb78d0a4c1b6ffa0ae Mon Sep 17 00:00:00 2001 From: Jorge Bucaran Date: Fri, 28 Nov 2014 18:39:17 +0900 Subject: Add FishShellLexer class, example.fish and rebuild lexer mapping. --- pygments/formatters/_mapping.py | 1 - pygments/lexers/_mapping.py | 1 + pygments/lexers/shell.py | 75 +++++- tests/examplefiles/example.fish | 580 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 655 insertions(+), 2 deletions(-) create mode 100644 tests/examplefiles/example.fish diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py index 678c069d..bc4d606e 100755 --- a/pygments/formatters/_mapping.py +++ b/pygments/formatters/_mapping.py @@ -32,7 +32,6 @@ FORMATTERS = { 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.') } - if __name__ == '__main__': # pragma: no cover import sys import os diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 05ada4f3..c5165f4f 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -124,6 +124,7 @@ LEXERS = { 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), 'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), 'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), + 'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)), 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)), 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()), 'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()), diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py index 1bbfd7a7..62213a4a 100644 --- a/pygments/lexers/shell.py +++ b/pygments/lexers/shell.py @@ -18,7 +18,7 @@ from pygments.util import shebang_matches __all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer', - 'PowerShellLexer', 'ShellSessionLexer'] + 'PowerShellLexer', 'ShellSessionLexer', 'FishShellLexer'] line_re = re.compile('.*?\n') @@ -435,3 +435,76 @@ class PowerShellLexer(RegexLexer): (r".", String.Heredoc), ] } + + +class FishShellLexer(RegexLexer): + """ + Lexer for Fish shell scripts. + + .. versionadded:: 2.1.1 + """ + + name = 'Fish' + aliases = ['fish', 'fishshell'] + filenames = ['*.fish', '*.load'] + mimetypes = ['application/x-fish'] + + tokens = { + 'root': [ + include('basic'), + include('data'), + include('interp'), + ], + 'interp': [ + (r'\$\(\(', Keyword, 'math'), + (r'\(', Keyword, 'paren'), + (r'\$#?(\w+|.)', Name.Variable), + ], + 'basic': [ + (r'\b(begin|end|if|else|while|break|for|in|return|function|block|' + r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|' + r'cd|count|test)(\s*)\b', + bygroups(Keyword, Text)), + (r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|' + r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|' + r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|' + r'fish_update_completions|fishd|funced|funcsave|functions|help|' + r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|' + r'pushd|random|read|set_color|source|status|trap|type|ulimit|' + r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)', + Name.Builtin), + (r'#.*\n', Comment), + (r'\\[\w\W]', String.Escape), + (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)), + (r'[\[\]()=]', Operator), + (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String), + ], + 'data': [ + (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double), + (r'"', String.Double, 'string'), + (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single), + (r"(?s)'.*?'", String.Single), + (r';', Punctuation), + (r'&|\||\^|<|>', Operator), + (r'\s+', Text), + (r'\d+(?= |\Z)', Number), + (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text), + ], + 'string': [ + (r'"', String.Double, '#pop'), + (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double), + include('interp'), + ], + 'paren': [ + (r'\)', Keyword, '#pop'), + include('root'), + ], + 'math': [ + (r'\)\)', Keyword, '#pop'), + (r'[-+*/%^|&]|\*\*|\|\|', Operator), + (r'\d+#\d+', Number), + (r'\d+#(?! )', Number), + (r'\d+', Number), + include('root'), + ], + } diff --git a/tests/examplefiles/example.fish b/tests/examplefiles/example.fish new file mode 100644 index 00000000..2cfd2c8b --- /dev/null +++ b/tests/examplefiles/example.fish @@ -0,0 +1,580 @@ +# ----------------------------------------------------------------------------- +# Fishshell Samples +# |- Theme / bobthefish +# |- Function / funced +# |- Configuration / config.fish +# ----------------------------------------------------------------------------- + +# name: bobthefish +# +# bobthefish is a Powerline-style, Git-aware fish theme optimized for awesome. +# +# You will probably need a Powerline-patched font for this to work: +# +# https://powerline.readthedocs.org/en/latest/fontpatching.html +# +# I recommend picking one of these: +# +# https://github.com/Lokaltog/powerline-fonts +# +# You can override some default options in your config.fish: +# +# set -g theme_display_user yes +# set -g default_user your_normal_user + +set -g __bobthefish_current_bg NONE + +# Powerline glyphs +set __bobthefish_branch_glyph \uE0A0 +set __bobthefish_ln_glyph \uE0A1 +set __bobthefish_padlock_glyph \uE0A2 +set __bobthefish_right_black_arrow_glyph \uE0B0 +set __bobthefish_right_arrow_glyph \uE0B1 +set __bobthefish_left_black_arrow_glyph \uE0B2 +set __bobthefish_left_arrow_glyph \uE0B3 + +# Additional glyphs +set __bobthefish_detached_glyph \u27A6 +set __bobthefish_nonzero_exit_glyph '! ' +set __bobthefish_superuser_glyph '$ ' +set __bobthefish_bg_job_glyph '% ' +set __bobthefish_hg_glyph \u263F + +# Python glyphs +set __bobthefish_superscript_glyph \u00B9 \u00B2 \u00B3 +set __bobthefish_virtualenv_glyph \u25F0 +set __bobthefish_pypy_glyph \u1D56 + +# Colors +set __bobthefish_lt_green addc10 +set __bobthefish_med_green 189303 +set __bobthefish_dk_green 0c4801 + +set __bobthefish_lt_red C99 +set __bobthefish_med_red ce000f +set __bobthefish_dk_red 600 + +set __bobthefish_slate_blue 255e87 + +set __bobthefish_lt_orange f6b117 +set __bobthefish_dk_orange 3a2a03 + +set __bobthefish_dk_grey 333 +set __bobthefish_med_grey 999 +set __bobthefish_lt_grey ccc + +set __bobthefish_dk_brown 4d2600 +set __bobthefish_med_brown 803F00 +set __bobthefish_lt_brown BF5E00 + +set __bobthefish_dk_blue 1E2933 +set __bobthefish_med_blue 275379 +set __bobthefish_lt_blue 326D9E + +# =========================== +# Helper methods +# =========================== + +function __bobthefish_in_git -d 'Check whether pwd is inside a git repo' + command which git > /dev/null 2>&1; and command git rev-parse --is-inside-work-tree >/dev/null 2>&1 +end + +function __bobthefish_in_hg -d 'Check whether pwd is inside a hg repo' + command which hg > /dev/null 2>&1; and command hg stat > /dev/null 2>&1 +end + +function __bobthefish_git_branch -d 'Get the current git branch (or commitish)' + set -l ref (command git symbolic-ref HEAD 2> /dev/null) + if [ $status -gt 0 ] + set -l branch (command git show-ref --head -s --abbrev |head -n1 2> /dev/null) + set ref "$__bobthefish_detached_glyph $branch" + end + echo $ref | sed "s-refs/heads/-$__bobthefish_branch_glyph -" +end + +function __bobthefish_hg_branch -d 'Get the current hg branch' + set -l branch (hg branch ^/dev/null) + set -l book " @ "(hg book | grep \* | cut -d\ -f3) + echo "$__bobthefish_branch_glyph $branch$book" +end + +function __bobthefish_pretty_parent -d 'Print a parent directory, shortened to fit the prompt' + echo -n (dirname $argv[1]) | sed -e 's|/private||' -e "s|^$HOME|~|" -e 's-/\(\.\{0,1\}[^/]\)\([^/]*\)-/\1-g' -e 's|/$||' +end + +function __bobthefish_git_project_dir -d 'Print the current git project base directory' + command git rev-parse --show-toplevel 2>/dev/null +end + +function __bobthefish_hg_project_dir -d 'Print the current hg project base directory' + command hg root 2>/dev/null +end + +function __bobthefish_project_pwd -d 'Print the working directory relative to project root' + echo "$PWD" | sed -e "s*$argv[1]**g" -e 's*^/**' +end + + +# =========================== +# Segment functions +# =========================== + +function __bobthefish_start_segment -d 'Start a prompt segment' + set_color -b $argv[1] + set_color $argv[2] + if [ "$__bobthefish_current_bg" = 'NONE' ] + # If there's no background, just start one + echo -n ' ' + else + # If there's already a background... + if [ "$argv[1]" = "$__bobthefish_current_bg" ] + # and it's the same color, draw a separator + echo -n "$__bobthefish_right_arrow_glyph " + else + # otherwise, draw the end of the previous segment and the start of the next + set_color $__bobthefish_current_bg + echo -n "$__bobthefish_right_black_arrow_glyph " + set_color $argv[2] + end + end + set __bobthefish_current_bg $argv[1] +end + +function __bobthefish_path_segment -d 'Display a shortened form of a directory' + if test -w "$argv[1]" + __bobthefish_start_segment $__bobthefish_dk_grey $__bobthefish_med_grey + else + __bobthefish_start_segment $__bobthefish_dk_red $__bobthefish_lt_red + end + + set -l directory + set -l parent + + switch "$argv[1]" + case / + set directory '/' + case "$HOME" + set directory '~' + case '*' + set parent (__bobthefish_pretty_parent "$argv[1]") + set parent "$parent/" + set directory (basename "$argv[1]") + end + + test "$parent"; and echo -n -s "$parent" + set_color fff --bold + echo -n "$directory " + set_color normal +end + +function __bobthefish_finish_segments -d 'Close open prompt segments' + if [ -n $__bobthefish_current_bg -a $__bobthefish_current_bg != 'NONE' ] + set_color -b normal + set_color $__bobthefish_current_bg + echo -n "$__bobthefish_right_black_arrow_glyph " + set_color normal + end + set -g __bobthefish_current_bg NONE +end + + +# =========================== +# Theme components +# =========================== + +function __bobthefish_prompt_status -d 'Display symbols for a non zero exit status, root and background jobs' + set -l nonzero + set -l superuser + set -l bg_jobs + + # Last exit was nonzero + if [ $status -ne 0 ] + set nonzero $__bobthefish_nonzero_exit_glyph + end + + # if superuser (uid == 0) + set -l uid (id -u $USER) + if [ $uid -eq 0 ] + set superuser $__bobthefish_superuser_glyph + end + + # Jobs display + if [ (jobs -l | wc -l) -gt 0 ] + set bg_jobs $__bobthefish_bg_job_glyph + end + + set -l status_flags "$nonzero$superuser$bg_jobs" + + if test "$nonzero" -o "$superuser" -o "$bg_jobs" + __bobthefish_start_segment fff 000 + if [ "$nonzero" ] + set_color $__bobthefish_med_red --bold + echo -n $__bobthefish_nonzero_exit_glyph + end + + if [ "$superuser" ] + set_color $__bobthefish_med_green --bold + echo -n $__bobthefish_superuser_glyph + end + + if [ "$bg_jobs" ] + set_color $__bobthefish_slate_blue --bold + echo -n $__bobthefish_bg_job_glyph + end + + set_color normal + end +end + +function __bobthefish_prompt_user -d 'Display actual user if different from $default_user' + if [ "$theme_display_user" = 'yes' ] + if [ "$USER" != "$default_user" -o -n "$SSH_CLIENT" ] + __bobthefish_start_segment $__bobthefish_lt_grey $__bobthefish_slate_blue + echo -n -s (whoami) '@' (hostname | cut -d . -f 1) ' ' + end + end +end + +function __bobthefish_prompt_hg -d 'Display the actual hg state' + set -l dirty (command hg stat; or echo -n '*') + + set -l flags "$dirty" + test "$flags"; and set flags "" + + set -l flag_bg $__bobthefish_lt_green + set -l flag_fg $__bobthefish_dk_green + if test "$dirty" + set flag_bg $__bobthefish_med_red + set flag_fg fff + end + + __bobthefish_path_segment (__bobthefish_hg_project_dir) + + __bobthefish_start_segment $flag_bg $flag_fg + echo -n -s $__bobthefish_hg_glyph ' ' + + __bobthefish_start_segment $flag_bg $flag_fg + set_color $flag_fg --bold + echo -n -s (__bobthefish_hg_branch) $flags ' ' + set_color normal + + set -l project_pwd (__bobthefish_project_pwd (__bobthefish_hg_project_dir)) + if test "$project_pwd" + if test -w "$PWD" + __bobthefish_start_segment 333 999 + else + __bobthefish_start_segment $__bobthefish_med_red $__bobthefish_lt_red + end + + echo -n -s $project_pwd ' ' + end +end + +# TODO: clean up the fugly $ahead business +function __bobthefish_prompt_git -d 'Display the actual git state' + set -l dirty (command git diff --no-ext-diff --quiet --exit-code; or echo -n '*') + set -l staged (command git diff --cached --no-ext-diff --quiet --exit-code; or echo -n '~') + set -l stashed (command git rev-parse --verify refs/stash > /dev/null 2>&1; and echo -n '$') + set -l ahead (command git branch -v 2> /dev/null | grep -Eo '^\* [^ ]* *[^ ]* *\[[^]]*\]' | grep -Eo '\[[^]]*\]$' | awk 'ORS="";/ahead/ {print "+"} /behind/ {print "-"}' | sed -e 's/+-/±/') + + set -l new (command git ls-files --other --exclude-standard); + test "$new"; and set new '…' + + set -l flags "$dirty$staged$stashed$ahead$new" + test "$flags"; and set flags " $flags" + + set -l flag_bg $__bobthefish_lt_green + set -l flag_fg $__bobthefish_dk_green + if test "$dirty" -o "$staged" + set flag_bg $__bobthefish_med_red + set flag_fg fff + else + if test "$stashed" + set flag_bg $__bobthefish_lt_orange + set flag_fg $__bobthefish_dk_orange + end + end + + __bobthefish_path_segment (__bobthefish_git_project_dir) + + __bobthefish_start_segment $flag_bg $flag_fg + set_color $flag_fg --bold + echo -n -s (__bobthefish_git_branch) $flags ' ' + set_color normal + + set -l project_pwd (__bobthefish_project_pwd (__bobthefish_git_project_dir)) + if test "$project_pwd" + if test -w "$PWD" + __bobthefish_start_segment 333 999 + else + __bobthefish_start_segment $__bobthefish_med_red $__bobthefish_lt_red + end + + echo -n -s $project_pwd ' ' + end +end + +function __bobthefish_prompt_dir -d 'Display a shortened form of the current directory' + __bobthefish_path_segment "$PWD" +end + +function __bobthefish_in_virtualfish_virtualenv + set -q VIRTUAL_ENV +end + +function __bobthefish_virtualenv_python_version -d 'Get current python version' + switch (readlink (which python)) + case python2 + echo $__bobthefish_superscript_glyph[2] + case python3 + echo $__bobthefish_superscript_glyph[3] + case pypy + echo $__bobthefish_pypy_glyph + end +end + +function __bobthefish_virtualenv -d 'Get the current virtualenv' + echo $__bobthefish_virtualenv_glyph(__bobthefish_virtualenv_python_version) (basename "$VIRTUAL_ENV") +end + +function __bobthefish_prompt_virtualfish -d "Display activated virtual environment (only for virtualfish, virtualenv's activate.fish changes prompt by itself)" + set flag_bg $__bobthefish_lt_blue + set flag_fg $__bobthefish_dk_blue + __bobthefish_start_segment $flag_bg $flag_fg + set_color $flag_fg --bold + echo -n -s (__bobthefish_virtualenv) $flags ' ' + set_color normal +end + + +# =========================== +# Apply theme +# =========================== + +function fish_prompt -d 'bobthefish, a fish theme optimized for awesome' + __bobthefish_prompt_status + __bobthefish_prompt_user + if __bobthefish_in_virtualfish_virtualenv + __bobthefish_prompt_virtualfish + end + if __bobthefish_in_git # TODO: do this right. + __bobthefish_prompt_git # if something is in both git and hg, check the length of + else if __bobthefish_in_hg # __bobthefish_git_project_dir vs __bobthefish_hg_project_dir + __bobthefish_prompt_hg # and pick the longer of the two. + else + __bobthefish_prompt_dir + end + __bobthefish_finish_segments +end + +# ----------------------------------------------------------------------------- +# funced - edit a function interactively +# +# Synopsis +# +# funced [OPTIONS] NAME +# +# Description +# +# funced provides an interface to edit the definition of the function NAME. +# ----------------------------------------------------------------------------- + +function funced --description 'Edit function definition' + set -l editor $EDITOR + set -l interactive + set -l funcname + while set -q argv[1] + switch $argv[1] + case -h --help + __fish_print_help funced + return 0 + + case -e --editor + set editor $argv[2] + set -e argv[2] + + case -i --interactive + set interactive 1 + + case -- + set funcname $funcname $argv[2] + set -e argv[2] + + case '-*' + set_color red + printf (_ "%s: Unknown option %s\n") funced $argv[1] + set_color normal + return 1 + + case '*' '.*' + set funcname $funcname $argv[1] + end + set -e argv[1] + end + + if begin; set -q funcname[2]; or not test "$funcname[1]"; end + set_color red + _ "funced: You must specify one function name +" + set_color normal + return 1 + end + + set -l init + switch $funcname + case '-*' + set init function -- $funcname\n\nend + case '*' + set init function $funcname\n\nend + end + + # Break editor up to get its first command (i.e. discard flags) + if test -n "$editor" + set -l editor_cmd + eval set editor_cmd $editor + if not type -f "$editor_cmd[1]" >/dev/null + _ "funced: The value for \$EDITOR '$editor' could not be used because the command '$editor_cmd[1]' could not be found + " + set editor fish + end + end + + # If no editor is specified, use fish + if test -z "$editor" + set editor fish + end + + if begin; set -q interactive[1]; or test "$editor" = fish; end + set -l IFS + if functions -q -- $funcname + # Shadow IFS here to avoid array splitting in command substitution + set init (functions -- $funcname | fish_indent --no-indent) + end + + set -l prompt 'printf "%s%s%s> " (set_color green) '$funcname' (set_color normal)' + # Unshadow IFS since the fish_title breaks otherwise + set -e IFS + if read -p $prompt -c "$init" -s cmd + # Shadow IFS _again_ to avoid array splitting in command substitution + set -l IFS + eval (echo -n $cmd | fish_indent) + end + return 0 + end + + set -q TMPDIR; or set -l TMPDIR /tmp + set -l tmpname (printf "$TMPDIR/fish_funced_%d_%d.fish" %self (random)) + while test -f $tmpname + set tmpname (printf "$TMPDIR/fish_funced_%d_%d.fish" %self (random)) + end + + if functions -q -- $funcname + functions -- $funcname > $tmpname + else + echo $init > $tmpname + end + if eval $editor $tmpname + . $tmpname + end + set -l stat $status + rm -f $tmpname >/dev/null + return $stat +end + +# ----------------------------------------------------------------------------- +# Main file for fish command completions. This file contains various +# common helper functions for the command completions. All actual +# completions are located in the completions subdirectory. +## ----------------------------------------------------------------------------- + +# +# Set default field separators +# + +set -g IFS \n\ \t + +# +# Set default search paths for completions and shellscript functions +# unless they already exist +# + +set -l configdir ~/.config + +if set -q XDG_CONFIG_HOME + set configdir $XDG_CONFIG_HOME +end + +# __fish_datadir, __fish_sysconfdir, __fish_help_dir, __fish_bin_dir +# are expected to have been set up by read_init from fish.cpp + +# Set up function and completion paths. Make sure that the fish +# default functions/completions are included in the respective path. + +if not set -q fish_function_path + set fish_function_path $configdir/fish/functions $__fish_sysconfdir/functions $__fish_datadir/functions +end + +if not contains $__fish_datadir/functions $fish_function_path + set fish_function_path[-1] $__fish_datadir/functions +end + +if not set -q fish_complete_path + set fish_complete_path $configdir/fish/completions $__fish_sysconfdir/completions $__fish_datadir/completions +end + +if not contains $__fish_datadir/completions $fish_complete_path + set fish_complete_path[-1] $__fish_datadir/completions +end + +# +# This is a Solaris-specific test to modify the PATH so that +# Posix-conformant tools are used by default. It is separate from the +# other PATH code because this directory needs to be prepended, not +# appended, since it contains POSIX-compliant replacements for various +# system utilities. +# + +if test -d /usr/xpg4/bin + if not contains /usr/xpg4/bin $PATH + set PATH /usr/xpg4/bin $PATH + end +end + +# +# Add a few common directories to path, if they exists. Note that pure +# console programs like makedep sometimes live in /usr/X11R6/bin, so we +# want this even for text-only terminals. +# + +set -l path_list /bin /usr/bin /usr/X11R6/bin /usr/local/bin $__fish_bin_dir + +# Root should also have the sbin directories in the path +switch $USER + case root + set path_list $path_list /sbin /usr/sbin /usr/local/sbin +end + +for i in $path_list + if not contains $i $PATH + if test -d $i + set PATH $PATH $i + end + end +end + +# +# Launch debugger on SIGTRAP +# +function fish_sigtrap_handler --on-signal TRAP --no-scope-shadowing --description "Signal handler for the TRAP signal. Lanches a debug prompt." + breakpoint +end + +# +# Whenever a prompt is displayed, make sure that interactive +# mode-specific initializations have been performed. +# This handler removes itself after it is first called. +# +function __fish_on_interactive --on-event fish_prompt + __fish_config_interactive + functions -e __fish_on_interactive +end -- cgit v1.2.1 From f7c74b1eb3e14fc2fcc636665e3e45373dcad03e Mon Sep 17 00:00:00 2001 From: hoosieree Date: Wed, 3 Dec 2014 03:38:49 +0000 Subject: j.py created online with Bitbucket Adding lexer for the J programming language: jsoftware.com --- pygments/lexers/j.py | 147 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 147 insertions(+) create mode 100644 pygments/lexers/j.py diff --git a/pygments/lexers/j.py b/pygments/lexers/j.py new file mode 100644 index 00000000..0c8a9f22 --- /dev/null +++ b/pygments/lexers/j.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.j + ~~~~~~~~~~~~~~~~~ + + Lexer for the J programming language. + +""" + +import re + +from pygments.lexer import RegexLexer, words, include +from pygments.token import Comment, Error, Keyword, Name, Number, Operator, Punctuation, String, Text + +__all__ = ['JLexer'] + +class JLexer(RegexLexer): + """ + For `J `_ source code. + """ + + name = 'J' + aliases = ['j'] + filenames = ['*.ijs'] + mimetypes = ['text/x-j'] + + validName = r'\b[a-zA-Z]\w*' + + tokens = { + 'root': [ + # Shebang script + (r'#!.*$', Comment.Preproc), + + # Comments + (r'NB\..*', Comment.Single), + (r'\n+\s*Note', Comment.Multiline, 'comment'), + (r'\s*Note.*', Comment.Single), + + # Whitespace + (r'\s+', Text), + + # Strings + (r"'", String, 'singlequote'), + + # Definitions + (r'0\s+:\s*0|noun\s+define\s*$', Name.Entity, 'nounDefinition'), + (r'\b(([1-4]|13)\s+:\s*0)|((adverb|conjunction|dyad|monad|verb)\s+define)\b', Name.Function, 'explicitDefinition'), + + # Flow Control + (words(('for_', 'goto_', 'label_'), suffix=validName+'\.'), Name.Label), + (words(( + 'assert', 'break', 'case', 'catch', 'catchd', + 'catcht', 'continue', 'do', 'else', 'elseif', + 'end', 'fcase', 'for', 'if', 'return', + 'select', 'throw', 'try', 'while', 'whilst', + ), suffix='\.'), Name.Label), + + # Variable Names + (validName, Name.Variable), + + # Standard Library + (words(( + 'ARGV', 'CR', 'CRLF', 'DEL', 'Debug', + 'EAV', 'EMPTY', 'FF', 'JVERSION', 'LF', + 'LF2', 'Note', 'TAB', 'alpha17', 'alpha27', + 'apply', 'bind', 'bind', 'boxopen', 'boxopen', + 'boxxopen', 'boxxopen', 'bx', 'clear', 'clear', + 'cutLF', 'cutopen', 'cutopen', 'datatype', 'datatype', + 'def', 'def', 'dfh', 'drop', 'drop', + 'each', 'each', 'echo', 'empty', 'empty', + 'erase', 'erase', 'every', 'every', 'evtloop', + 'exit', 'expand', 'expand', 'fetch', 'fetch', + 'file2url', 'fixdotdot', 'fliprgb', 'getargs', 'getenv', + 'hfd', 'inv', 'inverse', 'inverse', 'iospath', + 'isatty', 'isutf8', 'items', 'leaf', 'leaf', + 'list', 'list', 'nameclass', 'nameclass', 'namelist', + 'namelist', 'names', 'names', 'nc', 'nc', + 'nl', 'nl', 'on', 'on', 'pick', + 'pick', 'rows', 'rows', 'script', 'script', + 'scriptd', 'scriptd', 'sign', 'sign', 'sminfo', + 'smoutput', 'smoutput', 'sort', 'split', 'stderr', + 'stdin', 'stdout', 'table', 'take', 'timespacex', + 'timex', 'tmoutput', 'toCRLF', 'toHOST', 'toJ', + 'tolower', 'toupper', 'type', 'ucp', 'ucpcount', + 'usleep', 'utf8', 'uucp', + )), Name.Function), + + # Copula + (r'=[.:]', Operator), + + # Builtins + (r'[-=+*#$%@!~`^&";:.,<>{}\[\]\\|/]', Operator), + + # Short Keywords + (r'[abCdDeEfHiIjLMoprtT]\.', Keyword.Reserved), + (r'[aDiLpqsStux]\:', Keyword.Reserved), + (r'(_[0-9])\:', Keyword.Constant), + + # Parens + (r'\(', Punctuation, 'parentheses'), + + # Numbers + include('numbers'), + ], + + 'comment': [ + (r'[^)]', Comment.Multiline), + (r'^\)', Comment.Multiline, '#pop'), + (r'[)]', Comment.Multiline), + ], + + 'explicitDefinition': [ + (r'\b[nmuvxy]\b', Name.Decorator), + include('root'), + (r'[^)]', Name), + (r'^\)', Name.Label, '#pop'), + (r'[)]', Name), + ], + + 'numbers': [ + (r'_{3,}', Error), + (r'\b_{1,2}\b', Number), + (r'_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?', Number), + (r'_?\d+\.(?=\d+)', Number.Float), + (r'_?\d+x', Number.Integer.Long), + (r'_?\d+', Number.Integer), + ], + + 'nounDefinition': [ + (r'[^)]', String), + (r'^\)', Name.Label, '#pop'), + (r'[)]', String), + ], + + 'parentheses': [ + (r'\)', Punctuation, '#pop'), + #include('nounDefinition'), + include('explicitDefinition'), + include('root'), + ], + + 'singlequote': [ + (r"[^']", String), + (r"''", String), + (r"'", String, '#pop'), + ], + } \ No newline at end of file -- cgit v1.2.1 From c1ee6b18bd9ccc79dc1b00455bad4bb8f4ed221d Mon Sep 17 00:00:00 2001 From: hoosieree Date: Wed, 3 Dec 2014 03:42:10 +0000 Subject: pacman.ijs created online with Bitbucket add example file - this file comes bundled with J803 tarball --- tests/examplefiles/pacman.ijs | 1107 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1107 insertions(+) create mode 100644 tests/examplefiles/pacman.ijs diff --git a/tests/examplefiles/pacman.ijs b/tests/examplefiles/pacman.ijs new file mode 100644 index 00000000..f067b6e2 --- /dev/null +++ b/tests/examplefiles/pacman.ijs @@ -0,0 +1,1107 @@ +cocurrent 'jpacman' +coinsert 'j' + +BASELIB=: 'base library' +DATAMASK=: 0 +HWNDP=: '' +ISGUI=: 0 +ONLINE=: 0 +PKGDATA=: 0 7$a: +SECTION=: ,<'All' +SYSNAME=: 'Package Manager' +TIMEOUT=: 60 +WWWREV=: REV=: _1 + +IgnoreIOS=: 0 : 0 +api/jni +data/dbman +data/ddmysql +data/odbc +demos/isigraph +demos/wd +demos/wdplot +games/minesweeper +games/nurikabe +games/pousse +games/solitaire +general/pcall +general/sfl +graphics/d3 +graphics/fvj3 +graphics/gl2 +graphics/gnuplot +graphics/graph +graphics/graphviz +graphics/jturtle +graphics/print +graphics/tgsj +graphics/treemap +graphics/viewmat +gui/monthview +gui/util +ide/qt +math/tabula +media/animate +media/gdiplus +media/image3 +media/ming +media/paint +media/wav +) + +Ignore=: 3 : 0'' +if. IFIOS do. + <;._2 IgnoreIOS +else. + <'ide/ios' +end. +) +3 : 0'' +nc=. '--no-cache' +if. IFUNIX do. + if. UNAME-:'Darwin' do. + HTTPCMD=: 'curl -o %O --stderr %L -f -s -S %U' + elseif. do. + if. 'Android'-:UNAME do. nc=. '' + else. try. nc=. nc #~ 1 e. nc E. shell 'wget --help' catch. nc=. '' end. end. + HTTPCMD=: 'wget ',nc,' -O %O -o %L -t %t %U' + end. +else. + if. fexist exe=. jpath '~tools/ftp/wget.exe' do. exe=. '"',exe,'"' else. exe=. 'wget.exe' end. + try. nc=. nc #~ 1 e. nc E. shell exe,' --help' catch. nc=. '' end. + HTTPCMD=: exe,' ',nc,' -O %O -o %L -t %t -T %T %U' + if. fexist UNZIP=: jpath '~tools/zip/unzip.exe' do. UNZIP=: '"',UNZIP,'" -o -C ' else. UNZIP=: 'unzip.exe -o -C ' end. +end. +) +setfiles=: 3 : 0 +ADDCFG=: jpath '~addons/config/' +makedir ADDCFG +ADDCFGIJS=: ADDCFG,'config.ijs' +JRELEASE=: ({.~i.&'/') 9!:14'' +JRELEASE=: 'j802' +LIBTREE=: readtree'' +if. IFIOS do. + WWW=: '/jal/',JRELEASE,'/' +else. + WWW=: 'http://www.jsoftware.com/jal/',JRELEASE,'/' +end. +LIBVER=: jpath '~system/config/version.txt' +) +destroy=: codestroy +CFGFILES=: <;._2 (0 : 0) +addons.txt +library.txt +release.txt +revision.txt +zips.txt +) +LIBDESC=: 0 : 0 +This is the base library of scripts and labs included in the J system. + +Reinstalling or upgrading this library will overwrite files in the system subdirectory. Restart J afterwards. + +Files outside the system subdirectory, such as profile.ijs, are not changed. +) +cutjal=: ([: (* 4 > +/\) ' ' = ]) <;._1 ] +cutjsp=: ([: (* 5 > +/\) ' ' = ]) <;._1 ] +dquote=: '"'&, @ (,&'"') +fname=: #~ ([: *./\. ~:&'/') +hostcmd=: [: 2!:0 '(' , ] , ' || true)'"_ +ischar=: 2 = 3!:0 +rnd=: [ * [: <. 0.5 + %~ +sep2under=: '/' & (I.@('_' = ])}) +termLF=: , (0 < #) # LF -. {: +todel=: ; @: (DEL&, @ (,&(DEL,' ')) each) +tolist=: }. @ ; @: (LF&,@,@":each) +isjpkgout=: ((4 = {:) *. 2 = #)@$ *. 1 = L. +getintro=: ('...' ,~ -&3@[ {. ])^:(<#) +info=: smoutput +getnames=: 3 : 0 +select. L.y +case. 0 do. + if. +/ BASELIB E. y do. + y=. ({:"1 y +y=. (45&getintro &.> idx{y) idx}y +) +deltree=: 3 : 0 +try. + res=. 0< ferase {."1 dirtree y + *./ res,0 #y do. i.0 5 return. end. +m=. _2 |. (LF,')',LF) E. y +r=. _2 }. each m <;._2 y +x=. r i.&> LF +d=. (x+1) }.each r +r=. x {.each r +r=. 3 {."1 cutjal &> ' ' ,each r +x=. d i.&> LF +c=. x {.each d +d=. (x+1) }.each d +r,.c,.d +) +fixjal2=: 3 : 0 +if. 2 > #y do. i.0 2 return. end. +cutjal &> ' ' ,each <;._2 y +) +fixjsp=: 3 : 0 +if. 2 > #y do. i.0 5 return. end. +m=. _2 |. (LF,')',LF) E. y +r=. _2 }. each m <;._2 y +x=. r i.&> LF +d=. (x+1) }.each r +r=. x {.each r +r=. ' ' ,each r +(cutjsp &> r),.d +) +fixlib=: 3 : 0 +msk=. ( #y do. + i.0 6 return. +end. +fls=. <;._2 y +ndx=. fls i.&> ' ' +siz=. <&> 0 ". (ndx+1) }.&> fls +fls=. ndx {.each fls +zps=. <;._2 &> fls ,each '_' +pfm=. 3 {"1 zps +uname=. tolower UNAME +msk=. (uname -: ({.~ i.&'.')) &> pfm +if. 1 ~: +/msk do. msk=. 1,~ }:0*.msk end. +msk # zps,.fls,.siz +) +fixrev=: 3 : 0 +{. _1 ". :: _1: y -. CRLF +) +fixupd=: 3 : 0 +_1 ". :: _1: y -. CRLF +) +fixver=: 3 : 0 +if. ischar y do. + y=. y -. CRLF + y=. 0 ". ' ' (I. y='.') } y +end. +3 {. y +) +fixvers=: 3 : 0 +s=. $y +y=. ,y +3 {."1 [ 0 ". s $ ' ' (I. y e. './') } y +) +fmtjal=: 3 : 0 +if. 0 = #y do. '' return. end. +r=. (4 {."1 y) ,each "1 ' ',LF2 +r=. <@; "1 r +; r ,each ({:"1 y) ,each <')',LF +) +fmtjal2=: 3 : 0 +if. 0 = #y do. '' return. end. +; (2 {."1 y) ,each "1 ' ',LF +) +fmtdep=: 3 : 0 +}. ; ',' ,each a: -.~ <;._2 y +) +fmtjsp=: 3 : 0 +if. 0 = #y do. '' return. end. +r=. (4 {."1 y) ,each "1 ' ',LF +r=. <@; "1 r +; r ,each ({:"1 y) ,each <')',LF +) +fmtlib=: 3 : 0 +, 'q<.>,q<.>r<0>3.0,r<0>3.0' 8!:2 y +) +fmtver=: 3 : 0 +if. 0=#y do. '' return. end. +if. ischar y do. y return. end. +}. ; '.' ,each ": each y +) +fmtverlib=: 3 : 0 +fmtver y +) +fixzips=: 3 : 0 +if. 2 > #y do. i.0 5 return. end. +fls=. <;._2 y +ndx=. fls i.&> ' ' +siz=. 0 ". (ndx+1) }.&> fls +fls=. ndx {.each fls +zps=. <;._2 &> fls ,each '_' +zps=. zps,.fls,.<&>siz +pfm=. 3 {"1 zps +and=. (1 e. 'android'&E.) &> pfm +lnx=. (1 e. 'linux'&E.) &> pfm +mac=. (1 e. 'darwin'&E.) &> pfm +win=. mac < (1 e. 'win'&E.) &> pfm + +select. UNAME +case. 'Win' do. + zps=. win # zps +case. 'Linux' do. + zps=. lnx # zps +case. 'Android' do. + zps=. and # zps +case. 'Darwin' do. + zps=. mac # zps + zps=. zps /: 3 {"1 zps + zps=. (~: 3 {."1 zps) # zps +end. + +bit=. IF64 pick '64';'32' +pfm=. 3 {"1 zps +exc=. (1 e. bit&E.) &> pfm +zps=. zps \: exc +zps=. (~: 3 {."1 zps) # zps +fnm=. 0 {"1 zps +lnm=. 1 {"1 zps +ver=. 2 {"1 zps +pfm=. 3 {"1 zps +fls=. 4 {"1 zps +siz=. 5 {"1 zps +nms=. fnm ,each '/' ,each lnm +pfm=. (pfm i.&> '.') {.each pfm +ndx=. \: # &> pfm +sort ndx { nms,.pfm,.ver,.fls,.siz +) +fwritenew=: 4 : 0 +if. x -: fread y do. + 0 +else. + x fwrite y +end. +) +platformparent=: 3 : 0 +((< _2 {. y) e. '32';'64') # _2 }. y +) +makedir=: 1!:5 :: 0: @ < +plural=: 4 : 0 +y,(1=x)#'s' +) +sizefmt=: 3 : 0 +select. +/ y >: 1e3 1e4 1e6 1e7 1e9 +case. 0 do. + (": y), ' byte',(y~:1)#'s' +case. 1 do. + (": 0.1 rnd y%1e3),' KB' +case. 2 do. + (": 1 rnd y%1e3),' KB' +case. 3 do. + (": 0.1 rnd y%1e6),' MB' +case. 4 do. + (": 1 rnd y%1e6),' MB' +case. do. + (": 0.1 rnd y%1e9),' GB' +end. +) +shellcmd=: 3 : 0 +if. IFUNIX do. + hostcmd y +else. + spawn_jtask_ y +end. +) +subdir=: 3 : 0 +if. 0=#y do. '' return. end. +a=. 1!:0 y,'*' +if. 0=#a do. '' return. end. +a=. a #~ '-d' -:"1 [ 1 4 {"1 > 4 {"1 a +( '/mnt/sdcard'-:2!:5'EXTERNAL_STORAGE' do. notarcmd=. 1 end. + end. + if. notarcmd do. + require 'tar' + 'file dir'=. y + if. (i.0 0) -: tar 'x';file;dir do. e=. '' end. + else. + e=. shellcmd 'tar ',((IFIOS+:UNAME-:'Android')#(('Darwin'-:UNAME){::'--no-same-owner --no-same-permissions';'-o -p')),' -xzf ',file,' -C ',dir + end. + if. (0~:FHS) *. ('root'-:2!:5'USER') +. (<2!:5'HOME') e. 0;'/var/root';'/root';'';,'/' do. + shellcmd ::0: 'find ',dir,' -type d -exec chmod a+rx {} \+' + shellcmd ::0: 'find ',dir,' -type f -exec chmod a+r {} \+' + end. +else. + dir=. (_2&}. , '/' -.~ _2&{.) dir + e=. shellcmd UNZIP,' ',file,' -d ',dir +end. +e +) +zipext=: 3 : 0 +y, IFUNIX pick '.zip';'.tar.gz' +) +CHECKADDONSDIR=: 0 : 0 +The addons directory does not exist and cannot be created. + +It is set to: XX. + +You can either create the directory manually, or set a new addons directory in your profile script. +) +CHECKASK=: 0 : 0 +Read catalog from the server using Internet connection now? + +Otherwise the local catalog is used offline. +) +CHECKONLINE=: 0 : 0 +An active Internet connection is needed to install packages. + +Continue only if you have an active Internet connection. + +OK to continue? +) +CHECKREADSVR=: 0 : 0 +An active Internet connection is needed to read the server repository catalog. + +Continue only if you have an active Internet connection. + +OK to continue? +) +CHECKSTARTUP=: 0 : 0 +Setup repository using Internet connection now? + +Select No if not connected, to complete setup later. After Setup is done, repository can be used offline with more options in Tools menu and Preferences dialog. +) +checkaccess=: 3 : 0 +if. testaccess'' do. 1 return. end. +msg=. 'Unable to run Package Manager, as you do not have access to the installation folder.' +if. IFWIN do. + msg=. msg,LF2,'To run as Administrator, right-click the J icon, select Run as... and ' + msg=. msg,'then select Adminstrator.' +end. +info msg +0 +) +checkaddonsdir=: 3 : 0 +d=. jpath '~addons' +if. # 1!:0 d do. 1 return. end. +if. 1!:5 :: 0: : 0 do. + ONLINE=: 0 + log 'Using local copy of catalog. See Preferences to change the setting.' + 1 return. + end. + if. 0 = getonline 'Read Catalog from Server';CHECKREADSVR do. 0 return. end. +case. 1 do. + ONLINE=: 1 +case. 2 do. + if. REV >: 0 do. + if. 0 = getonline 'Read Catalog from Server';CHECKASK do. + log 'Using local copy of catalog. See Preferences to change the setting.' + 1 return. + end. + else. + if. 0 = getonline 'Setup Repository';CHECKSTARTUP do. 0 return. end. + end. +end. +log 'Updating server catalog...' +if. 0 = getserver'' do. + ONLINE=: 0 + log 'Working offline using local copy of catalog.' +else. + log 'Done.' +end. +1 +) +checkstatus=: 3 : 0 +if. 0 e. #LIBS do. '' return. end. +msk=. masklib PKGDATA +ups=. pkgups'' +libupm=. 1 e. msk *. ups +msk=. -. msk +addnim=. +/msk *. pkgnew'' +addupm=. +/msk *. pkgups'' +tot=. +/addnim,addupm,libupm +if. 0 = tot do. + 'All available packages are installed and up to date.' return. +end. +select. 0 < addnim,addupm +case. 0 0 do. + msg=. 'Addons are up to date.' +case. 0 1 do. + msg=. 'All addons are installed, ',(":addupm), ' can be upgraded.' +case. 1 0 do. + if. addnim = <:#PKGDATA do. + msg=. 'No addons are installed.' + else. + j=. ' addon',('s'#~1: fsize p do. + if. _1-:msg=. freads q do. + if. 0=#msg=. e do. msg=. 'Unexpected error' end. end. + log 'Connection failed: ',msg + info 'Connection failed:',LF2,msg + r=. 1;msg + ferase p;q +else. + r=. 0;p + ferase q +end. +r +) +httpgetr=: 3 : 0 +res=. httpget y +if. 0 = 0 pick res do. + f=. 1 pick res + txt=. freads f + ferase f + 0;txt +end. +) +install=: 3 : 0 +dat=. getdepend y +'num siz'=. pmview_applycounts dat +many=. 1 < num +msg=. 'Installing ',(":num),' package',many#'s' +msg=. msg,' of ',(many#'total '),'size ',sizefmt siz +log msg +installdo 1 {"1 dat +log 'Done.' +readlocal'' +pacman_init 0 +) +install_console=: 3 : 0 + if. -. init_console 'server' do. '' return. end. + pkgs=. getnames y + if. pkgs -: ,<'all' do. pkgs=. 1 {"1 PKGDATA end. + pkgs=. pkgs (e. # [) ~. (<'base library'), ((pkgnew +. pkgups) # 1&{"1@]) PKGDATA + pkgs=. pkgs -. Ignore + pkgs=. getdepend_console pkgs + if. 0 = num=. #pkgs do. '' return. end. + many=. 1 < num + msg=. 'Installing ',(":num),' package',many#'s' + log msg + installdo pkgs + log 'Done.' + readlocal'' + pacman_init '' + checkstatus'' +) +upgrade_console=: 3 : 0 + if. -. init_console 'read' do. '' return. end. + pkgs=. getnames y + if. (0=#pkgs) +. pkgs -: ,<'all' do. pkgs=. 1{"1 PKGDATA end. + pkgs=. pkgs (e. # [) (pkgups # 1&{"1@])PKGDATA + install_console pkgs +) +installdo=: 3 : 0 +msk=. -. y e. :fsize jpath'~addons/',y,'/manifest.ijs' do. + log 'Extraction failed: ',msg + info 'Extraction failed:',LF2,msg + return. +end. +install_addins y +install_config y +) +install_addins=: 3 :0 +fl=. ADDCFG,'addins.txt' +ins=. fixjal2 freads fl +ins=. ins #~ ( txt +msk=. fexist &> ( msk # 1 {"1 PKGDATA) ,. res + res=. (2#LF) joinstring (70&foldtext)&.> res + end. + case. 'showinstalled' do. + dat=. (isjpkgout y) {:: (1 2 3 4 {"1 PKGDATA); (pkgs) ,&.> <'/',x,(x-:'history'){::'.ijs';'.txt' + res=. res #~ msk=. (<_1) ~: res=. fread@jpath &.> fn + if. #res do. + res=. ,((<'== '), &.> msk#pkgs) ,. res + res=. (2#LF) joinstring res + end. +) +remove_console=: 3 : 0 + if. -. init_console 'edit' do. '' return. end. + pkgs=. getnames y + if. pkgs -: ,<'all' do. pkgs=. 1 {"1 PKGDATA end. + pkgs=. pkgs (e. # [) (-.@pkgnew # 1&{"1@]) PKGDATA + pkgs=. pkgs -. . fixver freads LIBVER +) +readlocal=: 3 : 0 +readlin'' +ADDONS=: fixjal freads ADDCFG,'addons.txt' +ADDINS=: fixjal2 freads ADDCFG,'addins.txt' +REV=: fixrev freads ADDCFG,'revision.txt' +LASTUPD=: fixupd freads ADDCFG,'lastupdate.txt' +LIBS=: fixlibs freads ADDCFG,'library.txt' +LIB=: fixlib LIBS +ZIPS=: fixzips freads ADDCFG,'zips.txt' +EMPTY +) +readtree=: 3 : 0 +f=. ADDCFG,'tree.txt' +tree=. LF -.~ freads f +if. -. (d),'manifest.ijs' + if. mft -: _1 do. continue. end. + VERSION=: '' + 0!:100 mft + ver=. fmtver fixver VERSION + n=. }: (#p) }. >d + n=. '/' (I.n='\') } n + r=. r,n,' ',ver,LF + s=. s,d +end. +r fwritenew f +s=. (#p) }.each }: each s +install_labs each s +write_config'' +) +refreshjal=: 3 : 0 +'rc p'=. httpget WWW,zipext 'jal' +if. rc do. 0 return. end. +unzip p;ADDCFG +ferase p +if. *./ CFGFILES e. {."1 [ 1!:0 ADDCFG,'*' do. 1 return. end. +msg=. 'Could not install the local repository catalog.' +log msg +info msg +0 +) +updatejal=: 3 : 0 + log 'Updating server catalog...' + if. -. init_console 'server' do. '' return. end. + refreshaddins'' + readlocal'' + pacman_init'' + res=. checklastupdate'' + res,LF,checkstatus'' +) +RELIBMSG=: 0 : 0 +You are now using the XX base library, and can switch to the YY base library. + +This will download the YY version of the base library and overwrite existing files. Addons are not affected. + +OK to switch to the YY library? +) +prelib=: 3 : 0 +old=. LIBTREE +new=. (('stable';'current') i. (2-s) {"1 dat +srv=. fixvers > (3-s) {"1 dat +{."1 /:"2 srv ,:"1 loc +) +pkgnew=: 3 : 0 +dat=. (s=.isjpkgout y){:: PKGDATA; (2-s) {"1 dat +) +pkgups=: pkgnew < pkglater +pkgsearch=: 3 : 0 + +./"1 +./ y E."1&>"(0 _) 1{"1 PKGDATA +) +pkgshow=: 3 : 0 + y e.~ 1{"1 PKGDATA +) +setshowall=: 3 : 0 +PKGDATA=: ( '/') {.each nms +SECTION=: 'All';nms +DATAMASK=: (#PKGDATA) $ 1 +EMPTY +) +init_console=: 3 : 0 + if. 0=#y do. y=. 'read' end. + select. y + fcase. 'edit';'server' do. + if. -. checkaccess'' do. 0 return. end. + case. 'read' do. + if. -. checkaddonsdir'' do. 0 return. end. + setfiles'' + readlocal'' + pacman_init '' + res=. 1 + case. do. res=. 0 + end. + if. y -: 'server' do. res=. getserver'' end. + res +) +jpkg=: 4 : 0 + select. x + case. 'history';'manifest' do. + x showfiles_console y + case. 'install' do. + install_console y + case. 'reinstall' do. + remove_console y + install_console y + case. 'remove' do. + remove_console y + case. ;:'show search showinstalled shownotinstalled showupgrade status' do. + x show_console y + case. 'update' do. + updatejal '' + case. 'upgrade' do. + upgrade_console y + case. do. + msg=. 'Valid options are:',LF + msg=. msg,' history, install, manifest, remove, reinstall, show, search,',LF + msg=. msg,' showinstalled, shownotinstalled, showupgrade, status,',LF + msg,' update, upgrade' + end. +) +do_install=: 3 : 0 +if. -. checkaccess_jpacman_ '' do. return. end. +'update' jpkg '' +select. y +case. 'qtide';'angle' do. + 'install' jpkg 'base library ide/qt' + getqtbin (y-:'angle'){::0;'angle' + msg=. (+/ 2 1 * IFWIN,'Darwin'-:UNAME) pick 'jqt.sh';'the jqt icon';'jqt.cmd' + smoutput 'exit and restart J using ',msg +case. 'all' do. + 'install' jpkg 'all' + getqtbin 0 +end. +) +do_getqtbin=: 3 : 0 +smoutput 'Installing JQt binaries...' +if. 'Linux'-:UNAME do. + if. IFRASPI do. + z=. 'jqt-raspi-32.tar.gz' + else. + z=. 'jqt-',((y-:'slim') pick 'linux';'slim'),'-',(IF64 pick 'x86';'x64'),'.tar.gz' + end. + z1=. 'libjqt.so' +elseif. IFWIN do. + z=. 'jqt-win',((y-:'slim')#'slim'),'-',(IF64 pick 'x86';'x64'),'.zip' + z1=. 'jqt.dll' +elseif. do. + z=. 'jqt-mac',((y-:'slim')#'slim'),'-',(IF64 pick 'x86';'x64'),'.zip' + z1=. 'libjqt.dylib' +end. +'rc p'=. httpget_jpacman_ 'http://www.jsoftware.com/download/j802/qtide/',z +if. rc do. + smoutput 'unable to download: ',z return. +end. +d=. jpath '~bin' +if. IFWIN do. + unzip_jpacman_ p;d +else. + if. 'Linux'-:UNAME do. + if. (0~:FHS) do. + if. IFRASPI do. + d1=. '/usr/lib/arm-linux-gnueabihf/.' + elseif. IF64 do. + d1=. '/usr/lib/x86_64-linux-gnu/.' + elseif. do. + d1=. '/usr/lib/i386-linux-gnu/.' + end. + hostcmd_jpacman_ 'cd /usr/bin && tar --no-same-owner --no-same-permissions -xzf ',(dquote p), ' && chmod 755 jqt && chmod 644 libjqt.so && mv libjqt.so ',d1 + else. + hostcmd_jpacman_ 'cd ',(dquote d),' && tar xzf ',(dquote p) + end. + else. + hostcmd_jpacman_ 'unzip -o ',(dquote p),' -d ',dquote d + end. +end. +ferase p +if. #1!:0 ((0~:FHS)*.'Linux'-:UNAME){::(jpath '~bin/',z1);'/usr/bin/jqt' do. + m=. 'Finished install of JQt binaries.' +else. + m=. 'Unable to install JQt binaries.',LF + m=. m,'check that you have write permission for: ',LF,((0~:FHS)*.'Linux'-:UNAME){::(jpath '~bin');'/usr/bin' +end. +smoutput m +if. 'Linux'-:UNAME do. return. end. + +tgt=. jpath IFWIN{::'~install/Qt';'~bin/Qt5Core.dll' +y=. (*#y){::0;y +smoutput 'Installing Qt library...' +if. IFWIN do. + z=. 'qt53-',((y-:'angle') pick 'win';'angle'),'-',((y-:'slim')#'slim-'),(IF64 pick 'x86';'x64'),'.zip' +else. + z=. 'qt53-mac-',((y-:'slim')#'slim-'),(IF64 pick 'x86';'x64'),'.zip' +end. +'rc p'=. httpget_jpacman_ 'http://www.jsoftware.com/download/j802/qtlib/',z +if. rc do. + smoutput 'unable to download: ',z return. +end. +d=. jpath IFWIN{::'~install';'~bin' +if. IFWIN do. + unzip_jpacman_ p;d +else. + hostcmd_jpacman_ 'unzip -o ',(dquote p),' -d ',dquote d +end. +ferase p +if. #1!:0 tgt do. + m=. 'Finished install of Qt binaries.' +else. + m=. 'Unable to install Qt binaries.',LF + m=. m,'check that you have write permission for: ',LF,IFWIN{::tgt;jpath'~bin' +end. +smoutput m + +) +jpkg_z_=: 3 : 0 + 'help' jpkg y + : + a=. conew 'jpacman' + res=. x jpkg__a y + destroy__a'' + res +) +jpkgv_z_=: (<@:>"1@|:^:(0 ~: #))@jpkg \ No newline at end of file -- cgit v1.2.1 -- cgit v1.2.1 From 762540340e7cc1e3c6802aeddfc98b71d3bc9f9c Mon Sep 17 00:00:00 2001 From: hoosieree Date: Wed, 3 Dec 2014 15:42:08 +0000 Subject: Remove duplicates and "error" code for too many underscores. --- pygments/lexers/j.py | 36 +++++++++++++++--------------------- 1 file changed, 15 insertions(+), 21 deletions(-) diff --git a/pygments/lexers/j.py b/pygments/lexers/j.py index 0c8a9f22..5eb85332 100644 --- a/pygments/lexers/j.py +++ b/pygments/lexers/j.py @@ -63,26 +63,21 @@ class JLexer(RegexLexer): 'ARGV', 'CR', 'CRLF', 'DEL', 'Debug', 'EAV', 'EMPTY', 'FF', 'JVERSION', 'LF', 'LF2', 'Note', 'TAB', 'alpha17', 'alpha27', - 'apply', 'bind', 'bind', 'boxopen', 'boxopen', - 'boxxopen', 'boxxopen', 'bx', 'clear', 'clear', - 'cutLF', 'cutopen', 'cutopen', 'datatype', 'datatype', - 'def', 'def', 'dfh', 'drop', 'drop', - 'each', 'each', 'echo', 'empty', 'empty', - 'erase', 'erase', 'every', 'every', 'evtloop', - 'exit', 'expand', 'expand', 'fetch', 'fetch', - 'file2url', 'fixdotdot', 'fliprgb', 'getargs', 'getenv', - 'hfd', 'inv', 'inverse', 'inverse', 'iospath', - 'isatty', 'isutf8', 'items', 'leaf', 'leaf', - 'list', 'list', 'nameclass', 'nameclass', 'namelist', - 'namelist', 'names', 'names', 'nc', 'nc', - 'nl', 'nl', 'on', 'on', 'pick', - 'pick', 'rows', 'rows', 'script', 'script', - 'scriptd', 'scriptd', 'sign', 'sign', 'sminfo', - 'smoutput', 'smoutput', 'sort', 'split', 'stderr', - 'stdin', 'stdout', 'table', 'take', 'timespacex', - 'timex', 'tmoutput', 'toCRLF', 'toHOST', 'toJ', - 'tolower', 'toupper', 'type', 'ucp', 'ucpcount', - 'usleep', 'utf8', 'uucp', + 'apply', 'bind', 'boxopen', 'boxxopen', 'bx', + 'clear', 'cutLF', 'cutopen', 'datatype', 'def', + 'dfh', 'drop', 'each', 'echo', 'empty', + 'erase', 'every', 'evtloop', 'exit', 'expand', + 'fetch', 'file2url', 'fixdotdot', 'fliprgb', 'getargs', + 'getenv', 'hfd', 'inv', 'inverse', 'iospath', + 'isatty', 'isutf8', 'items', 'leaf', 'list', + 'nameclass', 'namelist', 'namelist', 'names', 'nc', + 'nl', 'on', 'pick', 'pick', 'rows', + 'script', 'scriptd', 'sign', 'sminfo', 'smoutput', + 'sort', 'split', 'stderr', 'stdin', 'stdout', + 'table', 'take', 'timespacex', 'timex', 'tmoutput', + 'toCRLF', 'toHOST', 'toJ', 'tolower', 'toupper', + 'type', 'ucp', 'ucpcount', 'usleep', 'utf8', + 'uucp', )), Name.Function), # Copula @@ -118,7 +113,6 @@ class JLexer(RegexLexer): ], 'numbers': [ - (r'_{3,}', Error), (r'\b_{1,2}\b', Number), (r'_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?', Number), (r'_?\d+\.(?=\d+)', Number.Float), -- cgit v1.2.1 From d303d58a5c1f90c707bd5ccaf31a30ef6f9221bb Mon Sep 17 00:00:00 2001 From: "Dan Michael O. Hegg?" Date: Sun, 7 Dec 2014 16:12:27 +0100 Subject: Add TurtleLexer --- pygments/lexers/_mapping.py | 1 + pygments/lexers/rdf.py | 98 +++++++++++++++++++++++++++++++++++++++++- tests/examplefiles/example.ttl | 43 ++++++++++++++++++ 3 files changed, 140 insertions(+), 2 deletions(-) create mode 100644 tests/examplefiles/example.ttl diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 05ada4f3..b30eef97 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -344,6 +344,7 @@ LEXERS = { 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), + 'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')), 'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)), 'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)), 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)), diff --git a/pygments/lexers/rdf.py b/pygments/lexers/rdf.py index 292b1ae2..05393a6e 100644 --- a/pygments/lexers/rdf.py +++ b/pygments/lexers/rdf.py @@ -12,10 +12,10 @@ import re from pygments.lexer import RegexLexer, bygroups, default -from pygments.token import Keyword, Punctuation, String, Number, Operator, \ +from pygments.token import Keyword, Punctuation, String, Number, Operator, Generic, \ Whitespace, Name, Literal, Comment, Text -__all__ = ['SparqlLexer'] +__all__ = ['SparqlLexer', 'TurtleLexer'] class SparqlLexer(RegexLexer): @@ -97,3 +97,97 @@ class SparqlLexer(RegexLexer): default('#pop:2'), ], } + + +class TurtleLexer(RegexLexer): + """ + Lexer for `Turtle `_ data language. + + .. versionadded:: 2.0 + """ + name = 'Turtle' + aliases = ['turtle'] + filenames = ['*.ttl'] + mimetypes = ['text/turtle', 'application/x-turtle'] + + flags = re.IGNORECASE + + patterns = { + 'PNAME_NS': r'((?:[a-zA-Z][\w-]*)?\:)', # Simplified character range + 'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)' + } + + # PNAME_NS PN_LOCAL (with simplified character range) + patterns['PrefixedName'] = r'%(PNAME_NS)s([a-z][\w-]*)' % patterns + + tokens = { + 'root': [ + (r'\s+', Whitespace), + + # Base / prefix + (r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns, + bygroups(Keyword, Whitespace, Name.Variable, Whitespace, + Punctuation)), + (r'(@prefix|PREFIX)(\s+)%(PNAME_NS)s(\s+)%(IRIREF)s(\s*)(\.?)' % patterns, + bygroups(Keyword, Whitespace, Name.Namespace, Whitespace, + Name.Variable, Whitespace, Punctuation)), + + # The shorthand predicate 'a' + (r'(?<=\s)a(?=\s)', Keyword.Type), + + # IRIREF + (r'%(IRIREF)s' % patterns, Name.Variable), + + # PrefixedName + (r'%(PrefixedName)s' % patterns, + bygroups(Name.Namespace, Name.Tag)), + + # Comment + (r'#[^\n]+', Comment), + + (r'\b(true|false)\b', Literal), + (r'[+\-]?\d*\.\d+', Number.Float), + (r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float), + (r'[+\-]?\d+', Number.Integer), + (r'[\[\](){}.;,:^]', Punctuation), + + (r'"""', String, 'triple-double-quoted-string'), + (r'"', String, 'single-double-quoted-string'), + (r"'''", String, 'triple-single-quoted-string'), + (r"'", String, 'single-single-quoted-string'), + ], + 'triple-double-quoted-string': [ + (r'"""', String, 'end-of-string'), + (r'[^\\]+', String), + (r'\\', String, 'string-escape'), + ], + 'single-double-quoted-string': [ + (r'"', String, 'end-of-string'), + (r'[^"\\\n]+', String), + (r'\\', String, 'string-escape'), + ], + 'triple-single-quoted-string': [ + (r"'''", String, 'end-of-string'), + (r'[^\\]+', String), + (r'\\', String, 'string-escape'), + ], + 'single-single-quoted-string': [ + (r"'", String, 'end-of-string'), + (r"[^'\\\n]+", String), + (r'\\', String, 'string-escape'), + ], + 'string-escape': [ + (r'.', String, '#pop'), + ], + 'end-of-string': [ + + (r'(@)([a-zA-Z]+(:?-[a-zA-Z0-9]+)*)', + bygroups(Operator, Generic.Emph), '#pop:2'), + + (r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'), + (r'(\^\^)%(PrefixedName)s' % patterns, bygroups(Operator, Generic.Emph, Generic.Emph), '#pop:2'), + + default('#pop:2'), + + ], + } diff --git a/tests/examplefiles/example.ttl b/tests/examplefiles/example.ttl new file mode 100644 index 00000000..e524d86c --- /dev/null +++ b/tests/examplefiles/example.ttl @@ -0,0 +1,43 @@ +@base . +@prefix dcterms: . @prefix xs: . +@prefix mads: . +@prefix skos: . +@PREFIX dc: # SPARQL-like syntax is OK +@prefix : . # empty prefix is OK + + . + +<#doc1> a <#document> + dc:creator "Smith", "Jones"; + :knows + dcterms:hasPart [ # A comment + dc:title "Some title", "Some other title"; + dc:creator "برشت، برتولد"@ar; + dc:date "2009"^^xs:date + ]; + dc:title "A sample title", 23.0; + dcterms:isPartOf [ + dc:title "another", "title" + ] ; + :exists true . + + a mads:Topic, + skos:Concept ; + dcterms:created "2014-08-25"^^xsd:date ; + dcterms:modified "2014-11-12"^^xsd:date ; + dcterms:identifier "REAL006839" ; + skos:prefLabel "Flerbørstemarker"@nb, + "Polychaeta"@la ; + skos:altLabel "Flerbørsteormer"@nb, + "Mangebørstemark"@nb, + "Mangebørsteormer"@nb, + "Havbørsteormer"@nb, + "Havbørstemarker"@nb, + "Polycheter"@nb. + skos:inScheme ; + skos:narrower , + , + ; + skos:exactMatch , + , + . -- cgit v1.2.1 From c3a05e1fccbca85ff91f879c4419fc7afadf132d Mon Sep 17 00:00:00 2001 From: hoosieree Date: Mon, 8 Dec 2014 19:02:09 +0000 Subject: add lexer for Elm language --- pygments/lexers/elm.py | 112 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 112 insertions(+) create mode 100644 pygments/lexers/elm.py diff --git a/pygments/lexers/elm.py b/pygments/lexers/elm.py new file mode 100644 index 00000000..1312fb1e --- /dev/null +++ b/pygments/lexers/elm.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.elm + ~~~~~~~~~~~~~~~~~~~ + + Lexer for the Elm programming language. + +""" + +import re + +from pygments.lexer import bygroups, RegexLexer, words, include, using +from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, String, Text, Error + +__all__ = ['ElmLexer'] + +class ElmLexer(RegexLexer): + """ + For `Elm `_ source code. + """ + + name = 'Elm' + aliases = ['elm'] + filenames = ['*.elm'] + mimetypes = ['text/x-elm'] + + validName = r'[a-z_][a-zA-Z_\']*' + + specialName = r'^main ' + + builtinOps = ( + '~', '||', '|>', '|', '`', '^', '\\', '\'', '>>', '>=', '>', '==', + '=', '<~', '<|', '<=', '<<', '<-', '<', '::', ':', '/=', '//', '/', + '..', '.', '->', '-', '++', '+', '*', '&&', '%', + ) + + reservedWords = words(( + 'if', 'then', 'else', 'case', 'of', 'let', 'in', 'type', 'module', 'where', + 'import', 'as', 'hiding', 'open', 'export', 'foreign', 'deriving', 'port', + ), suffix=r'\b') + + tokens = { + 'root': [ + + # Comments + (r'{-', Comment.Multiline, 'comment'), + (r'--.*', Comment.Single), + + # Whitespace + (r'\s+', Text), + + # Strings + (r'"', String, 'doublequote'), + + # Modules + (r'^\s*module\s*', Keyword.Namespace, 'imports'), + + # Imports + (r'^\s*import\s*', Keyword.Namespace, 'imports'), + + # Keywords + (reservedWords, Keyword.Reserved), + + # Types + (r'[A-Z]\w*', Keyword.Type), + + # Main + (specialName, Keyword.Reserved), + + # Prefix Operators + (words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function), + + # Infix Operators + (words((builtinOps)), Name.Function), + + # Numbers + include('numbers'), + + # Variable Names + (validName, Name.Variable), + + # Parens + (r'[,\(\)\[\]{}]', Punctuation), + + ], + + 'comment': [ + (r'-(?!})', Comment.Multiline), + (r'{-', Comment.Multiline, 'comment'), + (r'^@docs .*\n', Comment.Preproc), + (r'^# .*\n', Comment.Preproc), + (r'^ {4}.*\n', String.Doc), + (r'[^-}]', Comment.Multiline), + (r'-}', Comment.Multiline, '#pop'), + ], + + 'imports': [ + (r'\w+(\.\w+)*', Name.Class, '#pop'), + ], + + 'numbers': [ + (r'_?\d+\.(?=\d+)', Number.Float), + (r'_?\d+', Number.Integer), + ], + + 'doublequote': [ + (r'\\u[0-9a-fA-F]\{4}', String.Escape), + (r'\\[nrfvb\\\"]', String.Escape), + (r'[^"]', String), + (r'"', String, '#pop'), + ], + } -- cgit v1.2.1 From e6b3b68ba1633f1a523673519ddd236fd13f8675 Mon Sep 17 00:00:00 2001 From: hoosieree Date: Mon, 8 Dec 2014 19:04:37 +0000 Subject: add test file for Elm lexer --- tests/examplefiles/Basics.elm | 464 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 464 insertions(+) create mode 100644 tests/examplefiles/Basics.elm diff --git a/tests/examplefiles/Basics.elm b/tests/examplefiles/Basics.elm new file mode 100644 index 00000000..777534e8 --- /dev/null +++ b/tests/examplefiles/Basics.elm @@ -0,0 +1,464 @@ +module Basics where + +{-| Tons of useful functions that get imported by default. + +The following libraries also get imported by default: List, Signal, +Text, Maybe, Time, Graphics.Element, Color, Graphics.Collage. + +# Equality +@docs (==), (/=) + +# Comparison + +{- Comments may be nested! + {- Comments may be nested! -} + -} + +These functions only work on `comparable` types. This includes numbers, +characters, strings, lists of comparable things, and tuples of comparable +things. Note that tuples with 7 or more elements are not comparable; why +are your tuples so big? + +@docs (<), (>), (<=), (>=), max, min, Order, compare + +# Booleans +@docs not, (&&), (||), xor, otherwise + +# Mathematics +@docs (+), (-), (*), (/), (^), (//), rem, (%), negate, abs, sqrt, clamp, logBase, e + +# Trigonometry +@docs pi, cos, sin, tan, acos, asin, atan, atan2 + +# Number Conversions +@docs round, floor, ceiling, truncate, toFloat + +# Angle Conversions +All angle conversions result in “standard Elm angles” +which happen to be radians. + +@docs degrees, radians, turns + +# Polar Coordinates +@docs toPolar, fromPolar + +# Floating Point Checks +@docs isNaN, isInfinite + +# Strings and Lists +@docs toString, (++) + +# Tuples +@docs fst, snd + +# Higher-Order Helpers +@docs identity, always, (<|), (|>), (<<), (>>), flip, curry, uncurry + +-} + +import Native.Basics +import Native.Ports +import Native.Show +import Native.Utils +import Native.Runtime + + +{-| Convert radians to standard Elm angles (radians). -} +radians : Float -> Float +radians t = t + +{-| Convert degrees to standard Elm angles (radians). -} +degrees : Float -> Float +degrees = Native.Basics.degrees + +{-| Convert turns to standard Elm angles (radians). +One turn is equal to 360°. +-} +turns : Float -> Float +turns = Native.Basics.turns + +{-| Convert polar coordinates (r,θ) to cartesian coordinates (x,y). -} +fromPolar : (Float,Float) -> (Float,Float) +fromPolar = Native.Basics.fromPolar + +{-| Convert cartesian coordinates (x,y) to polar coordinates (r,θ). -} +toPolar : (Float,Float) -> (Float,Float) +toPolar = Native.Basics.toPolar + +(+) : number -> number -> number +(+) = Native.Basics.add + +(-) : number -> number -> number +(-) = Native.Basics.sub + +(*) : number -> number -> number +(*) = Native.Basics.mul + +{-| Floating point division. -} +(/) : Float -> Float -> Float +(/) = Native.Basics.floatDiv + +infixl 6 + +infixl 6 - +infixl 7 * +infixl 7 / +infixl 8 ^ + +infixl 7 // +infixl 7 % +infixl 7 `rem` + +{-| Integer division. The remainder is discarded. -} +(//) : Int -> Int -> Int +(//) = Native.Basics.div + +{-| Find the remainder after dividing one number by another. + + 7 `rem` 2 == 1 + -1 `rem` 4 == -1 +-} +rem : Int -> Int -> Int +rem = Native.Basics.rem + +{-| Perform [modular arithmetic](http://en.wikipedia.org/wiki/Modular_arithmetic). + + 7 % 2 == 1 + -1 % 4 == 3 +-} +(%) : Int -> Int -> Int +(%) = Native.Basics.mod + +{-| Exponentiation + + 3^2 == 9` +-} +(^) : number -> number -> number +(^) = Native.Basics.exp + +cos : Float -> Float +cos = Native.Basics.cos + +sin : Float -> Float +sin = Native.Basics.sin + +tan : Float -> Float +tan = Native.Basics.tan + +acos : Float -> Float +acos = Native.Basics.acos + +asin : Float -> Float +asin = Native.Basics.asin + +{-| You probably do not want to use this. It takes `(y/x)` as the +argument, so there is no way to know whether the negative signs comes from +the `y` or `x`. Thus, the resulting angle is always between π/2 and -π/2 +(in quadrants I and IV). You probably want to use `atan2` instead. +-} +atan : Float -> Float +atan = Native.Basics.atan + +{-| This helps you find the angle of a cartesian coordinate. +You will almost certainly want to use this instead of `atan`. +So `atan2 y x` computes *atan(y/x)* but also keeps track of which +quadrant the angle should really be in. The result will be between +π and -π, giving you the full range of angles. +-} +atan2 : Float -> Float -> Float +atan2 = Native.Basics.atan2 + +{-| Take the square root of a number. -} +sqrt : Float -> Float +sqrt = Native.Basics.sqrt + +{-| Negate a number. + + negate 42 == -42 + negate -42 == 42 + negate 0 == 0 +-} +negate : number -> number +negate = Native.Basics.negate + +{-| Take the absolute value of a number. -} +abs : number -> number +abs = Native.Basics.abs + +{-| Calculate the logarithm of a number with a given base. + + logBase 10 100 == 2 + logBase 2 256 == 8 +-} +logBase : Float -> Float -> Float +logBase = Native.Basics.logBase + +{-| Clamps a number within a given range. With the expression +`clamp 100 200 x` the results are as follows: + + 100 if x < 100 + x if 100 <= x < 200 + 200 if 200 <= x +-} +clamp : number -> number -> number -> number +clamp = Native.Basics.clamp + +{-| An approximation of pi. -} +pi : Float +pi = Native.Basics.pi + +{-| An approximation of e. -} +e : Float +e = Native.Basics.e + +(==) : a -> a -> Bool +(==) = Native.Basics.eq + +(/=) : a -> a -> Bool +(/=) = Native.Basics.neq + +(<) : comparable -> comparable -> Bool +(<) = Native.Basics.lt + +(>) : comparable -> comparable -> Bool +(>) = Native.Basics.gt + +(<=) : comparable -> comparable -> Bool +(<=) = Native.Basics.le + +(>=) : comparable -> comparable -> Bool +(>=) = Native.Basics.ge + +infix 4 == +infix 4 /= +infix 4 < +infix 4 > +infix 4 <= +infix 4 >= + +{-| Compare any two comparable values. Comparable values include `String`, `Char`, +`Int`, `Float`, `Time`, or a list or tuple containing comparable values. +These are also the only values that work as `Dict` keys or `Set` members. +-} +compare : comparable -> comparable -> Order +compare = Native.Basics.compare + +{-| Represents the relative ordering of two things. +The relations are less than, equal to, and greater than. +-} +type Order = LT | EQ | GT + +{-| Find the smaller of two comparables. -} +min : comparable -> comparable -> comparable +min = Native.Basics.min + +{-| Find the larger of two comparables. -} +max : comparable -> comparable -> comparable +max = Native.Basics.max + +{-| The logical AND operator. `True` if both inputs are `True`. +This operator short-circuits to `False` if the first argument is `False`. +-} +(&&) : Bool -> Bool -> Bool +(&&) = Native.Basics.and + +{-| The logical OR operator. `True` if one or both inputs are `True`. +This operator short-circuits to `True` if the first argument is True. +-} +(||) : Bool -> Bool -> Bool +(||) = Native.Basics.or + +infixr 3 && +infixr 2 || + +{-| The exclusive-or operator. `True` if exactly one input is `True`. -} +xor : Bool -> Bool -> Bool +xor = Native.Basics.xor + +{-| Negate a boolean value. + + not True == False + not False == True +-} +not : Bool -> Bool +not = Native.Basics.not + +{-| Equal to `True`. Useful as the last case of a multi-way-if. -} +otherwise : Bool +otherwise = True + + +-- Conversions + +{-| Round a number to the nearest integer. -} +round : Float -> Int +round = Native.Basics.round + +{-| Truncate a number, rounding towards zero. -} +truncate : Float -> Int +truncate = Native.Basics.truncate + +{-| Floor function, rounding down. -} +floor : Float -> Int +floor = Native.Basics.floor + +{-| Ceiling function, rounding up. -} +ceiling : Float -> Int +ceiling = Native.Basics.ceiling + +{-| Convert an integer into a float. -} +toFloat : Int -> Float +toFloat = Native.Basics.toFloat + +{- | Determine whether a float is an undefined or unrepresentable number. +NaN stands for *not a number* and it is [a standardized part of floating point +numbers](http://en.wikipedia.org/wiki/NaN). + + isNaN (0/0) == True + isNaN (sqrt -1) == True + isNaN (1/0) == False -- infinity is a number + isNaN 1 == False +-} +isNaN : Float -> Bool +isNaN = Native.Basics.isNaN + +{- | Determine whether a float is positive or negative infinity. + + isInfinite (0/0) == False + isInfinite (sqrt -1) == False + isInfinite (1/0) == True + isInfinite 1 == False + +Notice that NaN is not infinite! For float `n` to be finite implies that +`not (isInfinite n || isNaN n)` evaluates to `True`. +-} +isInfinite : Float -> Bool +isInfinite = Native.Basics.isInfinite + + +{-| Turn any kind of value into a string. + + toString 42 == "42" + toString [1,2] == "[1,2]" +-} +toString : a -> String +toString = Native.Show.toString + + +{-| Put two appendable things together. This includes strings, lists, and text. + + "hello" ++ "world" == "helloworld" + [1,1,2] ++ [3,5,8] == [1,1,2,3,5,8] +-} +(++) : appendable -> appendable -> appendable +(++) = Native.Utils.append + +infixr 5 ++ + + +-- Function Helpers + +{-| Function composition, passing results along in the suggested direction. For +example, the following code checks if the square root of a number is odd: + + not << isEven << sqrt + +You can think of this operator as equivalent to the following: + + (g << f) == (\x -> g (f x)) + +So our example expands out to something like this: + + \n -> not (isEven (sqrt n)) +-} +(<<) : (b -> c) -> (a -> b) -> (a -> c) +(<<) g f x = g (f x) + +{-| Function composition, passing results along in the suggested direction. For +example, the following code checks if the square root of a number is odd: + + sqrt >> isEven >> not + +This direction of function composition seems less pleasant than `(<<)` which +reads nicely in expressions like: `filter (not << isRegistered) students` +-} +(>>) : (a -> b) -> (b -> c) -> (a -> c) +(>>) f g x = g (f x) + +{-| Forward function application `x |> f == f x`. This function is useful +for avoiding parenthesis and writing code in a more natural way. +Consider the following code to create a pentagon: + + scale 2 (move (10,10) (filled blue (ngon 5 30))) + +This can also be written as: + + ngon 5 30 + |> filled blue + |> move (10,10) + |> scale 2 +-} +(|>) : a -> (a -> b) -> b +x |> f = f x + +{-| Backward function application `f <| x == f x`. This function is useful for +avoiding parenthesis. Consider the following code to create a text element: + + text (monospace (toText "code")) + +This can also be written as: + + text << monospace <| toText "code" +-} +(<|) : (a -> b) -> a -> b +f <| x = f x + +infixr 9 << +infixl 9 >> +infixr 0 <| +infixl 0 |> + +{-| Given a value, returns exactly the same value. This is called +[the identity function](http://en.wikipedia.org/wiki/Identity_function). +-} +identity : a -> a +identity x = x + +{-| Create a [constant function](http://en.wikipedia.org/wiki/Constant_function), +a function that *always* returns the same value regardless of what input you give. +It is defined as: + + always a b = a + +It totally ignores the second argument, so `always 42` is a function that always +returns 42. When you are dealing with higher-order functions, this comes in +handy more often than you might expect. For example, creating a zeroed out list +of length ten would be: + + map (always 0) [0..9] +-} +always : a -> b -> a +always a _ = a + +{-| Given a 2-tuple, returns the first value. -} +fst : (a,b) -> a +fst (a,_) = a + +{-| Given a 2-tuple, returns the second value. -} +snd : (a,b) -> b +snd (_,b) = b + +{-| Flip the order of the first two arguments to a function. -} +flip : (a -> b -> c) -> (b -> a -> c) +flip f b a = f a b + +{-| Change how arguments are passed to a function. +This splits paired arguments into two separate arguments. +-} +curry : ((a,b) -> c) -> a -> b -> c +curry f a b = f (a,b) + +{-| Change how arguments are passed to a function. +This combines two arguments into a single pair. +-} +uncurry : (a -> b -> c) -> (a,b) -> c +uncurry f (a,b) = f a b \ No newline at end of file -- cgit v1.2.1 From bcb9feaa3c40fff88b26138c237ec05284fbd0cc Mon Sep 17 00:00:00 2001 From: hoosieree Date: Mon, 8 Dec 2014 19:43:03 +0000 Subject: elm.py deleted online with Bitbucket --- pygments/lexers/elm.py | 112 ------------------------------------------------- 1 file changed, 112 deletions(-) delete mode 100644 pygments/lexers/elm.py diff --git a/pygments/lexers/elm.py b/pygments/lexers/elm.py deleted file mode 100644 index 1312fb1e..00000000 --- a/pygments/lexers/elm.py +++ /dev/null @@ -1,112 +0,0 @@ -# -*- coding: utf-8 -*- -""" - pygments.lexers.elm - ~~~~~~~~~~~~~~~~~~~ - - Lexer for the Elm programming language. - -""" - -import re - -from pygments.lexer import bygroups, RegexLexer, words, include, using -from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, String, Text, Error - -__all__ = ['ElmLexer'] - -class ElmLexer(RegexLexer): - """ - For `Elm `_ source code. - """ - - name = 'Elm' - aliases = ['elm'] - filenames = ['*.elm'] - mimetypes = ['text/x-elm'] - - validName = r'[a-z_][a-zA-Z_\']*' - - specialName = r'^main ' - - builtinOps = ( - '~', '||', '|>', '|', '`', '^', '\\', '\'', '>>', '>=', '>', '==', - '=', '<~', '<|', '<=', '<<', '<-', '<', '::', ':', '/=', '//', '/', - '..', '.', '->', '-', '++', '+', '*', '&&', '%', - ) - - reservedWords = words(( - 'if', 'then', 'else', 'case', 'of', 'let', 'in', 'type', 'module', 'where', - 'import', 'as', 'hiding', 'open', 'export', 'foreign', 'deriving', 'port', - ), suffix=r'\b') - - tokens = { - 'root': [ - - # Comments - (r'{-', Comment.Multiline, 'comment'), - (r'--.*', Comment.Single), - - # Whitespace - (r'\s+', Text), - - # Strings - (r'"', String, 'doublequote'), - - # Modules - (r'^\s*module\s*', Keyword.Namespace, 'imports'), - - # Imports - (r'^\s*import\s*', Keyword.Namespace, 'imports'), - - # Keywords - (reservedWords, Keyword.Reserved), - - # Types - (r'[A-Z]\w*', Keyword.Type), - - # Main - (specialName, Keyword.Reserved), - - # Prefix Operators - (words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function), - - # Infix Operators - (words((builtinOps)), Name.Function), - - # Numbers - include('numbers'), - - # Variable Names - (validName, Name.Variable), - - # Parens - (r'[,\(\)\[\]{}]', Punctuation), - - ], - - 'comment': [ - (r'-(?!})', Comment.Multiline), - (r'{-', Comment.Multiline, 'comment'), - (r'^@docs .*\n', Comment.Preproc), - (r'^# .*\n', Comment.Preproc), - (r'^ {4}.*\n', String.Doc), - (r'[^-}]', Comment.Multiline), - (r'-}', Comment.Multiline, '#pop'), - ], - - 'imports': [ - (r'\w+(\.\w+)*', Name.Class, '#pop'), - ], - - 'numbers': [ - (r'_?\d+\.(?=\d+)', Number.Float), - (r'_?\d+', Number.Integer), - ], - - 'doublequote': [ - (r'\\u[0-9a-fA-F]\{4}', String.Escape), - (r'\\[nrfvb\\\"]', String.Escape), - (r'[^"]', String), - (r'"', String, '#pop'), - ], - } -- cgit v1.2.1 From a33060645454ef55a614e3095894ce316fb5124d Mon Sep 17 00:00:00 2001 From: hoosieree Date: Mon, 8 Dec 2014 19:43:21 +0000 Subject: Basics.elm deleted online with Bitbucket --- tests/examplefiles/Basics.elm | 464 ------------------------------------------ 1 file changed, 464 deletions(-) delete mode 100644 tests/examplefiles/Basics.elm diff --git a/tests/examplefiles/Basics.elm b/tests/examplefiles/Basics.elm deleted file mode 100644 index 777534e8..00000000 --- a/tests/examplefiles/Basics.elm +++ /dev/null @@ -1,464 +0,0 @@ -module Basics where - -{-| Tons of useful functions that get imported by default. - -The following libraries also get imported by default: List, Signal, -Text, Maybe, Time, Graphics.Element, Color, Graphics.Collage. - -# Equality -@docs (==), (/=) - -# Comparison - -{- Comments may be nested! - {- Comments may be nested! -} - -} - -These functions only work on `comparable` types. This includes numbers, -characters, strings, lists of comparable things, and tuples of comparable -things. Note that tuples with 7 or more elements are not comparable; why -are your tuples so big? - -@docs (<), (>), (<=), (>=), max, min, Order, compare - -# Booleans -@docs not, (&&), (||), xor, otherwise - -# Mathematics -@docs (+), (-), (*), (/), (^), (//), rem, (%), negate, abs, sqrt, clamp, logBase, e - -# Trigonometry -@docs pi, cos, sin, tan, acos, asin, atan, atan2 - -# Number Conversions -@docs round, floor, ceiling, truncate, toFloat - -# Angle Conversions -All angle conversions result in “standard Elm angles” -which happen to be radians. - -@docs degrees, radians, turns - -# Polar Coordinates -@docs toPolar, fromPolar - -# Floating Point Checks -@docs isNaN, isInfinite - -# Strings and Lists -@docs toString, (++) - -# Tuples -@docs fst, snd - -# Higher-Order Helpers -@docs identity, always, (<|), (|>), (<<), (>>), flip, curry, uncurry - --} - -import Native.Basics -import Native.Ports -import Native.Show -import Native.Utils -import Native.Runtime - - -{-| Convert radians to standard Elm angles (radians). -} -radians : Float -> Float -radians t = t - -{-| Convert degrees to standard Elm angles (radians). -} -degrees : Float -> Float -degrees = Native.Basics.degrees - -{-| Convert turns to standard Elm angles (radians). -One turn is equal to 360°. --} -turns : Float -> Float -turns = Native.Basics.turns - -{-| Convert polar coordinates (r,θ) to cartesian coordinates (x,y). -} -fromPolar : (Float,Float) -> (Float,Float) -fromPolar = Native.Basics.fromPolar - -{-| Convert cartesian coordinates (x,y) to polar coordinates (r,θ). -} -toPolar : (Float,Float) -> (Float,Float) -toPolar = Native.Basics.toPolar - -(+) : number -> number -> number -(+) = Native.Basics.add - -(-) : number -> number -> number -(-) = Native.Basics.sub - -(*) : number -> number -> number -(*) = Native.Basics.mul - -{-| Floating point division. -} -(/) : Float -> Float -> Float -(/) = Native.Basics.floatDiv - -infixl 6 + -infixl 6 - -infixl 7 * -infixl 7 / -infixl 8 ^ - -infixl 7 // -infixl 7 % -infixl 7 `rem` - -{-| Integer division. The remainder is discarded. -} -(//) : Int -> Int -> Int -(//) = Native.Basics.div - -{-| Find the remainder after dividing one number by another. - - 7 `rem` 2 == 1 - -1 `rem` 4 == -1 --} -rem : Int -> Int -> Int -rem = Native.Basics.rem - -{-| Perform [modular arithmetic](http://en.wikipedia.org/wiki/Modular_arithmetic). - - 7 % 2 == 1 - -1 % 4 == 3 --} -(%) : Int -> Int -> Int -(%) = Native.Basics.mod - -{-| Exponentiation - - 3^2 == 9` --} -(^) : number -> number -> number -(^) = Native.Basics.exp - -cos : Float -> Float -cos = Native.Basics.cos - -sin : Float -> Float -sin = Native.Basics.sin - -tan : Float -> Float -tan = Native.Basics.tan - -acos : Float -> Float -acos = Native.Basics.acos - -asin : Float -> Float -asin = Native.Basics.asin - -{-| You probably do not want to use this. It takes `(y/x)` as the -argument, so there is no way to know whether the negative signs comes from -the `y` or `x`. Thus, the resulting angle is always between π/2 and -π/2 -(in quadrants I and IV). You probably want to use `atan2` instead. --} -atan : Float -> Float -atan = Native.Basics.atan - -{-| This helps you find the angle of a cartesian coordinate. -You will almost certainly want to use this instead of `atan`. -So `atan2 y x` computes *atan(y/x)* but also keeps track of which -quadrant the angle should really be in. The result will be between -π and -π, giving you the full range of angles. --} -atan2 : Float -> Float -> Float -atan2 = Native.Basics.atan2 - -{-| Take the square root of a number. -} -sqrt : Float -> Float -sqrt = Native.Basics.sqrt - -{-| Negate a number. - - negate 42 == -42 - negate -42 == 42 - negate 0 == 0 --} -negate : number -> number -negate = Native.Basics.negate - -{-| Take the absolute value of a number. -} -abs : number -> number -abs = Native.Basics.abs - -{-| Calculate the logarithm of a number with a given base. - - logBase 10 100 == 2 - logBase 2 256 == 8 --} -logBase : Float -> Float -> Float -logBase = Native.Basics.logBase - -{-| Clamps a number within a given range. With the expression -`clamp 100 200 x` the results are as follows: - - 100 if x < 100 - x if 100 <= x < 200 - 200 if 200 <= x --} -clamp : number -> number -> number -> number -clamp = Native.Basics.clamp - -{-| An approximation of pi. -} -pi : Float -pi = Native.Basics.pi - -{-| An approximation of e. -} -e : Float -e = Native.Basics.e - -(==) : a -> a -> Bool -(==) = Native.Basics.eq - -(/=) : a -> a -> Bool -(/=) = Native.Basics.neq - -(<) : comparable -> comparable -> Bool -(<) = Native.Basics.lt - -(>) : comparable -> comparable -> Bool -(>) = Native.Basics.gt - -(<=) : comparable -> comparable -> Bool -(<=) = Native.Basics.le - -(>=) : comparable -> comparable -> Bool -(>=) = Native.Basics.ge - -infix 4 == -infix 4 /= -infix 4 < -infix 4 > -infix 4 <= -infix 4 >= - -{-| Compare any two comparable values. Comparable values include `String`, `Char`, -`Int`, `Float`, `Time`, or a list or tuple containing comparable values. -These are also the only values that work as `Dict` keys or `Set` members. --} -compare : comparable -> comparable -> Order -compare = Native.Basics.compare - -{-| Represents the relative ordering of two things. -The relations are less than, equal to, and greater than. --} -type Order = LT | EQ | GT - -{-| Find the smaller of two comparables. -} -min : comparable -> comparable -> comparable -min = Native.Basics.min - -{-| Find the larger of two comparables. -} -max : comparable -> comparable -> comparable -max = Native.Basics.max - -{-| The logical AND operator. `True` if both inputs are `True`. -This operator short-circuits to `False` if the first argument is `False`. --} -(&&) : Bool -> Bool -> Bool -(&&) = Native.Basics.and - -{-| The logical OR operator. `True` if one or both inputs are `True`. -This operator short-circuits to `True` if the first argument is True. --} -(||) : Bool -> Bool -> Bool -(||) = Native.Basics.or - -infixr 3 && -infixr 2 || - -{-| The exclusive-or operator. `True` if exactly one input is `True`. -} -xor : Bool -> Bool -> Bool -xor = Native.Basics.xor - -{-| Negate a boolean value. - - not True == False - not False == True --} -not : Bool -> Bool -not = Native.Basics.not - -{-| Equal to `True`. Useful as the last case of a multi-way-if. -} -otherwise : Bool -otherwise = True - - --- Conversions - -{-| Round a number to the nearest integer. -} -round : Float -> Int -round = Native.Basics.round - -{-| Truncate a number, rounding towards zero. -} -truncate : Float -> Int -truncate = Native.Basics.truncate - -{-| Floor function, rounding down. -} -floor : Float -> Int -floor = Native.Basics.floor - -{-| Ceiling function, rounding up. -} -ceiling : Float -> Int -ceiling = Native.Basics.ceiling - -{-| Convert an integer into a float. -} -toFloat : Int -> Float -toFloat = Native.Basics.toFloat - -{- | Determine whether a float is an undefined or unrepresentable number. -NaN stands for *not a number* and it is [a standardized part of floating point -numbers](http://en.wikipedia.org/wiki/NaN). - - isNaN (0/0) == True - isNaN (sqrt -1) == True - isNaN (1/0) == False -- infinity is a number - isNaN 1 == False --} -isNaN : Float -> Bool -isNaN = Native.Basics.isNaN - -{- | Determine whether a float is positive or negative infinity. - - isInfinite (0/0) == False - isInfinite (sqrt -1) == False - isInfinite (1/0) == True - isInfinite 1 == False - -Notice that NaN is not infinite! For float `n` to be finite implies that -`not (isInfinite n || isNaN n)` evaluates to `True`. --} -isInfinite : Float -> Bool -isInfinite = Native.Basics.isInfinite - - -{-| Turn any kind of value into a string. - - toString 42 == "42" - toString [1,2] == "[1,2]" --} -toString : a -> String -toString = Native.Show.toString - - -{-| Put two appendable things together. This includes strings, lists, and text. - - "hello" ++ "world" == "helloworld" - [1,1,2] ++ [3,5,8] == [1,1,2,3,5,8] --} -(++) : appendable -> appendable -> appendable -(++) = Native.Utils.append - -infixr 5 ++ - - --- Function Helpers - -{-| Function composition, passing results along in the suggested direction. For -example, the following code checks if the square root of a number is odd: - - not << isEven << sqrt - -You can think of this operator as equivalent to the following: - - (g << f) == (\x -> g (f x)) - -So our example expands out to something like this: - - \n -> not (isEven (sqrt n)) --} -(<<) : (b -> c) -> (a -> b) -> (a -> c) -(<<) g f x = g (f x) - -{-| Function composition, passing results along in the suggested direction. For -example, the following code checks if the square root of a number is odd: - - sqrt >> isEven >> not - -This direction of function composition seems less pleasant than `(<<)` which -reads nicely in expressions like: `filter (not << isRegistered) students` --} -(>>) : (a -> b) -> (b -> c) -> (a -> c) -(>>) f g x = g (f x) - -{-| Forward function application `x |> f == f x`. This function is useful -for avoiding parenthesis and writing code in a more natural way. -Consider the following code to create a pentagon: - - scale 2 (move (10,10) (filled blue (ngon 5 30))) - -This can also be written as: - - ngon 5 30 - |> filled blue - |> move (10,10) - |> scale 2 --} -(|>) : a -> (a -> b) -> b -x |> f = f x - -{-| Backward function application `f <| x == f x`. This function is useful for -avoiding parenthesis. Consider the following code to create a text element: - - text (monospace (toText "code")) - -This can also be written as: - - text << monospace <| toText "code" --} -(<|) : (a -> b) -> a -> b -f <| x = f x - -infixr 9 << -infixl 9 >> -infixr 0 <| -infixl 0 |> - -{-| Given a value, returns exactly the same value. This is called -[the identity function](http://en.wikipedia.org/wiki/Identity_function). --} -identity : a -> a -identity x = x - -{-| Create a [constant function](http://en.wikipedia.org/wiki/Constant_function), -a function that *always* returns the same value regardless of what input you give. -It is defined as: - - always a b = a - -It totally ignores the second argument, so `always 42` is a function that always -returns 42. When you are dealing with higher-order functions, this comes in -handy more often than you might expect. For example, creating a zeroed out list -of length ten would be: - - map (always 0) [0..9] --} -always : a -> b -> a -always a _ = a - -{-| Given a 2-tuple, returns the first value. -} -fst : (a,b) -> a -fst (a,_) = a - -{-| Given a 2-tuple, returns the second value. -} -snd : (a,b) -> b -snd (_,b) = b - -{-| Flip the order of the first two arguments to a function. -} -flip : (a -> b -> c) -> (b -> a -> c) -flip f b a = f a b - -{-| Change how arguments are passed to a function. -This splits paired arguments into two separate arguments. --} -curry : ((a,b) -> c) -> a -> b -> c -curry f a b = f (a,b) - -{-| Change how arguments are passed to a function. -This combines two arguments into a single pair. --} -uncurry : (a -> b -> c) -> (a,b) -> c -uncurry f (a,b) = f a b \ No newline at end of file -- cgit v1.2.1 From 73d4107c2d40fa616671fd32821ca450072e702b Mon Sep 17 00:00:00 2001 From: Matt Layman Date: Sun, 14 Dec 2014 23:31:59 -0500 Subject: Add a lexer for the Test Anything Protocol (TAP). --- AUTHORS | 1 + pygments/formatters/_mapping.py | 1 - pygments/lexers/_mapping.py | 1 + pygments/lexers/tap.py | 91 +++++++++++++++++++++++++++++++++++++++++ tests/examplefiles/example.tap | 37 +++++++++++++++++ 5 files changed, 130 insertions(+), 1 deletion(-) create mode 100644 pygments/lexers/tap.py create mode 100644 tests/examplefiles/example.tap diff --git a/AUTHORS b/AUTHORS index 06119231..1f95aecf 100644 --- a/AUTHORS +++ b/AUTHORS @@ -91,6 +91,7 @@ Other contributors, listed alphabetically, are: * Gerd Kurzbach -- Modelica lexer * Jon Larimer, Google Inc. -- Smali lexer * Olov Lassus -- Dart lexer +* Matt Layman -- TAP lexer * Sylvestre Ledru -- Scilab lexer * Mark Lee -- Vala lexer * Ben Mabey -- Gherkin lexer diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py index 678c069d..bc4d606e 100755 --- a/pygments/formatters/_mapping.py +++ b/pygments/formatters/_mapping.py @@ -32,7 +32,6 @@ FORMATTERS = { 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.') } - if __name__ == '__main__': # pragma: no cover import sys import os diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 05ada4f3..3134b81f 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -336,6 +336,7 @@ LEXERS = { 'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), + 'TAPLexer': ('pygments.lexers.tap', 'TAP', ('tap',), ('*.tap',), ()), 'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()), 'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), diff --git a/pygments/lexers/tap.py b/pygments/lexers/tap.py new file mode 100644 index 00000000..7d965c80 --- /dev/null +++ b/pygments/lexers/tap.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.tap + ~~~~~~~~~~~~~~~~~~~ + + Lexer for the Test Anything Protocol (TAP). + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import bygroups, RegexLexer +from pygments.token import Comment, Generic, Keyword, Name, Number, Text + +__all__ = ['TAPLexer'] + + +class TAPLexer(RegexLexer): + """ + For Test Anything Protocol (TAP) output. + + .. versionadded:: 2.1 + """ + name = 'TAP' + aliases = ['tap'] + filenames = ['*.tap'] + + tokens = { + 'root': [ + # A TAP version may be specified. + (r'^TAP version \d+\n', Name.Namespace), + + # Specify a plan with a plan line. + (r'^1..\d+', Keyword.Declaration, 'plan'), + + # A test failure + (r'^(not ok)([^\S\n]*)(\d*)', + bygroups(Generic.Error, Text, Number.Integer), 'test'), + + # A test success + (r'^(ok)([^\S\n]*)(\d*)', + bygroups(Keyword.Reserved, Text, Number.Integer), 'test'), + + # Diagnostics start with a hash. + (r'^#.*\n', Comment), + + # TAP's version of an abort statement. + (r'^Bail out!.*\n', Generic.Error), + + # TAP ignores any unrecognized lines. + (r'^.*\n', Text), + ], + 'plan': [ + # Consume whitespace (but not newline). + (r'[^\S\n]+', Text), + + # A plan may have a directive with it. + (r'#', Comment, 'directive'), + + # Or it could just end. + (r'\n', Comment, '#pop'), + + # Anything else is wrong. + (r'.*\n', Generic.Error, '#pop'), + ], + 'test': [ + # Consume whitespace (but not newline). + (r'[^\S\n]+', Text), + + # A test may have a directive with it. + (r'#', Comment, 'directive'), + + (r'\S+', Text), + + (r'\n', Text, '#pop'), + ], + 'directive': [ + # Consume whitespace (but not newline). + (r'[^\S\n]+', Comment), + + # Extract todo items. + (r'(?i)\bTODO\b', Comment.Preproc), + + # Extract skip items. + (r'(?i)\bSKIP\S*', Comment.Preproc), + + (r'\S+', Comment), + + (r'\n', Comment, '#pop:2'), + ], + } diff --git a/tests/examplefiles/example.tap b/tests/examplefiles/example.tap new file mode 100644 index 00000000..a70a239d --- /dev/null +++ b/tests/examplefiles/example.tap @@ -0,0 +1,37 @@ +TAP version 13 +1..42 +1..13 A plan only supports directives so this text is wrong. +ok 1 A normal test line includes a number. +ok But a test line may also omit a number. + +A random line that does not look like a test or diagnostic should be ignored. + No matter how it is spaced out. + +Or if it is a totally blank line. + +not ok 3 This is a failing test line. + +# Diagnostics are any lines... +# ... beginning with a hash character. + +not ok 4 There are a couple of directives. # TODO is one of those directives. +not ok 5 # TODO: is invalid because the directive must be followed by a space. +ok 6 - Another directive line # toDO is not case sensitive. + +ok 7 A line that is a # SKIP +ok 8 Tests can be # skipped as long as the directive has the "skip" stem. +ok 9 The TODO directive must be followed by a space, but # skip: is valid. +1..0 # Skipped directives can show on a plan line too. + +Bail out! is a special phrase emitted when a TAP file aborted. + +not ok 10 Having TAP version 13 in the middle of a line is not a TAP version. +not ok 11 Having Bail out! in the middle of a line is not a bail out. + +ok 12 Here is an empty directive. # + +# The most basic valid test lines. +ok +not ok + +ok 15 Only the test number should look different. Not another 42, for example. -- cgit v1.2.1 From 662155ee212d1e3508defeb1ee199ad8cf172e24 Mon Sep 17 00:00:00 2001 From: David Corbett Date: Tue, 23 Dec 2014 16:46:26 -0500 Subject: Improve BatchLexer --- pygments/lexers/shell.py | 326 ++++++++++++++++++++++++++++++++++----- tests/examplefiles/batchfile.bat | 49 ------ tests/examplefiles/example.bat | 205 ++++++++++++++++++++++++ 3 files changed, 494 insertions(+), 86 deletions(-) delete mode 100644 tests/examplefiles/batchfile.bat create mode 100644 tests/examplefiles/example.bat diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py index 1bbfd7a7..0a89aeaa 100644 --- a/pygments/lexers/shell.py +++ b/pygments/lexers/shell.py @@ -11,7 +11,8 @@ import re -from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, include +from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \ + include, default, this, using, words from pygments.token import Punctuation, \ Text, Comment, Operator, Keyword, Name, String, Number, Generic from pygments.util import shebang_matches @@ -229,46 +230,297 @@ class BatchLexer(RegexLexer): flags = re.MULTILINE | re.IGNORECASE + _nl = r'\n\x1a' + _punct = r'&<>|' + _ws = r'\t\v\f\r ,;=\xa0' + _space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws) + _keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' % + (_nl, _ws, _nl, _punct)) + _token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl) + _start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws + _label = r'(?:(?:[^%s%s%s+:^]|\^[%s]?[\w\W])*)' % (_nl, _punct, _ws, _nl) + _label_compound = (r'(?:(?:[^%s%s%s+:^)]|\^[%s]?[^)])*)' % + (_nl, _punct, _ws, _nl)) + _number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator + _opword = r'(?:equ|geq|gtr|leq|lss|neq)' + _string = r'(?:"[^%s"]*"?)' % _nl + _variable = (r'(?:(?:%%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|' + r'[^%%:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%%%s^]|' + r'\^[^%%%s])[^=%s]*=(?:[^%%%s^]|\^[^%%%s])*)?)?%%))|' + r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:' + r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' % + (_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl)) + _core_token = r'(?:(?:(?:\^[%s]?)?[^%s%s%s])+)' % (_nl, _nl, _punct, _ws) + _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^%s%s%s)])+)' % (_nl, _nl, + _punct, _ws) + _token = r'(?:[%s]+|%s)' % (_punct, _core_token) + _token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound) + _stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' % + (_punct, _string, _variable, _core_token)) + + def _make_begin_state(compound, _core_token=_core_token, + _core_token_compound=_core_token_compound, + _keyword_terminator=_keyword_terminator, + _nl=_nl, _punct=_punct, _string=_string, + _space=_space, _start_label=_start_label, + _stoken=_stoken, _token_terminator=_token_terminator, + _variable=_variable, _ws=_ws): + rest = '(?:%s|%s|[^"%%%s%s%s])*' % (_string, _variable, _nl, _punct, + ')' if compound else '') + rest_of_line = r'(?:(?:[^%s^]|\^[%s]?[\w\W])*)' % (_nl, _nl) + rest_of_line_compound = r'(?:(?:[^%s^)]|\^[%s]?[^)])*)' % (_nl, _nl) + set_space = r'((?:(?:\^[%s]?)?[^\S\n])*)' % _nl + suffix = '' + if compound: + _keyword_terminator = r'(?:(?=\))|%s)' % _keyword_terminator + _token_terminator = r'(?:(?=\))|%s)' % _token_terminator + suffix = '/compound' + return [ + ((r'\)', Punctuation, '#pop') if compound else + (r'\)((?=\()|%s)%s' % (_token_terminator, rest_of_line), + Comment.Single)), + (r'(?=%s)' % _start_label, Text, 'follow%s' % suffix), + (_space, using(this, state='text')), + include('redirect%s' % suffix), + (r'[%s]+' % _nl, Text), + (r'\(', Punctuation, 'root/compound'), + (r'@+', Punctuation), + (r'((?:for|if|rem)(?:(?=(?:\^[%s]?)?/)|(?:(?!\^)|' + r'(?<=m))(?:(?=\()|%s)))(%s?%s?(?:\^[%s]?)?/(?:\^[%s]?)?\?)' % + (_nl, _token_terminator, _space, + _core_token_compound if compound else _core_token, _nl, _nl), + bygroups(Keyword, using(this, state='text')), + 'follow%s' % suffix), + (r'(goto%s)(%s(?:\^[%s]?)?/(?:\^[%s]?)?\?%s)' % + (_keyword_terminator, rest, _nl, _nl, rest), + bygroups(Keyword, using(this, state='text')), + 'follow%s' % suffix), + (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy', + 'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase', + 'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move', + 'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren', + 'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time', + 'title', 'type', 'ver', 'verify', 'vol'), + suffix=_keyword_terminator), Keyword, 'follow%s' % suffix), + (r'(call)(%s?)(:)' % _space, + bygroups(Keyword, using(this, state='text'), Punctuation), + 'call%s' % suffix), + (r'call%s' % _keyword_terminator, Keyword), + (r'(for%s(?!\^))(%s)(/f%s)' % + (_token_terminator, _space, _token_terminator), + bygroups(Keyword, using(this, state='text'), Keyword), + ('for/f', 'for')), + (r'(for%s(?!\^))(%s)(/l%s)' % + (_token_terminator, _space, _token_terminator), + bygroups(Keyword, using(this, state='text'), Keyword), + ('for/l', 'for')), + (r'for%s(?!\^)' % _token_terminator, Keyword, ('for2', 'for')), + (r'(goto%s)(%s?)(:?)' % (_keyword_terminator, _space), + bygroups(Keyword, using(this, state='text'), Punctuation), + 'label%s' % suffix), + (r'(if(?:(?=\()|%s)(?!\^))(%s?)((?:/i%s)?)(%s?)((?:not%s)?)(%s?)' % + (_token_terminator, _space, _token_terminator, _space, + _token_terminator, _space), + bygroups(Keyword, using(this, state='text'), Keyword, + using(this, state='text'), Keyword, + using(this, state='text')), ('(?', 'if')), + (r'rem(((?=\()|%s)%s?%s?.*|%s%s)' % + (_token_terminator, _space, _stoken, _keyword_terminator, + rest_of_line_compound if compound else rest_of_line), + Comment.Single, 'follow%s' % suffix), + (r'(set%s)%s(/a)' % (_keyword_terminator, set_space), + bygroups(Keyword, using(this, state='text'), Keyword), + 'arithmetic%s' % suffix), + (r'(set%s)%s((?:/p)?)%s((?:(?:(?:\^[%s]?)?[^"%s%s^=%s]|' + r'\^[%s]?[^"=])+)?)((?:(?:\^[%s]?)?=)?)' % + (_keyword_terminator, set_space, set_space, _nl, _nl, _punct, + ')' if compound else '', _nl, _nl), + bygroups(Keyword, using(this, state='text'), Keyword, + using(this, state='text'), using(this, state='variable'), + Punctuation), + 'follow%s' % suffix), + default('follow%s' % suffix) + ] + + def _make_follow_state(compound, _label=_label, + _label_compound=_label_compound, _nl=_nl, + _space=_space, _start_label=_start_label, + _token=_token, _token_compound=_token_compound, + _ws=_ws): + suffix = '/compound' if compound else '' + state = [] + if compound: + state.append((r'(?=\))', Text, '#pop')) + state += [ + (r'%s([%s]*)(%s)(.*)' % + (_start_label, _ws, _label_compound if compound else _label), + bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)), + include('redirect%s' % suffix), + (r'(?=[%s])' % _nl, Text, '#pop'), + (r'\|\|?|&&?', Punctuation, '#pop'), + include('text') + ] + return state + + def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct, + _string=_string, _variable=_variable, _ws=_ws): + op = r'=+\-*/!~' + state = [] + if compound: + state.append((r'(?=\))', Text, '#pop')) + state += [ + (r'0[0-7]+', Number.Oct), + (r'0x[\da-f]+', Number.Hex), + (r'\d+', Number.Integer), + (r'[(),]+', Punctuation), + (r'([%s]|%%|\^\^)+' % op, Operator), + (r'(%s|%s|(\^[%s]?)?[^()%s%%^"%s%s%s]|\^[%s%s]?%s)+' % + (_string, _variable, _nl, op, _nl, _punct, _ws, _nl, _ws, + r'[^)]' if compound else r'[\w\W]'), + using(this, state='variable')), + (r'(?=[\x00|&])', Text, '#pop'), + include('follow') + ] + return state + + def _make_call_state(compound, _label=_label, + _label_compound=_label_compound): + state = [] + if compound: + state.append((r'(?=\))', Text, '#pop')) + state.append((r'(:?)(%s)' % (_label_compound if compound else _label), + bygroups(Punctuation, Name.Label), '#pop')) + return state + + def _make_label_state(compound, _label=_label, + _label_compound=_label_compound, _nl=_nl, + _punct=_punct, _string=_string, _variable=_variable): + state = [] + if compound: + state.append((r'(?=\))', Text, '#pop')) + state.append((r'(%s?)((?:%s|%s|\^[%s]?%s|[^"%%^%s%s%s])*)' % + (_label_compound if compound else _label, _string, + _variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl, + _punct, r')' if compound else ''), + bygroups(Name.Label, Comment.Single), '#pop')) + return state + + def _make_redirect_state(compound, + _core_token_compound=_core_token_compound, + _nl=_nl, _punct=_punct, _stoken=_stoken, + _string=_string, _space=_space, + _variable=_variable, _ws=_ws): + stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' % + (_punct, _string, _variable, _core_token_compound)) + return [ + (r'((?:(?<=[%s%s])\d)?)(>>?&|<&)([%s%s]*)(\d)' % + (_nl, _ws, _nl, _ws), + bygroups(Number.Integer, Punctuation, Text, Number.Integer)), + (r'((?:(?<=[%s%s])(?>?|<)(%s?%s)' % + (_nl, _ws, _nl, _space, stoken_compound if compound else _stoken), + bygroups(Number.Integer, Punctuation, using(this, state='text'))) + ] + tokens = { - 'root': [ - # Lines can start with @ to prevent echo - (r'^\s*@', Punctuation), - (r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)), - (r'".*?"', String.Double), - (r"'.*?'", String.Single), - # If made more specific, make sure you still allow expansions - # like %~$VAR:zlt - (r'%%?[~$:\w]+%?', Name.Variable), - (r'::.*', Comment), # Technically :: only works at BOL - (r'\b(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)), - (r'\b(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)), - (r'\b(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)), - (r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|' - r'setlocal|shift|errorlevel|exist|defined|cmdextversion|' - r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword), - (r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator), - include('basic'), - (r'.', Text), + 'root': _make_begin_state(False), + 'follow': _make_follow_state(False), + 'arithmetic': _make_arithmetic_state(False), + 'call': _make_call_state(False), + 'label': _make_label_state(False), + 'redirect': _make_redirect_state(False), + 'root/compound': _make_begin_state(True), + 'follow/compound': _make_follow_state(True), + 'arithmetic/compound': _make_arithmetic_state(True), + 'call/compound': _make_call_state(True), + 'label/compound': _make_label_state(True), + 'redirect/compound': _make_redirect_state(True), + 'variable-or-escape': [ + (_variable, Name.Variable), + (r'%%%%|\^[%s]?(\^!|[\w\W])' % _nl, String.Escape) ], - 'echo': [ - # Escapes only valid within echo args? - (r'\^\^|\^<|\^>|\^\|', String.Escape), - (r'\n', Text, '#pop'), - include('basic'), - (r'[^\'"^]+', Text), + 'string': [ + (r'"', String.Double, '#pop'), + (_variable, Name.Variable), + (r'\^!|%%', String.Escape), + (r'[^"%%^%s]+|[%%^]' % _nl, String.Double), + default('#pop') ], - 'basic': [ - (r'".*?"', String.Double), - (r"'.*?'", String.Single), - (r'`.*?`', String.Backtick), - (r'-?\d+', Number), - (r',', Punctuation), - (r'=', Operator), - (r'/\S+', Name), - (r':\w+', Name.Label), - (r'\w:\w+', Text), - (r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)), + 'sqstring': [ + include('variable-or-escape'), + (r'[^%]+|%', String.Single) ], + 'bqstring': [ + include('variable-or-escape'), + (r'[^%]+|%', String.Backtick) + ], + 'text': [ + (r'"', String.Double, 'string'), + include('variable-or-escape'), + (r'[^"%%^%s%s%s\d)]+|.' % (_nl, _punct, _ws), Text) + ], + 'variable': [ + (r'"', String.Double, 'string'), + include('variable-or-escape'), + (r'[^"%%^%s]+|.' % _nl, Name.Variable) + ], + 'for': [ + (r'(%s)(in)(%s)(\()' % (_space, _space), + bygroups(using(this, state='text'), Keyword, + using(this, state='text'), Punctuation), '#pop'), + include('follow') + ], + 'for2': [ + (r'\)', Punctuation), + (r'(%s)(do%s)' % (_space, _token_terminator), + bygroups(using(this, state='text'), Keyword), '#pop'), + (r'[%s]+' % _nl, Text), + include('follow') + ], + 'for/f': [ + (r'(")((?:%s|[^"])*?")([%s%s]*)(\))' % (_variable, _nl, _ws), + bygroups(String.Double, using(this, state='string'), Text, + Punctuation)), + (r'"', String.Double, ('#pop', 'for2', 'string')), + (r"('(?:%s|[\w\W])*?')([%s%s]*)(\))" % (_variable, _nl, _ws), + bygroups(using(this, state='sqstring'), Text, Punctuation)), + (r'(`(?:%s|[\w\W])*?`)([%s%s]*)(\))' % (_variable, _nl, _ws), + bygroups(using(this, state='bqstring'), Text, Punctuation)), + include('for2') + ], + 'for/l': [ + (r'-?\d+', Number.Integer), + include('for2') + ], + 'if': [ + (r'((?:cmdextversion|errorlevel)%s)(%s)(\d+)' % + (_token_terminator, _space), + bygroups(Keyword, using(this, state='text'), + Number.Integer), '#pop'), + (r'(defined%s)(%s)(%s)' % (_token_terminator, _space, _stoken), + bygroups(Keyword, using(this, state='text'), + using(this, state='variable')), '#pop'), + (r'(exist%s)(%s%s)' % (_token_terminator, _space, _stoken), + bygroups(Keyword, using(this, state='text')), '#pop'), + (r'(%s%s?)(==)(%s?%s)' % (_stoken, _space, _space, _stoken), + bygroups(using(this, state='text'), Operator, + using(this, state='text')), '#pop'), + (r'(%s%s)(%s)(%s%s)' % (_number, _space, _opword, _space, _number), + bygroups(using(this, state='arithmetic'), Operator.Word, + using(this, state='arithmetic')), '#pop'), + (r'(%s%s)(%s)(%s%s)' % (_stoken, _space, _opword, _space, _stoken), + bygroups(using(this, state='text'), Operator.Word, + using(this, state='text')), '#pop') + ], + '(?': [ + (_space, using(this, state='text')), + (r'\(', Punctuation, ('#pop', 'else?', 'root/compound')), + default('#pop') + ], + 'else?': [ + (_space, using(this, state='text')), + (r'else%s' % _token_terminator, Keyword, '#pop'), + default('#pop') + ] } diff --git a/tests/examplefiles/batchfile.bat b/tests/examplefiles/batchfile.bat deleted file mode 100644 index 5cdc625c..00000000 --- a/tests/examplefiles/batchfile.bat +++ /dev/null @@ -1,49 +0,0 @@ -rem this is a demo file. -@rem -@echo off - -call c:\temp.bat somearg -call :lab somearg -rem This next one is wrong in the vim lexer! -call c:temp.bat - -echo "Hi!" -echo hi -echo on -echo off -echo. -@echo off -if exist *.log echo The log file has arrived. -rem These are all escapes, also done incorrectly by the vim lexer -echo ^^ ^> ^< ^| - -x=beginning -setlocal -x = new text -endlocal - -echo testrem x -echo test rem x - -for %%var in (*.jpg) do echo %%var -for /D %%var in (a b c) do echo %%var -for /R C:\temp %%var in (*.jpg) do iexplore.exe %%var -rem Vim has this one wrong too. -for /L %%var in (10,-1,1) do echo %%var -for /F %%var in ("hi!") do echo %%var -for /F "eol=c,skip=1,usebackq" %%var in (`command`) do echo %%var %~l %~fl %~dl %~pl %~nl %~xl %~sl %~al %~tl %~zl %~$PATH:l %~dpl %~dp$PATH:l %~ftzal - -echo some file ?! > somefile.txt - -set PATH=%PATH%;c:\windows - -goto answer%errorlevel% - :answer0 - echo Hi it's zero - :answer1 - echo New - -if exist a del a -else echo A is missing! - - diff --git a/tests/examplefiles/example.bat b/tests/examplefiles/example.bat new file mode 100644 index 00000000..bf27673c --- /dev/null +++ b/tests/examplefiles/example.bat @@ -0,0 +1,205 @@ +@ @@ echo off +::This is an example of the Windows batch language. + +setlocal EnableDelayedExpansion +(cls) +set/a^ +_te^ +sts^ +=0,^ +_"pa"^ +ssed=0^ +0 +set,/a title= Batch test +title=%,/a title% +echo^ %~nx0,^ the>,con comprehensive testing suite +ver +echo( + +if cmdextversion 2 goto =) +goto :fail + + :) +echo Starting tests at: +date/t & time/t +echo( + +if '%*'=='--help' ( + echo Usage: %~nx0 [--help] + echo --help: Display this help message and quit. + shift + goto :exit comment) else rem + +(call :comments) +call ::io+x +call:control:x +call::internal x + +:exit +if /i !_tests!==!_passed! ( + color 02 +) else if !*==* ( + color c + if not defined _exit^ +Code set _exit^ +Code=1 +) +set _percentage=NaN +if defined _tests ( + if !_tests! neq 0 (set/a_percentage=100*_passed/_tests) +) +echo( +if !_percentage!==NaN ( echo(There were no tests^^! & color e +) else ( echo Tests passed: %_passed%/%_tests% (%_percentage%%%^) ) +pause +color +title +endlocal +exit /b %_exitCode% + +x:fail +rem This should never happen. +echo Internal error 1>& 269105>>&2 +set /a _exitCode=0x69+(0105*1000) +break +goto :exit + +:comments +(rem )/? +) +rem "comment^ +(rem.) & set /a _tests+=1 +(rem) & goto :fail +(rem. ) & (rem. comment ) & echo Test %_tests%: Comments +rem ) +) +)|comment +)( +:: comment +goto :comments^^1:comment +:comments^^1 comment +if(1==1) goto :comments^ +^1 +rem^ /? +rem ^ +^ +goto :comments^ +2+comment +goto :fail +:comments2 +rem >^ +if 1==1 (goto :comments3) +:comments3) +goto :fail +:comments3 +rem comment^ +goto:fail +rem.comment comment^ +goto fail +rem "comment comment"^ +goto fail +rem comment comment^ +set /a _passed+=1 +GOTO :EOF +goto :fail + +:IO +SET /A _tests+=1 & Echo Test !_tests:*!==^!: I/O +verify on +pushd . +if exist temp echo temp already exists. & goto :eof +md temp +cd temp +mkdir 2>nul temp +chdir temp +>cd echo Checking drive... +>>cd echo must be C or else this won't work +for /f "tokens=* usebackq" %%G in ("cd +) do (test0^ +.bat echo rem Machine-generated; do not edit +call echo set /a _passed+=1 >>test0.bat +type test0.bat >"test 1.bat +ren "test 1.bat" test2.bat +rename test2.bat test.bat +caLL ^ +C:test +del test.bat 2>nul +2>NUL erase test0.bat +popd +rd temp\temp +rmdir temp +VERIFY OFF +goto:eof + +:control +set /a _tests+=1 +echo Test %_tests%: Control statements +set "_iterations=0">nul +for %%G in (,+,,-, +) do @( + for /l %%H in (,-1;;-1 -3,) do ( + for /f tokens^=1-2^,5 %%I in ("2 %%H _ _ 10") do ( + for /f "tokens=1 usebackq" %%L in ( `echo %%G%%J ``` ` +` ` ) do ( for /f "tokens=2" %%M in ('echo ' %%L0 ' +' ' ) do ( set /a _iterations+=(%%M%%M^) + ) + ) + ) + ) +) +if exist %~nx0 if not exist %~nx0 goto :fail +if exist %~nx0 ( + if not exist %~nx0 goto :fail +) else ( + if exist %~nx0 goto :fail +) +if /i %_iterations% gtr -2 ( + if /i %_iterations% geq -1 ( + if /i %_iterations% lss 1 ( + if /i %_iterations% leq 0 ( + if /i %_iterations% equ 0 ( + if 1 equ 01 ( + if 1 neq "01" ( + if "1" neq 01 ( + set /a _passed+=1)))))))) +) comment +goto :eof + +:internal +set /a _tests+=1 +echo Test %_tests%: Internal commands +keys on +mklink 2>nul +>nul path %path% +>nul dpath %dpath% +if not defined prompt prompt $P$G +prompt !prompt:~!rem/ $H? +echo on +rem/? +@echo off +rem(/?>nul +rem )/? >nul +(rem (/?) >nul +rem /?>nul +rem^/?>nul +if/?>nul || if^/^?>nul || if /?>nul || if x/? >nul +for/?>nul && for^/^?>nul && for /?>nul && for x/? >nul && for /?x >nul +goto/?>nul && goto^/? && goto^ /? && goto /^ +? && goto /?>nul && goto:/? >nul && goto ) /? ) >nul && (goto /? )>nul +=set+;/p extension'),=.bat +for /f "tokens=2 delims==" %%G in ( 'assoc %+;/p extension'),%' + ) do ( + assoc 2>nul %+;/p extension'),:*.=.%=%%G + ftype 1>nul %%G +) &>nul ver +if errorlevel 0 if not errorlevel 1 set /a _passed+=1 +goto :eof +:/? +goto :fail -- cgit v1.2.1 From 5a6b44cae1edbea60db15217de4b745bd38c54bc Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 27 Dec 2014 18:38:08 +0000 Subject: Added a new lexer for Terraform files. Should hopefully syntax highlight terraform *.tf files. Syntax is similar to json but not identical and it has it's own keywords and lookup functions. More details here: https://www.terraform.io/docs/configuration/index.html --- pygments/lexers/terraform.py | 82 +++++++++++++++++++++ tests/examplefiles/example.tf | 162 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 244 insertions(+) create mode 100644 pygments/lexers/terraform.py create mode 100644 tests/examplefiles/example.tf diff --git a/pygments/lexers/terraform.py b/pygments/lexers/terraform.py new file mode 100644 index 00000000..1600d154 --- /dev/null +++ b/pygments/lexers/terraform.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.terraform + ~~~~~~~~~~~~~~~~~~~~~ + + Lexer for Terraform tf files + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import Lexer, RegexLexer, bygroups, include +from pygments.token import Text, Comment, Operator, Keyword, Name, String, Punctuation, Number +__all__ = ['TerraformLexer'] + +class TerraformLexer(RegexLexer): + """ + Lexer for terraformi .tf files: https://www.terraform.io/ + + """ + name = 'Terraform' + aliases = ['terraform', 'tf'] + filenames = ['*.tf'] + mimetypes = ['application/x-tf', 'application/x-terraform'] + tokens = { + 'root': [ + include ('string'), + include ('punctuation'), + include ('curly'), + include('basic'), + include('whitespace'), + (r'[0-9]+', Number), + ], + 'basic': [ + (r'\b(true|false)\b', Keyword.Type), + (r'\s*/\*', Comment.Multiline, 'comment'), + (r'\s*#.*\n', Comment.Single), + (r'(.*?)(\s*)(=)', + bygroups(Name.Attribute, Text, Operator)), + (r'\bvariable|resource|provider|provisioner|module\b',Keyword.Reserved,'function'), + (r'\bingress|egress|listener|default|connection\b',Keyword.Declaration), + ('\$\{',String.Interpol,'var_builtin'), + ], + 'function':[ + (r'(\s+)(".*")(\s+)', bygroups(Text,String,Text)), + include ('punctuation'), + include ('curly'), + ], + 'var_builtin':[ + (r'\$\{', String.Interpol, '#push'), + (r'\bconcat|file|join|lookup|element\b',Name.Builtin), + include ('string'), + include ('punctuation'), + (r'\s+', Text), + (r'\}',String.Interpol, '#pop'), + ], + 'string':[ + (r'(".*")',bygroups(String.Double)), + ], + 'punctuation':[ + (r'[\[\]\(\),.]',Punctuation), + ], + # Keep this seperate from punctuation - we sometimes want to use different + # Tokens for { } + 'curly':[ + (r'\{',Text.Punctuation), + (r'\}',Text.Punctuation), + ], + 'comment': [ + (r'[^*/]', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline) + ], + 'whitespace': [ + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), + ], + } diff --git a/tests/examplefiles/example.tf b/tests/examplefiles/example.tf new file mode 100644 index 00000000..d3f02779 --- /dev/null +++ b/tests/examplefiles/example.tf @@ -0,0 +1,162 @@ +variable "key_name" { + description = "Name of the SSH keypair to use in AWS." +} + +variable "key_path" { + description = "Path to the private portion of the SSH key specified." +} + +variable "aws_region" { + description = "AWS region to launch servers." + default = "us-west-2" + somevar = true +} + +# Ubuntu Precise 12.04 LTS (x64) +variable "aws_amis" { + default = { + eu-west-1 = "ami-b1cf19c6" + us-east-1 = "ami-de7ab6b6" + us-west-1 = "ami-3f75767a" + us-west-2 = "ami-21f78e11" + } +} + + + + + + +provider "aws" { + access_key = "${myvar}" + secret_key = "your aws secret key" + region = "us-east-1" +} +/* multiline + + comment + +*/ + + +# Single line comment +resource "aws_instance" "example" { + ami = "ami-408c7f28" + instance_type = "t1.micro" + key_name = "your-aws-key-name" +} + +# Create our Heroku application. Heroku will +# automatically assign a name. +resource "heroku_app" "web" {} + +# Create our DNSimple record to point to the +# heroku application. +resource "dnsimple_record" "web" { + domain = "${var.dnsimple_domain}" + + + # heroku_hostname is a computed attribute on the heroku + # application we can use to determine the hostname + value = "${heroku_app.web.heroku_hostname}" + + type = "CNAME" + ttl = 3600 +} + +# The Heroku domain, which will be created and added +# to the heroku application after we have assigned the domain +# in DNSimple +resource "heroku_domain" "foobar" { + app = "${heroku_app.web.name}" + hostname = "${dnsimple_record.web.hostname}" +} + + +# Specify the provider and access details +provider "aws" { + region = "${var.aws_region}" + value = ${file("path.txt")} +} + +# Our default security group to access +# the instances over SSH and HTTP +resource "aws_security_group" "default" { + name = "terraform_example" + description = "Used in the terraform" + + # SSH access from anywhere + ingress { + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + } + + # HTTP access from anywhere + ingress { + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + } +} + + +resource "aws_elb" "web" { + name = "terraform-example-elb" + + # The same availability zone as our instance + availability_zones = ["${aws_instance.web.availability_zone}"] + + listener { + instance_port = 80 + instance_protocol = "http" + lb_port = 80 + lb_protocol = "http" + } + + # The instance is registered automatically + instances = ["${aws_instance.web.id}"] +} + + +resource "aws_instance" "web" { + # The connection block tells our provisioner how to + # communicate with the resource (instance) + connection { + # The default username for our AMI + user = "ubuntu" + + # The path to your keyfile + key_file = "${var.key_path}" + } + + instance_type = "m1.small" + + # Lookup the correct AMI based on the region + # we specified + ami = "${lookup(var.aws_amis, var.aws_region)}" + + # The name of our SSH keypair you've created and downloaded + # from the AWS console. + # + # https://console.aws.amazon.com/ec2/v2/home?region=us-west-2#KeyPairs: + # + key_name = "${var.key_name}" + + # Our Security group to allow HTTP and SSH access + security_groups = ["${aws_security_group.default.name}"] + + # We run a remote provisioner on the instance after creating it. + # In this case, we just install nginx and start it. By default, + # this should be on port 80 + provisioner "remote-exec" { + inline = [ + "sudo apt-get -y update", + "sudo apt-get -y install nginx", + "sudo service nginx start" + ] + } +} + -- cgit v1.2.1 From c5f9bab22bc0487090381b7baec70aacb2307432 Mon Sep 17 00:00:00 2001 From: Kashif Rasul Date: Mon, 29 Dec 2014 21:35:18 +0100 Subject: updated cocoa builtins for iOS 8.1 --- pygments/lexers/_cocoa_builtins.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pygments/lexers/_cocoa_builtins.py b/pygments/lexers/_cocoa_builtins.py index 84f3eeea..48918ce6 100644 --- a/pygments/lexers/_cocoa_builtins.py +++ b/pygments/lexers/_cocoa_builtins.py @@ -14,16 +14,15 @@ from __future__ import print_function -COCOA_INTERFACES = set(['UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'UIKeyCommand', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'UICollectionViewLayoutAttributes', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'CIBarcodeFeature', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'NSHTTPCookie', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'UIFontDescriptor', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase']) -COCOA_PROTOCOLS = set(['SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate']) -COCOA_PRIMITIVES = set(['ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'GCAcceleration', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'gss_buffer_desc_struct', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader']) - +COCOA_INTERFACES = set(['UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'PKPayment', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'PKPaymentSummaryItem', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'HKWorkoutType', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'PKShippingMethod', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'HKCorrelationType', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'UICollectionViewLayoutAttributes', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'HMUser', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSHTTPCookie', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'UIFontDescriptor', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'PKPaymentPass', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'HKWorkout', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'CIQRCodeFeature', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'PKPaymentRequest', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'PKPaymentToken', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'PKPaymentAuthorizationViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'HKWorkoutEvent', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'UIKeyCommand', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase']) +COCOA_PROTOCOLS = set(['SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'PKPaymentAuthorizationViewControllerDelegate', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate']) +COCOA_PRIMITIVES = set(['ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'gss_buffer_desc_struct', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader']) if __name__ == '__main__': # pragma: no cover import os import re - FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.0.sdk/System/Library/Frameworks/' + FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.1.sdk/System/Library/Frameworks/' frameworks = os.listdir(FRAMEWORKS_PATH) all_interfaces = set() -- cgit v1.2.1 From db55976896fc07240e1ad954f0c3ac96d85cfd6a Mon Sep 17 00:00:00 2001 From: Muthiah Annamalai Date: Mon, 2 Feb 2015 21:30:36 -0500 Subject: Add syntax highlighting for Ezhil - a Tamil-script based programming language. Ezhil - http://ezhillang.org --- pygments/lexers/_mapping.py | 1 + pygments/lexers/ezhil.py | 62 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 pygments/lexers/ezhil.py diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 6ed4b620..02ea4c10 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -116,6 +116,7 @@ LEXERS = { 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), 'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)), 'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), + 'EzhilLexer': ('pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)), 'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)), 'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)), 'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)), diff --git a/pygments/lexers/ezhil.py b/pygments/lexers/ezhil.py new file mode 100644 index 00000000..cb825270 --- /dev/null +++ b/pygments/lexers/ezhil.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.ezhil + ~~~~~~~~~~~~~~~~~~~~~~ + + Pygments lexers for Ezhil language. + + :copyright: Copyright 2015 Muthiah Annamalai + :license: BSD, see LICENSE for details. +""" + +import re +from pygments.lexer import RegexLexer, include, words +from pygments.token import Keyword, Text, Comment, Name +from pygments.token import String, Number, Punctuation, Operator + +__all__ = ['EzhilLexer'] + +class EzhilLexer(RegexLexer): + """ + *New in Pygments.* + """ + name = 'Ezhil' + aliases = ['ezhil'] + filenames = ['*.n'] + mimetypes = ['text/x-ezhil'] + + tokens = { + 'root': [ + include('keywords'), + (r'^#.*\n', Comment.Single), + (r'#.*?\n', Comment.Single), + (r'[@|+|/|*|!=|,|^|\-|%|<|>|<=|>=|=|==]',Operator), + (r'(assert|min|max)',Name.Builtin), + (r'(True|False)\b', Keyword.Constant), + (r'[^\S\n]+', Text), + include('identifier'), + include('whitespace'), + include('literal'), + (r'[(){}\[\],:&%;.]', Punctuation), + ], + 'keywords': [ + (u'பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword), + ], + 'identifier': [ + (u'[a-zA-Z_|அ|ஆ|இ|ஈ|உ|ஊ|எ|ஏ|ஐ|ஒ|ஓ|ஔ|ஃ|க்|ச்|ட்|த்|ப்|ற்|ங்|ஞ்|ண்|ந்|ம்|ன்|ய்|ர்|ல்|வ்|ழ்|ள்|க|ச|ட|த|ப|ற|ஞ|ங|ண|ந|ம|ன|ய|ர|ல|வ|ழ|ள|ஜ|ஷ|ஸ|ஹ|க|கா|கி|கீ|கு|கூ|கெ|கே|கை|கொ|கோ|கௌ|ச|சா|சி|சீ|சு|சூ|செ|சே|சை|சொ|சோ|சௌ|ட|டா|டி|டீ|டு|டூ|டெ|டே|டை|டொ|டோ|டௌ|த|தா|தி|தீ|து|தூ|தெ|தே|தை|தொ|தோ|தௌ|ப|பா|பி|பீ|பு|பூ|பெ|பே|பை|பொ|போ|பௌ|ற|றா|றி|றீ|று|றூ|றெ|றே|றை|றொ|றோ|றௌ|ஞ|ஞா|ஞி|ஞீ|ஞு|ஞூ|ஞெ|ஞே|ஞை|ஞொ|ஞோ|ஞௌ|ங|ஙா|ஙி|ஙீ|ஙு|ஙூ|ஙெ|ஙே|ஙை|ஙொ|ஙோ|ஙௌ|ண|ணா|ணி|ணீ|ணு|ணூ|ணெ|ணே|ணை|ணொ|ணோ|ணௌ|ந|நா|நி|நீ|நு|நூ|நெ|நே|நை|நொ|நோ|நௌ|ம|மா|மி|மீ|மு|மூ|மெ|மே|மை|மொ|மோ|மௌ|ன|னா|னி|னீ|னு|னூ|னெ|னே|னை|னொ|னோ|னௌ|ய|யா|யி|யீ|யு|யூ|யெ|யே|யை|யொ|யோ|யௌ|ர|ரா|ரி|ரீ|ரு|ரூ|ரெ|ரே|ரை|ரொ|ரோ|ரௌ|ல|லா|லி|லீ|லு|லூ|லெ|லே|லை|லொ|லோ|லௌ|வ|வா|வி|வீ|வு|வூ|வெ|வே|வை|வொ|வோ|வௌ|ழ|ழா|ழி|ழீ|ழு|ழூ|ழெ|ழே|ழை|ழொ|ழோ|ழௌ|ள|ளா|ளி|ளீ|ளு|ளூ|ளெ|ளே|ளை|ளொ|ளோ|ளௌ][a-zA-Z0-9.]*',Name), + ], + 'whitespace': [ + (r'\n', Text), + (r'\s+', Text), + ], + 'literal': [ + (r'".*"', String), + (r'0x[0-9A-Fa-f]+t?', Number.Hex), + (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), + (r'[0-9]+L?', Number.Integer), + ] + } + + def __init__(self, **options): + super(EzhilLexer, self).__init__(**options) + self.encoding = options.get('encoding', 'utf-8') -- cgit v1.2.1 From a9dc461ae44c37357ae6543944c09d06657a95bb Mon Sep 17 00:00:00 2001 From: Muthiah Annamalai Date: Mon, 2 Feb 2015 22:39:41 -0500 Subject: 1) add unittest for Ezhil Pygments 2) add example Ezhil script 'ezhil_primefactors.n' --- doc/languages.rst | 1 + tests/examplefiles/ezhil_primefactors.n | 150 ++++++++++++++++++++++++++ tests/test_ezhil.py | 186 ++++++++++++++++++++++++++++++++ 3 files changed, 337 insertions(+) create mode 100644 tests/examplefiles/ezhil_primefactors.n create mode 100644 tests/test_ezhil.py diff --git a/doc/languages.rst b/doc/languages.rst index 1d5c3155..e6070b77 100644 --- a/doc/languages.rst +++ b/doc/languages.rst @@ -32,6 +32,7 @@ Programming languages * Delphi * Dylan * Erlang +* `Ezhil `_ Ezhil - A Tamil programming language * Factor * Fancy * Fortran diff --git a/tests/examplefiles/ezhil_primefactors.n b/tests/examplefiles/ezhil_primefactors.n new file mode 100644 index 00000000..96c82b4c --- /dev/null +++ b/tests/examplefiles/ezhil_primefactors.n @@ -0,0 +1,150 @@ +# (C) முத்தையா அண்ணாமலை 2013 +# (A) என். சொக்கன் +# எழில் தமிழ் நிரலாக்க மொழி உதாரணம் + +## Prime Factors Example +## பகா எண் கூறுகளைக் கண்டறியும் உதாரணம் + +## இது நிரல் தரப்பட்ட எண்ணின் பகாஎண் கூறுகளைக் கண்டறியும் + +நிரல்பாகம் பகாஎண்ணா(எண்1) + + ## இது நிரல்பாகம் தரப்பட்ட எண் பகு எண்ணா அல்லது பகா எண்ணா என்று கண்டறிந்து சொல்லும் + ## பகுஎண் என்றால் 0 திரும்பத் தரப்படும் + ## பகாஎண் என்றால் 1 திரும்பத் தரப்படும் + + @(எண்1 < 0) ஆனால் + + ## எதிர்மறை எண்களை நேராக்குதல் + + எண்1 = எண்1 * (-1) + + முடி + + @(எண்1 < 2) ஆனால் + + ## பூஜ்ஜியம், ஒன்று ஆகியவை பகா எண்கள் அல்ல + + பின்கொடு 0 + + முடி + + @(எண்1 == 2) ஆனால் + + ## இரண்டு என்ற எண் ஒரு பகா எண் + + பின்கொடு 1 + + முடி + + மீதம் = எண்1%2 + + @(மீதம் == 0) ஆனால் + + ## இரட்டைப்படை எண், ஆகவே, இது பகா எண் அல்ல + + பின்கொடு 0 + + முடி + + எண்1வர்க்கமூலம் = எண்1^0.5 + + @(எண்2 = 3, எண்2 <= எண்1வர்க்கமூலம், எண்2 = எண்2 + 2) ஆக + + மீதம்1 = எண்1%எண்2 + + @(மீதம்1 == 0) ஆனால் + + ## ஏதேனும் ஓர் எண்ணால் முழுமையாக வகுபட்டுவிட்டது, ஆகவே அது பகா எண் அல்ல + + பின்கொடு 0 + + முடி + + முடி + + பின்கொடு 1 + +முடி + +நிரல்பாகம் பகுத்தெடு(எண்1) + + ## இது எண் தரப்பட்ட எண்ணின் பகா எண் கூறுகளைக் கண்டறிந்து பட்டியல் இடும் + + கூறுகள் = பட்டியல்() + + @(எண்1 < 0) ஆனால் + + ## எதிர்மறை எண்களை நேராக்குதல் + + எண்1 = எண்1 * (-1) + + முடி + + @(எண்1 <= 1) ஆனால் + + ## ஒன்று அல்லது அதற்குக் குறைவான எண்களுக்குப் பகா எண் விகிதம் கண்டறியமுடியாது + + பின்கொடு கூறுகள் + + முடி + + @(பகாஎண்ணா(எண்1) == 1) ஆனால் + + ## தரப்பட்ட எண்ணே பகா எண்ணாக அமைந்துவிட்டால், அதற்கு அதுவே பகாஎண் கூறு ஆகும் + + பின்இணை(கூறுகள், எண்1) + பின்கொடு கூறுகள் + + முடி + + தாற்காலிகஎண் = எண்1 + + எண்2 = 2 + + @(எண்2 <= தாற்காலிகஎண்) வரை + + விடை1 = பகாஎண்ணா(எண்2) + மீண்டும்தொடங்கு = 0 + + @(விடை1 == 1) ஆனால் + + விடை2 = தாற்காலிகஎண்%எண்2 + + @(விடை2 == 0) ஆனால் + + ## பகா எண்ணால் முழுமையாக வகுபட்டுள்ளது, அதனைப் பட்டியலில் இணைக்கிறோம் + + பின்இணை(கூறுகள், எண்2) + தாற்காலிகஎண் = தாற்காலிகஎண்/எண்2 + + ## மீண்டும் இரண்டில் தொடங்கி இதே கணக்கிடுதலைத் தொடரவேண்டும் + + எண்2 = 2 + மீண்டும்தொடங்கு = 1 + + முடி + + முடி + + @(மீண்டும்தொடங்கு == 0) ஆனால் + + ## அடுத்த எண்ணைத் தேர்ந்தெடுத்துக் கணக்கிடுதலைத் தொடரவேண்டும் + + எண்2 = எண்2 + 1 + + முடி + + முடி + + பின்கொடு கூறுகள் + +முடி + +அ = int(உள்ளீடு("உங்களுக்குப் பிடித்த ஓர் எண்ணைத் தாருங்கள்: ")) + +பகாஎண்கூறுகள் = பட்டியல்() + +பகாஎண்கூறுகள் = பகுத்தெடு(அ) + +பதிப்பி "நீங்கள் தந்த எண்ணின் பகா எண் கூறுகள் இவை: ", பகாஎண்கூறுகள் diff --git a/tests/test_ezhil.py b/tests/test_ezhil.py new file mode 100644 index 00000000..042dcbb0 --- /dev/null +++ b/tests/test_ezhil.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +""" + Basic EzhilLexer Test + ~~~~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2015 Muthiah Annamalai + :license: BSD, see LICENSE for details. +""" + +import unittest + +from pygments.token import Operator, Number, Text, Token +from pygments.lexers import EzhilLexer + + +class EzhilTest(unittest.TestCase): + + def setUp(self): + self.lexer = EzhilLexer() + self.maxDiff = None + + def testSum(self): + fragment = u'1+3\n' + tokens = [ + (Number.Integer, u'1'), + (Operator, u'+'), + (Number.Integer, u'3'), + (Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testGCDExpr(self): + fragment = u'1^3+(5-5)*gcd(a,b)\n' + tokens = [ + (Token.Number.Integer,u'1'), + (Token.Operator,u'^'), + (Token.Literal.Number.Integer, u'3'), + (Token.Operator, u'+'), + (Token.Punctuation, u'('), + (Token.Literal.Number.Integer, u'5'), + (Token.Operator, u'-'), + (Token.Literal.Number.Integer, u'5'), + (Token.Punctuation, u')'), + (Token.Operator, u'*'), + (Token.Name, u'gcd'), + (Token.Punctuation, u'('), + (Token.Name, u'a'), + (Token.Operator, u','), + (Token.Name, u'b'), + (Token.Punctuation, u')'), + (Token.Text, u'\n') + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testIfStatement(self): + fragment = u"""@( 0 > 3 ) ஆனால் + பதிப்பி "wont print" +முடி""" + tokens = [ + (Token.Operator, u'@'), + (Token.Punctuation, u'('), + (Token.Text, u' '), + (Token.Literal.Number.Integer,u'0'), + (Token.Text, u' '), + (Token.Operator,u'>'), + (Token.Text, u' '), + (Token.Literal.Number.Integer, u'3'), + (Token.Text, u' '), + (Token.Punctuation, u')'), + (Token.Text, u' '), + (Token.Keyword, u'ஆனால்'), + (Token.Text, u'\n'), + (Token.Text, u'\t'), + (Token.Keyword, u'பதிப்பி'), + (Token.Text, u' '), + (Token.Literal.String, u'"wont print"'), + (Token.Text, u'\t'), + (Token.Text, u'\n'), + (Token.Keyword, u'முடி'), + (Token.Text, u'\n') + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testFunction(self): + fragment = u"""# (C) முத்தையா அண்ணாமலை 2013, 2015 +நிரல்பாகம் gcd ( x, y ) + மு = max(x,y) + q = min(x,y) + + @( q == 0 ) ஆனால் + பின்கொடு மு + முடி + பின்கொடு gcd( மு - q , q ) +முடி\n""" + tokens = [ + (Token.Comment.Single, + u'# (C) \u0bae\u0bc1\u0ba4\u0bcd\u0ba4\u0bc8\u0baf\u0bbe \u0b85\u0ba3\u0bcd\u0ba3\u0bbe\u0bae\u0bb2\u0bc8 2013, 2015\n'), + (Token.Keyword,u'நிரல்பாகம்'), + (Token.Text, u' '), + (Token.Name, u'gcd'), + (Token.Text, u' '), + (Token.Punctuation, u'('), + (Token.Text, u' '), + (Token.Name, u'x'), + (Token.Operator, u','), + (Token.Text, u' '), + (Token.Name, u'y'), + (Token.Text, u' '), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Name, u'\u0bae'), + (Token.Name, u'\u0bc1'), + (Token.Text, u' '), + (Token.Operator, u'='), + (Token.Text, u' '), + (Token.Name.Builtin, u'max'), + (Token.Punctuation, u'('), + (Token.Name, u'x'), + (Token.Operator, u','), + (Token.Name, u'y'), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Name, u'q'), + (Token.Text, u' '), + (Token.Operator, u'='), + (Token.Text, u' '), + (Token.Name.Builtin, u'min'), + (Token.Punctuation, u'('), + (Token.Name, u'x'), + (Token.Operator, u','), + (Token.Name, u'y'), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Operator, u'@'), + (Token.Punctuation, u'('), + (Token.Text, u' '), + (Token.Name, u'q'), + (Token.Text, u' '), + (Token.Operator, u'='), + (Token.Operator, u'='), + (Token.Text, u' '), + (Token.Literal.Number.Integer, u'0'), + (Token.Text, u' '), + (Token.Punctuation, u')'), + (Token.Text, u' '), + (Token.Keyword, u'ஆனால்'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'பின்கொடு'), + (Token.Text, u' '), + (Token.Name, u'\u0bae'), + (Token.Name, u'\u0bc1'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'முடி'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'\u0baa\u0bbf\u0ba9\u0bcd\u0b95\u0bca\u0b9f\u0bc1'), + (Token.Text, u' '), + (Token.Name, u'gcd'), + (Token.Punctuation, u'('), + (Token.Text, u' '), + (Token.Name, u'\u0bae'), + (Token.Name, u'\u0bc1'), + (Token.Text, u' '), + (Token.Operator, u'-'), + (Token.Text, u' '), + (Token.Name, u'q'), + (Token.Text, u' '), + (Token.Operator, u','), + (Token.Text, u' '), + (Token.Name, u'q'), + (Token.Text, u' '), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Keyword, u'முடி'), #u'\u0bae\u0bc1\u0b9f\u0bbf'), + (Token.Text, u'\n') + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + +if __name__ == "__main__": + unittest.main() -- cgit v1.2.1 From f90dca50d4015d456041280e24766d4fa657ca80 Mon Sep 17 00:00:00 2001 From: Muthiah Annamalai Date: Wed, 4 Feb 2015 03:02:06 -0500 Subject: 1) pygments/lexers/ezhil.py for the following in response to review by David Corbett a) lose the 2 types of comment lexical scanning regex. remove ? at end of comment. Add a type for '@' b) use words() function for builtins c) remove whitespace lexer class d) streamline punctuation class e) unable to modify keywords lexer class to use words() function, so keeping it f) streamline identifier use with _taletters variable g) remove hex lexer class h) update float and integer lexer class. N.B. Ezhil does not use Tamil-number formats - instead we use standard Arabic/Hindu system 2) Update tests for fallout from #1 --- pygments/lexers/ezhil.py | 39 +++++++++++++++++++++------------------ tests/test_ezhil.py | 12 ++++-------- 2 files changed, 25 insertions(+), 26 deletions(-) diff --git a/pygments/lexers/ezhil.py b/pygments/lexers/ezhil.py index cb825270..4fba0340 100644 --- a/pygments/lexers/ezhil.py +++ b/pygments/lexers/ezhil.py @@ -16,6 +16,9 @@ from pygments.token import String, Number, Punctuation, Operator __all__ = ['EzhilLexer'] +# see constant from open-tamil package: tamil.utf8.tamil_letters +_taletters= u'a-zA-Z_|அ|ஆ|இ|ஈ|உ|ஊ|எ|ஏ|ஐ|ஒ|ஓ|ஔ|ஃ|க்|ச்|ட்|த்|ப்|ற்|ங்|ஞ்|ண்|ந்|ம்|ன்|ய்|ர்|ல்|வ்|ழ்|ள்|க|ச|ட|த|ப|ற|ஞ|ங|ண|ந|ம|ன|ய|ர|ல|வ|ழ|ள|ஜ|ஷ|ஸ|ஹ|க|கா|கி|கீ|கு|கூ|கெ|கே|கை|கொ|கோ|கௌ|ச|சா|சி|சீ|சு|சூ|செ|சே|சை|சொ|சோ|சௌ|ட|டா|டி|டீ|டு|டூ|டெ|டே|டை|டொ|டோ|டௌ|த|தா|தி|தீ|து|தூ|தெ|தே|தை|தொ|தோ|தௌ|ப|பா|பி|பீ|பு|பூ|பெ|பே|பை|பொ|போ|பௌ|ற|றா|றி|றீ|று|றூ|றெ|றே|றை|றொ|றோ|றௌ|ஞ|ஞா|ஞி|ஞீ|ஞு|ஞூ|ஞெ|ஞே|ஞை|ஞொ|ஞோ|ஞௌ|ங|ஙா|ஙி|ஙீ|ஙு|ஙூ|ஙெ|ஙே|ஙை|ஙொ|ஙோ|ஙௌ|ண|ணா|ணி|ணீ|ணு|ணூ|ணெ|ணே|ணை|ணொ|ணோ|ணௌ|ந|நா|நி|நீ|நு|நூ|நெ|நே|நை|நொ|நோ|நௌ|ம|மா|மி|மீ|மு|மூ|மெ|மே|மை|மொ|மோ|மௌ|ன|னா|னி|னீ|னு|னூ|னெ|னே|னை|னொ|னோ|னௌ|ய|யா|யி|யீ|யு|யூ|யெ|யே|யை|யொ|யோ|யௌ|ர|ரா|ரி|ரீ|ரு|ரூ|ரெ|ரே|ரை|ரொ|ரோ|ரௌ|ல|லா|லி|லீ|லு|லூ|லெ|லே|லை|லொ|லோ|லௌ|வ|வா|வி|வீ|வு|வூ|வெ|வே|வை|வொ|வோ|வௌ|ழ|ழா|ழி|ழீ|ழு|ழூ|ழெ|ழே|ழை|ழொ|ழோ|ழௌ|ள|ளா|ளி|ளீ|ளு|ளூ|ளெ|ளே|ளை|ளொ|ளோ|ளௌ|ௐ|ஜ|ஜா|ஜி|ஜீ|ஜு|ஜூ|ஜெ|ஜே|ஜை|ஜொ|ஜோ|ஜௌ|ஷ|ஷா|ஷி|ஷீ|ஷு|ஷூ|ஷெ|ஷே|ஷை|ஷொ|ஷோ|ஷௌ|ஸ|ஸா|ஸி|ஸீ|ஸு|ஸூ|ஸெ|ஸே|ஸை|ஸொ|ஸோ|ஸௌ|ஹ|ஹா|ஹி|ஹீ|ஹு|ஹூ|ஹெ|ஹே|ஹை|ஹொ|ஹோ|ஹௌ' + class EzhilLexer(RegexLexer): """ *New in Pygments.* @@ -28,35 +31,35 @@ class EzhilLexer(RegexLexer): tokens = { 'root': [ include('keywords'), - (r'^#.*\n', Comment.Single), - (r'#.*?\n', Comment.Single), - (r'[@|+|/|*|!=|,|^|\-|%|<|>|<=|>=|=|==]',Operator), - (r'(assert|min|max)',Name.Builtin), + (r'#.*\n', Comment.Single), + (r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?',Operator), + (u'இல்',Operator.Word), + (words(('assert', 'max', 'min', + 'நீளம்','சரம்_இடமாற்று','சரம்_கண்டுபிடி', + 'பட்டியல்','பின்இணை','வரிசைப்படுத்து', + 'எடு','தலைகீழ்','நீட்டிக்க','நுழைக்க','வை', + 'கோப்பை_திற','கோப்பை_எழுது','கோப்பை_மூடு', + 'pi','sin','cos','tan','sqrt','hypot','pow','exp','log','log10' + 'min','max','exit', + ), suffix=r'\b'),Name.Builtin), (r'(True|False)\b', Keyword.Constant), (r'[^\S\n]+', Text), include('identifier'), - include('whitespace'), include('literal'), - (r'[(){}\[\],:&%;.]', Punctuation), + (r'[(){}\[\]:;.]', Punctuation), ], 'keywords': [ (u'பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword), - ], + ], 'identifier': [ - (u'[a-zA-Z_|அ|ஆ|இ|ஈ|உ|ஊ|எ|ஏ|ஐ|ஒ|ஓ|ஔ|ஃ|க்|ச்|ட்|த்|ப்|ற்|ங்|ஞ்|ண்|ந்|ம்|ன்|ய்|ர்|ல்|வ்|ழ்|ள்|க|ச|ட|த|ப|ற|ஞ|ங|ண|ந|ம|ன|ய|ர|ல|வ|ழ|ள|ஜ|ஷ|ஸ|ஹ|க|கா|கி|கீ|கு|கூ|கெ|கே|கை|கொ|கோ|கௌ|ச|சா|சி|சீ|சு|சூ|செ|சே|சை|சொ|சோ|சௌ|ட|டா|டி|டீ|டு|டூ|டெ|டே|டை|டொ|டோ|டௌ|த|தா|தி|தீ|து|தூ|தெ|தே|தை|தொ|தோ|தௌ|ப|பா|பி|பீ|பு|பூ|பெ|பே|பை|பொ|போ|பௌ|ற|றா|றி|றீ|று|றூ|றெ|றே|றை|றொ|றோ|றௌ|ஞ|ஞா|ஞி|ஞீ|ஞு|ஞூ|ஞெ|ஞே|ஞை|ஞொ|ஞோ|ஞௌ|ங|ஙா|ஙி|ஙீ|ஙு|ஙூ|ஙெ|ஙே|ஙை|ஙொ|ஙோ|ஙௌ|ண|ணா|ணி|ணீ|ணு|ணூ|ணெ|ணே|ணை|ணொ|ணோ|ணௌ|ந|நா|நி|நீ|நு|நூ|நெ|நே|நை|நொ|நோ|நௌ|ம|மா|மி|மீ|மு|மூ|மெ|மே|மை|மொ|மோ|மௌ|ன|னா|னி|னீ|னு|னூ|னெ|னே|னை|னொ|னோ|னௌ|ய|யா|யி|யீ|யு|யூ|யெ|யே|யை|யொ|யோ|யௌ|ர|ரா|ரி|ரீ|ரு|ரூ|ரெ|ரே|ரை|ரொ|ரோ|ரௌ|ல|லா|லி|லீ|லு|லூ|லெ|லே|லை|லொ|லோ|லௌ|வ|வா|வி|வீ|வு|வூ|வெ|வே|வை|வொ|வோ|வௌ|ழ|ழா|ழி|ழீ|ழு|ழூ|ழெ|ழே|ழை|ழொ|ழோ|ழௌ|ள|ளா|ளி|ளீ|ளு|ளூ|ளெ|ளே|ளை|ளொ|ளோ|ளௌ][a-zA-Z0-9.]*',Name), + (u'['+_taletters+']['+u'0-9'+_taletters+u']*',Name), ], - 'whitespace': [ - (r'\n', Text), - (r'\s+', Text), - ], 'literal': [ (r'".*"', String), - (r'0x[0-9A-Fa-f]+t?', Number.Hex), - (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), - (r'[0-9]+L?', Number.Integer), - ] - } - + (r'(?u)\d+((\.\d*)?[eE][+-]?\d+|\.\d*)', Number.Float), + (r'(?u)\d+',Number.Integer), + ]} + def __init__(self, **options): super(EzhilLexer, self).__init__(**options) self.encoding = options.get('encoding', 'utf-8') diff --git a/tests/test_ezhil.py b/tests/test_ezhil.py index 042dcbb0..23b9cb41 100644 --- a/tests/test_ezhil.py +++ b/tests/test_ezhil.py @@ -109,8 +109,7 @@ class EzhilTest(unittest.TestCase): (Token.Punctuation, u')'), (Token.Text, u'\n'), (Token.Text, u' '), - (Token.Name, u'\u0bae'), - (Token.Name, u'\u0bc1'), + (Token.Name, u'\u0bae\u0bc1'), (Token.Text, u' '), (Token.Operator, u'='), (Token.Text, u' '), @@ -140,8 +139,7 @@ class EzhilTest(unittest.TestCase): (Token.Text, u' '), (Token.Name, u'q'), (Token.Text, u' '), - (Token.Operator, u'='), - (Token.Operator, u'='), + (Token.Operator, u'=='), (Token.Text, u' '), (Token.Literal.Number.Integer, u'0'), (Token.Text, u' '), @@ -152,8 +150,7 @@ class EzhilTest(unittest.TestCase): (Token.Text, u' '), (Token.Keyword, u'பின்கொடு'), (Token.Text, u' '), - (Token.Name, u'\u0bae'), - (Token.Name, u'\u0bc1'), + (Token.Name, u'\u0bae\u0bc1'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Keyword, u'முடி'), @@ -164,8 +161,7 @@ class EzhilTest(unittest.TestCase): (Token.Name, u'gcd'), (Token.Punctuation, u'('), (Token.Text, u' '), - (Token.Name, u'\u0bae'), - (Token.Name, u'\u0bc1'), + (Token.Name, u'\u0bae\u0bc1'), (Token.Text, u' '), (Token.Operator, u'-'), (Token.Text, u' '), -- cgit v1.2.1 From 7d53204456ec85bb107eb23fea279b1073cb877d Mon Sep 17 00:00:00 2001 From: Muthiah Annamalai Date: Fri, 6 Feb 2015 23:27:26 -0500 Subject: 1. set flags to re.MULTILINE | re.UNICODE 2. Move TALETTERS as class constant. Due to 'grapheme' mapping to Tamil letter nature, I prefer to have the disjunction regexp 3. Ezhil lexical issues addressed within Ezhil implementation; done separately. 4. This pygments lexer is targeted toward canonical Ezhil language. 5. Update string regexp as reviewed r'".*?"' --- pygments/lexers/ezhil.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pygments/lexers/ezhil.py b/pygments/lexers/ezhil.py index 4fba0340..f1d739ce 100644 --- a/pygments/lexers/ezhil.py +++ b/pygments/lexers/ezhil.py @@ -16,18 +16,18 @@ from pygments.token import String, Number, Punctuation, Operator __all__ = ['EzhilLexer'] -# see constant from open-tamil package: tamil.utf8.tamil_letters -_taletters= u'a-zA-Z_|அ|ஆ|இ|ஈ|உ|ஊ|எ|ஏ|ஐ|ஒ|ஓ|ஔ|ஃ|க்|ச்|ட்|த்|ப்|ற்|ங்|ஞ்|ண்|ந்|ம்|ன்|ய்|ர்|ல்|வ்|ழ்|ள்|க|ச|ட|த|ப|ற|ஞ|ங|ண|ந|ம|ன|ய|ர|ல|வ|ழ|ள|ஜ|ஷ|ஸ|ஹ|க|கா|கி|கீ|கு|கூ|கெ|கே|கை|கொ|கோ|கௌ|ச|சா|சி|சீ|சு|சூ|செ|சே|சை|சொ|சோ|சௌ|ட|டா|டி|டீ|டு|டூ|டெ|டே|டை|டொ|டோ|டௌ|த|தா|தி|தீ|து|தூ|தெ|தே|தை|தொ|தோ|தௌ|ப|பா|பி|பீ|பு|பூ|பெ|பே|பை|பொ|போ|பௌ|ற|றா|றி|றீ|று|றூ|றெ|றே|றை|றொ|றோ|றௌ|ஞ|ஞா|ஞி|ஞீ|ஞு|ஞூ|ஞெ|ஞே|ஞை|ஞொ|ஞோ|ஞௌ|ங|ஙா|ஙி|ஙீ|ஙு|ஙூ|ஙெ|ஙே|ஙை|ஙொ|ஙோ|ஙௌ|ண|ணா|ணி|ணீ|ணு|ணூ|ணெ|ணே|ணை|ணொ|ணோ|ணௌ|ந|நா|நி|நீ|நு|நூ|நெ|நே|நை|நொ|நோ|நௌ|ம|மா|மி|மீ|மு|மூ|மெ|மே|மை|மொ|மோ|மௌ|ன|னா|னி|னீ|னு|னூ|னெ|னே|னை|னொ|னோ|னௌ|ய|யா|யி|யீ|யு|யூ|யெ|யே|யை|யொ|யோ|யௌ|ர|ரா|ரி|ரீ|ரு|ரூ|ரெ|ரே|ரை|ரொ|ரோ|ரௌ|ல|லா|லி|லீ|லு|லூ|லெ|லே|லை|லொ|லோ|லௌ|வ|வா|வி|வீ|வு|வூ|வெ|வே|வை|வொ|வோ|வௌ|ழ|ழா|ழி|ழீ|ழு|ழூ|ழெ|ழே|ழை|ழொ|ழோ|ழௌ|ள|ளா|ளி|ளீ|ளு|ளூ|ளெ|ளே|ளை|ளொ|ளோ|ளௌ|ௐ|ஜ|ஜா|ஜி|ஜீ|ஜு|ஜூ|ஜெ|ஜே|ஜை|ஜொ|ஜோ|ஜௌ|ஷ|ஷா|ஷி|ஷீ|ஷு|ஷூ|ஷெ|ஷே|ஷை|ஷொ|ஷோ|ஷௌ|ஸ|ஸா|ஸி|ஸீ|ஸு|ஸூ|ஸெ|ஸே|ஸை|ஸொ|ஸோ|ஸௌ|ஹ|ஹா|ஹி|ஹீ|ஹு|ஹூ|ஹெ|ஹே|ஹை|ஹொ|ஹோ|ஹௌ' - class EzhilLexer(RegexLexer): """ - *New in Pygments.* + *New in Pygments. Lexical analyzer for Tamil script based programming language, Ezhil* + *See: http://ezhillang.org/ """ name = 'Ezhil' aliases = ['ezhil'] filenames = ['*.n'] mimetypes = ['text/x-ezhil'] - + flags = re.MULTILINE | re.UNICODE + # see constant from open-tamil package: tamil.utf8.tamil_letters + _TALETTERS = u'a-zA-Z_|அ|ஆ|இ|ஈ|உ|ஊ|எ|ஏ|ஐ|ஒ|ஓ|ஔ|ஃ|க்|ச்|ட்|த்|ப்|ற்|ங்|ஞ்|ண்|ந்|ம்|ன்|ய்|ர்|ல்|வ்|ழ்|ள்|க|ச|ட|த|ப|ற|ஞ|ங|ண|ந|ம|ன|ய|ர|ல|வ|ழ|ள|ஜ|ஷ|ஸ|ஹ|க|கா|கி|கீ|கு|கூ|கெ|கே|கை|கொ|கோ|கௌ|ச|சா|சி|சீ|சு|சூ|செ|சே|சை|சொ|சோ|சௌ|ட|டா|டி|டீ|டு|டூ|டெ|டே|டை|டொ|டோ|டௌ|த|தா|தி|தீ|து|தூ|தெ|தே|தை|தொ|தோ|தௌ|ப|பா|பி|பீ|பு|பூ|பெ|பே|பை|பொ|போ|பௌ|ற|றா|றி|றீ|று|றூ|றெ|றே|றை|றொ|றோ|றௌ|ஞ|ஞா|ஞி|ஞீ|ஞு|ஞூ|ஞெ|ஞே|ஞை|ஞொ|ஞோ|ஞௌ|ங|ஙா|ஙி|ஙீ|ஙு|ஙூ|ஙெ|ஙே|ஙை|ஙொ|ஙோ|ஙௌ|ண|ணா|ணி|ணீ|ணு|ணூ|ணெ|ணே|ணை|ணொ|ணோ|ணௌ|ந|நா|நி|நீ|நு|நூ|நெ|நே|நை|நொ|நோ|நௌ|ம|மா|மி|மீ|மு|மூ|மெ|மே|மை|மொ|மோ|மௌ|ன|னா|னி|னீ|னு|னூ|னெ|னே|னை|னொ|னோ|னௌ|ய|யா|யி|யீ|யு|யூ|யெ|யே|யை|யொ|யோ|யௌ|ர|ரா|ரி|ரீ|ரு|ரூ|ரெ|ரே|ரை|ரொ|ரோ|ரௌ|ல|லா|லி|லீ|லு|லூ|லெ|லே|லை|லொ|லோ|லௌ|வ|வா|வி|வீ|வு|வூ|வெ|வே|வை|வொ|வோ|வௌ|ழ|ழா|ழி|ழீ|ழு|ழூ|ழெ|ழே|ழை|ழொ|ழோ|ழௌ|ள|ளா|ளி|ளீ|ளு|ளூ|ளெ|ளே|ளை|ளொ|ளோ|ளௌ|ௐ|ஜ|ஜா|ஜி|ஜீ|ஜு|ஜூ|ஜெ|ஜே|ஜை|ஜொ|ஜோ|ஜௌ|ஷ|ஷா|ஷி|ஷீ|ஷு|ஷூ|ஷெ|ஷே|ஷை|ஷொ|ஷோ|ஷௌ|ஸ|ஸா|ஸி|ஸீ|ஸு|ஸூ|ஸெ|ஸே|ஸை|ஸொ|ஸோ|ஸௌ|ஹ|ஹா|ஹி|ஹீ|ஹு|ஹூ|ஹெ|ஹே|ஹை|ஹொ|ஹோ|ஹௌ' tokens = { 'root': [ include('keywords'), @@ -52,10 +52,10 @@ class EzhilLexer(RegexLexer): (u'பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword), ], 'identifier': [ - (u'['+_taletters+']['+u'0-9'+_taletters+u']*',Name), + (u'['+_TALETTERS+']['+u'0-9'+_TALETTERS+u']*',Name), ], 'literal': [ - (r'".*"', String), + (r'".*?"', String), (r'(?u)\d+((\.\d*)?[eE][+-]?\d+|\.\d*)', Number.Float), (r'(?u)\d+',Number.Integer), ]} -- cgit v1.2.1 From 12a789a4608049824edbc66376c6366ccdc401e5 Mon Sep 17 00:00:00 2001 From: VincentZurczak Date: Fri, 20 Feb 2015 14:10:08 +0100 Subject: Support Roboconf's DSL --- .hgignore | 1 + AUTHORS | 1 + pygments/formatters/_mapping.py | 1 - pygments/lexers/_mapping.py | 2 ++ pygments/lexers/roboconf.py | 57 +++++++++++++++++++++++++++++++++++ tests/examplefiles/roboconf.graph | 40 ++++++++++++++++++++++++ tests/examplefiles/roboconf.instances | 24 +++++++++++++++ 7 files changed, 125 insertions(+), 1 deletion(-) create mode 100644 pygments/lexers/roboconf.py create mode 100644 tests/examplefiles/roboconf.graph create mode 100644 tests/examplefiles/roboconf.instances diff --git a/.hgignore b/.hgignore index 6fd21b49..17e6d700 100644 --- a/.hgignore +++ b/.hgignore @@ -9,6 +9,7 @@ Pygments.egg-info/* .ropeproject tests/examplefiles/output .idea/ +.project .tags TAGS tests/.coverage diff --git a/AUTHORS b/AUTHORS index 06119231..3513a469 100644 --- a/AUTHORS +++ b/AUTHORS @@ -170,5 +170,6 @@ Other contributors, listed alphabetically, are: * Enrique Zamudio -- Ceylon lexer * Alex Zimin -- Nemerle lexer * Rob Zimmerman -- Kal lexer +* Vincent Zurczak -- Roboconf lexer Many thanks for all contributions! diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py index f01206e0..bfc82253 100755 --- a/pygments/formatters/_mapping.py +++ b/pygments/formatters/_mapping.py @@ -32,7 +32,6 @@ FORMATTERS = { 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.') } - if __name__ == '__main__': # pragma: no cover import sys import os diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index dcaefb48..bca29783 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -305,6 +305,8 @@ LEXERS = { 'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), ('*.txt',), ()), 'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), + 'RoboconfGraphLexer': ('pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()), + 'RoboconfInstancesLexer': ('pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()), 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)), 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), diff --git a/pygments/lexers/roboconf.py b/pygments/lexers/roboconf.py new file mode 100644 index 00000000..e3e8a836 --- /dev/null +++ b/pygments/lexers/roboconf.py @@ -0,0 +1,57 @@ +from pygments.lexer import RegexLexer, words, bygroups, re, include +from pygments.token import * + +__all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer'] + +class RoboconfGraphLexer(RegexLexer): + name = 'Roboconf Graph' + aliases = ['roboconf-graph'] + filenames = ['*.graph'] + + flags = re.IGNORECASE | re.MULTILINE + tokens = { + 'root': [ + # Skip white spaces + (r'\s+', Text), + + # There is one operator + (r'=',Operator), + + # Keywords + (words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword), + (words(('installer', 'extends', 'exports', 'imports', 'facets', 'children'), suffix=r'\s*:?', prefix=r'\b'), Name), + + # Comments + (r'#.*\n', Comment), + + # Default + (r'[^#]', Text), + (r'.*\n', Text) + ] + } + +class RoboconfInstancesLexer(RegexLexer): + name = 'Roboconf Instances' + aliases = ['roboconf-instances'] + filenames = ['*.instances'] + + flags = re.IGNORECASE | re.MULTILINE + tokens = { + 'root': [ + + # Skip white spaces + (r'\s+', Text), + + # Keywords + (words(('instance of', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword), + (words(('name', 'count'), suffix=r's*:?', prefix=r'\b'), Name), + (r'\s*[\w.-]+\s*:', Name), + + # Comments + (r'#.*\n', Comment), + + # Default + (r'[^#]', Text), + (r'.*\n', Text) + ] + } diff --git a/tests/examplefiles/roboconf.graph b/tests/examplefiles/roboconf.graph new file mode 100644 index 00000000..e5fdedff --- /dev/null +++ b/tests/examplefiles/roboconf.graph @@ -0,0 +1,40 @@ +################## +# A sample graph +################## + +import some-definition.graph; +import another-definition.graph; + +VM { + installer : target; + children: deployable; +} + +facet deployable { + # nothing +} + +# Sample deployables +mysql { + insTaller: puppet; + facets: deployable; + exports: ip, port = 3306; +} + +tomcat { + installer: bash; + facets: deployable; + exports: ip; + children: web-application; +} + +facet web-application { + exports: full-path = undefined; +} + +my-war-1 { + facets: web-application; + installer: file; + exports: full-path = apps/my-war-1; # the relative path + imports: mysql.*; +} diff --git a/tests/examplefiles/roboconf.instances b/tests/examplefiles/roboconf.instances new file mode 100644 index 00000000..c69a2ab0 --- /dev/null +++ b/tests/examplefiles/roboconf.instances @@ -0,0 +1,24 @@ + +# Deal with imports +import others.instances; + +instance of VM { + name: VM-mysql; + instance of mysql { + name: MySQL; + } +} + +instance of VM { + name: VM ; + count: 5; + + INSTANCE of tomcat { + name: Tomcat; + + instance of my-war-1 { + name: my-war-1; + full-path: apps/my-war; + } + } +} -- cgit v1.2.1 From 483deba6920adf5c56a318acffb88c890656bc70 Mon Sep 17 00:00:00 2001 From: Seiei Higa Date: Wed, 25 Feb 2015 01:45:05 +0900 Subject: Add Gemfile to RubyLexer filenames --- pygments/lexers/ruby.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/ruby.py b/pygments/lexers/ruby.py index 943fd715..74edd7ae 100644 --- a/pygments/lexers/ruby.py +++ b/pygments/lexers/ruby.py @@ -36,7 +36,7 @@ class RubyLexer(ExtendedRegexLexer): name = 'Ruby' aliases = ['rb', 'ruby', 'duby'] filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', - '*.rbx', '*.duby'] + '*.rbx', '*.duby', 'Gemfile'] mimetypes = ['text/x-ruby', 'application/x-ruby'] flags = re.DOTALL | re.MULTILINE -- cgit v1.2.1 From 3e08cb38034cd14901d93e0c2b728049c80419f1 Mon Sep 17 00:00:00 2001 From: hoosieree Date: Mon, 2 Mar 2015 16:15:06 +0000 Subject: _mapping.py edited online with Bitbucket --- pygments/lexers/_mapping.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 05ada4f3..3dd4a82d 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -113,6 +113,7 @@ LEXERS = { 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), 'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), 'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)), + 'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)), 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), 'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)), 'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), -- cgit v1.2.1 From 84f4708a9415efa54c978b96747179c84f0d11a1 Mon Sep 17 00:00:00 2001 From: hoosieree Date: Mon, 2 Mar 2015 14:02:24 -0500 Subject: update mapping.py --- pygments/lexers/_mapping.py | 1 + pygments/lexers/elm.py | 119 +++++++++++++++++++++++++++++++++++++++++ tests/examplefiles/example.elm | 58 ++++++++++++++++++++ 3 files changed, 178 insertions(+) create mode 100644 pygments/lexers/elm.py create mode 100644 tests/examplefiles/example.elm diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 3dd4a82d..385ff44e 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -167,6 +167,7 @@ LEXERS = { 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), 'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)), + 'JLexer': ('pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)), 'JadeLexer': ('pygments.lexers.html', 'Jade', ('jade',), ('*.jade',), ('text/x-jade',)), 'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()), 'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()), diff --git a/pygments/lexers/elm.py b/pygments/lexers/elm.py new file mode 100644 index 00000000..a5143480 --- /dev/null +++ b/pygments/lexers/elm.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.elm + ~~~~~~~~~~~~~~~~~~~ + + Lexer for the Elm programming language. + +""" + +import re + +from pygments.lexer import RegexLexer, words, include +from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, String, Text + +__all__ = ['ElmLexer'] + +class ElmLexer(RegexLexer): + """ + For `Elm `_ source code. + """ + + name = 'Elm' + aliases = ['elm'] + filenames = ['*.elm'] + mimetypes = ['text/x-elm'] + + validName = r'[a-z_][a-zA-Z_\']*' + + specialName = r'^main ' + + builtinOps = ( + '~', '||', '|>', '|', '`', '^', '\\', '\'', '>>', '>=', '>', '==', + '=', '<~', '<|', '<=', '<<', '<-', '<', '::', ':', '/=', '//', '/', + '..', '.', '->', '-', '++', '+', '*', '&&', '%', + ) + + reservedWords = words(( + 'alias', 'as', 'case', 'else', 'if', 'import', 'in', + 'let', 'module', 'of', 'port', 'then', 'type', 'where', + ), suffix=r'\b') + + tokens = { + 'root': [ + + # Comments + (r'{-', Comment.Multiline, 'comment'), + (r'--.*', Comment.Single), + + # Whitespace + (r'\s+', Text), + + # Strings + (r'"', String, 'doublequote'), + + # Modules + (r'^\s*module\s*', Keyword.Namespace, 'imports'), + + # Imports + (r'^\s*import\s*', Keyword.Namespace, 'imports'), + + # Shaders + (r'\[glsl\|.*', Name.Entity, 'shader'), + + # Keywords + (reservedWords, Keyword.Reserved), + + # Types + (r'[A-Z]\w*', Keyword.Type), + + # Main + (specialName, Keyword.Reserved), + + # Prefix Operators + (words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function), + + # Infix Operators + (words((builtinOps)), Name.Function), + + # Numbers + include('numbers'), + + # Variable Names + (validName, Name.Variable), + + # Parens + (r'[,\(\)\[\]{}]', Punctuation), + + ], + + 'comment': [ + (r'-(?!})', Comment.Multiline), + (r'{-', Comment.Multiline, 'comment'), + (r'[^-}]', Comment.Multiline), + (r'-}', Comment.Multiline, '#pop'), + ], + + 'doublequote': [ + (r'\\u[0-9a-fA-F]\{4}', String.Escape), + (r'\\[nrfvb\\\"]', String.Escape), + (r'[^"]', String), + (r'"', String, '#pop'), + ], + + 'imports': [ + (r'\w+(\.\w+)*', Name.Class, '#pop'), + ], + + 'numbers': [ + (r'_?\d+\.(?=\d+)', Number.Float), + (r'_?\d+', Number.Integer), + ], + + 'shader': [ + (r'\|(?!\])', Name.Entity), + (r'\|\]', Name.Entity, '#pop'), + (r'.*\n', Name.Entity), + ], + } + diff --git a/tests/examplefiles/example.elm b/tests/examplefiles/example.elm new file mode 100644 index 00000000..222d46e5 --- /dev/null +++ b/tests/examplefiles/example.elm @@ -0,0 +1,58 @@ +import Math.Vector3 (..) +import Math.Matrix4 (..) +import Graphics.WebGL (..) + +-- Create a mesh with two triangles + +type Vertex = { position:Vec3, color:Vec3 } + +mesh : [Triangle Vertex] +mesh = [ ( Vertex (vec3 0 0 0) (vec3 1 0 0) + , Vertex (vec3 1 1 0) (vec3 0 1 0) + , Vertex (vec3 1 -1 0) (vec3 0 0 1) + ) + ] + +-- Create the scene + +main : Signal Element +main = scene <~ foldp (+) 0 (fps 30) + +scene : Float -> Element +scene t = + webgl (400,400) + [ entity vertexShader fragmentShader mesh { view = view (t / 1000) } ] + +view : Float -> Mat4 +view t = + mul (makePerspective 45 1 0.01 100) + (makeLookAt (vec3 (4 * cos t) 0 (4 * sin t)) (vec3 0 0 0) (vec3 0 1 0)) + +-- Shaders + +vertexShader : Shader { attr | position:Vec3, color:Vec3 } { unif | view:Mat4 } { vcolor:Vec3 } +vertexShader = [glsl| + +attribute vec3 position; +attribute vec3 color; +uniform mat4 view; +varying vec3 vcolor; + +void main () { + gl_Position = view * vec4(position, 1.0); + vcolor = color; +} + +|] + +fragmentShader : Shader {} u { vcolor:Vec3 } +fragmentShader = [glsl| + +precision mediump float; +varying vec3 vcolor; + +void main () { + gl_FragColor = vec4(vcolor, 1.0); +} + +|] -- cgit v1.2.1 From fe9c75ae63b18c2acad5f6c901c86c2978bfe887 Mon Sep 17 00:00:00 2001 From: Trevor Bergeron Date: Mon, 16 Mar 2015 13:44:51 -0400 Subject: Add IRC formatter --- pygments/formatters/_mapping.py | 1 + pygments/formatters/irc.py | 182 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 183 insertions(+) create mode 100644 pygments/formatters/irc.py diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py index bfc82253..b2458d3f 100755 --- a/pygments/formatters/_mapping.py +++ b/pygments/formatters/_mapping.py @@ -21,6 +21,7 @@ FORMATTERS = { 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ```` tags within a ``
`` tag, wrapped in a ``
`` tag. The ``
``'s CSS class can be set by the `cssclass` option."), 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color codes.'), 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'), 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'), diff --git a/pygments/formatters/irc.py b/pygments/formatters/irc.py new file mode 100644 index 00000000..44fe6c4a --- /dev/null +++ b/pygments/formatters/irc.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- +""" + pygments.formatters.irc + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Formatter for IRC output + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import sys + +from pygments.formatter import Formatter +from pygments.token import Keyword, Name, Comment, String, Error, \ + Number, Operator, Generic, Token, Whitespace +from pygments.util import get_choice_opt + + +__all__ = ['IRCFormatter'] + + +#: Map token types to a tuple of color values for light and dark +#: backgrounds. +IRC_COLORS = { + Token: ('', ''), + + Whitespace: ('lightgray', 'darkgray'), + Comment: ('lightgray', 'darkgray'), + Comment.Preproc: ('teal', 'turquoise'), + Keyword: ('darkblue', 'blue'), + Keyword.Type: ('teal', 'turquoise'), + Operator.Word: ('purple', 'fuchsia'), + Name.Builtin: ('teal', 'turquoise'), + Name.Function: ('darkgreen', 'green'), + Name.Namespace: ('_teal_', '_turquoise_'), + Name.Class: ('_darkgreen_', '_green_'), + Name.Exception: ('teal', 'turquoise'), + Name.Decorator: ('darkgray', 'lightgray'), + Name.Variable: ('darkred', 'red'), + Name.Constant: ('darkred', 'red'), + Name.Attribute: ('teal', 'turquoise'), + Name.Tag: ('blue', 'blue'), + String: ('brown', 'brown'), + Number: ('darkblue', 'blue'), + + Generic.Deleted: ('red', 'red'), + Generic.Inserted: ('darkgreen', 'green'), + Generic.Heading: ('**', '**'), + Generic.Subheading: ('*purple*', '*fuchsia*'), + Generic.Error: ('red', 'red'), + + Error: ('_red_', '_red_'), +} + + +IRC_COLOR_MAP = { + 'white': 0, + 'black': 1, + 'darkblue': 2, + 'green': 3, + 'red': 4, + 'brown': 5, + 'purple': 6, + 'orange': 7, + 'darkgreen': 7, #compat w/ ansi + 'yellow': 8, + 'lightgreen': 9, + 'turquoise': 9, # compat w/ ansi + 'teal': 10, + 'lightblue': 11, + 'darkred': 11, # compat w/ ansi + 'blue': 12, + 'fuchsia': 13, + 'darkgray': 14, + 'lightgray': 15, +} + +def ircformat(color, text): + if len(color) < 1: + return text + add = sub = '' + if '_' in color: # italic + add += '\x1D' + sub = '\x1D' + sub + color = color.strip('_') + if '*' in color: # bold + add += '\x02' + sub = '\x02' + sub + color = color.strip('*') + # underline (\x1F) not supported + # backgrounds (\x03FF,BB) not supported + if len(color) > 0: # actual color - may have issues with ircformat("red", "blah")+"10" type stuff + add += '\x03' + str(IRC_COLOR_MAP[color]).zfill(2) + sub = '\x03' + sub + return add + text + sub + return '<'+add+'>'+text+'' + + +class IRCFormatter(Formatter): + r""" + Format tokens with IRC color sequences + + The `get_style_defs()` method doesn't do anything special since there is + no support for common styles. + + Options accepted: + + `bg` + Set to ``"light"`` or ``"dark"`` depending on the terminal's background + (default: ``"light"``). + + `colorscheme` + A dictionary mapping token types to (lightbg, darkbg) color names or + ``None`` (default: ``None`` = use builtin colorscheme). + + `linenos` + Set to ``True`` to have line numbers in the output as well + (default: ``False`` = no line numbers). + """ + name = 'IRC' + aliases = ['irc', 'IRC'] + filenames = [] + + def __init__(self, **options): + Formatter.__init__(self, **options) + self.darkbg = get_choice_opt(options, 'bg', + ['light', 'dark'], 'light') == 'dark' + self.colorscheme = options.get('colorscheme', None) or IRC_COLORS + self.linenos = options.get('linenos', False) + self._lineno = 0 + + def _write_lineno(self, outfile): + self._lineno += 1 + outfile.write("\n%04d: " % self._lineno) + + def _format_unencoded_with_lineno(self, tokensource, outfile): + self._write_lineno(outfile) + + for ttype, value in tokensource: + if value.endswith("\n"): + self._write_lineno(outfile) + value = value[:-1] + color = self.colorscheme.get(ttype) + while color is None: + ttype = ttype[:-1] + color = self.colorscheme.get(ttype) + if color: + color = color[self.darkbg] + spl = value.split('\n') + for line in spl[:-1]: + self._write_lineno(outfile) + if line: + outfile.write(ircformat(color, line[:-1])) + if spl[-1]: + outfile.write(ircformat(color, spl[-1])) + else: + outfile.write(value) + + outfile.write("\n") + + def format_unencoded(self, tokensource, outfile): + if self.linenos: + self._format_unencoded_with_lineno(tokensource, outfile) + return + + for ttype, value in tokensource: + color = self.colorscheme.get(ttype) + while color is None: + ttype = ttype[:-1] + color = self.colorscheme.get(ttype) + if color: + color = color[self.darkbg] + spl = value.split('\n') + for line in spl[:-1]: + if line: + outfile.write(ircformat(color, line)) + outfile.write('\n') + if spl[-1]: + outfile.write(ircformat(color, spl[-1])) + else: + outfile.write(value) -- cgit v1.2.1 From 1745df8a49d222d314d230161f55c411021ea439 Mon Sep 17 00:00:00 2001 From: unusual-thoughts Date: Thu, 19 Mar 2015 21:42:13 +0100 Subject: added strings and improved numbers --- pygments/lexers/idl.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/idl.py b/pygments/lexers/idl.py index a1ab1ad0..d745bcfd 100644 --- a/pygments/lexers/idl.py +++ b/pygments/lexers/idl.py @@ -12,7 +12,7 @@ import re from pygments.lexer import RegexLexer, words -from pygments.token import Text, Comment, Operator, Keyword, Name, Number +from pygments.token import Text, Comment, Operator, Keyword, Name, Number, String __all__ = ['IDLLexer'] @@ -256,7 +256,14 @@ class IDLLexer(RegexLexer): (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator), (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator), (r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator), - (r'\b[0-9](L|B|S|UL|ULL|LL)?\b', Number), + (r'"[^\"]*"', String.Double), + (r"'[^\']*'", String.Single), + (r'\b[\+\-]?([0-9]*\.[0-9]+|[0-9]+\.[0-9]*)(D|E)?([\+\-]?[0-9]+)?\b', Number.Float), + (r'\b\'[\+\-]?[0-9A-F]+\'X(U?(S?|L{1,2})|B)\b', Number.Hex), + (r'\b\'[\+\-]?[0-7]+\'O(U?(S?|L{1,2})|B)\b', Number.Oct), + (r'\b[\+\-]?[0-9]+U?L{1,2}\b', Number.Integer.Long), + (r'\b[\+\-]?[0-9]+U?S?\b', Number.Integer), + (r'\b[\+\-]?[0-9]+B\b', Number), (r'.', Text), ] } -- cgit v1.2.1 From a5205cb7fad042a4bba84d02800f9797e8a256af Mon Sep 17 00:00:00 2001 From: Jan ?kr??ek Date: Tue, 24 Mar 2015 21:37:43 +0000 Subject: graph lexer: added support for non-directed relationship in cypher --- pygments/lexers/graph.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pygments/lexers/graph.py b/pygments/lexers/graph.py index d90f0278..8315898c 100644 --- a/pygments/lexers/graph.py +++ b/pygments/lexers/graph.py @@ -61,6 +61,7 @@ class CypherLexer(RegexLexer): 'relations': [ (r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)), (r'(<-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)), + (r'(-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)), (r'-->|<--|\[|\]', Operator), (r'<|>|<>|=|<=|=>|\(|\)|\||:|,|;', Punctuation), (r'[.*{}]', Punctuation), -- cgit v1.2.1 From 915089a6ba9c9e4f65c0eeada21d69598eb7af51 Mon Sep 17 00:00:00 2001 From: Matthew Fernandez Date: Thu, 26 Mar 2015 11:21:00 +1100 Subject: Add a lexer for the CAmkES language. This changeset adds a lexer to support the input language for the CAmkES component platform (https://sel4.systems/CAmkES/). It is a basic stateless lexer that merely supports keywords and types without tracking context. --- pygments/lexers/_mapping.py | 1 + pygments/lexers/esoteric.py | 62 ++++++++++++++++++++++++++++++++++++++-- tests/examplefiles/simple.camkes | 38 ++++++++++++++++++++++++ 3 files changed, 99 insertions(+), 2 deletions(-) create mode 100644 tests/examplefiles/simple.camkes diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 2b836ac6..32998626 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -53,6 +53,7 @@ LEXERS = { 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), 'BroLexer': ('pygments.lexers.dsls', 'Bro', ('bro',), ('*.bro',), ()), 'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), + 'CAmkESLexer': ('pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()), 'CLexer': ('pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')), 'CMakeLexer': ('pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)), 'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)), diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py index f61b292d..e8e002ba 100644 --- a/pygments/lexers/esoteric.py +++ b/pygments/lexers/esoteric.py @@ -9,11 +9,11 @@ :license: BSD, see LICENSE for details. """ -from pygments.lexer import RegexLexer, include +from pygments.lexer import RegexLexer, include, words from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Error -__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer'] +__all__ = ['BrainfuckLexer', 'BefungeLexer', 'CAmkESLexer', 'RedcodeLexer'] class BrainfuckLexer(RegexLexer): @@ -78,6 +78,64 @@ class BefungeLexer(RegexLexer): } +class CAmkESLexer(RegexLexer): + """ + Basic lexer for the input language for the + `CAmkES `_ component platform. + """ + name = 'CAmkES' + aliases = ['camkes', 'idl4'] + filenames = ['*.camkes', '*.idl4'] + + tokens = { + 'root':[ + # C pre-processor directive + (r'^\s*#.*\n', Comment.Preproc), + + # Whitespace, comments + (r'\s+', Text), + (r'/\*(.|\n)*?\*/', Comment), + (r'//.*\n', Comment), + + (r'[\[\(\){},\.;=\]]', Punctuation), + + (words(('assembly', 'attribute', 'component', 'composition', + 'configuration', 'connection', 'connector', 'consumes', + 'control', 'dataport', 'Dataport', 'emits', 'event', + 'Event', 'from', 'group', 'hardware', 'has', 'interface', + 'Interface', 'maybe', 'procedure', 'Procedure', 'provides', + 'template', 'to', 'uses'), suffix=r'\b'), Keyword), + + (words(('bool', 'boolean', 'Buf', 'char', 'character', 'double', + 'float', 'in', 'inout', 'int', 'int16_6', 'int32_t', + 'int64_t', 'int8_t', 'integer', 'mutex', 'out', 'real', + 'refin', 'semaphore', 'signed', 'string', 'uint16_t', + 'uint32_t', 'uint64_t', 'uint8_t', 'uintptr_t', 'unsigned', + 'void'), suffix=r'\b'), Keyword.Type), + + # Recognised attributes + (r'[a-zA-Z_]\w*_(priority|domain|buffer)', Keyword.Reserved), + (words(('dma_pool', 'from_access', 'to_access'), suffix=r'\b'), + Keyword.Reserved), + + # CAmkES-level include + (r'import\s+(<[^>]*>|"[^"]*");', Comment.Preproc), + + # C-level include + (r'include\s+(<[^>]*>|"[^"]*");', Comment.Preproc), + + # Literals + (r'0[xX][\da-fA-F]+', Number.Hex), + (r'-?[\d]+', Number), + (r'-?[\d]+\.[\d]+', Number.Float), + (r'"[^"]*"', String), + + # Identifiers + (r'[a-zA-Z_]\w*', Name), + ], + } + + class RedcodeLexer(RegexLexer): """ A simple Redcode lexer based on ICWS'94. diff --git a/tests/examplefiles/simple.camkes b/tests/examplefiles/simple.camkes new file mode 100644 index 00000000..43e11732 --- /dev/null +++ b/tests/examplefiles/simple.camkes @@ -0,0 +1,38 @@ +/* + * Example input for CAmkES lexer. + */ + +import ; + +// A single-line comment. + +import "components/Client/Client.camkes"; +import "components/Echo/Echo.camkes"; + +component Foo { + include "proc_defn.h"; + control; + dataport Buf my_port; +} + +#ifdef BAR_AVAILABLE + component Bar { + provides CharAccess ca; + } +#endif + + #define HASH_DEF_WITH_LEADING_SPACE + +assembly { /* Another multiline comment. */ + composition { + component Echo echo; + component Client client; + + connection seL4RPC simple(from client.s, to echo.s); + } + + configuration { + echo.dma_pool = 4096; + } +} + -- cgit v1.2.1 From bc20c9895ab135e2a270b99b425fe220dc0dbe34 Mon Sep 17 00:00:00 2001 From: Dejan Muhamedagic Date: Fri, 15 May 2015 16:26:37 +0200 Subject: crmsh lexer update --- pygments/lexers/dsls.py | 62 ++++++++++++++++++++++------------------- tests/examplefiles/example.pcmk | 22 ++++++++++++++- 2 files changed, 55 insertions(+), 29 deletions(-) diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index 8d27726c..07cc8411 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -520,27 +520,31 @@ class CrmshLexer(RegexLexer): Lexer for `crmsh `_ configuration files for Pacemaker clusters. - .. versionadded:: 1.0 + .. versionadded:: 2.1 """ name = 'Crmsh' aliases = ['crmsh', 'pcmk'] filenames = ['*.crmsh', '*.pcmk'] mimetypes = [] - elem = (r'node|primitive|group|clone|ms|location|colocation|order|' - r'fencing_topology|' - r'rsc_ticket|rsc_template|property|rsc_defaults|op_defaults|' - r'acl_target|acl_group|user|role') - sub = (r'params|meta|operations|op|rule|attributes|utilization') - acl = (r'read|write|deny') - acl_mod = (r'tag|ref|xpath') - bin_rel=(r'and|or') - un_ops=(r'defined|not_defined') - bin_ops=(r'lt|gt|lte|gte|eq|ne') - val_qual=(r'string|version|number') - date_exp=(r'in_range|date|spec') - rsc_role_action=(r'Master|Started|Slave|Stopped|' - r'start|promote|demote|stop') + elem = words(( + 'node', 'primitive', 'group', 'clone', 'ms', 'location', + 'colocation', 'order', 'fencing_topology', 'rsc_ticket', + 'rsc_template', 'property', 'rsc_defaults', + 'op_defaults', 'acl_target', 'acl_group', 'user', 'role', + 'tag'), suffix=r'(?![\w#$-])') + sub = words(( + 'params', 'meta', 'operations', 'op', 'rule', + 'attributes', 'utilization'), suffix=r'(?![\w#$-])') + acl = words(('read', 'write', 'deny'), suffix=r'(?![\w#$-])') + bin_rel = words(('and', 'or'), suffix=r'(?![\w#$-])') + un_ops = words(('defined', 'not_defined'), suffix=r'(?![\w#$-])') + date_exp = words(('in_range', 'date', 'spec'), suffix=r'(?![\w#$-])') + acl_mod = (r'(?:tag|ref|xpath)') + bin_ops = (r'(?:lt|gt|lte|gte|eq|ne)') + val_qual = (r'(?:string|version|number)') + rsc_role_action=(r'(?:Master|Started|Slave|Stopped|' + r'start|promote|demote|stop)') tokens = { 'root': [ @@ -550,30 +554,32 @@ class CrmshLexer(RegexLexer): # need this construct, otherwise numeric node ids # are matched as scores # elem id: - (r'(%s)(\s+)([\w#$-]+)(:)' % elem, + (r'(node)(\s+)([\w#$-]+)(:)', bygroups(Keyword, Whitespace, Name, Punctuation)), # scores - (r'([0-9]+|[+-]?inf):', Number), + (r'([+-]?([0-9]+|inf)):', Number), # keywords (elements and other) - (r'(%s|%s|%s)(?![\w#$-])' % (elem,sub,acl), Keyword), + (elem, Keyword), + (sub, Keyword), + (acl, Keyword), # binary operators - (r'(?:%s:)?%s(?![\w#$-])' % (val_qual,bin_ops), \ + (r'(?:%s:)?(%s)(?![\w#$-])' % (val_qual,bin_ops), Operator.Word), # other operators - (r'(%s|%s|%s)(?![\w#$-])' % (bin_rel,un_ops,date_exp), \ - Operator.Word), + (bin_rel, Operator.Word), + (un_ops, Operator.Word), + (date_exp, Operator.Word), # builtin attributes (e.g. #uname) (r'#[a-z]+(?![\w#$-])', Name.Builtin), - # rsc_id[:(role|action)] - (r'([\w#$-]+)(?:(:)(%s))?' % rsc_role_action, \ - bygroups(Name, Punctuation, Operator.Word)), # acl_mod:blah (r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod, \ - bygroups(Operator.Word, Punctuation, Name)), - # ids, and everything else not matched above - (r'([\w#$-]+)(?![\w#$-])', Name), + bygroups(Keyword, Punctuation, Name)), + # rsc_id[:(role|action)] + # NB: this matches all other identifiers + (r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action, \ + bygroups(Name, Punctuation, Operator.Word)), # punctuation - (r'(\\(?=\n)|[[\](){}/:])', Punctuation), + (r'(\\(?=\n)|[[\](){}/:@])', Punctuation), (r'#.*\n', Comment), (r'\s+|\n', Whitespace), ], diff --git a/tests/examplefiles/example.pcmk b/tests/examplefiles/example.pcmk index 7ec10686..7570a0b6 100644 --- a/tests/examplefiles/example.pcmk +++ b/tests/examplefiles/example.pcmk @@ -1,5 +1,7 @@ node 167906355: sle12-a -node 167906357: sle12-c +node 167906357: sle12-c \ + description="The second node" \ + utilization memory=64 primitive fs1 Filesystem \ params device="/dev/nfs-vg/fs1" directory="/srv/nfs" fstype=ext3 \ op monitor interval=10s @@ -19,6 +21,24 @@ primitive s-libvirt stonith:external/libvirt \ op monitor interval=5m timeout=60s primitive virtual-ip IPaddr2 \ params ip=10.2.12.100 +primitive xen0 @vm_scheme1 xmfile=/etc/xen/vm/xen0 +primitive very-primitive Dummy \ + params 3: rule #uname eq node1 interface=eth1 \ + params 2: rule #uname string:eq node2 interface=eth2 port=8888 \ + params 1: interface=eth0 port=9999 \ + operations $id-ref=those_other_ops +fencing_topology poison-pill power +fencing_topology \ + node-a: poison-pill power \ + node-b: ipmi serial +role nfs_admin \ + write meta:nfs-server:target-role \ + write meta:nfs-server:is-managed \ + write location:nfs-server \ + read ref:nfs-server +acl_target joe \ + nfs_admin +tag nfs: nfs-server nfs-vg group nfs-disk nfs-vg fs1 group nfs-srv virtual-ip nfs-server ms ms_drbd_nfs p_drbd_nfs \ -- cgit v1.2.1 From 4e8e8efbc37a9a3bdfdefa97679b8719395f40af Mon Sep 17 00:00:00 2001 From: Jon Parise Date: Sun, 17 May 2015 12:14:26 -0700 Subject: "information" is the correct word. --- doc/docs/lexerdevelopment.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/docs/lexerdevelopment.rst b/doc/docs/lexerdevelopment.rst index 08069889..5b621666 100644 --- a/doc/docs/lexerdevelopment.rst +++ b/doc/docs/lexerdevelopment.rst @@ -145,7 +145,7 @@ Regex Flags You can either define regex flags locally in the regex (``r'(?x)foo bar'``) or globally by adding a `flags` attribute to your lexer class. If no attribute is -defined, it defaults to `re.MULTILINE`. For more informations about regular +defined, it defaults to `re.MULTILINE`. For more information about regular expression flags see the page about `regular expressions`_ in the Python documentation. -- cgit v1.2.1 From 67831712531fd928f12c2940eac82c01e5c60ecc Mon Sep 17 00:00:00 2001 From: Jon Parise Date: Sun, 17 May 2015 14:26:39 -0700 Subject: Adding a lexer for the Thrift interface definition language. --- AUTHORS | 2 +- pygments/lexers/_mapping.py | 1 + pygments/lexers/dsls.py | 97 +++++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 96 insertions(+), 4 deletions(-) diff --git a/AUTHORS b/AUTHORS index 4fa7e0da..d3e807b0 100644 --- a/AUTHORS +++ b/AUTHORS @@ -121,7 +121,7 @@ Other contributors, listed alphabetically, are: * Edward O'Callaghan -- Cryptol lexer * David Oliva -- Rebol lexer * Pat Pannuto -- nesC lexer -* Jon Parise -- Protocol buffers lexer +* Jon Parise -- Protocol buffers and Thrift lexers * Ronny Pfannschmidt -- BBCode lexer * Benjamin Peterson -- Test suite refactoring * Dominik Picheta -- Nimrod lexer diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 2b836ac6..13b9fc5e 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -344,6 +344,7 @@ LEXERS = { 'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)), 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), + 'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ()), 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), 'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)), diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index 433287d4..554c8a87 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -11,12 +11,13 @@ import re -from pygments.lexer import RegexLexer, bygroups, words, include, default +from pygments.lexer import RegexLexer, bygroups, words, include, default, \ + this, using from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Literal -__all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer', - 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer'] +__all__ = ['ProtoBufLexer', 'ThriftLexer', 'BroLexer', 'PuppetLexer', + 'RslLexer', 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer'] class ProtoBufLexer(RegexLexer): @@ -81,6 +82,96 @@ class ProtoBufLexer(RegexLexer): } +class ThriftLexer(RegexLexer): + """ + For `Thrift `__ interface definitions. + + .. versionadded:: 2.1 + """ + name = 'Thrift' + aliases = ['thrift'] + filenames = ['*.thrift'] + mimetypes = ['application/x-thrift'] + + tokens = { + 'root': [ + include('whitespace'), + include('comments'), + (r'\".*?\"', String), + (r'\'.*?\'', String), + (r'(namespace)(\s+)', + bygroups(Keyword.Namespace, Text.Whitespace), 'namespace'), + (r'(enum|union|struct|service|exception)(\s+)', + bygroups(Keyword.Declaration, Text.Whitespace), 'class'), + (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments + r'((?:[^\W\d]|\$)[\w$]*)' # method name + r'(\s*)(\()', # signature start + bygroups(using(this), Name.Function, Text, Operator)), + include('keywords'), + include('numbers'), + (r'[&=]', Operator), + (r'[:;\,\{\}\(\)\<>\[\]]', Punctuation), + (r'[a-zA-Z_](\.[a-zA-Z_0-9]|[a-zA-Z_0-9])*', Name), + ], + 'whitespace': [ + (r'\n', Text.Whitespace), + (r'\s+', Text.Whitespace), + ], + 'comments': [ + (r'#.*$', Comment), + (r'//.*?\n', Comment), + (r'/\*[\w\W]*?\*/', Comment.Multiline), + ], + 'namespace': [ + (r'[a-z\*](\.[a-zA-Z_0-9]|[a-zA-Z_0-9])*', Name.Namespace, '#pop'), + default('#pop'), + ], + 'class': [ + (r'[a-zA-Z_]\w*', Name.Class, '#pop'), + default('#pop'), + ], + 'keywords': [ + (words(( + 'async', 'oneway', 'extends', 'throws', 'required', + 'optional'), suffix=r'\b'), + Keyword), + (words(( + 'typedef', 'const'), suffix=r'\b'), + Keyword.Declaration), + (words(( + 'void', 'bool', 'byte', 'i16', 'i32', 'i64', 'double', + 'string', 'binary', 'void', 'map', 'list', 'set', 'slist', + 'senum'), suffix=r'\b'), + Keyword.Type), + (words(( + 'BEGIN', 'END', '__CLASS__', '__DIR__', '__FILE__', + '__FUNCTION__', '__LINE__', '__METHOD__', '__NAMESPACE__', + 'abstract', 'alias', 'and', 'args', 'as', 'assert', 'begin', + 'break', 'case', 'catch', 'class', 'clone', 'continue', + 'declare', 'def', 'default', 'del', 'delete', 'do', 'dynamic', + 'elif', 'else', 'elseif', 'elsif', 'end', 'enddeclare', + 'endfor', 'endforeach', 'endif', 'endswitch', 'endwhile', + 'ensure', 'except', 'exec', 'finally', 'float', 'for', + 'foreach', 'function', 'global', 'goto', 'if', 'implements', + 'import', 'in', 'inline', 'instanceof', 'interface', 'is', + 'lambda', 'module', 'native', 'new', 'next', 'nil', 'not', + 'or', 'pass', 'public', 'print', 'private', 'protected', + 'raise', 'redo', 'rescue', 'retry', 'register', 'return', + 'self', 'sizeof', 'static', 'super', 'switch', 'synchronized', + 'then', 'this', 'throw', 'transient', 'try', 'undef', + 'unless', 'unsigned', 'until', 'use', 'var', 'virtual', + 'volatile', 'when', 'while', 'with', 'xor', 'yield'), + prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + ], + 'numbers': [ + (r'[+-]?[0-9]+', Number.Integer), + (r'[+-]?"0x"[0-9A-Fa-f]+', Number.Hex), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float), + ], + } + + class BroLexer(RegexLexer): """ For `Bro `_ scripts. -- cgit v1.2.1 From 1120d019bbefae319b68a01ded40f575d7442f4f Mon Sep 17 00:00:00 2001 From: Jon Parise Date: Mon, 18 May 2015 07:23:27 -0700 Subject: Improve the hex and float patterns to avoid conflicts. --- pygments/lexers/dsls.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index 554c8a87..ea1cefa3 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -165,9 +165,9 @@ class ThriftLexer(RegexLexer): Keyword.Reserved), ], 'numbers': [ + (r'[+-]?0x[0-9A-Fa-f]+', Number.Hex), + (r'[+-]?[0-9]+(\.[0-9]+)?([eE][+-]?[0-9]+)?', Number.Float), (r'[+-]?[0-9]+', Number.Integer), - (r'[+-]?"0x"[0-9A-Fa-f]+', Number.Hex), - (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float), ], } -- cgit v1.2.1 From e1d131eee9eab0c4202130fc50516061c3ac0bb7 Mon Sep 17 00:00:00 2001 From: Hong Xu Date: Mon, 18 May 2015 16:25:40 -0700 Subject: Add 'flush' as a keyword in the mysql lexer --- pygments/lexers/sql.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py index f575ed38..646a9f31 100644 --- a/pygments/lexers/sql.py +++ b/pygments/lexers/sql.py @@ -489,8 +489,8 @@ class MySqlLexer(RegexLexer): r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|' r'declare|default|delayed|delete|desc|describe|deterministic|' r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|' - r'enclosed|escaped|exists|exit|explain|fetch|float|float4|float8' - r'|for|force|foreign|from|fulltext|grant|group|having|' + r'enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|' + r'float8|for|force|foreign|from|fulltext|grant|group|having|' r'high_priority|hour_microsecond|hour_minute|hour_second|if|' r'ignore|in|index|infile|inner|inout|insensitive|insert|int|' r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|' -- cgit v1.2.1 From 26510022a4f1676c0f7e97261aa4a3766f21301b Mon Sep 17 00:00:00 2001 From: Dejan Muhamedagic Date: Tue, 19 May 2015 18:21:42 +0200 Subject: crmsh lexer update (2) --- pygments/lexers/dsls.py | 12 ++++----- tests/examplefiles/example.pcmk | 57 ++++++++++++++++++++++++++++++++++++++++- 2 files changed, 61 insertions(+), 8 deletions(-) diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index 07cc8411..01aad058 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -539,8 +539,8 @@ class CrmshLexer(RegexLexer): acl = words(('read', 'write', 'deny'), suffix=r'(?![\w#$-])') bin_rel = words(('and', 'or'), suffix=r'(?![\w#$-])') un_ops = words(('defined', 'not_defined'), suffix=r'(?![\w#$-])') - date_exp = words(('in_range', 'date', 'spec'), suffix=r'(?![\w#$-])') - acl_mod = (r'(?:tag|ref|xpath)') + date_exp = words(('in_range', 'date', 'spec', 'in'), suffix=r'(?![\w#$-])') + acl_mod = (r'(?:tag|ref|reference|attribute|type|xpath)') bin_ops = (r'(?:lt|gt|lte|gte|eq|ne)') val_qual = (r'(?:string|version|number)') rsc_role_action=(r'(?:Master|Started|Slave|Stopped|' @@ -548,6 +548,7 @@ class CrmshLexer(RegexLexer): tokens = { 'root': [ + (r'^#.*\n?', Comment), # attr=value (nvpair) (r'([\w#$-]+)(=)("(?:""|[^"])*"|\S+)', bygroups(Name.Attribute, Punctuation, String)), @@ -572,17 +573,14 @@ class CrmshLexer(RegexLexer): # builtin attributes (e.g. #uname) (r'#[a-z]+(?![\w#$-])', Name.Builtin), # acl_mod:blah - (r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod, \ + (r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod, bygroups(Keyword, Punctuation, Name)), # rsc_id[:(role|action)] # NB: this matches all other identifiers - (r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action, \ + (r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action, bygroups(Name, Punctuation, Operator.Word)), # punctuation (r'(\\(?=\n)|[[\](){}/:@])', Punctuation), - (r'#.*\n', Comment), (r'\s+|\n', Whitespace), ], } - -# vim:ts=4:sw=4:et: diff --git a/tests/examplefiles/example.pcmk b/tests/examplefiles/example.pcmk index 7570a0b6..22cc60e4 100644 --- a/tests/examplefiles/example.pcmk +++ b/tests/examplefiles/example.pcmk @@ -2,6 +2,18 @@ node 167906355: sle12-a node 167906357: sle12-c \ description="The second node" \ utilization memory=64 +node node1 \ + attributes mem=16G +node node2 utilization cpu=4 +primitive st stonith:ssh \ + params hostlist="node1 node2" \ + meta target-role="Started" \ + op start requires=nothing timeout=60s \ + op monitor interval=60m timeout=60s +primitive d1 ocf:pacemaker:Dummy \ + operations $id=d1-ops \ + op monitor interval=60m \ + op monitor interval=120m OCF_CHECK_LEVEL=10 primitive fs1 Filesystem \ params device="/dev/nfs-vg/fs1" directory="/srv/nfs" fstype=ext3 \ op monitor interval=10s @@ -22,6 +34,9 @@ primitive s-libvirt stonith:external/libvirt \ primitive virtual-ip IPaddr2 \ params ip=10.2.12.100 primitive xen0 @vm_scheme1 xmfile=/etc/xen/vm/xen0 +primitive d7 Dummy \ + params rule inf: #uname eq node1 fake=1 \ + params rule inf: #uname eq node2 fake=2 primitive very-primitive Dummy \ params 3: rule #uname eq node1 interface=eth1 \ params 2: rule #uname string:eq node2 interface=eth2 port=8888 \ @@ -36,18 +51,52 @@ role nfs_admin \ write meta:nfs-server:is-managed \ write location:nfs-server \ read ref:nfs-server +role basic-read \ + read status \ + read type:node attribute:uname \ + read type:node attribute:type \ + read property +role basic-read-basic \ + read cib +role d0-admin \ + write meta:d0:target-role \ + write meta:d0:is-managed \ + read xpath:"//nodes//attributes" \ + read ref:d0 acl_target joe \ - nfs_admin + nfs_admin tag nfs: nfs-server nfs-vg group nfs-disk nfs-vg fs1 group nfs-srv virtual-ip nfs-server ms ms_drbd_nfs p_drbd_nfs \ meta notify=true clone-max=2 location nfs-pref virtual-ip 100: sle12-a +location l1 nfs-srv 100: node1 +location l2 d1 \ + rule 100: #uname eq node1 +location l3 d1 \ + rule inf: #uname eq node1 and pingd gt 0 +location l4 d1 \ + rule -inf: not_defined pingd or pingd lte 0 +location l5 fs1 \ + rule -inf: not_defined pingd or pingd lte 0 \ + rule #uname eq node1 and pingd gt 0 \ + rule date lt 2009-05-26 and date in start=2009-05-26 end=2009-07-26 and date in start=2009-05-26 years=2009 and date spec years=2009 hours=09-17 +location l6 d1 \ + rule $id-ref=l2-rule1 +location l7 d1 \ + rule $id-ref=l2 colocation c-nfs inf: nfs-server fs1 colocation vg-with-drbd inf: nfs-vg ms_drbd_nfs:Master +# drbd device is the nfs-vg PV order drbd-before-vg inf: ms_drbd_nfs:promote nfs-vg:start +# need fs1 for the NFS server order o-nfs inf: fs1 nfs-server +rsc_ticket ticket-A_m6 ticket-A: d1 +rsc_ticket ticket-B_m6_m5 ticket-B: d1 d7 loss-policy=fence +rsc_ticket ticket-C_master ticket-C: d1 ms_drbd_nfs:Master loss-policy=fence +property cpset2: \ + maintenance-mode=true property cib-bootstrap-options: \ dc-version=1.1.12-ad083a8 \ cluster-infrastructure=corosync \ @@ -58,3 +107,9 @@ property cib-bootstrap-options: \ maintenance-mode=false op_defaults op-options: \ timeout=120s +rsc_defaults rsc-options: \ + failure-timeout=10m +op_defaults opsdef2: \ + rule 100: #uname eq node1 \ + record-pending=true +tag t1: d1 d7 opsdef2 -- cgit v1.2.1 From 549e0b5a7d972c20ce87a02b505e8cc45afa1f73 Mon Sep 17 00:00:00 2001 From: Jon Parise Date: Sat, 23 May 2015 12:48:44 -0700 Subject: Addressing the shortcomings of the previous Thift lexer: - Strings are now classified as String.Double or String.Single. - Strings now support embedded escape sequences. - All known namespaces (even deprecated ones) are now recognized. - The double and integer patterns now longer collide. - `true` and `false` are recognized as constants. --- pygments/lexers/dsls.py | 35 ++++++++++++++++++++++++++--------- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index ea1cefa3..60ffeae5 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -97,8 +97,8 @@ class ThriftLexer(RegexLexer): 'root': [ include('whitespace'), include('comments'), - (r'\".*?\"', String), - (r'\'.*?\'', String), + (r'"', String.Double, 'dqs'), + (r'\'', String.Single, 'sqs'), (r'(namespace)(\s+)', bygroups(Keyword.Namespace, Text.Whitespace), 'namespace'), (r'(enum|union|struct|service|exception)(\s+)', @@ -122,6 +122,19 @@ class ThriftLexer(RegexLexer): (r'//.*?\n', Comment), (r'/\*[\w\W]*?\*/', Comment.Multiline), ], + 'string': [ + (r'\\([\\nrt"\'])', String.Escape), # escape characters + (r'[^\\"\\\'\n]+', String), # all other characters + (r'\\', String), # literal backslash + ], + 'dqs': [ + (r'"', String, '#pop'), + include('string') + ], + 'sqs': [ + (r"'", String, '#pop'), + include('string') + ], 'namespace': [ (r'[a-z\*](\.[a-zA-Z_0-9]|[a-zA-Z_0-9])*', Name.Namespace, '#pop'), default('#pop'), @@ -131,13 +144,17 @@ class ThriftLexer(RegexLexer): default('#pop'), ], 'keywords': [ + (r'(async|oneway|extends|throws|required|optional)\b', Keyword), + (r'(true|false)\b', Keyword.Constant), + (r'(const|typedef)\b', Keyword.Declaration), (words(( - 'async', 'oneway', 'extends', 'throws', 'required', - 'optional'), suffix=r'\b'), - Keyword), - (words(( - 'typedef', 'const'), suffix=r'\b'), - Keyword.Declaration), + 'cpp_namespace', 'cpp_include', 'cpp_type', 'java_package', + 'cocoa_prefix', 'csharp_namespace', 'delphi_namespace', + 'php_namespace', 'py_module', 'perl_package', + 'ruby_namespace', 'smalltalk_category', 'smalltalk_prefix', + 'xsd_all', 'xsd_optional', 'xsd_nillable', 'xsd_namespace', + 'xsd_attrs', 'include'), suffix=r'\b'), + Keyword.Namespace), (words(( 'void', 'bool', 'byte', 'i16', 'i32', 'i64', 'double', 'string', 'binary', 'void', 'map', 'list', 'set', 'slist', @@ -165,8 +182,8 @@ class ThriftLexer(RegexLexer): Keyword.Reserved), ], 'numbers': [ + (r'[+-]?(\d+\.\d+([eE][+-]?\d+)?|\.?\d+[eE][+-]?\d+)', Number.Float), (r'[+-]?0x[0-9A-Fa-f]+', Number.Hex), - (r'[+-]?[0-9]+(\.[0-9]+)?([eE][+-]?[0-9]+)?', Number.Float), (r'[+-]?[0-9]+', Number.Integer), ], } -- cgit v1.2.1 From f7a597a4ae3537b5a75c564e00972cec0f95aa34 Mon Sep 17 00:00:00 2001 From: Jon Parise Date: Sat, 23 May 2015 15:30:41 -0700 Subject: Apply String.(Single|Double) to the entire string. --- pygments/lexers/dsls.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index 60ffeae5..4b7a50b0 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -12,7 +12,7 @@ import re from pygments.lexer import RegexLexer, bygroups, words, include, default, \ - this, using + this, using, combined from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Literal @@ -97,8 +97,8 @@ class ThriftLexer(RegexLexer): 'root': [ include('whitespace'), include('comments'), - (r'"', String.Double, 'dqs'), - (r'\'', String.Single, 'sqs'), + (r'"', String.Double, combined('stringescape', 'dqs')), + (r'\'', String.Single, combined('stringescape', 'sqs')), (r'(namespace)(\s+)', bygroups(Keyword.Namespace, Text.Whitespace), 'namespace'), (r'(enum|union|struct|service|exception)(\s+)', @@ -122,18 +122,16 @@ class ThriftLexer(RegexLexer): (r'//.*?\n', Comment), (r'/\*[\w\W]*?\*/', Comment.Multiline), ], - 'string': [ - (r'\\([\\nrt"\'])', String.Escape), # escape characters - (r'[^\\"\\\'\n]+', String), # all other characters - (r'\\', String), # literal backslash + 'stringescape': [ + (r'\\([\\nrt"\'])', String.Escape), ], 'dqs': [ - (r'"', String, '#pop'), - include('string') + (r'"', String.Double, '#pop'), + (r'[^\\"\n]+', String.Double), ], 'sqs': [ - (r"'", String, '#pop'), - include('string') + (r"'", String.String, '#pop'), + (r'[^\\\'\n]+', String.Single), ], 'namespace': [ (r'[a-z\*](\.[a-zA-Z_0-9]|[a-zA-Z_0-9])*', Name.Namespace, '#pop'), -- cgit v1.2.1 From 1cfb77d1ae18ddda3cbdf5316d437d6bbd83d98f Mon Sep 17 00:00:00 2001 From: Jon Parise Date: Sat, 23 May 2015 20:20:35 -0700 Subject: s/String.String/String.Single/ --- pygments/lexers/dsls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index 4b7a50b0..5d087200 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -130,7 +130,7 @@ class ThriftLexer(RegexLexer): (r'[^\\"\n]+', String.Double), ], 'sqs': [ - (r"'", String.String, '#pop'), + (r"'", String.Single, '#pop'), (r'[^\\\'\n]+', String.Single), ], 'namespace': [ -- cgit v1.2.1 From 457e329a988719f2cd51c8be332a9d4e80008f8d Mon Sep 17 00:00:00 2001 From: Colin Sullivan Date: Thu, 28 May 2015 17:01:55 -0700 Subject: Added SuperCollider language spec pretty much just copied from JavaScript with a few modifications. A good start though. --- pygments/lexers/_mapping.py | 1 + pygments/lexers/supercollider.py | 79 ++++++++++++++++++++++++++++++++++++++++ tests/examplefiles/example.scd | 76 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 156 insertions(+) create mode 100644 pygments/lexers/supercollider.py create mode 100644 tests/examplefiles/example.scd diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 2b836ac6..41ebfdac 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -335,6 +335,7 @@ LEXERS = { 'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)), 'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)), 'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()), + 'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('sc', 'supercollider'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')), 'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), diff --git a/pygments/lexers/supercollider.py b/pygments/lexers/supercollider.py new file mode 100644 index 00000000..da553e5f --- /dev/null +++ b/pygments/lexers/supercollider.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.supercollider + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for SuperCollider + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, default, using, this +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Other +from pygments.util import get_bool_opt, iteritems +import pygments.unistring as uni + +__all__ = ['SuperColliderLexer'] + +class SuperColliderLexer(RegexLexer): + """ + For SuperCollider source code. + """ + + name = 'SuperCollider' + aliases = ['sc', 'supercollider'] + filenames = ['*.sc', '*.scd'] + mimetypes = ['application/supercollider', 'text/supercollider', ] + + flags = re.DOTALL + tokens = { + 'commentsandwhitespace': [ + (r'\s+', Text), + (r'0, " wing units."; - ]; - -Object -> "thimble" with name 'thimble'; - -Object -> pebble "pebble" with name 'pebble'; - -Ifdef TARGET_ZCODE; Trace objects; Endif; - -Statusline score; - -Stub StubR 3; - -Ifdef TARGET_ZCODE; -Zcharacter "abcdefghijklmnopqrstuvwxyz" - "ABCDEFGHIJKLMNOPQRSTUVWXYZ" - "123456789.,!?_#'0/@{005C}-:()"; -Zcharacter table '@!!' '@<<' '@'A'; -Zcharacter table + '@AE' '@{dc}' '@et' '@:y'; -Ifnot; -Ifdef TARGET_GLULX; -Message "Glulx doesn't use ~Zcharacter~.^Oh well."; ! '~' and '^' work here. -Ifnot; -Message warning "Uh-oh! ^~^"; ! They don't work in other Messages. -Endif; -Endif; - -Include "Grammar"; - -Verb"acquire"'collect'='take'; - -[ NounFilter; return noun ofclass Bird; ]; - -[ ScopeFilter obj; - switch (scope_stage) { - 1: rtrue; - 2: objectloop (obj in compass) PlaceInScope(obj); - 3: "Nothing is in scope."; - } -]; - -Verb meta "t" 'test' - * 'held' held -> TestHeld - * number -> TestNumber - * reversed -> TestAttribute - * 'creature' creature -> TestCreature - * 'multiheld' multiheld -> TestMultiheld - * 'm' multiexcept 'into'/"in" noun -> TestMultiexcept - * 'm' multiinside 'from' noun -> TestMultiinside - * multi -> TestMulti - * 'filter'/'f' noun=NounFilter -> TestNounFilter - * 'filter'/'f' scope=ScopeFilter -> TestScopeFilter - * 'special' special -> TestSpecial - * topic -> TestTopic; - -Verb 'reverse' 'swap' 'exchange' - * held 'for' noun -> reverse - * noun 'with' noun -> reverse reverse; - -Extend "t" last * noun -> TestNoun; - -Extend 't' first * -> Test; - -Extend 'wave' replace * -> NewWave; - -Extend only 'feel' 'touch' replace * noun -> Feel; - -[ TestSub a b o; - string 25 low_string; - print "Test what?> "; - table->0 = 260; - parse->0 = 61; - #Ifdef TARGET_ZCODE; - read buffer parse; - #Ifnot; ! TARGET_GLULX - KeyboardPrimitive(buffer, parse); - #Endif; ! TARGET_ - switch (parse-->1) { - 'save': - #Ifdef TARGET_ZCODE; - #Ifv3; - @save ?saved; - #Ifnot; - save saved; - #Endif; - #Endif; - print "Saving failed.^"; - 'restore': - #Ifdef TARGET_ZCODE; - restore saved; - #Endif; - print "Restoring failed.^"; - 'restart': - @restart; - 'quit', 'q//': - quit; - return 2; rtrue; rfalse; return; - 'print', 'p//': - print "Print:^", - " (string): ", (string) "xyzzy^", - " (number): ", (number) 123, "^", - " (char): ", (char) 'x', "^", - " (address): ", (address) 'plugh//p', "^", - " (The): ", (The) person, "^", - " (the): ", (the) person, "^", - " (A): ", (A) person, "^", - " (a): ", (a) person, "^", - " (an): ", (an) person, "^", - " (name): ", (name) person, "^", - " (object): ", (object) person, "^", - " (property): ", (property) alias, "^", - " (): ", (LanguageNumber) 123, "^", - " : ", a * 2 - 1, "^", - " (): ", (a + person), "^"; - print "Escapes:^", - " by mnemonic: @!! @<< @'A @AE @et @:y^", - " by decimal value: @@64 @@126^", - " by Unicode value: @{DC}@{002b}^", - " by string variable: @25^"; - 'font', 'style': - font off; print "font off^"; - font on; print "font on^"; - style reverse; print "style reverse^"; style roman; - style bold; print "style bold^"; - style underline; print "style underline^"; - style fixed; print "style fixed^"; - style roman; print "style roman^"; - 'statements': - spaces 8; - objectloop (o) { - print "objectloop (o): ", (the) o, "^"; - } - objectloop (o in compass) { ! 'in' is a keyword - print "objectloop (o in compass): ", (the) o, "^"; - } - objectloop (o in compass && true) { ! 'in' is an operator - print "objectloop (o in compass && true): ", (the) o, "^"; - } - objectloop (o from se_obj) { - print "objectloop (o from se_obj): ", (the) o, "^"; - } - objectloop (o near person) { - print "objectloop (o near person): ", (the) o, "^"; - } - #Ifdef TARGET_ZCODE; - #Trace assembly on; -@ ! This is assembly. - add -4 ($$1+$3)*2 -> b; - @get_sibling test_tube -> b ?saved; - @inc [b]; - @je sp (1+3*0) ? equal; - @je 1 ((sp)) ?~ different; - .! This is a label: - equal; - print "sp == 1^"; - jump label; - .different; - print "sp @@126= 1^"; - .label; - #Trace off; #Endif; ! TARGET_ZCODE - a = random(10); - switch (a) { - 1, 9: - box "Testing oneself is best when done alone." - " -- Jimmy Carter"; - 2, 6, to, 3 to 5, to to to: - ; - #Ifdef VN_1633; - ; - #Endif; - a = ##Drop; - < ! The angle brackets may be separated by whitespace. - < (a) pigeon > >; - default: - do { - give person general ~general; - } until (person provides life && ~~false); - if (a == 7) a = 4; - else a = 5; - } - 'expressions': - a = 1+1-1*1/1%1&1|1&&1||1==(1~=(1>(1<(1>=(1<=1))))); - a++; ++a; a--; --a; - a = person.life; - a = kitchen.&arr; - a = kitchen.#arr; - a = Bird::wingspan; - a = kitchen has general; - a = kitchen hasnt general; - a = kitchen provides arr; - a = person in kitchen; - a = person notin kitchen; - a = person ofclass Bird; - a = a == 0 or 1; - a = StubR(); - a = StubR(a); - a = StubR(, a); - a = "string"; - a = 'word'; - a = '''; ! character - a = $09afAF; - a = $$01; - a = ##Eat; a = #a$Eat; - a = #g$self; - a = #n$!word; - a = #r$StubR; - a = #dict_par1; - default: - for (a = 2, b = a; (a < buffer->1 + 2) && (Bird::wingspan): ++a, b--) { - print (char) buffer->a; - } - new_line; - for (::) break; - } - .saved;; -]; - -[ TestNumberSub; - print_ret parsed_number, " is ", (number) parsed_number, "."; -]; - -[ TestAttributeSub; print_ret (The) noun, " has been reversed."; ]; - -[ CreatureTest obj; return obj has animate; ]; - -[ TestCreatureSub; print_ret (The) noun, " is a creature."; ]; - -[ TestMultiheldSub; print_ret "You are holding ", (the) noun, "."; ]; - -[ TestMultiexceptSub; "You test ", (the) noun, " with ", (the) second, "."; ]; - -[ TestMultiinsideSub; "You test ", (the) noun, " from ", (the) second, "."; ]; - -[ TestMultiSub; print_ret (The) noun, " is a thing."; ]; - -[ TestNounFilterSub; print_ret (The) noun, " is a bird."; ]; - -[ TestScopeFilterSub; print_ret (The) noun, " is a direction."; ]; - -[ TestSpecialSub; "Your lucky number is ", parsed_number, "."; ]; - -[ TestTopicSub; "You discuss a topic."; ]; - -[ TestNounSub; "That is ", (a) noun, "."; ]; - -[ TestHeldSub; "You are holding ", (a) noun, "."; ]; - -[ NewWaveSub; "That would be foolish."; ]; - -[ FeelSub; print_ret (The) noun, " feels normal."; ]; - -[ ReverseSub from; - from = parent(noun); - move noun to parent(second); - if (from == to) - move second to to; - else - move second to from; - give noun to; - from = to; - give second from; - "You swap ", (the) noun, " and ", (the) second, "."; -]; - -End: The End directive ends the source code. diff --git a/tests/examplefiles/inform6_example b/tests/examplefiles/inform6_example new file mode 100644 index 00000000..73cdd087 --- /dev/null +++ b/tests/examplefiles/inform6_example @@ -0,0 +1,374 @@ +!% $SMALL ! This is ICL, not a comment. +!% -w + +!% A comprehensive test of Inform6Lexer. + +Switches d2SDq; + +Constant Story "Informal Testing"; +Constant Headline "^Not a game.^";!% This is a comment, not ICL. + +Release 2; +Serial "140308"; +Version 5; + +Ifndef TARGET_ZCODE; +Ifndef TARGET_GLULX; +Ifndef WORDSIZE; +Default WORDSIZE 2; +Constant TARGET_ZCODE; +Endif; +Endif; +Endif; + +Ifv3; Message "Compiling to version 3"; Endif; +Ifv5; Message "Not compiling to version 3"; endif; +ifdef TARGET_ZCODE; +#IFTRUE (#version_number == 5); +Message "Compiling to version 5"; +#ENDIF; +endif ; + +Replace CreatureTest; + +Include "Parser"; +Include "VerbLib"; + +# ! A hash is optional at the top level. +Object kitchen "Kitchen" + with description "You are in a kitchen.", + arr 1 2 3 4, + has light; + +#[ Initialise; + location = kitchen; + print "v"; inversion; "^"; +]; + +Ifdef VN_1633; +Replace IsSeeThrough IsSeeThroughOrig; +[ IsSeeThrough * o; + return o hasnt opaque || IsSeeThroughOrig(o); +]; +Endif; + +Abbreviate "test"; + +Array table buffer 260; + +Attribute reversed; +Attribute opaque alias locked; +Constant to reversed; + +Property long additive additive long alias; +Property long long long wingspan alias alias; + +Class Flier with wingspan 5; +Class Bird(10) has animate class Flier with wingspan 2; + +Constant Constant1; +Constant Constant2 Constant1; +Constant Constant3 = Constant2; +Ifdef VN_1633; Undef Constant; Endif; + +Ifdef VN_1633; +Dictionary 'word' 1 2; +Ifnot; +Dictionary dict_word "word"; +Endif; + +Fake_action NotReal; + +Global global1; +Global global2 = 69105; + +Lowstring low_string "low string"; + +Iftrue false; +Message error "Uh-oh!^~false~ shouldn't be ~true~."; +Endif; +Iffalse true; +Message fatalerror "Uh-oh!^~true~ shouldn't be ~false~."; +Endif; + +Nearby person "person" + with name 'person', + description "This person is barely implemented.", + life [ * x y z; + Ask: print_ret (The) self, " says nothing."; + Answer: print (The) self, " didn't say anything.^"; rfalse; + ] + has has animate transparent; + +Object -> -> test_tube "test tube" + with name 'test' "tube" 'testtube', + has ~openable ~opaque container; + +Bird -> pigeon + with name 'pigeon', + description [; + "The pigeon has a wingspan of ", self.&wingspan-->0, " wing units."; + ]; + +Object -> "thimble" with name 'thimble'; + +Object -> pebble "pebble" with name 'pebble'; + +Ifdef TARGET_ZCODE; Trace objects; Endif; + +Statusline score; + +Stub StubR 3; + +Ifdef TARGET_ZCODE; +Zcharacter "abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "123456789.,!?_#'0/@{005C}-:()"; +Zcharacter table '@!!' '@<<' '@'A'; +Zcharacter table + '@AE' '@{dc}' '@et' '@:y'; +Ifnot; +Ifdef TARGET_GLULX; +Message "Glulx doesn't use ~Zcharacter~.^Oh well."; ! '~' and '^' work here. +Ifnot; +Message warning "Uh-oh! ^~^"; ! They don't work in other Messages. +Endif; +Endif; + +Include "Grammar"; + +Verb"acquire"'collect'='take'; + +[ NounFilter; return noun ofclass Bird; ]; + +[ ScopeFilter obj; + switch (scope_stage) { + 1: rtrue; + 2: objectloop (obj in compass) PlaceInScope(obj); + 3: "Nothing is in scope."; + } +]; + +Verb meta "t" 'test' + * 'held' held -> TestHeld + * number -> TestNumber + * reversed -> TestAttribute + * 'creature' creature -> TestCreature + * 'multiheld' multiheld -> TestMultiheld + * 'm' multiexcept 'into'/"in" noun -> TestMultiexcept + * 'm' multiinside 'from' noun -> TestMultiinside + * multi -> TestMulti + * 'filter'/'f' noun=NounFilter -> TestNounFilter + * 'filter'/'f' scope=ScopeFilter -> TestScopeFilter + * 'special' special -> TestSpecial + * topic -> TestTopic; + +Verb 'reverse' 'swap' 'exchange' + * held 'for' noun -> reverse + * noun 'with' noun -> reverse reverse; + +Extend "t" last * noun -> TestNoun; + +Extend 't' first * -> Test; + +Extend 'wave' replace * -> NewWave; + +Extend only 'feel' 'touch' replace * noun -> Feel; + +[ TestSub a b o; + string 25 low_string; + print "Test what?> "; + table->0 = 260; + parse->0 = 61; + #Ifdef TARGET_ZCODE; + read buffer parse; + #Ifnot; ! TARGET_GLULX + KeyboardPrimitive(buffer, parse); + #Endif; ! TARGET_ + switch (parse-->1) { + 'save': + #Ifdef TARGET_ZCODE; + #Ifv3; + @save ?saved; + #Ifnot; + save saved; + #Endif; + #Endif; + print "Saving failed.^"; + 'restore': + #Ifdef TARGET_ZCODE; + restore saved; + #Endif; + print "Restoring failed.^"; + 'restart': + @restart; + 'quit', 'q//': + quit; + return 2; rtrue; rfalse; return; + 'print', 'p//': + print "Print:^", + " (string): ", (string) "xyzzy^", + " (number): ", (number) 123, "^", + " (char): ", (char) 'x', "^", + " (address): ", (address) 'plugh//p', "^", + " (The): ", (The) person, "^", + " (the): ", (the) person, "^", + " (A): ", (A) person, "^", + " (a): ", (a) person, "^", + " (an): ", (an) person, "^", + " (name): ", (name) person, "^", + " (object): ", (object) person, "^", + " (property): ", (property) alias, "^", + " (): ", (LanguageNumber) 123, "^", + " : ", a * 2 - 1, "^", + " (): ", (a + person), "^"; + print "Escapes:^", + " by mnemonic: @!! @<< @'A @AE @et @:y^", + " by decimal value: @@64 @@126^", + " by Unicode value: @{DC}@{002b}^", + " by string variable: @25^"; + 'font', 'style': + font off; print "font off^"; + font on; print "font on^"; + style reverse; print "style reverse^"; style roman; + style bold; print "style bold^"; + style underline; print "style underline^"; + style fixed; print "style fixed^"; + style roman; print "style roman^"; + 'statements': + spaces 8; + objectloop (o) { + print "objectloop (o): ", (the) o, "^"; + } + objectloop (o in compass) { ! 'in' is a keyword + print "objectloop (o in compass): ", (the) o, "^"; + } + objectloop (o in compass && true) { ! 'in' is an operator + print "objectloop (o in compass && true): ", (the) o, "^"; + } + objectloop (o from se_obj) { + print "objectloop (o from se_obj): ", (the) o, "^"; + } + objectloop (o near person) { + print "objectloop (o near person): ", (the) o, "^"; + } + #Ifdef TARGET_ZCODE; + #Trace assembly on; +@ ! This is assembly. + add -4 ($$1+$3)*2 -> b; + @get_sibling test_tube -> b ?saved; + @inc [b]; + @je sp (1+3*0) ? equal; + @je 1 ((sp)) ?~ different; + .! This is a label: + equal; + print "sp == 1^"; + jump label; + .different; + print "sp @@126= 1^"; + .label; + #Trace off; #Endif; ! TARGET_ZCODE + a = random(10); + switch (a) { + 1, 9: + box "Testing oneself is best when done alone." + " -- Jimmy Carter"; + 2, 6, to, 3 to 5, to to to: + ; + #Ifdef VN_1633; + ; + #Endif; + a = ##Drop; + < ! The angle brackets may be separated by whitespace. + < (a) pigeon > >; + default: + do { + give person general ~general; + } until (person provides life && ~~false); + if (a == 7) a = 4; + else a = 5; + } + 'expressions': + a = 1+1-1*1/1%1&1|1&&1||1==(1~=(1>(1<(1>=(1<=1))))); + a++; ++a; a--; --a; + a = person.life; + a = kitchen.&arr; + a = kitchen.#arr; + a = Bird::wingspan; + a = kitchen has general; + a = kitchen hasnt general; + a = kitchen provides arr; + a = person in kitchen; + a = person notin kitchen; + a = person ofclass Bird; + a = a == 0 or 1; + a = StubR(); + a = StubR(a); + a = StubR(, a); + a = "string"; + a = 'word'; + a = '''; ! character + a = $09afAF; + a = $$01; + a = ##Eat; a = #a$Eat; + a = #g$self; + a = #n$!word; + a = #r$StubR; + a = #dict_par1; + default: + for (a = 2, b = a; (a < buffer->1 + 2) && (Bird::wingspan): ++a, b--) { + print (char) buffer->a; + } + new_line; + for (::) break; + } + .saved;; +]; + +[ TestNumberSub; + print_ret parsed_number, " is ", (number) parsed_number, "."; +]; + +[ TestAttributeSub; print_ret (The) noun, " has been reversed."; ]; + +[ CreatureTest obj; return obj has animate; ]; + +[ TestCreatureSub; print_ret (The) noun, " is a creature."; ]; + +[ TestMultiheldSub; print_ret "You are holding ", (the) noun, "."; ]; + +[ TestMultiexceptSub; "You test ", (the) noun, " with ", (the) second, "."; ]; + +[ TestMultiinsideSub; "You test ", (the) noun, " from ", (the) second, "."; ]; + +[ TestMultiSub; print_ret (The) noun, " is a thing."; ]; + +[ TestNounFilterSub; print_ret (The) noun, " is a bird."; ]; + +[ TestScopeFilterSub; print_ret (The) noun, " is a direction."; ]; + +[ TestSpecialSub; "Your lucky number is ", parsed_number, "."; ]; + +[ TestTopicSub; "You discuss a topic."; ]; + +[ TestNounSub; "That is ", (a) noun, "."; ]; + +[ TestHeldSub; "You are holding ", (a) noun, "."; ]; + +[ NewWaveSub; "That would be foolish."; ]; + +[ FeelSub; print_ret (The) noun, " feels normal."; ]; + +[ ReverseSub from; + from = parent(noun); + move noun to parent(second); + if (from == to) + move second to to; + else + move second to from; + give noun to; + from = to; + give second from; + "You swap ", (the) noun, " and ", (the) second, "."; +]; + +End: The End directive ends the source code. -- cgit v1.2.1 From 6a842fa939e69bc5c713dc85d30f6b2179610ae7 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Wed, 14 Oct 2015 17:47:11 -0700 Subject: Fix out of order alternation in Earl Grey --- pygments/lexers/javascript.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index 3cc20e01..ef92034d 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -1409,13 +1409,13 @@ class EarlGreyLexer(RegexLexer): 'bt': [ (r'`', String.Backtick, '#pop'), (r'(? Date: Wed, 14 Oct 2015 18:04:28 -0700 Subject: Update changelog --- CHANGES | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES b/CHANGES index 8c71dbe8..68c29370 100644 --- a/CHANGES +++ b/CHANGES @@ -30,7 +30,7 @@ Version 2.1 * Component Pascal (PR#437) * SuperCollider (PR#472) * Shell consoles (Tcsh, PowerShell, MSDOS) (PR#479) - + * Elm and J (PR#452) - Added styles: -- cgit v1.2.1 From f70045a95c41687a989cb5920315f0633912f3c2 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Wed, 14 Oct 2015 18:13:00 -0700 Subject: Update changelog --- AUTHORS | 1 + CHANGES | 1 + 2 files changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index 9990aeff..8f1f948a 100644 --- a/AUTHORS +++ b/AUTHORS @@ -123,6 +123,7 @@ Other contributors, listed alphabetically, are: * Jon Morton -- Rust lexer * Paulo Moura -- Logtalk lexer * Mher Movsisyan -- DTD lexer +* Dejan Muhamedagic -- Crmsh lexer * Ana Nelson -- Ragel, ANTLR, R console lexers * Nam T. Nguyen -- Monokai style * Jesper Noehr -- HTML formatter "anchorlinenos" diff --git a/CHANGES b/CHANGES index 68c29370..0b48c998 100644 --- a/CHANGES +++ b/CHANGES @@ -31,6 +31,7 @@ Version 2.1 * SuperCollider (PR#472) * Shell consoles (Tcsh, PowerShell, MSDOS) (PR#479) * Elm and J (PR#452) + * Crmsh (PR#440) - Added styles: -- cgit v1.2.1 From 5b1e6e2e8cf2ec213615aa55f2dc4b47b05db7dc Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Wed, 14 Oct 2015 18:19:58 -0700 Subject: Update mapfiles, TrafficScript formatting. --- pygments/lexers/_mapping.py | 10 +--------- pygments/lexers/trafficscript.py | 12 +++++++----- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index fd65b98b..0e0e610a 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -330,21 +330,13 @@ LEXERS = { 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), + 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()), 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')), 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs',), ('text/rust',)), 'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), 'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), 'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), - 'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('RobotFramework', 'robotframework'), ('*.txt', '*.robot'), ('text/x-robotframework',)), - 'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), - 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('trafficscript', 'rts'), ('*.rts'), ('text/x-trafficscript', 'application/x-trafficscript')), - 'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), - 'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')), - 'RustLexer': ('pygments.lexers.compiled', 'Rust', ('rust',), ('*.rs', '*.rc'), ('text/x-rustsrc',)), - 'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), - 'SMLLexer': ('pygments.lexers.functional', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), - 'SassLexer': ('pygments.lexers.web', 'Sass', ('sass', 'SASS'), ('*.sass',), ('text/x-sass',)), 'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)), 'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), 'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')), diff --git a/pygments/lexers/trafficscript.py b/pygments/lexers/trafficscript.py index ddc4aa50..34ca7d5b 100644 --- a/pygments/lexers/trafficscript.py +++ b/pygments/lexers/trafficscript.py @@ -4,26 +4,28 @@ pygments.lexers.trafficscript ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Lexer for RiverBed's TrafficScript (RTS) language. - :copyright: Copyright 2013 by Alex Gosse + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re + from pygments.lexer import RegexLexer -from pygments.token import * +from pygments.token import String, Number, Name, Keyword, Operator, Text, Comment __all__ = ['RtsLexer'] class RtsLexer(RegexLexer): """ - For `Riverbed Stingray Traffic Manager - ` + For `Riverbed Stingray Traffic Manager `_ + + .. versionadded:: 2.1 """ name = 'TrafficScript' aliases = ['rts','trafficscript'] filenames = ['*.rts'] + tokens = { 'root' : [ (r"'(\\\\|\\[^\\]|[^'\\])*'", String), -- cgit v1.2.1 From 0b3bba88ef898e555cb79db5b4f5749c6ef6f704 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Wed, 14 Oct 2015 18:26:16 -0700 Subject: Properly skip when latex isn't installed --- tests/test_latex_formatter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_latex_formatter.py b/tests/test_latex_formatter.py index 56b5db2e..05a6c3ac 100644 --- a/tests/test_latex_formatter.py +++ b/tests/test_latex_formatter.py @@ -44,7 +44,7 @@ class LatexFormatterTest(unittest.TestCase): po.stdout.close() except OSError: # latex not available - pass + raise support.SkipTest else: if ret: print(output) -- cgit v1.2.1 From 6d0b5ca281d57ec6b7bf8e3bc94c6ff78e82cb35 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Wed, 14 Oct 2015 18:31:55 -0700 Subject: Add basic test for irc formatter --- tests/test_irc_formatter.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 tests/test_irc_formatter.py diff --git a/tests/test_irc_formatter.py b/tests/test_irc_formatter.py new file mode 100644 index 00000000..649e430d --- /dev/null +++ b/tests/test_irc_formatter.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +""" + Pygments HTML formatter tests + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from __future__ import print_function + +import io +import os +import re +import unittest +import tempfile +from os.path import join, dirname, isfile + +from pygments.util import StringIO +from pygments.lexers import PythonLexer +from pygments.formatters import IRCFormatter + +import support + +tokensource = list(PythonLexer().get_tokens("lambda x: 123")) + +class HtmlFormatterTest(unittest.TestCase): + def test_correct_output(self): + hfmt = IRCFormatter() + houtfile = StringIO() + hfmt.format(tokensource, houtfile) + + self.assertEqual(u'\x0302lambda\x03 x: \x0302123\x03\n', houtfile.getvalue()) + -- cgit v1.2.1 From 56e49bdf1fc057ac87d55eb51a8dc4ceaca83ca4 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Wed, 14 Oct 2015 18:33:43 -0700 Subject: Fixup naming of IRC test; add to docs --- AUTHORS | 1 + CHANGES | 6 ++++++ tests/test_irc_formatter.py | 10 +++------- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/AUTHORS b/AUTHORS index 471c91e2..5101af12 100644 --- a/AUTHORS +++ b/AUTHORS @@ -23,6 +23,7 @@ Other contributors, listed alphabetically, are: * Michael Bayer -- Myghty lexers * Thomas Beale -- Archetype lexers * John Benediktsson -- Factor lexer +* Trevor Bergeron -- mIRC formatter * Vincent Bernat -- LessCSS lexer * Christopher Bertels -- Fancy lexer * Jarrett Billingsley -- MiniD lexer diff --git a/CHANGES b/CHANGES index 0b48c998..837a2ab6 100644 --- a/CHANGES +++ b/CHANGES @@ -39,6 +39,12 @@ Version 2.1 * Lovelace (PR#456) * Algol and Algol-nu (#1090) + +- Added formatters: + + * IRC (PR#458) + + - Updated autopygmentize script (PR#445) - Fixed style inheritance for non-standard token types in HTML output. diff --git a/tests/test_irc_formatter.py b/tests/test_irc_formatter.py index 649e430d..16a8fd30 100644 --- a/tests/test_irc_formatter.py +++ b/tests/test_irc_formatter.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """ - Pygments HTML formatter tests - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Pygments IRC formatter tests + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. @@ -9,12 +9,8 @@ from __future__ import print_function -import io -import os import re import unittest -import tempfile -from os.path import join, dirname, isfile from pygments.util import StringIO from pygments.lexers import PythonLexer @@ -24,7 +20,7 @@ import support tokensource = list(PythonLexer().get_tokens("lambda x: 123")) -class HtmlFormatterTest(unittest.TestCase): +class IRCFormatterTest(unittest.TestCase): def test_correct_output(self): hfmt = IRCFormatter() houtfile = StringIO() -- cgit v1.2.1 From a18b96f18b2bc421f9172a42982e22f5149ac432 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Fri, 16 Oct 2015 07:55:00 -0700 Subject: Avoid the shell entirely when finding fonts. Manually tested on OS X. --- pygments/formatters/img.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/pygments/formatters/img.py b/pygments/formatters/img.py index cd51bb82..60f33fa6 100644 --- a/pygments/formatters/img.py +++ b/pygments/formatters/img.py @@ -10,12 +10,13 @@ """ import sys -import shlex from pygments.formatter import Formatter from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \ get_choice_opt, xrange +import subprocess + # Import this carefully try: from PIL import Image, ImageDraw, ImageFont @@ -76,14 +77,11 @@ class FontManager(object): self._create_nix() def _get_nix_font_path(self, name, style): - try: - from commands import getstatusoutput - except ImportError: - from subprocess import getstatusoutput - exit, out = getstatusoutput('fc-list %s file' % - shlex.quote("%s:style=%s" % (name, style))) - if not exit: - lines = out.splitlines() + proc = subprocess.Popen(['fc-list', "%s:style=%s" % (name, style), 'file'], + stdout=subprocess.PIPE, stderr=None) + stdout, _ = proc.communicate() + if proc.returncode == 0: + lines = stdout.splitlines() if lines: path = lines[0].strip().strip(':') return path @@ -198,7 +196,7 @@ class ImageFormatter(Formatter): bold and italic fonts will be generated. This really should be a monospace font to look sane. - Default: "Bitstream Vera Sans Mono" + Default: "Bitstream Vera Sans Mono" on Windows, Courier New on *nix `font_size` The font size in points to be used. -- cgit v1.2.1 From 2a74e18cb1b8b7cc2848e236a636e49be46ed165 Mon Sep 17 00:00:00 2001 From: Giedrius Dubinskas Date: Fri, 16 Oct 2015 21:27:23 +0300 Subject: Optimized HtmlFormatter to run ~10x faster for long lines Changed string concatenation to list extend/join to reduce memory allocations. How to reproduce: Test data: $ echo '{' $(for i in {1..10000}; do printf '"key%d":"value %d", ' $i $i; done) '}' > /tmp/one-line.json Before: $ time ./pygmentize -O encoding=utf-8 -O stripnl=False -f html -l 'json' > /dev/null < /tmp/one-line.json real 0m5.597s user 0m4.400s sys 0m1.158s After: $ time ./pygmentize -O encoding=utf-8 -O stripnl=False -f html -l 'json' > /dev/null < /tmp/one-line.json real 0m0.347s user 0m0.309s sys 0m0.029s --- pygments/formatters/html.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/pygments/formatters/html.py b/pygments/formatters/html.py index b22be54f..b03a4bd5 100644 --- a/pygments/formatters/html.py +++ b/pygments/formatters/html.py @@ -711,7 +711,7 @@ class HtmlFormatter(Formatter): tagsfile = self.tagsfile lspan = '' - line = '' + line = [] for ttype, value in tokensource: if nocls: cclass = getcls(ttype) @@ -742,30 +742,31 @@ class HtmlFormatter(Formatter): for part in parts[:-1]: if line: if lspan != cspan: - line += (lspan and '') + cspan + part + \ - (cspan and '') + lsep + line.extend(((lspan and ''), cspan, part, + (cspan and ''), lsep)) else: # both are the same - line += part + (lspan and '') + lsep - yield 1, line - line = '' + line.extend((part, (lspan and ''), lsep)) + yield 1, ''.join(line) + line = [] elif part: - yield 1, cspan + part + (cspan and '') + lsep + yield 1, ''.join((cspan, part, (cspan and ''), lsep)) else: yield 1, lsep # for the last line if line and parts[-1]: if lspan != cspan: - line += (lspan and '') + cspan + parts[-1] + line.extend(((lspan and ''), cspan, parts[-1])) lspan = cspan else: - line += parts[-1] + line.append(parts[-1]) elif parts[-1]: - line = cspan + parts[-1] + line = [cspan, parts[-1]] lspan = cspan # else we neither have to open a new span nor set lspan if line: - yield 1, line + (lspan and '') + lsep + line.extend(((lspan and ''), lsep)) + yield 1, ''.join(line) def _lookup_ctag(self, token): entry = ctags.TagEntry() -- cgit v1.2.1 From 1cf28f96951c1ac58353800840af45940b3952a9 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Fri, 16 Oct 2015 18:51:01 -0700 Subject: Fixes #1148: Trigger using CLexer off string "#ifndef" --- pygments/lexers/c_cpp.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py index 35ea517f..624ebb71 100644 --- a/pygments/lexers/c_cpp.py +++ b/pygments/lexers/c_cpp.py @@ -181,7 +181,7 @@ class CLexer(CFamilyLexer): def analyse_text(text): if re.search('^\s*#include [<"]', text, re.MULTILINE): return 0.1 - if re.search('^\s*#ifdef ', text, re.MULTILINE): + if re.search('^\s*#ifn?def ', text, re.MULTILINE): return 0.1 -- cgit v1.2.1 From a8a57c9257af61cffbc098701392b4f62c2f5c50 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Fri, 16 Oct 2015 19:01:32 -0700 Subject: Fixes #1139: Correctly order symbol and char literals in ScalaLexer --- pygments/lexers/jvm.py | 2 +- tests/examplefiles/char.scala | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 tests/examplefiles/char.scala diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py index 4d3c9159..2a4909c8 100644 --- a/pygments/lexers/jvm.py +++ b/pygments/lexers/jvm.py @@ -252,7 +252,6 @@ class ScalaLexer(RegexLexer): 'root': [ # method names (r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'), - (u"'%s" % idrest, Text.Symbol), (r'[^\S\n]+', Text), (r'//.*?\n', Comment.Single), (r'/\*', Comment.Multiline, 'comment'), @@ -271,6 +270,7 @@ class ScalaLexer(RegexLexer): (r'""".*?"""(?!")', String), (r'"(\\\\|\\"|[^"])*"', String), (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char), + (u"'%s" % idrest, Text.Symbol), (r'[fs]"""', String, 'interptriplestring'), # interpolated strings (r'[fs]"', String, 'interpstring'), # interpolated strings (r'raw"(\\\\|\\"|[^"])*"', String), # raw strings diff --git a/tests/examplefiles/char.scala b/tests/examplefiles/char.scala new file mode 100644 index 00000000..0032612f --- /dev/null +++ b/tests/examplefiles/char.scala @@ -0,0 +1,4 @@ +'symbol +'a' +'\u1234' +'\n' -- cgit v1.2.1 From 625fef7ceabe157912234f3d6c4a64edebe960fc Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Fri, 16 Oct 2015 19:26:11 -0700 Subject: Refs #1125: improve detection of builtins. It's still too greedy, e.g. 'jobs jobs jobs' highlights all as builtins, while only the first should be. --- pygments/lexers/shell.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py index adb7744c..041bdbe1 100644 --- a/pygments/lexers/shell.py +++ b/pygments/lexers/shell.py @@ -62,7 +62,7 @@ class BashLexer(RegexLexer): r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|' r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|' r'shopt|source|suspend|test|time|times|trap|true|type|typeset|' - r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)', + r'ulimit|umask|unalias|unset|wait)(?=[\s)`])', Name.Builtin), (r'\A#!.+\n', Comment.Hashbang), (r'#.*\n', Comment.Single), -- cgit v1.2.1 From 1d79c15719c7ca004e139a285725cba253f1fe86 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Fri, 16 Oct 2015 20:05:08 -0700 Subject: Remove IPython from TODO per #938 --- TODO | 3 --- 1 file changed, 3 deletions(-) diff --git a/TODO b/TODO index 3ff63a3f..88076f3d 100644 --- a/TODO +++ b/TODO @@ -1,9 +1,6 @@ Todo ==== -- suggested new lexers - * IPython sessions - - lexers that need work: * review perl lexer (numerous bugs, but so far no one had complaints ;) * readd property support for C# lexer? that is, find a regex that doesn't -- cgit v1.2.1 From c071960e07b8b877662904095ecb9eb584e82cbe Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 07:46:00 +0200 Subject: Closes #1134: remove nonworking links from docs/integrate --- doc/docs/integrate.rst | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/doc/docs/integrate.rst b/doc/docs/integrate.rst index 03fc268f..73e02acd 100644 --- a/doc/docs/integrate.rst +++ b/doc/docs/integrate.rst @@ -4,21 +4,6 @@ Using Pygments in various scenarios =================================== -PyGtk ------ - -Armin has written a piece of sample code that shows how to create a Gtk -`TextBuffer` object containing Pygments-highlighted text. - -See the article here: http://lucumr.pocoo.org/cogitations/2007/05/30/pygments-gtk-rendering/ - -Wordpress ---------- - -He also has a snippet that shows how to use Pygments in WordPress: - -http://lucumr.pocoo.org/cogitations/2007/05/30/pygments-in-wordpress/ - Markdown -------- -- cgit v1.2.1 From 4aaf4a05cec747886e93e1e87c3b5069e3c40fa5 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 08:03:57 +0200 Subject: Changelog update for PR#504. --- AUTHORS | 1 + CHANGES | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 945fc962..1a1428a1 100644 --- a/AUTHORS +++ b/AUTHORS @@ -40,6 +40,7 @@ Other contributors, listed alphabetically, are: * Pete Curry -- bugfixes * Bryan Davis -- EBNF lexer * Bruno Deferrari -- Shen lexer +* Giedrius Dubinskas -- HTML formatter improvements * Owen Durni -- Haxe lexer * Alexander Dutton, Oxford University Computing Services -- SPARQL lexer * James Edwards -- Terraform lexer diff --git a/CHANGES b/CHANGES index fc358867..4b353539 100644 --- a/CHANGES +++ b/CHANGES @@ -40,11 +40,11 @@ Version 2.1 * Lovelace (PR#456) * Algol and Algol-nu (#1090) - - Added formatters: * IRC (PR#458) +- Improved performance of the HTML formatter for long lines (PR#504). - Updated autopygmentize script (PR#445) -- cgit v1.2.1 From 2a23ca2d327465b8542471b36edb97249283ce90 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 08:12:03 +0200 Subject: Praat: style fixes --- CHANGES | 1 - pygments/lexers/praat.py | 167 +++++++++++++++++++++++++---------------------- 2 files changed, 88 insertions(+), 80 deletions(-) diff --git a/CHANGES b/CHANGES index 4b353539..6543a515 100644 --- a/CHANGES +++ b/CHANGES @@ -34,7 +34,6 @@ Version 2.1 * Crmsh (PR#440) * Praat (PR#492) - - Added styles: * Lovelace (PR#456) diff --git a/pygments/lexers/praat.py b/pygments/lexers/praat.py index e430ce79..776c38b8 100644 --- a/pygments/lexers/praat.py +++ b/pygments/lexers/praat.py @@ -9,10 +9,9 @@ :license: BSD, see LICENSE for details. """ -import re - -from pygments.lexer import RegexLexer, words, bygroups, default, include -from pygments.token import * +from pygments.lexer import RegexLexer, words, bygroups, include +from pygments.token import Name, Text, Comment, Keyword, String, Punctuation, Number, \ + Operator __all__ = ['PraatLexer'] @@ -29,42 +28,45 @@ class PraatLexer(RegexLexer): filenames = ['*.praat', '*.proc', '*.psc'] keywords = [ - 'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to', 'endfor', 'endproc', - 'while', 'endwhile', 'repeat', 'until', 'select', 'plus', 'minus', 'demo', 'assert', 'stopwatch', - 'nocheck', 'nowarn', 'noprogress', 'editor', 'endeditor', 'clearinfo' + 'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to', + 'endfor', 'endproc', 'while', 'endwhile', 'repeat', 'until', 'select', 'plus', + 'minus', 'demo', 'assert', 'stopwatch', 'nocheck', 'nowarn', 'noprogress', + 'editor', 'endeditor', 'clearinfo', ] functions_string = [ 'backslashTrigraphsToUnicode', 'chooseDirectory', 'chooseReadFile', - 'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine', 'extractWord', - 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace', 'replace_regex', 'right', - 'selected', 'string', 'unicodeToBackslashTrigraphs', + 'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine', + 'extractWord', 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace', + 'replace_regex', 'right', 'selected', 'string', 'unicodeToBackslashTrigraphs', ] functions_numeric = [ - 'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos', 'arccosh', - 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz', 'beginPause', - 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2', 'binomialP', 'binomialQ', 'boolean', - 'ceiling', 'chiSquareP', 'chiSquareQ', 'choice', 'comment', 'cos', 'cosh', 'createDirectory', - 'deleteFile', 'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed', + 'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos', + 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz', + 'beginPause', 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2', + 'binomialP', 'binomialQ', 'boolean', 'ceiling', 'chiSquareP', 'chiSquareQ', + 'choice', 'comment', 'cos', 'cosh', 'createDirectory', 'deleteFile', + 'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed', 'demoExtraControlKeyPressed', 'demoInput', 'demoKeyPressed', 'demoOptionKeyPressed', 'demoShiftKeyPressed', 'demoShow', 'demoWaitForInput', - 'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor', 'endPause', - 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc', 'exitScript', 'exp', - 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ', 'floor', 'gaussP', 'gaussQ', - 'hertzToBark', 'hertzToErb', 'hertzToMel', 'hertzToSemitones', 'imax', 'imin', - 'incompleteBeta', 'incompleteGammaP', 'index', 'index_regex', 'invBinomialP', - 'invBinomialQ', 'invChiSquareQ', 'invFisherQ', 'invGaussQ', 'invSigmoid', 'invStudentQ', - 'length', 'ln', 'lnBeta', 'lnGamma', 'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', - 'natural', 'number', 'numberOfColumns', 'numberOfRows', 'numberOfSelected', - 'objectsAreIdentical', 'option', 'optionMenu', 'pauseScript', - 'phonToDifferenceLimens', 'plusObject', 'positive', 'randomBinomial', 'randomGauss', - 'randomInteger', 'randomPoisson', 'randomUniform', 'real', 'readFile', 'removeObject', - 'rindex', 'rindex_regex', 'round', 'runScript', 'runSystem', 'runSystem_nocheck', - 'selectObject', 'selected', 'semitonesToHertz', 'sentencetext', 'sigmoid', 'sin', 'sinc', - 'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP', 'studentQ', 'tan', - 'tanh', 'variableExists', 'word', 'writeFile', 'writeFileLine', 'writeInfo', - 'writeInfoLine', + 'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor', + 'endPause', 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc', + 'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ', + 'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel', + 'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index', + 'index_regex', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ', + 'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma', + 'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number', + 'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical', + 'option', 'optionMenu', 'pauseScript', 'phonToDifferenceLimens', 'plusObject', + 'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson', + 'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex', + 'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject', + 'selected', 'semitonesToHertz', 'sentencetext', 'sigmoid', 'sin', 'sinc', + 'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP', + 'studentQ', 'tan', 'tanh', 'variableExists', 'word', 'writeFile', 'writeFileLine', + 'writeInfo', 'writeInfoLine', ] functions_array = [ @@ -72,29 +74,32 @@ class PraatLexer(RegexLexer): ] objects = [ - 'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword', 'Autosegment', - 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories', 'Cepstrogram', 'Cepstrum', - 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable', 'Cochleagram', 'Collection', - 'ComplexSpectrogram', 'Configuration', 'Confusion', 'ContingencyTable', 'Corpus', - 'Correlation', 'Covariance', 'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', - 'DataModeler', 'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', - 'Distributions', 'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', - 'EditDistanceTable', 'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', - 'FeatureWeights', 'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', - 'FormantGrid', 'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM', + 'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword', + 'Autosegment', 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories', + 'Cepstrogram', 'Cepstrum', 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable', + 'Cochleagram', 'Collection', 'ComplexSpectrogram', 'Configuration', 'Confusion', + 'ContingencyTable', 'Corpus', 'Correlation', 'Covariance', + 'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', 'DataModeler', + 'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', 'Distributions', + 'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', 'EditDistanceTable', + 'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', 'FeatureWeights', + 'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', 'FormantGrid', + 'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM', 'HMM_Observation', 'HMM_ObservationSequence', 'HMM_State', 'HMM_StateSequence', - 'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier', 'KNN', - 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries', 'LinearRegression', - 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline', 'ManPages', 'Manipulation', - 'Matrix', 'MelFilter', 'MelSpectrogram', 'MixingMatrix', 'Movie', 'Network', 'OTGrammar', - 'OTHistory', 'OTMulti', 'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', - 'Photo', 'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial', - 'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier', 'ResultsMFC', - 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct', 'Similarity', 'SimpleString', - 'SortedSetOfString', 'Sound', 'Speaker', 'Spectrogram', 'Spectrum', 'SpectrumTier', - 'SpeechSynthesizer', 'SpellingChecker', 'Strings', 'StringsIndex', 'Table', - 'TableOfReal', 'TextGrid', 'TextInterval', 'TextPoint', 'TextTier', 'Tier', 'Transition', - 'VocalTract', 'VocalTractTier', 'Weight', 'WordList', + 'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier', + 'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries', + 'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline', + 'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram', + 'MixingMatrix', 'Movie', 'Network', 'OTGrammar', 'OTHistory', 'OTMulti', 'PCA', + 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo', 'Pitch', + 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial', + 'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier', + 'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct', + 'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker', + 'Spectrogram', 'Spectrum', 'SpectrumTier', 'SpeechSynthesizer', 'SpellingChecker', + 'Strings', 'StringsIndex', 'Table', 'TableOfReal', 'TextGrid', 'TextInterval', + 'TextPoint', 'TextTier', 'Tier', 'Transition', 'VocalTract', 'VocalTractTier', + 'Weight', 'WordList', ] variables_numeric = [ @@ -123,10 +128,11 @@ class PraatLexer(RegexLexer): (words(keywords, suffix=r'\b'), Keyword), (r'(\bform\b)(\s+)([^\n]+)', - bygroups(Keyword, Text, String), 'old_form'), + bygroups(Keyword, Text, String), 'old_form'), - (r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|include|execute|system(?:_nocheck)?)(\s+)', - bygroups(Keyword, Text), 'string_unquoted'), + (r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|' + r'include|execute|system(?:_nocheck)?)(\s+)', + bygroups(Keyword, Text), 'string_unquoted'), (r'(goto|label)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)), @@ -150,20 +156,20 @@ class PraatLexer(RegexLexer): 'procedure_call': [ (r'\s+', Text), (r'([\w.]+)(:|\s*\()', - bygroups(Name.Function, Text), '#pop'), + bygroups(Name.Function, Text), '#pop'), (r'([\w.]+)', Name.Function, ('#pop', 'old_arguments')), ], 'procedure_definition': [ (r'\s', Text), (r'([\w.]+)(\s*?[(:])', - bygroups(Name.Function, Text), '#pop'), + bygroups(Name.Function, Text), '#pop'), (r'([\w.]+)([^\n]*)', - bygroups(Name.Function, Text), '#pop'), + bygroups(Name.Function, Text), '#pop'), ], 'function_call': [ - (words(functions_string , suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'), - (words(functions_array , suffix=r'#(?=\s*[:(])'), Name.Function, 'function'), - (words(functions_numeric, suffix=r'(?=\s*[:(])'), Name.Function, 'function'), + (words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'), + (words(functions_array, suffix=r'#(?=\s*[:(])'), Name.Function, 'function'), + (words(functions_numeric, suffix=r'(?=\s*[:(])'), Name.Function, 'function'), ], 'function': [ (r'\s+', Text), @@ -202,9 +208,9 @@ class PraatLexer(RegexLexer): 'object_attributes': [ (r'\.?(n(col|row)|[xy]min|[xy]max|[nd][xy])\b', Name.Builtin, '#pop'), (r'(\.?(?:col|row)\$)(\[)', - bygroups(Name.Builtin, Text), 'variable_name'), + bygroups(Name.Builtin, Text), 'variable_name'), (r'(\$?)(\[)', - bygroups(Name.Builtin, Text), ('#pop', 'comma_list')), + bygroups(Name.Builtin, Text), ('#pop', 'comma_list')), ], 'variable_name': [ include('operator'), @@ -214,14 +220,15 @@ class PraatLexer(RegexLexer): (words(variables_numeric, suffix=r'\b'), Name.Variable.Global), (r'\bObject_\w+', Name.Builtin, 'object_attributes'), - (words(objects, prefix=r'\b', suffix=r'_\w+'), Name.Builtin, 'object_attributes'), + (words(objects, prefix=r'\b', suffix=r'_\w+'), + Name.Builtin, 'object_attributes'), (r"\b(Object_)(')", - bygroups(Name.Builtin, String.Interpol), - ('object_attributes', 'string_interpolated')), + bygroups(Name.Builtin, String.Interpol), + ('object_attributes', 'string_interpolated')), (words(objects, prefix=r'\b', suffix=r"(_)(')"), - bygroups(Name.Builtin, Name.Builtin, String.Interpol), - ('object_attributes', 'string_interpolated')), + bygroups(Name.Builtin, Name.Builtin, String.Interpol), + ('object_attributes', 'string_interpolated')), (r'\.?_?[a-z][a-zA-Z0-9_.]*(\$|#)?', Text), (r'[\[\]]', Punctuation, 'comma_list'), @@ -232,7 +239,8 @@ class PraatLexer(RegexLexer): (r'\b(and|or|not|div|mod)\b', Operator.Word), ], 'string_interpolated': [ - (r'\.?[_a-z][a-zA-Z0-9_.]*[\$#]?(?:\[[a-zA-Z0-9,]+\])?(:[0-9]+)?', String.Interpol), + (r'\.?[_a-z][a-zA-Z0-9_.]*[\$#]?(?:\[[a-zA-Z0-9,]+\])?(:[0-9]+)?', + String.Interpol), (r"'", String.Interpol, '#pop'), ], 'string_unquoted': [ @@ -256,30 +264,31 @@ class PraatLexer(RegexLexer): (r'\s+', Text), (r'(optionmenu|choice)([ \t]+\S+:[ \t]+)', - bygroups(Keyword, Text), 'number'), + bygroups(Keyword, Text), 'number'), (r'(option|button)([ \t]+)', - bygroups(Keyword, Text), 'number'), + bygroups(Keyword, Text), 'number'), (r'(option|button)([ \t]+)', - bygroups(Keyword, Text), 'string_unquoted'), + bygroups(Keyword, Text), 'string_unquoted'), (r'(sentence|text)([ \t]+\S+)', - bygroups(Keyword, Text), 'string_unquoted'), + bygroups(Keyword, Text), 'string_unquoted'), (r'(word)([ \t]+\S+[ \t]*)(\S+)?([ \t]+.*)?', - bygroups(Keyword, Text, String, Text)), + bygroups(Keyword, Text, String, Text)), (r'(boolean)(\s+\S+\s*)(0|1|"?(?:yes|no)"?)', - bygroups(Keyword, Text, Name.Variable)), + bygroups(Keyword, Text, Name.Variable)), # Ideally processing of the number would happend in the 'number' # but that doesn't seem to work - (r'(real|natural|positive|integer)([ \t]+\S+[ \t]*)([+-]?)(\d+(?:\.\d*)?(?:[eE][-+]?\d+)?%?)', - bygroups(Keyword, Text, Operator, Number)), + (r'(real|natural|positive|integer)([ \t]+\S+[ \t]*)([+-]?)(\d+(?:\.\d*)?' + r'(?:[eE][-+]?\d+)?%?)', + bygroups(Keyword, Text, Operator, Number)), (r'(comment)(\s+)', - bygroups(Keyword, Text), 'string_unquoted'), + bygroups(Keyword, Text), 'string_unquoted'), (r'\bendform\b', Keyword, '#pop'), ] -- cgit v1.2.1 From 6d788f6655e464b49fab41e5ebb1ea71f6feacfa Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 08:13:41 +0200 Subject: Crmsh/dsls: style fixes --- pygments/lexers/dsls.py | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index 01aad058..c0671130 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -472,19 +472,22 @@ class PanLexer(RegexLexer): ], 'basic': [ (words(( - 'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final', 'prefix', - 'unique', 'object', 'foreach', 'include', 'template', 'function', 'variable', - 'structure', 'extensible', 'declaration'), prefix=r'\b', suffix=r'\s*\b'), + 'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final', + 'prefix', 'unique', 'object', 'foreach', 'include', 'template', + 'function', 'variable', 'structure', 'extensible', 'declaration'), + prefix=r'\b', suffix=r'\s*\b'), Keyword), (words(( - 'file_contents', 'format', 'index', 'length', 'match', 'matches', 'replace', - 'splice', 'split', 'substr', 'to_lowercase', 'to_uppercase', 'debug', 'error', - 'traceback', 'deprecated', 'base64_decode', 'base64_encode', 'digest', 'escape', - 'unescape', 'append', 'create', 'first', 'nlist', 'key', 'list', 'merge', 'next', - 'prepend', 'is_boolean', 'is_defined', 'is_double', 'is_list', 'is_long', - 'is_nlist', 'is_null', 'is_number', 'is_property', 'is_resource', 'is_string', - 'to_boolean', 'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists', - 'path_exists', 'if_exists', 'return', 'value'), prefix=r'\b', suffix=r'\s*\b'), + 'file_contents', 'format', 'index', 'length', 'match', 'matches', + 'replace', 'splice', 'split', 'substr', 'to_lowercase', 'to_uppercase', + 'debug', 'error', 'traceback', 'deprecated', 'base64_decode', + 'base64_encode', 'digest', 'escape', 'unescape', 'append', 'create', + 'first', 'nlist', 'key', 'list', 'merge', 'next', 'prepend', 'is_boolean', + 'is_defined', 'is_double', 'is_list', 'is_long', 'is_nlist', 'is_null', + 'is_number', 'is_property', 'is_resource', 'is_string', 'to_boolean', + 'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists', + 'path_exists', 'if_exists', 'return', 'value'), + prefix=r'\b', suffix=r'\s*\b'), Name.Builtin), (r'#.*', Comment), (r'\\[\w\W]', String.Escape), @@ -543,8 +546,8 @@ class CrmshLexer(RegexLexer): acl_mod = (r'(?:tag|ref|reference|attribute|type|xpath)') bin_ops = (r'(?:lt|gt|lte|gte|eq|ne)') val_qual = (r'(?:string|version|number)') - rsc_role_action=(r'(?:Master|Started|Slave|Stopped|' - r'start|promote|demote|stop)') + rsc_role_action = (r'(?:Master|Started|Slave|Stopped|' + r'start|promote|demote|stop)') tokens = { 'root': [ @@ -564,8 +567,7 @@ class CrmshLexer(RegexLexer): (sub, Keyword), (acl, Keyword), # binary operators - (r'(?:%s:)?(%s)(?![\w#$-])' % (val_qual,bin_ops), - Operator.Word), + (r'(?:%s:)?(%s)(?![\w#$-])' % (val_qual, bin_ops), Operator.Word), # other operators (bin_rel, Operator.Word), (un_ops, Operator.Word), @@ -574,11 +576,11 @@ class CrmshLexer(RegexLexer): (r'#[a-z]+(?![\w#$-])', Name.Builtin), # acl_mod:blah (r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod, - bygroups(Keyword, Punctuation, Name)), + bygroups(Keyword, Punctuation, Name)), # rsc_id[:(role|action)] # NB: this matches all other identifiers (r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action, - bygroups(Name, Punctuation, Operator.Word)), + bygroups(Name, Punctuation, Operator.Word)), # punctuation (r'(\\(?=\n)|[[\](){}/:@])', Punctuation), (r'\s+|\n', Whitespace), -- cgit v1.2.1 From 9d8e900209fcabc82b56b9a2f9cefc9cb4f64e9c Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 08:15:14 +0200 Subject: Elm/J: style, versionadded --- pygments/lexers/elm.py | 8 ++++---- pygments/lexers/j.py | 15 +++++++++------ 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/pygments/lexers/elm.py b/pygments/lexers/elm.py index a5143480..b8206c6d 100644 --- a/pygments/lexers/elm.py +++ b/pygments/lexers/elm.py @@ -7,16 +7,17 @@ """ -import re - from pygments.lexer import RegexLexer, words, include -from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, String, Text +from pygments.token import Comment, Keyword, Name, Number, Punctuation, String, Text __all__ = ['ElmLexer'] + class ElmLexer(RegexLexer): """ For `Elm `_ source code. + + .. versionadded:: 2.1 """ name = 'Elm' @@ -116,4 +117,3 @@ class ElmLexer(RegexLexer): (r'.*\n', Name.Entity), ], } - diff --git a/pygments/lexers/j.py b/pygments/lexers/j.py index 5eb85332..20176d0d 100644 --- a/pygments/lexers/j.py +++ b/pygments/lexers/j.py @@ -7,16 +7,18 @@ """ -import re - from pygments.lexer import RegexLexer, words, include -from pygments.token import Comment, Error, Keyword, Name, Number, Operator, Punctuation, String, Text +from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \ + String, Text __all__ = ['JLexer'] + class JLexer(RegexLexer): """ For `J `_ source code. + + .. versionadded:: 2.1 """ name = 'J' @@ -44,7 +46,8 @@ class JLexer(RegexLexer): # Definitions (r'0\s+:\s*0|noun\s+define\s*$', Name.Entity, 'nounDefinition'), - (r'\b(([1-4]|13)\s+:\s*0)|((adverb|conjunction|dyad|monad|verb)\s+define)\b', Name.Function, 'explicitDefinition'), + (r'\b(([1-4]|13)\s+:\s*0)|((adverb|conjunction|dyad|monad|verb)\s+define)\b', + Name.Function, 'explicitDefinition'), # Flow Control (words(('for_', 'goto_', 'label_'), suffix=validName+'\.'), Name.Label), @@ -128,7 +131,7 @@ class JLexer(RegexLexer): 'parentheses': [ (r'\)', Punctuation, '#pop'), - #include('nounDefinition'), + # include('nounDefinition'), include('explicitDefinition'), include('root'), ], @@ -138,4 +141,4 @@ class JLexer(RegexLexer): (r"''", String), (r"'", String, '#pop'), ], - } \ No newline at end of file + } -- cgit v1.2.1 From 435e5d1d93b3ffa486ff18da36ae615ab385ca73 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 08:17:34 +0200 Subject: Shell/supercollider: style fixes --- pygments/lexers/shell.py | 47 ++++++++++++++++++++-------------------- pygments/lexers/supercollider.py | 3 ++- 2 files changed, 26 insertions(+), 24 deletions(-) diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py index 041bdbe1..dc23d018 100644 --- a/pygments/lexers/shell.py +++ b/pygments/lexers/shell.py @@ -12,9 +12,9 @@ import re from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \ - include, default, this, using, words + include, default, this, using, words from pygments.token import Punctuation, \ - Text, Comment, Operator, Keyword, Name, String, Number, Generic + Text, Comment, Operator, Keyword, Name, String, Number, Generic from pygments.util import shebang_matches @@ -22,7 +22,7 @@ __all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer', 'MSDOSSessionLexer', 'PowerShellLexer', 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer'] -line_re = re.compile('.*?\n') +line_re = re.compile('.*?\n') class BashLexer(RegexLexer): @@ -49,8 +49,8 @@ class BashLexer(RegexLexer): (r'\$\(\(', Keyword, 'math'), (r'\$\(', Keyword, 'paren'), (r'\$\{#?', String.Interpol, 'curly'), - (r'\$[a-fA-F_][a-fA-F0-9_]*', Name.Variable), # user variable - (r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin + (r'\$[a-fA-F_][a-fA-F0-9_]*', Name.Variable), # user variable + (r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin (r'\$', Text), ], 'basic': [ @@ -578,6 +578,7 @@ class TcshLexer(RegexLexer): ], } + class TcshSessionLexer(ShellSessionBaseLexer): """ Lexer for Tcsh sessions. @@ -603,7 +604,7 @@ class PowerShellLexer(RegexLexer): """ name = 'PowerShell' aliases = ['powershell', 'posh', 'ps1', 'psm1'] - filenames = ['*.ps1','*.psm1'] + filenames = ['*.ps1', '*.psm1'] mimetypes = ['text/x-powershell'] flags = re.DOTALL | re.IGNORECASE | re.MULTILINE @@ -692,6 +693,23 @@ class PowerShellLexer(RegexLexer): } +class PowerShellSessionLexer(ShellSessionBaseLexer): + """ + Lexer for simplistic Windows PowerShell sessions. + + .. versionadded:: 2.1 + """ + + name = 'PowerShell Session' + aliases = ['ps1con'] + filenames = [] + mimetypes = [] + + _innerLexerCls = PowerShellLexer + _ps1rgx = r'^(PS [^>]+> )(.*\n?)' + _ps2 = '>> ' + + class FishShellLexer(RegexLexer): """ Lexer for Fish shell scripts. @@ -763,20 +781,3 @@ class FishShellLexer(RegexLexer): include('root'), ], } - - -class PowerShellSessionLexer(ShellSessionBaseLexer): - """ - Lexer for simplistic Windows PowerShell sessions. - - .. versionadded:: 2.1 - """ - - name = 'PowerShell Session' - aliases = ['ps1con'] - filenames = [] - mimetypes = [] - - _innerLexerCls = PowerShellLexer - _ps1rgx = r'^(PS [^>]+> )(.*\n?)' - _ps2 = '>> ' diff --git a/pygments/lexers/supercollider.py b/pygments/lexers/supercollider.py index 70417f59..d3e4c460 100644 --- a/pygments/lexers/supercollider.py +++ b/pygments/lexers/supercollider.py @@ -13,10 +13,11 @@ import re from pygments.lexer import RegexLexer, include, words from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Other + Number, Punctuation __all__ = ['SuperColliderLexer'] + class SuperColliderLexer(RegexLexer): """ For `SuperCollider `_ source code. -- cgit v1.2.1 From c046c010103f178df2183bd9ea0508f54431dc1a Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 08:20:27 +0200 Subject: More style fixes. --- pygments/lexers/lisp.py | 33 +++++++++++++++++---------------- pygments/lexers/oberon.py | 16 ++++++++-------- pygments/lexers/pascal.py | 13 ++++++++----- 3 files changed, 33 insertions(+), 29 deletions(-) diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py index 39741a22..bd59d2b6 100644 --- a/pygments/lexers/lisp.py +++ b/pygments/lexers/lisp.py @@ -269,8 +269,8 @@ class CommonLispLexer(RegexLexer): # decimal numbers (r'[-+]?\d+\.?' + terminated, Number.Integer), (r'[-+]?\d+/\d+' + terminated, Number), - (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' - + terminated, Number.Float), + (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' + + terminated, Number.Float), # sharpsign strings and characters (r"#\\." + terminated, String.Char), @@ -1550,7 +1550,8 @@ class EmacsLispLexer(RegexLexer): 'with-syntax-table', 'with-temp-buffer', 'with-temp-file', 'with-temp-message', 'with-timeout', 'with-tramp-connection-property', 'with-tramp-file-property', 'with-tramp-progress-reporter', - 'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv', 'return-from' + 'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv', + 'return-from', )) special_forms = set(( @@ -2066,8 +2067,8 @@ class EmacsLispLexer(RegexLexer): # decimal numbers (r'[-+]?\d+\.?' + terminated, Number.Integer), (r'[-+]?\d+/\d+' + terminated, Number), - (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' - + terminated, Number.Float), + (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' + + terminated, Number.Float), # vectors (r'\[|\]', Punctuation), @@ -2189,9 +2190,9 @@ class ShenLexer(RegexLexer): (r'(?s).', String), ], - 'root' : [ - (r'(?s)\\\*.*?\*\\', Comment.Multiline), # \* ... *\ - (r'\\\\.*', Comment.Single), # \\ ... + 'root': [ + (r'(?s)\\\*.*?\*\\', Comment.Multiline), # \* ... *\ + (r'\\\\.*', Comment.Single), # \\ ... (r'\s+', Text), (r'_{5,}', Punctuation), (r'={5,}', Punctuation), @@ -2223,8 +2224,8 @@ class ShenLexer(RegexLexer): if self._relevant(token): if opening_paren and token == Keyword and value in self.DECLARATIONS: declaration = value - for index, token, value \ - in self._process_declaration(declaration, tokens): + for index, token, value in \ + self._process_declaration(declaration, tokens): yield index, token, value opening_paren = value == '(' and token == Punctuation @@ -2243,7 +2244,7 @@ class ShenLexer(RegexLexer): if self._relevant(token): break yield index, token, value - + if declaration == 'datatype': prev_was_colon = False token = Keyword.Type if token == Literal else token @@ -2259,7 +2260,7 @@ class ShenLexer(RegexLexer): yield index, token, value elif declaration == 'define': token = Name.Function if token == Literal else token - yield index, token , value + yield index, token, value for index, token, value in tokens: if self._relevant(token): break @@ -2272,13 +2273,13 @@ class ShenLexer(RegexLexer): yield index, token, value else: token = Name.Function if token == Literal else token - yield index, token , value + yield index, token, value raise StopIteration def _process_signature(self, tokens): for index, token, value in tokens: - if token == Literal and value == '}': + if token == Literal and value == '}': yield index, Punctuation, value raise StopIteration elif token in (Literal, Name.Function): @@ -2315,7 +2316,7 @@ class CPSALexer(SchemeLexer): valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+' tokens = { - 'root' : [ + 'root': [ # the comments - always starting with semicolon # and going to the end of the line (r';.*$', Comment.Single), @@ -2328,7 +2329,7 @@ class CPSALexer(SchemeLexer): (r'-?\d+', Number.Integer), # support for uncommon kinds of numbers - # have to figure out what the characters mean - #(r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number), + # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number), # strings, symbols and characters (r'"(\\\\|\\"|[^"])*"', String), diff --git a/pygments/lexers/oberon.py b/pygments/lexers/oberon.py index df914358..db18259d 100644 --- a/pygments/lexers/oberon.py +++ b/pygments/lexers/oberon.py @@ -43,8 +43,8 @@ class ComponentPascalLexer(RegexLexer): include('identifiers'), ], 'whitespace': [ - (r'\n+', Text), # blank lines - (r'\s+', Text), # whitespace + (r'\n+', Text), # blank lines + (r'\s+', Text), # whitespace ], 'comments': [ (r'\(\*([^\$].*?)\*\)', Comment.Multiline), @@ -56,13 +56,13 @@ class ComponentPascalLexer(RegexLexer): 'numliterals': [ (r'[0-9A-F]+X\b', Number.Hex), # char code (r'[0-9A-F]+[HL]\b', Number.Hex), # hexadecimal number - (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number - (r'[0-9]+\.[0-9]+', Number.Float), # real number - (r'[0-9]+', Number.Integer), # decimal whole number + (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number + (r'[0-9]+\.[0-9]+', Number.Float), # real number + (r'[0-9]+', Number.Integer), # decimal whole number ], 'strings': [ - (r"'[^\n']*'", String), # single quoted string - (r'"[^\n"]*"', String), # double quoted string + (r"'[^\n']*'", String), # single quoted string + (r'"[^\n"]*"', String), # double quoted string ], 'operators': [ # Arithmetic Operators @@ -97,7 +97,7 @@ class ComponentPascalLexer(RegexLexer): 'HALT', 'IF', 'IMPORT', 'IN', 'INC', 'INCL', 'IS', 'LEN', 'LIMITED', 'LONG', 'LOOP', 'MAX', 'MIN', 'MOD', 'MODULE', 'NEW', 'ODD', 'OF', 'OR', 'ORD', 'OUT', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN', - 'SHORT','SHORTCHAR', 'SHORTINT', 'SIZE', 'THEN', 'TYPE', 'TO', 'UNTIL', + 'SHORT', 'SHORTCHAR', 'SHORTINT', 'SIZE', 'THEN', 'TYPE', 'TO', 'UNTIL', 'VAR', 'WHILE', 'WITH' ), suffix=r'\b'), Keyword.Reserved), (r'(TRUE|FALSE|NIL|INF)\b', Keyword.Constant), diff --git a/pygments/lexers/pascal.py b/pygments/lexers/pascal.py index d3ce6a3a..ce991a77 100644 --- a/pygments/lexers/pascal.py +++ b/pygments/lexers/pascal.py @@ -18,6 +18,7 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Error from pygments.scanner import Scanner +# compatibility import from pygments.lexers.modula2 import Modula2Lexer __all__ = ['DelphiLexer', 'AdaLexer'] @@ -536,11 +537,13 @@ class AdaLexer(RegexLexer): Comment.Preproc)), (r'(true|false|null)\b', Keyword.Constant), (words(( - 'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count', 'Cursor', - 'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator', 'Integer', 'Long_Float', - 'Long_Integer', 'Long_Long_Float', 'Long_Long_Integer', 'Natural', 'Positive', - 'Reference_Type', 'Short_Float', 'Short_Integer', 'Short_Short_Float', - 'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'), suffix=r'\b'), + 'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count', + 'Cursor', 'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator', + 'Integer', 'Long_Float', 'Long_Integer', 'Long_Long_Float', + 'Long_Long_Integer', 'Natural', 'Positive', 'Reference_Type', + 'Short_Float', 'Short_Integer', 'Short_Short_Float', + 'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'), + suffix=r'\b'), Keyword.Type), (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word), (r'generic|private', Keyword.Declaration), -- cgit v1.2.1 From 3672fed6a4d21b39075e2764295534c0be2e5ef5 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 08:22:31 +0200 Subject: Move TAP to testing module, roboconf style fixes. --- pygments/lexers/_mapping.py | 2 +- pygments/lexers/roboconf.py | 8 ++-- pygments/lexers/tap.py | 91 --------------------------------------------- pygments/lexers/testing.py | 86 +++++++++++++++++++++++++++++++++++++++--- 4 files changed, 86 insertions(+), 101 deletions(-) delete mode 100644 pygments/lexers/tap.py diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index a25e3af2..03a3c96e 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -363,7 +363,7 @@ LEXERS = { 'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), - 'TAPLexer': ('pygments.lexers.tap', 'TAP', ('tap',), ('*.tap',), ()), + 'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()), 'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()), 'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), diff --git a/pygments/lexers/roboconf.py b/pygments/lexers/roboconf.py index ec525c73..59755a68 100644 --- a/pygments/lexers/roboconf.py +++ b/pygments/lexers/roboconf.py @@ -9,7 +9,7 @@ :license: BSD, see LICENSE for details. """ -from pygments.lexer import RegexLexer, words, bygroups, re, include +from pygments.lexer import RegexLexer, words, re from pygments.token import Text, Operator, Keyword, Name, Comment __all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer'] @@ -19,7 +19,7 @@ class RoboconfGraphLexer(RegexLexer): """ Lexer for `Roboconf `_ graph files. - .. versadded:: 2.1 + .. versionadded:: 2.1 """ name = 'Roboconf Graph' aliases = ['roboconf-graph'] @@ -32,7 +32,7 @@ class RoboconfGraphLexer(RegexLexer): (r'\s+', Text), # There is one operator - (r'=',Operator), + (r'=', Operator), # Keywords (words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword), @@ -54,7 +54,7 @@ class RoboconfInstancesLexer(RegexLexer): """ Lexer for `Roboconf `_ instances files. - .. versadded:: 2.1 + .. versionadded:: 2.1 """ name = 'Roboconf Instances' aliases = ['roboconf-instances'] diff --git a/pygments/lexers/tap.py b/pygments/lexers/tap.py deleted file mode 100644 index 777dfdf0..00000000 --- a/pygments/lexers/tap.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -""" - pygments.lexers.tap - ~~~~~~~~~~~~~~~~~~~ - - Lexer for the Test Anything Protocol (TAP). - - :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -from pygments.lexer import bygroups, RegexLexer -from pygments.token import Comment, Generic, Keyword, Name, Number, Text - -__all__ = ['TAPLexer'] - - -class TAPLexer(RegexLexer): - """ - For Test Anything Protocol (TAP) output. - - .. versionadded:: 2.1 - """ - name = 'TAP' - aliases = ['tap'] - filenames = ['*.tap'] - - tokens = { - 'root': [ - # A TAP version may be specified. - (r'^TAP version \d+\n', Name.Namespace), - - # Specify a plan with a plan line. - (r'^1..\d+', Keyword.Declaration, 'plan'), - - # A test failure - (r'^(not ok)([^\S\n]*)(\d*)', - bygroups(Generic.Error, Text, Number.Integer), 'test'), - - # A test success - (r'^(ok)([^\S\n]*)(\d*)', - bygroups(Keyword.Reserved, Text, Number.Integer), 'test'), - - # Diagnostics start with a hash. - (r'^#.*\n', Comment), - - # TAP's version of an abort statement. - (r'^Bail out!.*\n', Generic.Error), - - # TAP ignores any unrecognized lines. - (r'^.*\n', Text), - ], - 'plan': [ - # Consume whitespace (but not newline). - (r'[^\S\n]+', Text), - - # A plan may have a directive with it. - (r'#', Comment, 'directive'), - - # Or it could just end. - (r'\n', Comment, '#pop'), - - # Anything else is wrong. - (r'.*\n', Generic.Error, '#pop'), - ], - 'test': [ - # Consume whitespace (but not newline). - (r'[^\S\n]+', Text), - - # A test may have a directive with it. - (r'#', Comment, 'directive'), - - (r'\S+', Text), - - (r'\n', Text, '#pop'), - ], - 'directive': [ - # Consume whitespace (but not newline). - (r'[^\S\n]+', Comment), - - # Extract todo items. - (r'(?i)\bTODO\b', Comment.Preproc), - - # Extract skip items. - (r'(?i)\bSKIP\S*', Comment.Preproc), - - (r'\S+', Comment), - - (r'\n', Comment, '#pop:2'), - ], - } diff --git a/pygments/lexers/testing.py b/pygments/lexers/testing.py index 4a91c5b1..0bdebe74 100644 --- a/pygments/lexers/testing.py +++ b/pygments/lexers/testing.py @@ -10,9 +10,9 @@ """ from pygments.lexer import RegexLexer, include, bygroups -from pygments.token import Comment, Keyword, Name, String +from pygments.token import Comment, Keyword, Name, String, Number, Generic, Text -__all__ = ['GherkinLexer'] +__all__ = ['GherkinLexer', 'TAPLexer'] class GherkinLexer(RegexLexer): @@ -26,10 +26,10 @@ class GherkinLexer(RegexLexer): filenames = ['*.feature'] mimetypes = ['text/x-gherkin'] - feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$' + feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$' feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$' - examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$' - step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )' + examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$' + step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )' tokens = { 'comments': [ @@ -129,3 +129,79 @@ class GherkinLexer(RegexLexer): (r'(\s|.)', Name.Function), ] } + + +class TAPLexer(RegexLexer): + """ + For Test Anything Protocol (TAP) output. + + .. versionadded:: 2.1 + """ + name = 'TAP' + aliases = ['tap'] + filenames = ['*.tap'] + + tokens = { + 'root': [ + # A TAP version may be specified. + (r'^TAP version \d+\n', Name.Namespace), + + # Specify a plan with a plan line. + (r'^1..\d+', Keyword.Declaration, 'plan'), + + # A test failure + (r'^(not ok)([^\S\n]*)(\d*)', + bygroups(Generic.Error, Text, Number.Integer), 'test'), + + # A test success + (r'^(ok)([^\S\n]*)(\d*)', + bygroups(Keyword.Reserved, Text, Number.Integer), 'test'), + + # Diagnostics start with a hash. + (r'^#.*\n', Comment), + + # TAP's version of an abort statement. + (r'^Bail out!.*\n', Generic.Error), + + # TAP ignores any unrecognized lines. + (r'^.*\n', Text), + ], + 'plan': [ + # Consume whitespace (but not newline). + (r'[^\S\n]+', Text), + + # A plan may have a directive with it. + (r'#', Comment, 'directive'), + + # Or it could just end. + (r'\n', Comment, '#pop'), + + # Anything else is wrong. + (r'.*\n', Generic.Error, '#pop'), + ], + 'test': [ + # Consume whitespace (but not newline). + (r'[^\S\n]+', Text), + + # A test may have a directive with it. + (r'#', Comment, 'directive'), + + (r'\S+', Text), + + (r'\n', Text, '#pop'), + ], + 'directive': [ + # Consume whitespace (but not newline). + (r'[^\S\n]+', Comment), + + # Extract todo items. + (r'(?i)\bTODO\b', Comment.Preproc), + + # Extract skip items. + (r'(?i)\bSKIP\S*', Comment.Preproc), + + (r'\S+', Comment), + + (r'\n', Comment, '#pop:2'), + ], + } -- cgit v1.2.1 From 19e68575b070b842fdcc234213d6bef446c76149 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 08:28:15 +0200 Subject: More style and markup fixes. --- pygments/lexers/configs.py | 56 +++++++++++++++++++------------------- pygments/lexers/javascript.py | 24 ++++++++++------- pygments/lexers/parasail.py | 24 ++++++++--------- pygments/lexers/rdf.py | 12 ++++++--- pygments/lexers/scripting.py | 63 ++++++++++++++++++++++++------------------- 5 files changed, 96 insertions(+), 83 deletions(-) diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py index 545c5f72..f5a67bc4 100644 --- a/pygments/lexers/configs.py +++ b/pygments/lexers/configs.py @@ -541,7 +541,7 @@ class DockerLexer(RegexLexer): bygroups(Name.Keyword, Whitespace, Keyword)), (r'^(%s)\b(.*)' % (_keywords,), bygroups(Keyword, String)), (r'#.*', Comment), - (r'RUN', Keyword), # Rest of line falls through + (r'RUN', Keyword), # Rest of line falls through (r'(.*\\\n)*.+', using(BashLexer)), ], } @@ -549,7 +549,7 @@ class DockerLexer(RegexLexer): class TerraformLexer(RegexLexer): """ - Lexer for `terraformi .tf files `_ + Lexer for `terraformi .tf files `_. .. versionadded:: 2.1 """ @@ -560,29 +560,29 @@ class TerraformLexer(RegexLexer): mimetypes = ['application/x-tf', 'application/x-terraform'] tokens = { - 'root': [ - include('string'), - include('punctuation'), - include('curly'), - include('basic'), - include('whitespace'), - (r'[0-9]+', Number), - ], - 'basic': [ - (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type), - (r'\s*/\*', Comment.Multiline, 'comment'), - (r'\s*#.*\n', Comment.Single), - (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)), - (words(('variable', 'resource', 'provider', 'provisioner', 'module'), - prefix=r'\b', suffix=r'\b'), Keyword.Reserved, 'function'), - (words(('ingress', 'egress', 'listener', 'default', 'connection'), - prefix=r'\b', suffix=r'\b'), Keyword.Declaration), - ('\$\{', String.Interpol, 'var_builtin'), - ], - 'function': [ - (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)), - include('punctuation'), - include('curly'), + 'root': [ + include('string'), + include('punctuation'), + include('curly'), + include('basic'), + include('whitespace'), + (r'[0-9]+', Number), + ], + 'basic': [ + (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type), + (r'\s*/\*', Comment.Multiline, 'comment'), + (r'\s*#.*\n', Comment.Single), + (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)), + (words(('variable', 'resource', 'provider', 'provisioner', 'module'), + prefix=r'\b', suffix=r'\b'), Keyword.Reserved, 'function'), + (words(('ingress', 'egress', 'listener', 'default', 'connection'), + prefix=r'\b', suffix=r'\b'), Keyword.Declaration), + ('\$\{', String.Interpol, 'var_builtin'), + ], + 'function': [ + (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)), + include('punctuation'), + include('curly'), ], 'var_builtin': [ (r'\$\{', String.Interpol, '#push'), @@ -593,15 +593,15 @@ class TerraformLexer(RegexLexer): (r'\s+', Text), (r'\}', String.Interpol, '#pop'), ], - 'string':[ + 'string': [ (r'(".*")', bygroups(String.Double)), ], - 'punctuation':[ + 'punctuation': [ (r'[\[\]\(\),.]', Punctuation), ], # Keep this seperate from punctuation - we sometimes want to use different # Tokens for { } - 'curly':[ + 'curly': [ (r'\{', Text.Punctuation), (r'\}', Text.Punctuation), ], diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index c35cd3f6..3982c91e 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -106,7 +106,7 @@ class JavascriptLexer(RegexLexer): (r'}', String.Interpol, '#pop'), include('root'), ], - #(\\\\|\\`|[^`])*`', String.Backtick), + # (\\\\|\\`|[^`])*`', String.Backtick), } @@ -1222,6 +1222,7 @@ class MaskLexer(RegexLexer): ], } + class EarlGreyLexer(RegexLexer): """ For `Earl-Grey`_ source code. @@ -1253,7 +1254,7 @@ class EarlGreyLexer(RegexLexer): Operator.Word), (r'[\*@]?->', Name.Function), (r'[+\-*/~^<>%&|?!@#.]*=', Operator.Word), - (r'\.{2,3}', Operator.Word), # Range Operator + (r'\.{2,3}', Operator.Word), # Range Operator (r'([+*/~^<>&|?!]+)|([#\-](?=\s))|@@+(?=\s)|=+', Operator), (r'(?%&|?!@#.]*\=\s)', + (r'(\.)?([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)' + r'(?=\s+[+\-*/~^<>%&|?!@#.]*\=\s)', bygroups(Punctuation, Name.Variable)) ], 'errors': [ (words(('Error', 'TypeError', 'ReferenceError'), - prefix=r'(?%&|?!@#.])?[a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)''', Keyword.Control), - (r'([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)(?!\n)\s+(?=[\'"\d\{\[\(])', Keyword.Control), + (r'([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)(?!\n)\s+(?=[\'"\d\{\[\(])', + Keyword.Control), (r'''(?x) (?: (?<=[%=])| @@ -1352,7 +1355,7 @@ class EarlGreyLexer(RegexLexer): prefix=r'(?"{}|^`\\\x00-\x20])*>' - BLANK_NODE_LABEL = '_:(?:' + PN_CHARS_U + '|[0-9])(?:(?:' + PN_CHARS + '|\.)*' + PN_CHARS + ')?' + BLANK_NODE_LABEL = '_:(?:' + PN_CHARS_U + '|[0-9])(?:(?:' + PN_CHARS + '|\.)*' + \ + PN_CHARS + ')?' PN_PREFIX = PN_CHARS_BASE + '(?:(?:' + PN_CHARS + '|\.)*' + PN_CHARS + ')?' - VARNAME = '(?:' + PN_CHARS_U + '|[0-9])(?:' + PN_CHARS_U + u'|[0-9\u00b7\u0300-\u036f\u203f-\u2040])*' + VARNAME = '(?:' + PN_CHARS_U + '|[0-9])(?:' + PN_CHARS_U + \ + u'|[0-9\u00b7\u0300-\u036f\u203f-\u2040])*' PERCENT = '%' + HEX + HEX @@ -72,7 +74,8 @@ class SparqlLexer(RegexLexer): PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')' PN_LOCAL = ('(?:(?:' + PN_CHARS_U + '|[:0-9])|' + PLX + ')' + - '(?:(?:(?:' + PN_CHARS + '|[.:])|' + PLX + ')*(?:(?:' + PN_CHARS + '|:)|' + PLX + '))?') + '(?:(?:(?:' + PN_CHARS + '|[.:])|' + PLX + ')*(?:(?:' + + PN_CHARS + '|:)|' + PLX + '))?') EXPONENT = r'[eE][+-]?\d+' @@ -247,7 +250,8 @@ class TurtleLexer(RegexLexer): bygroups(Operator, Generic.Emph), '#pop:2'), (r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'), - (r'(\^\^)%(PrefixedName)s' % patterns, bygroups(Operator, Generic.Emph, Generic.Emph), '#pop:2'), + (r'(\^\^)%(PrefixedName)s' % patterns, + bygroups(Operator, Generic.Emph, Generic.Emph), '#pop:2'), default('#pop:2'), diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py index c09c5ba9..4dd9594b 100644 --- a/pygments/lexers/scripting.py +++ b/pygments/lexers/scripting.py @@ -878,25 +878,29 @@ class HybrisLexer(RegexLexer): bygroups(Keyword.Namespace, Text), 'import'), (words(( 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold', - 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32', 'sha2', - 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos', 'cosh', 'exp', - 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin', 'sinh', 'sqrt', 'tan', 'tanh', - 'isint', 'isfloat', 'ischar', 'isstring', 'isarray', 'ismap', 'isalias', 'typeof', - 'sizeof', 'toint', 'tostring', 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', - 'var_names', 'var_values', 'user_functions', 'dyn_functions', 'methods', 'call', - 'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks', 'usleep', - 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink', 'dllcall', 'dllcall_argv', - 'dllclose', 'env', 'exec', 'fork', 'getpid', 'wait', 'popen', 'pclose', 'exit', 'kill', - 'pthread_create', 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill', - 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind', 'listen', - 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect', 'server', 'recv', - 'send', 'close', 'print', 'println', 'printf', 'input', 'readline', 'serial_open', - 'serial_fcntl', 'serial_get_attr', 'serial_get_ispeed', 'serial_get_ospeed', - 'serial_set_attr', 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', - 'serial_read', 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell', - 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir', 'pcre_replace', 'size', - 'pop', 'unmap', 'has', 'keys', 'values', 'length', 'find', 'substr', 'replace', 'split', - 'trim', 'remove', 'contains', 'join'), suffix=r'\b'), + 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32', + 'sha2', 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos', + 'cosh', 'exp', 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin', + 'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', 'isstring', + 'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring', + 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names', + 'var_values', 'user_functions', 'dyn_functions', 'methods', 'call', + 'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks', + 'usleep', 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink', + 'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', 'fork', 'getpid', + 'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create', + 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill', + 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind', + 'listen', 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect', + 'server', 'recv', 'send', 'close', 'print', 'println', 'printf', 'input', + 'readline', 'serial_open', 'serial_fcntl', 'serial_get_attr', + 'serial_get_ispeed', 'serial_get_ospeed', 'serial_set_attr', + 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', 'serial_read', + 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell', + 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir', + 'pcre_replace', 'size', 'pop', 'unmap', 'has', 'keys', 'values', + 'length', 'find', 'substr', 'replace', 'split', 'trim', 'remove', + 'contains', 'join'), suffix=r'\b'), Name.Builtin), (words(( 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process', @@ -996,7 +1000,8 @@ class EasytrieveLexer(RegexLexer): (r'\*.*\n', Comment.Single), (r'\n+', Whitespace), # Macro argument - (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable, 'after_macro_argument'), + (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable, + 'after_macro_argument'), # Macro call (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable), (r'(FILE|MACRO|REPORT)(\s+)', @@ -1008,12 +1013,14 @@ class EasytrieveLexer(RegexLexer): (_OPERATORS_PATTERN, Operator), # Procedure declaration (r'(' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+)(\s*)(\.?)(\s*)(PROC)(\s*\n)', - bygroups(Name.Function, Whitespace, Operator, Whitespace, Keyword.Declaration, Whitespace)), + bygroups(Name.Function, Whitespace, Operator, Whitespace, + Keyword.Declaration, Whitespace)), (r'[0-9]+\.[0-9]*', Number.Float), (r'[0-9]+', Number.Integer), (r"'(''|[^'])*'", String), (r'\s+', Whitespace), - (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name) # Everything else just belongs to a name + # Everything else just belongs to a name + (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name) ], 'after_declaration': [ (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Function), @@ -1024,7 +1031,8 @@ class EasytrieveLexer(RegexLexer): (r'\s+', Whitespace, '#pop'), (_OPERATORS_PATTERN, Operator, '#pop'), (r"'(''|[^'])*'", String, '#pop'), - (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name) # Everything else just belongs to a name + # Everything else just belongs to a name + (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name) ], } _COMMENT_LINE_REGEX = re.compile(r'^\s*\*') @@ -1155,7 +1163,7 @@ class JclLexer(RegexLexer): r'setup|signoff|xeq|xmit)\b', Keyword, 'option'), ], 'option': [ - #(r'\n', Text, 'root'), + # (r'\n', Text, 'root'), (r'\*', Name.Builtin), (r'[\[\](){}<>;,]', Punctuation), (r'[-+*/=&%]', Operator), @@ -1174,12 +1182,13 @@ class JclLexer(RegexLexer): (r"'", String, '#pop'), ], 'option_comment': [ - #(r'\n', Text, 'root'), + # (r'\n', Text, 'root'), (r'.+', Comment.Single), ] } - _JOB_HEADER_PATTERN = re.compile(r'^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$', re.IGNORECASE) + _JOB_HEADER_PATTERN = re.compile(r'^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$', + re.IGNORECASE) def analyse_text(text): """ @@ -1192,5 +1201,3 @@ class JclLexer(RegexLexer): result = 1.0 assert 0.0 <= result <= 1.0 return result - - -- cgit v1.2.1 From 21248af58d8c5900fa74aad8a4b2ff15c8e51bb3 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 08:29:48 +0200 Subject: Style fixes. --- pygments/lexers/archetype.py | 10 ++++++---- pygments/lexers/fortran.py | 17 +++++++++-------- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/pygments/lexers/archetype.py b/pygments/lexers/archetype.py index 4f1b2645..e596b7be 100644 --- a/pygments/lexers/archetype.py +++ b/pygments/lexers/archetype.py @@ -70,7 +70,8 @@ class AtomsLexer(RegexLexer): (r'[a-z][a-z0-9+.-]*:', Literal, 'uri'), # term code (r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)(\w[\w-]*)(\])', - bygroups(Punctuation, Name.Decorator, Punctuation, Name.Decorator, Punctuation)), + bygroups(Punctuation, Name.Decorator, Punctuation, Name.Decorator, + Punctuation)), (r'\|', Punctuation, 'interval'), # list continuation (r'\.\.\.', Punctuation), @@ -223,7 +224,8 @@ class CadlLexer(AtomsLexer): bygroups(Punctuation, String.Regex, Punctuation)), (r'/', Punctuation, 'path'), # for cardinality etc - (r'(\{)((?:\d+\.\.)?(?:\d+|\*))((?:\s*;\s*(?:ordered|unordered|unique)){,2})(\})', + (r'(\{)((?:\d+\.\.)?(?:\d+|\*))' + r'((?:\s*;\s*(?:ordered|unordered|unique)){,2})(\})', bygroups(Punctuation, Number, Number, Punctuation)), # [{ is start of a tuple value (r'\[\{', Punctuation), @@ -267,8 +269,8 @@ class AdlLexer(AtomsLexer): (r'^[ \t]*--.*$', Comment), ], 'odin_section': [ - # repeating the following two rules from the root state enable multi-line strings - # that start in the first column to be dealt with + # repeating the following two rules from the root state enable multi-line + # strings that start in the first column to be dealt with (r'^(language|description|ontology|terminology|annotations|' r'component_terminologies|revision_history)[ \t]*\n', Generic.Heading), (r'^(definition)[ \t]*\n', Generic.Heading, 'cadl_section'), diff --git a/pygments/lexers/fortran.py b/pygments/lexers/fortran.py index d822160f..4c22139d 100644 --- a/pygments/lexers/fortran.py +++ b/pygments/lexers/fortran.py @@ -73,13 +73,14 @@ class FortranLexer(RegexLexer): # Data Types (words(( 'CHARACTER', 'COMPLEX', 'DOUBLE PRECISION', 'DOUBLE COMPLEX', 'INTEGER', - 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG', 'C_SIGNED_CHAR', - 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T', 'C_INT64_T', 'C_INT_LEAST8_T', - 'C_INT_LEAST16_T', 'C_INT_LEAST32_T', 'C_INT_LEAST64_T', 'C_INT_FAST8_T', - 'C_INT_FAST16_T', 'C_INT_FAST32_T', 'C_INT_FAST64_T', 'C_INTMAX_T', - 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE', 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX', - 'C_DOUBLE_COMPLEX', 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR', - 'C_FUNPTR'), prefix=r'\b', suffix=r'\s*\b'), + 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG', + 'C_SIGNED_CHAR', 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T', + 'C_INT64_T', 'C_INT_LEAST8_T', 'C_INT_LEAST16_T', 'C_INT_LEAST32_T', + 'C_INT_LEAST64_T', 'C_INT_FAST8_T', 'C_INT_FAST16_T', 'C_INT_FAST32_T', + 'C_INT_FAST64_T', 'C_INTMAX_T', 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE', + 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX', 'C_DOUBLE_COMPLEX', + 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR', 'C_FUNPTR'), + prefix=r'\b', suffix=r'\s*\b'), Keyword.Type), # Operators @@ -171,7 +172,7 @@ class FortranFixedLexer(RegexLexer): aliases = ['fortranfixed'] filenames = ['*.f', '*.F'] - flags = re.IGNORECASE + flags = re.IGNORECASE def _lex_fortran(self, match, ctx=None): """Lex a line just as free form fortran without line break.""" -- cgit v1.2.1 From 131047f8e3e72160e25868cda4e25cc08bc3b88b Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 08:39:33 +0200 Subject: Closes #685: use String.{Single,Double} in Python lexer. --- pygments/lexers/python.py | 65 ++++++++++++++++++++++++----------------------- 1 file changed, 33 insertions(+), 32 deletions(-) diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py index ea97b855..903078d8 100644 --- a/pygments/lexers/python.py +++ b/pygments/lexers/python.py @@ -35,6 +35,19 @@ class PythonLexer(RegexLexer): filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'] mimetypes = ['text/x-python', 'application/x-python'] + def innerstring_rules(ttype): + return [ + # the old style '%s' % (...) string formatting + (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?' + '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol), + # backslashes, quotes and formatting signs must be parsed one at a time + (r'[^\\\'"%\n]+', ttype), + (r'[\'"\\]', ttype), + # unhandled string formatting sign + (r'%', ttype), + # newlines are an error (use "nl" state) + ] + tokens = { 'root': [ (r'\n', Text), @@ -57,14 +70,14 @@ class PythonLexer(RegexLexer): 'import'), include('builtins'), include('backtick'), - ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'), - ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'), - ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'), - ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'), - ('[uU]?"""', String, combined('stringescape', 'tdqs')), - ("[uU]?'''", String, combined('stringescape', 'tsqs')), - ('[uU]?"', String, combined('stringescape', 'dqs')), - ("[uU]?'", String, combined('stringescape', 'sqs')), + ('(?:[rR]|[uU][rR]|[rR][uU])"""', String.Double, 'tdqs'), + ("(?:[rR]|[uU][rR]|[rR][uU])'''", String.Single, 'tsqs'), + ('(?:[rR]|[uU][rR]|[rR][uU])"', String.Double, 'dqs'), + ("(?:[rR]|[uU][rR]|[rR][uU])'", String.Single, 'sqs'), + ('[uU]?"""', String.Double, combined('stringescape', 'tdqs')), + ("[uU]?'''", String.Single, combined('stringescape', 'tsqs')), + ('[uU]?"', String.Double, combined('stringescape', 'dqs')), + ("[uU]?'", String.Single, combined('stringescape', 'sqs')), include('name'), include('numbers'), ], @@ -155,39 +168,27 @@ class PythonLexer(RegexLexer): (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|' r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape) ], - 'strings': [ - # the old style '%s' % (...) string formatting - (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?' - '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol), - # backslashes, quotes and formatting signs must be parsed one at a time - (r'[^\\\'"%\n]+', String), - (r'[\'"\\]', String), - # unhandled string formatting sign - (r'%', String) - # newlines are an error (use "nl" state) - ], - 'nl': [ - (r'\n', String) - ], + 'strings-single': innerstring_rules(String.Single), + 'strings-double': innerstring_rules(String.Double), 'dqs': [ - (r'"', String, '#pop'), + (r'"', String.Double, '#pop'), (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings - include('strings') + include('strings-double') ], 'sqs': [ - (r"'", String, '#pop'), + (r"'", String.Single, '#pop'), (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings - include('strings') + include('strings-single') ], 'tdqs': [ - (r'"""', String, '#pop'), - include('strings'), - include('nl') + (r'"""', String.Double, '#pop'), + include('strings-double'), + (r'\n', String.Double) ], 'tsqs': [ - (r"'''", String, '#pop'), - include('strings'), - include('nl') + (r"'''", String.Single, '#pop'), + include('strings-single'), + (r'\n', String.Single) ], } -- cgit v1.2.1 From e23a91d9384494f6543313df8a702ef1c4bec3f1 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 08:44:13 +0200 Subject: Closes #1146: add "unsigned" as a cython builtin. --- pygments/lexers/python.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py index 903078d8..ccbd4c15 100644 --- a/pygments/lexers/python.py +++ b/pygments/lexers/python.py @@ -550,7 +550,7 @@ class CythonLexer(RegexLexer): 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod', - 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', + 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned', 'vars', 'xrange', 'zip'), prefix=r'(? Date: Sat, 17 Oct 2015 08:54:08 +0200 Subject: Closes #1142: add 16-million color terminal formatter. --- CHANGES | 2 ++ pygments/formatters/_mapping.py | 3 +- pygments/formatters/terminal256.py | 64 ++++++++++++++++++++++++++++++++++++-- 3 files changed, 65 insertions(+), 4 deletions(-) diff --git a/CHANGES b/CHANGES index 6543a515..432485f4 100644 --- a/CHANGES +++ b/CHANGES @@ -42,6 +42,8 @@ Version 2.1 - Added formatters: * IRC (PR#458) + * True color (24-bit) terminal ANSI sequences (#1142) + (formatter alias: "16m") - Improved performance of the HTML formatter for long lines (PR#504). diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py index 76f1b13f..a2e612ad 100755 --- a/pygments/formatters/_mapping.py +++ b/pygments/formatters/_mapping.py @@ -28,8 +28,9 @@ FORMATTERS = { 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'), 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'), 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ```` element with explicit ``x`` and ``y`` coordinates containing ```` elements with the individual token styles.'), - 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), + 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.') } diff --git a/pygments/formatters/terminal256.py b/pygments/formatters/terminal256.py index 5d794f4e..af311955 100644 --- a/pygments/formatters/terminal256.py +++ b/pygments/formatters/terminal256.py @@ -29,7 +29,7 @@ import sys from pygments.formatter import Formatter -__all__ = ['Terminal256Formatter'] +__all__ = ['Terminal256Formatter', 'TerminalTrueColorFormatter'] class EscapeSequence: @@ -56,6 +56,18 @@ class EscapeSequence: attrs.append("04") return self.escape(attrs) + def true_color_string(self): + attrs = [] + if self.fg: + attrs.extend(("38", "2", str(self.fg[0]), str(self.fg[1]), str(self.fg[2]))) + if self.bg: + attrs.extend(("48", "2", str(self.bg[0]), str(self.bg[1]), str(self.bg[2]))) + if self.bold: + attrs.append("01") + if self.underline: + attrs.append("04") + return self.escape(attrs) + def reset_string(self): attrs = [] if self.fg is not None: @@ -68,9 +80,9 @@ class EscapeSequence: class Terminal256Formatter(Formatter): - r""" + """ Format tokens with ANSI color sequences, for output in a 256-color - terminal or console. Like in `TerminalFormatter` color sequences + terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly. The formatter takes colors from a style defined by the `style` option @@ -221,3 +233,49 @@ class Terminal256Formatter(Formatter): if not_found: outfile.write(value) + + +class TerminalTrueColorFormatter(Terminal256Formatter): + r""" + Format tokens with ANSI color sequences, for output in a true-color + terminal or console. Like in `TerminalFormatter` color sequences + are terminated at newlines, so that paging the output works correctly. + + .. versionadded:: 2.1 + + Options accepted: + + `style` + The style to use, can be a string or a Style subclass (default: + ``'default'``). + """ + name = 'TerminalTrueColor' + aliases = ['terminal16m', 'console16m', '16m'] + filenames = [] + + def _build_color_table(self): + pass + + def _color_tuple(self, color): + try: + rgb = int(str(color), 16) + except ValueError: + return None + r = (rgb >> 16) & 0xff + g = (rgb >> 8) & 0xff + b = rgb & 0xff + return (r, g, b) + + def _setup_styles(self): + for ttype, ndef in self.style: + escape = EscapeSequence() + if ndef['color']: + escape.fg = self._color_tuple(ndef['color']) + if ndef['bgcolor']: + escape.bg = self._color_tuple(ndef['bgcolor']) + if self.usebold and ndef['bold']: + escape.bold = True + if self.useunderline and ndef['underline']: + escape.underline = True + self.style_string[str(ttype)] = (escape.true_color_string(), + escape.reset_string()) -- cgit v1.2.1 From 6f703dfda8ff616f466fea58fd469f62dac0d177 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 17 Oct 2015 09:12:22 +0200 Subject: Closes #1141: C: remove blanket *_t highlighting, add some Linux specific _t types --- pygments/lexers/c_cpp.py | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py index 624ebb71..2b875093 100644 --- a/pygments/lexers/c_cpp.py +++ b/pygments/lexers/c_cpp.py @@ -65,8 +65,7 @@ class CFamilyLexer(RegexLexer): 'restricted', 'return', 'sizeof', 'static', 'struct', 'switch', 'typedef', 'union', 'volatile', 'while'), suffix=r'\b'), Keyword), - (r'(bool|int|long|float|short|double|char|unsigned|signed|void|' - r'[a-z_][a-z0-9_]*_t)\b', + (r'(bool|int|long|float|short|double|char|unsigned|signed|void)\b', Keyword.Type), (words(('inline', '_inline', '__inline', 'naked', 'restrict', 'thread', 'typename'), suffix=r'\b'), Keyword.Reserved), @@ -139,22 +138,26 @@ class CFamilyLexer(RegexLexer): ] } - stdlib_types = ['size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', - 'sig_atomic_t', 'fpos_t', 'clock_t', 'time_t', 'va_list', - 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t', 'mbstate_t', - 'wctrans_t', 'wint_t', 'wctype_t'] - c99_types = ['_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t', - 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', - 'int_least16_t', 'int_least32_t', 'int_least64_t', - 'uint_least8_t', 'uint_least16_t', 'uint_least32_t', - 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t', - 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', - 'uint_fast64_t', 'intptr_t', 'uintptr_t', 'intmax_t', - 'uintmax_t'] + stdlib_types = set(( + 'size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', 'sig_atomic_t', 'fpos_t', + 'clock_t', 'time_t', 'va_list', 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t', + 'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t')) + c99_types = set(( + '_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t', + 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', 'int_least16_t', + 'int_least32_t', 'int_least64_t', 'uint_least8_t', 'uint_least16_t', + 'uint_least32_t', 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t', + 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', 'uint_fast64_t', + 'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t')) + linux_types = set(( + 'clockid_t', 'cpu_set_t', 'cpumask_t', 'dev_t', 'gid_t', 'id_t', 'ino_t', 'key_t', + 'mode_t', 'nfds_t', 'pid_t', 'rlim_t', 'sig_t', 'sighandler_t', 'siginfo_t', + 'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t')) def __init__(self, **options): self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True) self.c99highlighting = get_bool_opt(options, 'c99highlighting', True) + self.platformhighlighting = get_bool_opt(options, 'platformhighlighting', True) RegexLexer.__init__(self, **options) def get_tokens_unprocessed(self, text): @@ -165,6 +168,8 @@ class CFamilyLexer(RegexLexer): token = Keyword.Type elif self.c99highlighting and value in self.c99_types: token = Keyword.Type + elif self.platformhighlighting and value in self.linux_types: + token = Keyword.Type yield index, token, value -- cgit v1.2.1 From 6d584433c67716b6c01bd2e6c5948baecc13e39a Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 17 Oct 2015 07:48:25 -0700 Subject: Resolves #643: Remove 'here' as link text in docs. --- doc/faq.rst | 3 +-- doc/languages.rst | 6 +++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/doc/faq.rst b/doc/faq.rst index aeba9259..5458e655 100644 --- a/doc/faq.rst +++ b/doc/faq.rst @@ -12,8 +12,7 @@ processed version (in different formats) containing syntax highlighting markup. Its features include: -* a wide range of common languages and markup formats is supported (look here - for a list) +* a wide range of common :doc:`languages and markup formats ` is supported * new languages and formats are added easily * a number of output formats is available, including: diff --git a/doc/languages.rst b/doc/languages.rst index 13555ccf..683b8752 100644 --- a/doc/languages.rst +++ b/doc/languages.rst @@ -143,9 +143,9 @@ Other markup ... that's all? --------------- -Well, why not write your own? Contributing to Pygments is easy and fun. Look -:doc:`here ` for the docs on lexer development and -:ref:`here ` for contact details. +Well, why not write your own? Contributing to Pygments is easy and fun. Take a look at the +:doc:`docs on lexer development ` and +:ref:`contact details `. Note: the languages listed here are supported in the development version. The latest release may lack a few of them. -- cgit v1.2.1 From cd8f6c987c64a37a29d4443b0c3e2183c5f94882 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 17 Oct 2015 07:48:49 -0700 Subject: Correct versionadded directive. --- pygments/lexers/roboconf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/roboconf.py b/pygments/lexers/roboconf.py index ec525c73..bfb7e3b9 100644 --- a/pygments/lexers/roboconf.py +++ b/pygments/lexers/roboconf.py @@ -19,7 +19,7 @@ class RoboconfGraphLexer(RegexLexer): """ Lexer for `Roboconf `_ graph files. - .. versadded:: 2.1 + .. versionadded:: 2.1 """ name = 'Roboconf Graph' aliases = ['roboconf-graph'] @@ -54,7 +54,7 @@ class RoboconfInstancesLexer(RegexLexer): """ Lexer for `Roboconf `_ instances files. - .. versadded:: 2.1 + .. versionadded:: 2.1 """ name = 'Roboconf Instances' aliases = ['roboconf-instances'] -- cgit v1.2.1 From fd5e9cc469dcea7e745ab40cc516dce820e5570f Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 17 Oct 2015 07:50:47 -0700 Subject: Sphinxext: Provide a better error on missing docstrings. --- pygments/sphinxext.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pygments/sphinxext.py b/pygments/sphinxext.py index e63d3d35..2dc9810f 100644 --- a/pygments/sphinxext.py +++ b/pygments/sphinxext.py @@ -113,6 +113,8 @@ class PygmentsDoc(Directive): moduledocstrings[module] = moddoc for module, lexers in sorted(modules.items(), key=lambda x: x[0]): + if moduledocstrings[module] is None: + raise Exception("Missing docstring for %s" % (module,)) heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.') out.append(MODULEDOC % (module, heading, '-'*len(heading))) for data in lexers: -- cgit v1.2.1 From 52fb518e8211e678e873e779a0cbbe35985d8c61 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 17 Oct 2015 07:51:00 -0700 Subject: Add missing docstring for CSound; add author --- AUTHORS | 1 + CHANGES | 1 + pygments/lexers/csound.py | 9 +++++++++ 3 files changed, 11 insertions(+) diff --git a/AUTHORS b/AUTHORS index 945fc962..7b207aea 100644 --- a/AUTHORS +++ b/AUTHORS @@ -183,6 +183,7 @@ Other contributors, listed alphabetically, are: * Matthias Vallentin -- Bro lexer * Linh Vu Hong -- RSL lexer * Nathan Weizenbaum -- Haml and Sass lexers +* Nathan Whetsell -- CSound lexers * Dietmar Winkler -- Modelica lexer * Nils Winter -- Smalltalk lexer * Davy Wybiral -- Clojure lexer diff --git a/CHANGES b/CHANGES index fc358867..3a525d20 100644 --- a/CHANGES +++ b/CHANGES @@ -33,6 +33,7 @@ Version 2.1 * Elm and J (PR#452) * Crmsh (PR#440) * Praat (PR#492) + * CSound (PR#494) - Added styles: diff --git a/pygments/lexers/csound.py b/pygments/lexers/csound.py index 32b804ee..2d83e8d2 100644 --- a/pygments/lexers/csound.py +++ b/pygments/lexers/csound.py @@ -1,4 +1,13 @@ # -*- coding: utf-8 -*- +""" + pygments.lexers.csound + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for CSound languages. + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" import re -- cgit v1.2.1 From 1b141702b2a6d7a50273028628bee29f6400f713 Mon Sep 17 00:00:00 2001 From: yole Date: Tue, 20 Oct 2015 18:37:48 +0200 Subject: update list of Kotlin keywords according to language changes up to 1.0 beta --- pygments/lexers/jvm.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py index 2a4909c8..14647616 100644 --- a/pygments/lexers/jvm.py +++ b/pygments/lexers/jvm.py @@ -990,7 +990,7 @@ class CeylonLexer(RegexLexer): class KotlinLexer(RegexLexer): """ - For `Kotlin `_ + For `Kotlin `_ source code. .. versionadded:: 1.5 @@ -1025,15 +1025,17 @@ class KotlinLexer(RegexLexer): (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|" r"0[xX][0-9a-fA-F]+[Ll]?", Number), (r'(class)(\s+)(object)', bygroups(Keyword, Text, Keyword)), - (r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'), + (r'(class|interface|object)(\s+)', bygroups(Keyword, Text), 'class'), (r'(package|import)(\s+)', bygroups(Keyword, Text), 'package'), (r'(val|var)(\s+)', bygroups(Keyword, Text), 'property'), (r'(fun)(\s+)', bygroups(Keyword, Text), 'function'), - (r'(abstract|annotation|as|break|by|catch|class|continue|do|else|' - r'enum|false|final|finally|for|fun|get|if|import|in|inner|' - r'internal|is|null|object|open|out|override|package|private|' - r'protected|public|reified|return|set|super|this|throw|trait|' - r'true|try|type|val|var|vararg|when|where|while|This)\b', Keyword), + (r'(abstract|annotation|as|break|by|catch|class|companion|const|' + r'constructor|continue|crossinline|data|do|dynamic|else|enum|' + r'external|false|final|finally|for|fun|get|if|import|in|infix|' + r'inline|inner|interface|internal|is|lateinit|noinline|null|' + r'object|open|operator|out|override|package|private|protected|' + r'public|reified|return|sealed|set|super|tailrec|this|throw|' + r'true|try|val|var|vararg|when|where|while)\b', Keyword), (kt_id, Name), ], 'package': [ -- cgit v1.2.1 From 955dde8e4c633cd73894d776edcfa5a8a6211af4 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 20 Oct 2015 19:03:43 +0200 Subject: Csound: fix code style nits. --- pygments/lexers/csound.py | 86 ++++++++++++++++++++++++++++++----------------- 1 file changed, 56 insertions(+), 30 deletions(-) diff --git a/pygments/lexers/csound.py b/pygments/lexers/csound.py index 2d83e8d2..361f048a 100644 --- a/pygments/lexers/csound.py +++ b/pygments/lexers/csound.py @@ -12,10 +12,14 @@ import re from pygments.lexer import RegexLexer, bygroups, default, include, using, words -from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, String, Text, Whitespace +from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \ + String, Text +from pygments.lexers._csound_builtins import OPCODES +from pygments.lexers.python import PythonLexer +from pygments.lexers.scripting import LuaLexer # The CsoundDocumentLexer casuses a Pygments test to fail. -__all__ = ['CsoundScoreLexer', 'CsoundOrchestraLexer']#, 'CsoundDocumentLexer'] +__all__ = ['CsoundScoreLexer', 'CsoundOrchestraLexer'] # , 'CsoundDocumentLexer'] newline = (r'((?:;|//).*)*(\n)', bygroups(Comment.Single, Text)) @@ -28,21 +32,22 @@ class CsoundLexer(RegexLexer): (r'\\\n', Text), (r'/[*](.|\n)*?[*]/', Comment.Multiline) ], - + 'macro call': [ - (r'(\$\w+\.?)(\()', bygroups(Comment.Preproc, Punctuation), 'function macro call'), + (r'(\$\w+\.?)(\()', bygroups(Comment.Preproc, Punctuation), + 'function macro call'), (r'\$\w+(\.|\b)', Comment.Preproc) ], 'function macro call': [ (r"((?:\\['\)]|[^'\)])+)(')", bygroups(Comment.Preproc, Punctuation)), (r"([^'\)]+)(\))", bygroups(Comment.Preproc, Punctuation), '#pop') ], - + 'whitespace or macro call': [ include('whitespace'), include('macro call') ], - + 'preprocessor directives': [ (r'#(e(nd(if)?|lse)|ifn?def|undef)\b|##', Comment.Preproc), (r'#include\b', Comment.Preproc, 'include'), @@ -57,7 +62,8 @@ class CsoundLexer(RegexLexer): 'macro name': [ include('whitespace'), - (r'(\w+)(\()', bygroups(Comment.Preproc, Text), 'function macro argument list'), + (r'(\w+)(\()', bygroups(Comment.Preproc, Text), + 'function macro argument list'), (r'\w+', Comment.Preproc, 'object macro definition after name') ], 'object macro definition after name': [ @@ -70,7 +76,8 @@ class CsoundLexer(RegexLexer): ], 'function macro argument list': [ (r"(\w+)(['#])", bygroups(Comment.Preproc, Punctuation)), - (r'(\w+)(\))', bygroups(Comment.Preproc, Punctuation), 'function macro definition after name') + (r'(\w+)(\))', bygroups(Comment.Preproc, Punctuation), + 'function macro definition after name') ], 'function macro definition after name': [ (r'[ \t]+', Text), @@ -86,6 +93,8 @@ class CsoundLexer(RegexLexer): class CsoundScoreLexer(CsoundLexer): """ For `Csound `_ scores. + + .. versionadded:: 2.1 """ name = 'Csound Score' @@ -108,7 +117,7 @@ class CsoundScoreLexer(CsoundLexer): newline + ('#pop',), include('partial statement') ], - + 'root': [ newline, include('whitespace or macro call'), @@ -123,20 +132,18 @@ class CsoundScoreLexer(CsoundLexer): } -from pygments.lexers._csound_builtins import OPCODES -from pygments.lexers.python import PythonLexer -from pygments.lexers.scripting import LuaLexer - class CsoundOrchestraLexer(CsoundLexer): """ For `Csound `_ orchestras. + + .. versionadded:: 2.1 """ name = 'Csound Orchestra' filenames = ['*.orc'] user_defined_opcodes = set() - + def opcode_name_callback(lexer, match): opcode = match.group(0) lexer.user_defined_opcodes.add(opcode) @@ -173,13 +180,19 @@ class CsoundOrchestraLexer(CsoundLexer): (r'[](),?:[]', Punctuation), (words(( # Keywords - 'do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen', 'kthen', 'od', 'then', 'until', 'while', + 'do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen', 'kthen', + 'od', 'then', 'until', 'while', # Opcodes that act as control structures 'return', 'timout' ), prefix=r'\b', suffix=r'\b'), Keyword), - (words(('goto', 'igoto', 'kgoto', 'rigoto', 'tigoto'), prefix=r'\b', suffix=r'\b'), Keyword, 'goto label'), - (words(('cggoto', 'cigoto', 'cingoto', 'ckgoto', 'cngoto'), prefix=r'\b', suffix=r'\b'), Keyword, ('goto label', 'goto expression')), - (words(('loop_ge', 'loop_gt', 'loop_le', 'loop_lt'), prefix=r'\b', suffix=r'\b'), Keyword, ('goto label', 'goto expression', 'goto expression', 'goto expression')), + (words(('goto', 'igoto', 'kgoto', 'rigoto', 'tigoto'), + prefix=r'\b', suffix=r'\b'), Keyword, 'goto label'), + (words(('cggoto', 'cigoto', 'cingoto', 'ckgoto', 'cngoto'), + prefix=r'\b', suffix=r'\b'), Keyword, + ('goto label', 'goto expression')), + (words(('loop_ge', 'loop_gt', 'loop_le', 'loop_lt'), + prefix=r'\b', suffix=r'\b'), Keyword, + ('goto label', 'goto expression', 'goto expression', 'goto expression')), (r'\bscoreline(_i)?\b', Name.Builtin, 'scoreline opcode'), (r'\bpyl?run[it]?\b', Name.Builtin, 'python opcode'), (r'\blua_(exec|opdef)\b', Name.Builtin, 'lua opcode'), @@ -196,7 +209,8 @@ class CsoundOrchestraLexer(CsoundLexer): newline, include('whitespace or macro call'), (r'\binstr\b', Keyword, ('instrument block', 'instrument name list')), - (r'\bopcode\b', Keyword, ('opcode block', 'opcode parameter list', 'opcode types', 'opcode types', 'opcode name')), + (r'\bopcode\b', Keyword, ('opcode block', 'opcode parameter list', + 'opcode types', 'opcode types', 'opcode name')), include('label'), default('expression') ], @@ -246,7 +260,7 @@ class CsoundOrchestraLexer(CsoundLexer): (r',', Punctuation, '#pop'), include('partial expression') ], - + 'single-line string': [ include('macro call'), (r'"', String, '#pop'), @@ -299,33 +313,45 @@ class CsoundOrchestraLexer(CsoundLexer): # import copy # from pygments.lexers.html import HtmlLexer, XmlLexer -# +# # class CsoundDocumentLexer(XmlLexer): # """ # For `Csound `_ documents. # """ -# +# # name = 'Csound Document' # aliases = ['csound'] # filenames = ['*.csd'] -# +# # tokens = copy.deepcopy(XmlLexer.tokens) # for i, item in enumerate(tokens['root']): # if len(item) > 2 and item[2] == 'tag': -# (tokens['root']).insert(i, (r'(<)(\s*)(CsInstruments)(\s*)', bygroups(Name.Tag, Text, Name.Tag, Text), ('orchestra content', 'tag'))) -# (tokens['root']).insert(i, (r'(<)(\s*)(CsScore)(\s*)', bygroups(Name.Tag, Text, Name.Tag, Text), ('score content', 'tag'))) -# (tokens['root']).insert(i, (r'(<)(\s*)(html)(\s*)', bygroups(Name.Tag, Text, Name.Tag, Text), ('HTML', 'tag'))) +# (tokens['root']).insert(i, (r'(<)(\s*)(CsInstruments)(\s*)', +# bygroups(Name.Tag, Text, Name.Tag, Text), +# ('orchestra content', 'tag'))) +# (tokens['root']).insert(i, (r'(<)(\s*)(CsScore)(\s*)', +# bygroups(Name.Tag, Text, Name.Tag, Text), +# ('score content', 'tag'))) +# (tokens['root']).insert(i, (r'(<)(\s*)(html)(\s*)', +# bygroups(Name.Tag, Text, Name.Tag, Text), +# ('HTML', 'tag'))) # break -# +# # tokens['orchestra content'] = [ -# (r'(<)(\s*)(/)(\s*)(CsInstruments)(\s*)(>)', bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), '#pop'), +# (r'(<)(\s*)(/)(\s*)(CsInstruments)(\s*)(>)', +# bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), +# '#pop'), # (r'.+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer)) # ] # tokens['score content'] = [ -# (r'(<)(\s*)(/)(\s*)(CsScore)(\s*)(>)', bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), '#pop'), +# (r'(<)(\s*)(/)(\s*)(CsScore)(\s*)(>)', +# bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), +# '#pop'), # (r'.+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer)) # ] # tokens['HTML'] = [ -# (r'(<)(\s*)(/)(\s*)(html)(\s*)(>)', bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), '#pop'), +# (r'(<)(\s*)(/)(\s*)(html)(\s*)(>)', +# bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), +# '#pop'), # (r'.+?(?=<\s*/\s*html\s*>)', using(HtmlLexer)) # ] -- cgit v1.2.1 From fe4a4f54c113f5471f946e3784ed1735c9c82b37 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 20 Oct 2015 19:09:19 +0200 Subject: Modula2: code style, doc markup fix. --- pygments/lexers/modula2.py | 313 ++++++++++++++++++++++----------------------- 1 file changed, 154 insertions(+), 159 deletions(-) diff --git a/pygments/lexers/modula2.py b/pygments/lexers/modula2.py index d32bb5bb..a5fcbf78 100644 --- a/pygments/lexers/modula2.py +++ b/pygments/lexers/modula2.py @@ -77,20 +77,20 @@ class Modula2Lexer(RegexLexer): A dialect option may be embedded in a source file in form of a dialect tag, a specially formatted comment that specifies a dialect option. - Dialect Tag EBNF: + Dialect Tag EBNF:: - dialectTag : - OpeningCommentDelim Prefix dialectOption ClosingCommentDelim ; + dialectTag : + OpeningCommentDelim Prefix dialectOption ClosingCommentDelim ; - dialectOption : - 'm2pim' | 'm2iso' | 'm2r10' | 'objm2' | - 'm2iso+aglet' | 'm2pim+gm2' | 'm2iso+p1' | 'm2iso+xds' ; + dialectOption : + 'm2pim' | 'm2iso' | 'm2r10' | 'objm2' | + 'm2iso+aglet' | 'm2pim+gm2' | 'm2iso+p1' | 'm2iso+xds' ; - Prefix : '!' ; + Prefix : '!' ; - OpeningCommentDelim : '(*' ; + OpeningCommentDelim : '(*' ; - ClosingCommentDelim : '*)' ; + ClosingCommentDelim : '*)' ; No whitespace is permitted between the tokens of a dialect tag. @@ -103,9 +103,9 @@ class Modula2Lexer(RegexLexer): Examples: - `(*!m2r10*) DEFINITION MODULE Foobar; ...` + ``(*!m2r10*) DEFINITION MODULE Foobar; ...`` Use Modula2 R10 dialect to render this source file. - `(*!m2pim+gm2*) DEFINITION MODULE Bazbam; ...` + ``(*!m2pim+gm2*) DEFINITION MODULE Bazbam; ...`` Use PIM dialect with GNU extensions to render this source file. @@ -128,7 +128,7 @@ class Modula2Lexer(RegexLexer): Example: - `$ pygmentize -O full,style=algol -f latex -o /path/to/output /path/to/input` + ``$ pygmentize -O full,style=algol -f latex -o /path/to/output /path/to/input`` Render input file in Algol publication mode to LaTeX output. @@ -151,7 +151,7 @@ class Modula2Lexer(RegexLexer): Example: - `$ pygmentize -O full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off ...` + ``$ pygmentize -O full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off ...`` Render standard library ADTs as ordinary library types. .. versionadded:: 1.3 @@ -203,14 +203,14 @@ class Modula2Lexer(RegexLexer): 'plain_number_literals': [ # # Base-10, real number with exponent - (r'[0-9]+(\'[0-9]+)*' # integral part \ - r'\.[0-9]+(\'[0-9]+)*' # fractional part \ - r'[eE][+-]?[0-9]+(\'[0-9]+)*', # exponent \ + (r'[0-9]+(\'[0-9]+)*' # integral part + r'\.[0-9]+(\'[0-9]+)*' # fractional part + r'[eE][+-]?[0-9]+(\'[0-9]+)*', # exponent Number.Float), # # Base-10, real number without exponent - (r'[0-9]+(\'[0-9]+)*' # integral part \ - r'\.[0-9]+(\'[0-9]+)*', # fractional part \ + (r'[0-9]+(\'[0-9]+)*' # integral part + r'\.[0-9]+(\'[0-9]+)*', # fractional part Number.Float), # # Base-10, whole number @@ -235,52 +235,52 @@ class Modula2Lexer(RegexLexer): # Dot Product Operator (r'\*\.', Operator), # Array Concatenation Operator - (r'\+>', Operator), # M2R10 + ObjM2 + (r'\+>', Operator), # M2R10 + ObjM2 # Inequality Operator - (r'<>', Operator), # ISO + PIM + (r'<>', Operator), # ISO + PIM # Less-Or-Equal, Subset (r'<=', Operator), # Greater-Or-Equal, Superset (r'>=', Operator), # Identity Operator - (r'==', Operator), # M2R10 + ObjM2 + (r'==', Operator), # M2R10 + ObjM2 # Type Conversion Operator - (r'::', Operator), # M2R10 + ObjM2 + (r'::', Operator), # M2R10 + ObjM2 # Assignment Symbol (r':=', Operator), # Postfix Increment Mutator - (r'\+\+', Operator), # M2R10 + ObjM2 + (r'\+\+', Operator), # M2R10 + ObjM2 # Postfix Decrement Mutator - (r'--', Operator), # M2R10 + ObjM2 + (r'--', Operator), # M2R10 + ObjM2 ], 'unigraph_operators': [ # Arithmetic Operators (r'[+-]', Operator), (r'[*/]', Operator), # ISO 80000-2 compliant Set Difference Operator - (r'\\', Operator), # M2R10 + ObjM2 + (r'\\', Operator), # M2R10 + ObjM2 # Relational Operators (r'[=#<>]', Operator), # Dereferencing Operator (r'\^', Operator), # Dereferencing Operator Synonym - (r'@', Operator), # ISO + (r'@', Operator), # ISO # Logical AND Operator Synonym - (r'&', Operator), # PIM + ISO + (r'&', Operator), # PIM + ISO # Logical NOT Operator Synonym - (r'~', Operator), # PIM + ISO + (r'~', Operator), # PIM + ISO # Smalltalk Message Prefix - (r'`', Operator), # ObjM2 + (r'`', Operator), # ObjM2 ], 'digraph_punctuation': [ # Range Constructor (r'\.\.', Punctuation), # Opening Chevron Bracket - (r'<<', Punctuation), # M2R10 + ISO + (r'<<', Punctuation), # M2R10 + ISO # Closing Chevron Bracket - (r'>>', Punctuation), # M2R10 + ISO + (r'>>', Punctuation), # M2R10 + ISO # Blueprint Punctuation - (r'->', Punctuation), # M2R10 + ISO + (r'->', Punctuation), # M2R10 + ISO # Distinguish |# and # in M2 R10 (r'\|#', Punctuation), # Distinguish ## and # in M2 R10 @@ -292,23 +292,23 @@ class Modula2Lexer(RegexLexer): # Common Punctuation (r'[\(\)\[\]{},.:;\|]', Punctuation), # Case Label Separator Synonym - (r'!', Punctuation), # ISO + (r'!', Punctuation), # ISO # Blueprint Punctuation - (r'\?', Punctuation), # M2R10 + ObjM2 + (r'\?', Punctuation), # M2R10 + ObjM2 ], 'comments': [ # Single Line Comment - (r'^//.*?\n', Comment.Single), # M2R10 + ObjM2 + (r'^//.*?\n', Comment.Single), # M2R10 + ObjM2 # Block Comment (r'\(\*([^$].*?)\*\)', Comment.Multiline), # Template Block Comment - (r'/\*(.*?)\*/', Comment.Multiline), # M2R10 + ObjM2 + (r'/\*(.*?)\*/', Comment.Multiline), # M2R10 + ObjM2 ], 'pragmas': [ # ISO Style Pragmas - (r'<\*.*?\*>', Comment.Preproc), # ISO, M2R10 + ObjM2 + (r'<\*.*?\*>', Comment.Preproc), # ISO, M2R10 + ObjM2 # Pascal Style Pragmas - (r'\(\*\$.*?\*\)', Comment.Preproc), # PIM + (r'\(\*\$.*?\*\)', Comment.Preproc), # PIM ], 'root': [ include('whitespace'), @@ -316,8 +316,8 @@ class Modula2Lexer(RegexLexer): include('pragmas'), include('comments'), include('identifiers'), - include('suffixed_number_literals'), # PIM + ISO - include('prefixed_number_literals'), # M2R10 + ObjM2 + include('suffixed_number_literals'), # PIM + ISO + include('prefixed_number_literals'), # M2R10 + ObjM2 include('plain_number_literals'), include('string_literals'), include('digraph_punctuation'), @@ -634,7 +634,7 @@ class Modula2Lexer(RegexLexer): 'CloseOutput', 'ReadString', 'ReadInt', 'ReadCard', 'ReadWrd', 'WriteInt', 'WriteCard', 'WriteOct', 'WriteHex', 'WriteWrd', 'ReadReal', 'WriteReal', 'WriteFixPt', 'WriteRealOct', 'sqrt', 'exp', - 'ln', 'sin', 'cos', 'arctan', 'entier','ALLOCATE', 'DEALLOCATE', + 'ln', 'sin', 'cos', 'arctan', 'entier', 'ALLOCATE', 'DEALLOCATE', ) # PIM Modula-2 Standard Library Variables Dataset @@ -707,7 +707,7 @@ class Modula2Lexer(RegexLexer): 'LongRealIO', 'BCDIO', 'LongBCDIO', 'CardMath', 'LongCardMath', 'IntMath', 'LongIntMath', 'RealMath', 'LongRealMath', 'BCDMath', 'LongBCDMath', 'FileIO', 'FileSystem', 'Storage', 'IOSupport', - ) + ) # Modula-2 R10 Standard Library Types Dataset m2r10_stdlib_type_identifiers = ( @@ -733,7 +733,6 @@ class Modula2Lexer(RegexLexer): # D i a l e c t s - # Dialect modes dialects = ( 'unknown', @@ -746,39 +745,39 @@ class Modula2Lexer(RegexLexer): # Lexemes to Mark as Errors Database lexemes_to_reject_db = { # Lexemes to reject for unknown dialect - 'unknown' : ( + 'unknown': ( # LEAVE THIS EMPTY ), # Lexemes to reject for PIM Modula-2 - 'm2pim' : ( + 'm2pim': ( pim_lexemes_to_reject, ), # Lexemes to reject for ISO Modula-2 - 'm2iso' : ( + 'm2iso': ( iso_lexemes_to_reject, ), # Lexemes to reject for Modula-2 R10 - 'm2r10' : ( + 'm2r10': ( m2r10_lexemes_to_reject, ), # Lexemes to reject for Objective Modula-2 - 'objm2' : ( + 'objm2': ( objm2_lexemes_to_reject, ), # Lexemes to reject for Aglet Modula-2 - 'm2iso+aglet' : ( + 'm2iso+aglet': ( iso_lexemes_to_reject, ), # Lexemes to reject for GNU Modula-2 - 'm2pim+gm2' : ( + 'm2pim+gm2': ( pim_lexemes_to_reject, ), # Lexemes to reject for p1 Modula-2 - 'm2iso+p1' : ( + 'm2iso+p1': ( iso_lexemes_to_reject, ), # Lexemes to reject for XDS Modula-2 - 'm2iso+xds' : ( + 'm2iso+xds': ( iso_lexemes_to_reject, ), } @@ -786,7 +785,7 @@ class Modula2Lexer(RegexLexer): # Reserved Words Database reserved_words_db = { # Reserved words for unknown dialect - 'unknown' : ( + 'unknown': ( common_reserved_words, pim_additional_reserved_words, iso_additional_reserved_words, @@ -794,53 +793,53 @@ class Modula2Lexer(RegexLexer): ), # Reserved words for PIM Modula-2 - 'm2pim' : ( + 'm2pim': ( common_reserved_words, pim_additional_reserved_words, ), # Reserved words for Modula-2 R10 - 'm2iso' : ( + 'm2iso': ( common_reserved_words, iso_additional_reserved_words, ), # Reserved words for ISO Modula-2 - 'm2r10' : ( + 'm2r10': ( common_reserved_words, m2r10_additional_reserved_words, ), # Reserved words for Objective Modula-2 - 'objm2' : ( + 'objm2': ( common_reserved_words, m2r10_additional_reserved_words, objm2_additional_reserved_words, ), # Reserved words for Aglet Modula-2 Extensions - 'm2iso+aglet' : ( + 'm2iso+aglet': ( common_reserved_words, iso_additional_reserved_words, aglet_additional_reserved_words, ), # Reserved words for GNU Modula-2 Extensions - 'm2pim+gm2' : ( + 'm2pim+gm2': ( common_reserved_words, pim_additional_reserved_words, gm2_additional_reserved_words, ), # Reserved words for p1 Modula-2 Extensions - 'm2iso+p1' : ( + 'm2iso+p1': ( common_reserved_words, iso_additional_reserved_words, p1_additional_reserved_words, ), # Reserved words for XDS Modula-2 Extensions - 'm2iso+xds' : ( + 'm2iso+xds': ( common_reserved_words, iso_additional_reserved_words, xds_additional_reserved_words, @@ -850,7 +849,7 @@ class Modula2Lexer(RegexLexer): # Builtins Database builtins_db = { # Builtins for unknown dialect - 'unknown' : ( + 'unknown': ( common_builtins, pim_additional_builtins, iso_additional_builtins, @@ -858,53 +857,53 @@ class Modula2Lexer(RegexLexer): ), # Builtins for PIM Modula-2 - 'm2pim' : ( + 'm2pim': ( common_builtins, pim_additional_builtins, ), # Builtins for ISO Modula-2 - 'm2iso' : ( + 'm2iso': ( common_builtins, iso_additional_builtins, ), # Builtins for ISO Modula-2 - 'm2r10' : ( + 'm2r10': ( common_builtins, m2r10_additional_builtins, ), # Builtins for Objective Modula-2 - 'objm2' : ( + 'objm2': ( common_builtins, m2r10_additional_builtins, objm2_additional_builtins, ), # Builtins for Aglet Modula-2 Extensions - 'm2iso+aglet' : ( + 'm2iso+aglet': ( common_builtins, iso_additional_builtins, aglet_additional_builtins, ), # Builtins for GNU Modula-2 Extensions - 'm2pim+gm2' : ( + 'm2pim+gm2': ( common_builtins, pim_additional_builtins, gm2_additional_builtins, ), # Builtins for p1 Modula-2 Extensions - 'm2iso+p1' : ( + 'm2iso+p1': ( common_builtins, iso_additional_builtins, p1_additional_builtins, ), # Builtins for XDS Modula-2 Extensions - 'm2iso+xds' : ( + 'm2iso+xds': ( common_builtins, iso_additional_builtins, xds_additional_builtins, @@ -914,7 +913,7 @@ class Modula2Lexer(RegexLexer): # Pseudo-Module Builtins Database pseudo_builtins_db = { # Builtins for unknown dialect - 'unknown' : ( + 'unknown': ( common_pseudo_builtins, pim_additional_pseudo_builtins, iso_additional_pseudo_builtins, @@ -922,53 +921,53 @@ class Modula2Lexer(RegexLexer): ), # Builtins for PIM Modula-2 - 'm2pim' : ( + 'm2pim': ( common_pseudo_builtins, pim_additional_pseudo_builtins, ), # Builtins for ISO Modula-2 - 'm2iso' : ( + 'm2iso': ( common_pseudo_builtins, iso_additional_pseudo_builtins, ), # Builtins for ISO Modula-2 - 'm2r10' : ( + 'm2r10': ( common_pseudo_builtins, m2r10_additional_pseudo_builtins, ), # Builtins for Objective Modula-2 - 'objm2' : ( + 'objm2': ( common_pseudo_builtins, m2r10_additional_pseudo_builtins, objm2_additional_pseudo_builtins, ), # Builtins for Aglet Modula-2 Extensions - 'm2iso+aglet' : ( + 'm2iso+aglet': ( common_pseudo_builtins, iso_additional_pseudo_builtins, aglet_additional_pseudo_builtins, ), # Builtins for GNU Modula-2 Extensions - 'm2pim+gm2' : ( + 'm2pim+gm2': ( common_pseudo_builtins, pim_additional_pseudo_builtins, gm2_additional_pseudo_builtins, ), # Builtins for p1 Modula-2 Extensions - 'm2iso+p1' : ( + 'm2iso+p1': ( common_pseudo_builtins, iso_additional_pseudo_builtins, p1_additional_pseudo_builtins, ), # Builtins for XDS Modula-2 Extensions - 'm2iso+xds' : ( + 'm2iso+xds': ( common_pseudo_builtins, iso_additional_pseudo_builtins, xds_additional_pseudo_builtins, @@ -978,46 +977,46 @@ class Modula2Lexer(RegexLexer): # Standard Library ADTs Database stdlib_adts_db = { # Empty entry for unknown dialect - 'unknown' : ( + 'unknown': ( # LEAVE THIS EMPTY ), # Standard Library ADTs for PIM Modula-2 - 'm2pim' : ( + 'm2pim': ( # No first class library types ), # Standard Library ADTs for ISO Modula-2 - 'm2iso' : ( + 'm2iso': ( # No first class library types ), # Standard Library ADTs for Modula-2 R10 - 'm2r10' : ( + 'm2r10': ( m2r10_stdlib_adt_identifiers, ), # Standard Library ADTs for Objective Modula-2 - 'objm2' : ( + 'objm2': ( m2r10_stdlib_adt_identifiers, ), # Standard Library ADTs for Aglet Modula-2 - 'm2iso+aglet' : ( + 'm2iso+aglet': ( # No first class library types ), # Standard Library ADTs for GNU Modula-2 - 'm2pim+gm2' : ( + 'm2pim+gm2': ( # No first class library types ), # Standard Library ADTs for p1 Modula-2 - 'm2iso+p1' : ( + 'm2iso+p1': ( # No first class library types ), # Standard Library ADTs for XDS Modula-2 - 'm2iso+xds' : ( + 'm2iso+xds': ( # No first class library types ), } @@ -1025,49 +1024,49 @@ class Modula2Lexer(RegexLexer): # Standard Library Modules Database stdlib_modules_db = { # Empty entry for unknown dialect - 'unknown' : ( + 'unknown': ( # LEAVE THIS EMPTY ), # Standard Library Modules for PIM Modula-2 - 'm2pim' : ( + 'm2pim': ( pim_stdlib_module_identifiers, ), # Standard Library Modules for ISO Modula-2 - 'm2iso' : ( + 'm2iso': ( iso_stdlib_module_identifiers, ), # Standard Library Modules for Modula-2 R10 - 'm2r10' : ( + 'm2r10': ( m2r10_stdlib_blueprint_identifiers, m2r10_stdlib_module_identifiers, m2r10_stdlib_adt_identifiers, ), # Standard Library Modules for Objective Modula-2 - 'objm2' : ( + 'objm2': ( m2r10_stdlib_blueprint_identifiers, m2r10_stdlib_module_identifiers, ), # Standard Library Modules for Aglet Modula-2 - 'm2iso+aglet' : ( + 'm2iso+aglet': ( iso_stdlib_module_identifiers, ), # Standard Library Modules for GNU Modula-2 - 'm2pim+gm2' : ( + 'm2pim+gm2': ( pim_stdlib_module_identifiers, ), # Standard Library Modules for p1 Modula-2 - 'm2iso+p1' : ( + 'm2iso+p1': ( iso_stdlib_module_identifiers, ), # Standard Library Modules for XDS Modula-2 - 'm2iso+xds' : ( + 'm2iso+xds': ( iso_stdlib_module_identifiers, ), } @@ -1075,46 +1074,46 @@ class Modula2Lexer(RegexLexer): # Standard Library Types Database stdlib_types_db = { # Empty entry for unknown dialect - 'unknown' : ( + 'unknown': ( # LEAVE THIS EMPTY ), # Standard Library Types for PIM Modula-2 - 'm2pim' : ( + 'm2pim': ( pim_stdlib_type_identifiers, ), # Standard Library Types for ISO Modula-2 - 'm2iso' : ( + 'm2iso': ( iso_stdlib_type_identifiers, ), # Standard Library Types for Modula-2 R10 - 'm2r10' : ( + 'm2r10': ( m2r10_stdlib_type_identifiers, ), # Standard Library Types for Objective Modula-2 - 'objm2' : ( + 'objm2': ( m2r10_stdlib_type_identifiers, ), # Standard Library Types for Aglet Modula-2 - 'm2iso+aglet' : ( + 'm2iso+aglet': ( iso_stdlib_type_identifiers, ), # Standard Library Types for GNU Modula-2 - 'm2pim+gm2' : ( + 'm2pim+gm2': ( pim_stdlib_type_identifiers, ), # Standard Library Types for p1 Modula-2 - 'm2iso+p1' : ( + 'm2iso+p1': ( iso_stdlib_type_identifiers, ), # Standard Library Types for XDS Modula-2 - 'm2iso+xds' : ( + 'm2iso+xds': ( iso_stdlib_type_identifiers, ), } @@ -1122,46 +1121,46 @@ class Modula2Lexer(RegexLexer): # Standard Library Procedures Database stdlib_procedures_db = { # Empty entry for unknown dialect - 'unknown' : ( + 'unknown': ( # LEAVE THIS EMPTY ), # Standard Library Procedures for PIM Modula-2 - 'm2pim' : ( + 'm2pim': ( pim_stdlib_proc_identifiers, ), # Standard Library Procedures for ISO Modula-2 - 'm2iso' : ( + 'm2iso': ( iso_stdlib_proc_identifiers, ), # Standard Library Procedures for Modula-2 R10 - 'm2r10' : ( + 'm2r10': ( m2r10_stdlib_proc_identifiers, ), # Standard Library Procedures for Objective Modula-2 - 'objm2' : ( + 'objm2': ( m2r10_stdlib_proc_identifiers, ), # Standard Library Procedures for Aglet Modula-2 - 'm2iso+aglet' : ( + 'm2iso+aglet': ( iso_stdlib_proc_identifiers, ), # Standard Library Procedures for GNU Modula-2 - 'm2pim+gm2' : ( + 'm2pim+gm2': ( pim_stdlib_proc_identifiers, ), # Standard Library Procedures for p1 Modula-2 - 'm2iso+p1' : ( + 'm2iso+p1': ( iso_stdlib_proc_identifiers, ), # Standard Library Procedures for XDS Modula-2 - 'm2iso+xds' : ( + 'm2iso+xds': ( iso_stdlib_proc_identifiers, ), } @@ -1169,46 +1168,46 @@ class Modula2Lexer(RegexLexer): # Standard Library Variables Database stdlib_variables_db = { # Empty entry for unknown dialect - 'unknown' : ( + 'unknown': ( # LEAVE THIS EMPTY ), # Standard Library Variables for PIM Modula-2 - 'm2pim' : ( + 'm2pim': ( pim_stdlib_var_identifiers, ), # Standard Library Variables for ISO Modula-2 - 'm2iso' : ( + 'm2iso': ( iso_stdlib_var_identifiers, ), # Standard Library Variables for Modula-2 R10 - 'm2r10' : ( + 'm2r10': ( m2r10_stdlib_var_identifiers, ), # Standard Library Variables for Objective Modula-2 - 'objm2' : ( + 'objm2': ( m2r10_stdlib_var_identifiers, ), # Standard Library Variables for Aglet Modula-2 - 'm2iso+aglet' : ( + 'm2iso+aglet': ( iso_stdlib_var_identifiers, ), # Standard Library Variables for GNU Modula-2 - 'm2pim+gm2' : ( + 'm2pim+gm2': ( pim_stdlib_var_identifiers, ), # Standard Library Variables for p1 Modula-2 - 'm2iso+p1' : ( + 'm2iso+p1': ( iso_stdlib_var_identifiers, ), # Standard Library Variables for XDS Modula-2 - 'm2iso+xds' : ( + 'm2iso+xds': ( iso_stdlib_var_identifiers, ), } @@ -1216,46 +1215,46 @@ class Modula2Lexer(RegexLexer): # Standard Library Constants Database stdlib_constants_db = { # Empty entry for unknown dialect - 'unknown' : ( + 'unknown': ( # LEAVE THIS EMPTY ), # Standard Library Constants for PIM Modula-2 - 'm2pim' : ( + 'm2pim': ( pim_stdlib_const_identifiers, ), # Standard Library Constants for ISO Modula-2 - 'm2iso' : ( + 'm2iso': ( iso_stdlib_const_identifiers, ), # Standard Library Constants for Modula-2 R10 - 'm2r10' : ( + 'm2r10': ( m2r10_stdlib_const_identifiers, ), # Standard Library Constants for Objective Modula-2 - 'objm2' : ( + 'objm2': ( m2r10_stdlib_const_identifiers, ), # Standard Library Constants for Aglet Modula-2 - 'm2iso+aglet' : ( + 'm2iso+aglet': ( iso_stdlib_const_identifiers, ), # Standard Library Constants for GNU Modula-2 - 'm2pim+gm2' : ( + 'm2pim+gm2': ( pim_stdlib_const_identifiers, ), # Standard Library Constants for p1 Modula-2 - 'm2iso+p1' : ( + 'm2iso+p1': ( iso_stdlib_const_identifiers, ), # Standard Library Constants for XDS Modula-2 - 'm2iso+xds' : ( + 'm2iso+xds': ( iso_stdlib_const_identifiers, ), } @@ -1264,10 +1263,6 @@ class Modula2Lexer(RegexLexer): # initialise a lexer instance def __init__(self, **options): - # - # Alias for unknown dialect - global UNKNOWN - UNKNOWN = self.dialects[0] # # check dialect options # @@ -1281,8 +1276,8 @@ class Modula2Lexer(RegexLexer): # # Fallback Mode (DEFAULT) else: - # no valid dialect option - self.set_dialect(UNKNOWN) + # no valid dialect option + self.set_dialect('unknown') # self.dialect_set_by_tag = False # @@ -1298,8 +1293,8 @@ class Modula2Lexer(RegexLexer): # # Check option flags # - self.treat_stdlib_adts_as_builtins = \ - get_bool_opt(options, 'treat_stdlib_adts_as_builtins', True) + self.treat_stdlib_adts_as_builtins = get_bool_opt( + options, 'treat_stdlib_adts_as_builtins', True) # # call superclass initialiser RegexLexer.__init__(self, **options) @@ -1307,12 +1302,12 @@ class Modula2Lexer(RegexLexer): # Set lexer to a specified dialect def set_dialect(self, dialect_id): # - #if __debug__: + # if __debug__: # print 'entered set_dialect with arg: ', dialect_id # # check dialect name against known dialects if dialect_id not in self.dialects: - dialect = UNKNOWN # default + dialect = 'unknown' # default else: dialect = dialect_id # @@ -1389,7 +1384,7 @@ class Modula2Lexer(RegexLexer): self.variables = variables_set self.constants = constants_set # - #if __debug__: + # if __debug__: # print 'exiting set_dialect' # print ' self.dialect: ', self.dialect # print ' self.lexemes_to_reject: ', self.lexemes_to_reject @@ -1409,7 +1404,7 @@ class Modula2Lexer(RegexLexer): # matching name is returned, otherwise dialect id 'unknown' is returned def get_dialect_from_dialect_tag(self, dialect_tag): # - #if __debug__: + # if __debug__: # print 'entered get_dialect_from_dialect_tag with arg: ', dialect_tag # # constants @@ -1422,37 +1417,37 @@ class Modula2Lexer(RegexLexer): # # check comment string for dialect indicator if len(dialect_tag) > (left_tag_delim_len + right_tag_delim_len) \ - and dialect_tag.startswith(left_tag_delim) \ - and dialect_tag.endswith(right_tag_delim): + and dialect_tag.startswith(left_tag_delim) \ + and dialect_tag.endswith(right_tag_delim): # - #if __debug__: + # if __debug__: # print 'dialect tag found' # # extract dialect indicator indicator = dialect_tag[indicator_start:indicator_end] # - #if __debug__: + # if __debug__: # print 'extracted: ', indicator # # check against known dialects for index in range(1, len(self.dialects)): # - #if __debug__: + # if __debug__: # print 'dialects[', index, ']: ', self.dialects[index] # if indicator == self.dialects[index]: # - #if __debug__: + # if __debug__: # print 'matching dialect found' # # indicator matches known dialect return indicator else: # indicator does not match any dialect - return UNKNOWN # default + return 'unknown' # default else: # invalid indicator string - return UNKNOWN # default + return 'unknown' # default # intercept the token stream, modify token attributes and return them def get_tokens_unprocessed(self, text): @@ -1461,7 +1456,7 @@ class Modula2Lexer(RegexLexer): # check for dialect tag if dialect has not been set by tag if not self.dialect_set_by_tag and token == Comment.Special: indicated_dialect = self.get_dialect_from_dialect_tag(value) - if indicated_dialect != UNKNOWN: + if indicated_dialect != 'unknown': # token is a dialect indicator # reset reserved words and builtins self.set_dialect(indicated_dialect) @@ -1510,7 +1505,7 @@ class Modula2Lexer(RegexLexer): elif token in Number: # # mark prefix number literals as error for PIM and ISO dialects - if self.dialect not in (UNKNOWN, 'm2r10', 'objm2'): + if self.dialect not in ('unknown', 'm2r10', 'objm2'): if "'" in value or value[0:2] in ('0b', '0x', '0u'): token = Error # @@ -1529,7 +1524,7 @@ class Modula2Lexer(RegexLexer): # # mark single line comment as error for PIM and ISO dialects if token is Comment.Single: - if self.dialect not in [UNKNOWN, 'm2r10', 'objm2']: + if self.dialect not in ('unknown', 'm2r10', 'objm2'): token = Error # if token is Comment.Preproc: @@ -1539,11 +1534,11 @@ class Modula2Lexer(RegexLexer): token = Error # mark PIM pragma as comment for other dialects elif value.startswith('(*$') and \ - self.dialect != UNKNOWN and \ - not self.dialect.startswith('m2pim'): + self.dialect != 'unknown' and \ + not self.dialect.startswith('m2pim'): token = Comment.Multiline # - else: # token is neither Name nor Comment + else: # token is neither Name nor Comment # # mark lexemes matching the dialect's error token set as errors if value in self.lexemes_to_reject: -- cgit v1.2.1 From 7bc19a5fd1fb7e40c8ec4937a0fe45ddb56b5509 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 20 Oct 2015 19:10:35 +0200 Subject: Closes #1160: fix docstring markup. --- pygments/formatters/img.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/formatters/img.py b/pygments/formatters/img.py index 60f33fa6..667a8697 100644 --- a/pygments/formatters/img.py +++ b/pygments/formatters/img.py @@ -196,7 +196,7 @@ class ImageFormatter(Formatter): bold and italic fonts will be generated. This really should be a monospace font to look sane. - Default: "Bitstream Vera Sans Mono" on Windows, Courier New on *nix + Default: "Bitstream Vera Sans Mono" on Windows, Courier New on \*nix `font_size` The font size in points to be used. -- cgit v1.2.1 From bd1d943efc27031032e2f8c911c0909381b577ca Mon Sep 17 00:00:00 2001 From: Nathan Whetsell Date: Fri, 23 Oct 2015 21:13:55 -0400 Subject: Ignore instr opcode --- pygments/lexers/_csound_builtins.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/_csound_builtins.py b/pygments/lexers/_csound_builtins.py index ddf87a64..5f7a798a 100644 --- a/pygments/lexers/_csound_builtins.py +++ b/pygments/lexers/_csound_builtins.py @@ -12,6 +12,7 @@ # endop # igoto +# instr # kgoto # loop_ge # loop_gt @@ -463,7 +464,7 @@ OPCODES = set(( 'ins', 'insglobal', 'insremot', - 'instr', + #'instr', 'int', 'integ', 'interp', -- cgit v1.2.1 From e21965d42660c6bb58d62dffed01085ba230e4c6 Mon Sep 17 00:00:00 2001 From: Nathan Whetsell Date: Fri, 23 Oct 2015 21:14:50 -0400 Subject: Add Csound Document lexer --- pygments/lexers/_mapping.py | 1 + pygments/lexers/csound.py | 93 +++++++++++++++++++++------------------------ 2 files changed, 45 insertions(+), 49 deletions(-) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 03a3c96e..e715af2e 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -90,6 +90,7 @@ LEXERS = { 'CrmshLexer': ('pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()), 'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), + 'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', (), ('*.csd',), ()), 'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', (), ('*.orc',), ()), 'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', (), ('*.sco',), ()), 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')), diff --git a/pygments/lexers/csound.py b/pygments/lexers/csound.py index 361f048a..b9613bdf 100644 --- a/pygments/lexers/csound.py +++ b/pygments/lexers/csound.py @@ -9,17 +9,17 @@ :license: BSD, see LICENSE for details. """ -import re +import copy, re from pygments.lexer import RegexLexer, bygroups, default, include, using, words from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \ String, Text from pygments.lexers._csound_builtins import OPCODES +from pygments.lexers.html import HtmlLexer, XmlLexer from pygments.lexers.python import PythonLexer from pygments.lexers.scripting import LuaLexer -# The CsoundDocumentLexer casuses a Pygments test to fail. -__all__ = ['CsoundScoreLexer', 'CsoundOrchestraLexer'] # , 'CsoundDocumentLexer'] +__all__ = ['CsoundScoreLexer', 'CsoundOrchestraLexer', 'CsoundDocumentLexer'] newline = (r'((?:;|//).*)*(\n)', bygroups(Comment.Single, Text)) @@ -309,49 +309,44 @@ class CsoundOrchestraLexer(CsoundLexer): } -# Below is a lexer for Csound documents, but it causes a Pygments test to fail. - -# import copy -# from pygments.lexers.html import HtmlLexer, XmlLexer -# -# class CsoundDocumentLexer(XmlLexer): -# """ -# For `Csound `_ documents. -# """ -# -# name = 'Csound Document' -# aliases = ['csound'] -# filenames = ['*.csd'] -# -# tokens = copy.deepcopy(XmlLexer.tokens) -# for i, item in enumerate(tokens['root']): -# if len(item) > 2 and item[2] == 'tag': -# (tokens['root']).insert(i, (r'(<)(\s*)(CsInstruments)(\s*)', -# bygroups(Name.Tag, Text, Name.Tag, Text), -# ('orchestra content', 'tag'))) -# (tokens['root']).insert(i, (r'(<)(\s*)(CsScore)(\s*)', -# bygroups(Name.Tag, Text, Name.Tag, Text), -# ('score content', 'tag'))) -# (tokens['root']).insert(i, (r'(<)(\s*)(html)(\s*)', -# bygroups(Name.Tag, Text, Name.Tag, Text), -# ('HTML', 'tag'))) -# break -# -# tokens['orchestra content'] = [ -# (r'(<)(\s*)(/)(\s*)(CsInstruments)(\s*)(>)', -# bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), -# '#pop'), -# (r'.+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer)) -# ] -# tokens['score content'] = [ -# (r'(<)(\s*)(/)(\s*)(CsScore)(\s*)(>)', -# bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), -# '#pop'), -# (r'.+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer)) -# ] -# tokens['HTML'] = [ -# (r'(<)(\s*)(/)(\s*)(html)(\s*)(>)', -# bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), -# '#pop'), -# (r'.+?(?=<\s*/\s*html\s*>)', using(HtmlLexer)) -# ] +class CsoundDocumentLexer(XmlLexer): + """ + For `Csound `_ documents. + + + """ + + name = 'Csound Document' + aliases = [] + filenames = ['*.csd'] + mimetypes = [] + + tokens = copy.deepcopy(XmlLexer.tokens) + for i, item in enumerate(tokens['root']): + if len(item) > 2 and item[2] == 'tag': + (tokens['root']).insert(i, (r'(<)(\s*)(CsInstruments)(\s*)', + bygroups(Name.Tag, Text, Name.Tag, Text), + ('orchestra content', 'tag'))) + (tokens['root']).insert(i, (r'(<)(\s*)(CsScore)(\s*)', + bygroups(Name.Tag, Text, Name.Tag, Text), + ('score content', 'tag'))) + (tokens['root']).insert(i, (r'(<)(\s*)(html)(\s*)', + bygroups(Name.Tag, Text, Name.Tag, Text), + ('HTML', 'tag'))) + break + + tokens['orchestra content'] = [ + (r'(<)(\s*)(/)(\s*)(CsInstruments)(\s*)(>)', + bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), '#pop'), + (r'.+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer)) + ] + tokens['score content'] = [ + (r'(<)(\s*)(/)(\s*)(CsScore)(\s*)(>)', + bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), '#pop'), + (r'.+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer)) + ] + tokens['HTML'] = [ + (r'(<)(\s*)(/)(\s*)(html)(\s*)(>)', + bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), '#pop'), + (r'.+?(?=<\s*/\s*html\s*>)', using(HtmlLexer)) + ] -- cgit v1.2.1 From 46ac848624162c04022f0c1c10831d95878ba57f Mon Sep 17 00:00:00 2001 From: Nathan Whetsell Date: Fri, 23 Oct 2015 21:15:27 -0400 Subject: Fix minor nit in AUTHORS --- AUTHORS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 30b88082..f84ef2a0 100644 --- a/AUTHORS +++ b/AUTHORS @@ -184,7 +184,7 @@ Other contributors, listed alphabetically, are: * Matthias Vallentin -- Bro lexer * Linh Vu Hong -- RSL lexer * Nathan Weizenbaum -- Haml and Sass lexers -* Nathan Whetsell -- CSound lexers +* Nathan Whetsell -- Csound lexers * Dietmar Winkler -- Modelica lexer * Nils Winter -- Smalltalk lexer * Davy Wybiral -- Clojure lexer -- cgit v1.2.1 From b8f3945ff8955bc5adb77277cda3341edc58e33d Mon Sep 17 00:00:00 2001 From: Miikka Salminen Date: Sat, 24 Oct 2015 22:30:07 +0300 Subject: Added lexer for typical hexdump outputs. --- pygments/lexers/_mapping.py | 1 + pygments/lexers/hexdump.py | 74 ++++++++++ tests/examplefiles/hexdump_test | 310 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 385 insertions(+) create mode 100644 pygments/lexers/hexdump.py create mode 100644 tests/examplefiles/hexdump_test diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 03a3c96e..aaf6d606 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -161,6 +161,7 @@ LEXERS = { 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()), 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)), 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')), + 'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()), 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')), 'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)), 'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')), diff --git a/pygments/lexers/hexdump.py b/pygments/lexers/hexdump.py new file mode 100644 index 00000000..f20244ae --- /dev/null +++ b/pygments/lexers/hexdump.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.hexdump + ~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for hexadecimal dumps. + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, bygroups, include +from pygments.token import Text, Name, Number, String, Punctuation + +__all__ = ['HexdumpLexer'] + + +class HexdumpLexer(RegexLexer): + """ + For hex dumps output by the UNIX hexdump and hexcat tools. + """ + name = 'Hexdump' + aliases = ['hexdump'] + + hd = r'[0-9A-Ha-h]' + + tokens = { + 'root': [ + (r'\n', Text), + include('offset'), + (hd+r'{4}', Number.Hex, 'stringless-mode'), + (hd+r'{2}', Number.Hex), + (r'(\s+)(\|)(.{16})(\|)$', bygroups(Text, Punctuation, String, Punctuation), 'piped-strings'), + (r'(\s+)(\|)(.{1,15})(\|)$', bygroups(Text, Punctuation, String, Punctuation)), + (r'(\s+)(.{1,15})$', bygroups(Text, String)), + (r'(\s+)(.{16})$', bygroups(Text, String), 'nonpiped-strings'), + (r'\s', Text), + (r'^\*', Punctuation), + ], + 'offset': [ + (r'^('+hd+'+)(:)', bygroups(Name.Label, Punctuation), 'offset-mode'), + (r'^'+hd+'+', Name.Label), + ], + 'offset-mode': [ + (r'\s', Text, '#pop'), + (hd+'+', Name.Label), + (r':', Punctuation) + ], + 'stringless-mode': [ + (r'\n', Text), + include('offset'), + (hd+r'{2}', Number.Hex), + (r'\s', Text), + (r'^\*', Punctuation), + ], + 'piped-strings': [ + (r'\n', Text), + include('offset'), + (hd+r'{2}', Number.Hex), + (r'(\s)(\|)(.{1,16})(\|)$', bygroups(Text, Punctuation, String, Punctuation)), + (r'\s', Text), + (r'^\*', Punctuation), + ], + 'nonpiped-strings': [ + (r'\n', Text), + include('offset'), + (hd+r'{2}', Number.Hex), + (r'(\s+)(.{1,16})$', bygroups(Text, String)), + (r'\s', Text), + (r'^\*', Punctuation), + ], + } diff --git a/tests/examplefiles/hexdump_test b/tests/examplefiles/hexdump_test new file mode 100644 index 00000000..4af46fcb --- /dev/null +++ b/tests/examplefiles/hexdump_test @@ -0,0 +1,310 @@ +00000000 2f 2f 20 43 72 65 61 74 65 64 20 62 79 20 4c 69 |// Created by Li| +00000010 6f 6e 65 6c 6c 6f 20 4c 75 6e 65 73 75 20 61 6e |onello Lunesu an| +00000020 64 20 70 6c 61 63 65 64 20 69 6e 20 74 68 65 20 |d placed in the | +00000030 70 75 62 6c 69 63 20 64 6f 6d 61 69 6e 2e 0a 2f |public domain../| +00000040 2f 20 54 68 69 73 20 66 69 6c 65 20 68 61 73 20 |/ This file has | +00000050 62 65 65 6e 20 6d 6f 64 69 66 69 65 64 20 66 72 |been modified fr| +00000060 6f 6d 20 69 74 73 20 6f 72 69 67 69 6e 61 6c 20 |om its original | +00000070 76 65 72 73 69 6f 6e 2e 0a 2f 2f 20 49 74 20 68 |version..// It h| +00000080 61 73 20 62 65 65 6e 20 66 6f 72 6d 61 74 74 65 |as been formatte| +00000090 64 20 74 6f 20 66 69 74 20 79 6f 75 72 20 73 63 |d to fit your sc| +000000a0 72 65 65 6e 2e 0a 6d 6f 64 75 6c 65 20 70 68 6f |reen..module pho| +000000b0 6e 65 6e 6f 3b 20 20 20 20 20 2f 2f 20 6f 70 74 |neno; // opt| +000000c0 69 6f 6e 61 6c 0a 69 6d 70 6f 72 74 20 73 74 64 |ional.import std| +000000d0 2e 73 74 64 69 6f 3b 20 20 20 2f 2f 20 77 72 69 |.stdio; // wri| +000000e0 74 65 66 6c 6e 20 20 20 20 20 0a 69 6d 70 6f 72 |tefln .impor| +000000f0 74 20 73 74 64 2e 63 74 79 70 65 3b 20 20 20 2f |t std.ctype; /| +00000100 2f 20 69 73 64 69 67 69 74 20 20 20 20 20 0a 69 |/ isdigit .i| +00000110 6d 70 6f 72 74 20 73 74 64 2e 73 74 72 65 61 6d |mport std.stream| +00000120 3b 20 20 2f 2f 20 42 75 66 66 65 72 65 64 46 69 |; // BufferedFi| +00000130 6c 65 0a 0a 2f 2f 20 4a 75 73 74 20 66 6f 72 20 |le..// Just for | +00000140 72 65 61 64 61 62 69 6c 69 74 79 20 28 69 6d 61 |readability (ima| +00000150 67 69 6e 65 20 63 68 61 72 5b 5d 5b 5d 5b 63 68 |gine char[][][ch| +00000160 61 72 5b 5d 5d 29 20 20 20 20 0a 61 6c 69 61 73 |ar[]]) .alias| +00000170 20 63 68 61 72 5b 5d 20 73 74 72 69 6e 67 3b 0a | char[] string;.| +00000180 61 6c 69 61 73 20 73 74 72 69 6e 67 5b 5d 20 73 |alias string[] s| +00000190 74 72 69 6e 67 61 72 72 61 79 3b 0a 0a 2f 2f 2f |tringarray;..///| +000001a0 20 53 74 72 69 70 73 20 6e 6f 6e 2d 64 69 67 69 | Strips non-digi| +000001b0 74 20 63 68 61 72 61 63 74 65 72 73 20 66 72 6f |t characters fro| +000001c0 6d 20 74 68 65 20 73 74 72 69 6e 67 20 28 43 4f |m the string (CO| +000001d0 57 29 0a 73 74 72 69 6e 67 20 73 74 72 69 70 4e |W).string stripN| +000001e0 6f 6e 44 69 67 69 74 28 20 69 6e 20 73 74 72 69 |onDigit( in stri| +000001f0 6e 67 20 6c 69 6e 65 20 29 20 0a 7b 0a 20 20 20 |ng line ) .{. | +00000200 20 73 74 72 69 6e 67 20 72 65 74 3b 0a 20 20 20 | string ret;. | +00000210 20 66 6f 72 65 61 63 68 28 75 69 6e 74 20 69 2c | foreach(uint i,| +00000220 20 63 3b 20 6c 69 6e 65 29 20 7b 0a 20 20 20 20 | c; line) {. | +00000230 20 20 20 20 2f 2f 20 45 72 72 6f 72 3a 20 73 74 | // Error: st| +00000240 64 2e 63 74 79 70 65 2e 69 73 64 69 67 69 74 20 |d.ctype.isdigit | +00000250 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 5c 70 68 |at C:\dmd\src\ph| +00000260 6f 62 6f 73 5c 73 74 64 5c 63 74 79 70 65 2e 64 |obos\std\ctype.d| +00000270 28 33 37 29 20 0a 20 20 20 20 20 20 20 20 2f 2f |(37) . //| +00000280 20 63 6f 6e 66 6c 69 63 74 73 20 77 69 74 68 20 | conflicts with | +00000290 73 74 64 2e 73 74 72 65 61 6d 2e 69 73 64 69 67 |std.stream.isdig| +000002a0 69 74 20 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 |it at C:\dmd\src| +000002b0 5c 70 68 6f 62 6f 73 5c 73 74 64 5c 73 74 72 65 |\phobos\std\stre| +000002c0 61 6d 2e 64 28 32 39 32 34 29 0a 20 20 20 20 20 |am.d(2924). | +000002d0 20 20 20 69 66 20 28 21 73 74 64 2e 63 74 79 70 | if (!std.ctyp| +000002e0 65 2e 69 73 64 69 67 69 74 28 63 29 29 20 7b 0a |e.isdigit(c)) {.| +000002f0 20 20 20 20 20 20 20 20 20 20 20 20 69 66 20 28 | if (| +00000300 21 72 65 74 29 0a 20 20 20 20 20 20 20 20 20 20 |!ret). | +00000310 20 20 20 20 20 20 72 65 74 20 3d 20 6c 69 6e 65 | ret = line| +00000320 5b 30 2e 2e 69 5d 3b 20 20 20 20 0a 20 20 20 20 |[0..i]; . | +00000330 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 20 20 | } . | +00000340 20 20 65 6c 73 65 20 69 66 20 28 72 65 74 29 0a | else if (ret).| +00000350 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 | ret | +00000360 7e 3d 20 63 3b 20 20 20 20 0a 20 20 20 20 7d 20 |~= c; . } | +00000370 20 20 20 0a 20 20 20 20 72 65 74 75 72 6e 20 72 | . return r| +00000380 65 74 3f 72 65 74 3a 6c 69 6e 65 3b 0a 7d 0a 0a |et?ret:line;.}..| +00000390 75 6e 69 74 74 65 73 74 20 7b 0a 20 20 20 20 61 |unittest {. a| +000003a0 73 73 65 72 74 28 20 73 74 72 69 70 4e 6f 6e 44 |ssert( stripNonD| +000003b0 69 67 69 74 28 22 61 73 64 66 22 29 20 3d 3d 20 |igit("asdf") == | +000003c0 22 22 20 20 29 3b 0a 20 20 20 20 61 73 73 65 72 |"" );. asser| +000003d0 74 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 69 74 |t( stripNonDigit| +000003e0 28 22 5c 27 31 33 2d 3d 32 20 34 6b 6f 70 22 29 |("\'13-=2 4kop")| +000003f0 20 3d 3d 20 20 22 31 33 32 34 22 20 20 29 3b 0a | == "1324" );.| +00000400 7d 0a 0a 2f 2f 2f 20 43 6f 6e 76 65 72 74 73 20 |}../// Converts | +00000410 61 20 77 6f 72 64 20 69 6e 74 6f 20 61 20 6e 75 |a word into a nu| +00000420 6d 62 65 72 2c 20 69 67 6e 6f 72 69 6e 67 20 61 |mber, ignoring a| +00000430 6c 6c 20 6e 6f 6e 20 61 6c 70 68 61 20 63 68 61 |ll non alpha cha| +00000440 72 61 63 74 65 72 73 20 20 0a 73 74 72 69 6e 67 |racters .string| +00000450 20 77 6f 72 64 54 6f 4e 75 6d 28 20 69 6e 20 73 | wordToNum( in s| +00000460 74 72 69 6e 67 20 77 6f 72 64 20 29 0a 7b 0a 2f |tring word ).{./| +00000470 2f 20 74 72 61 6e 73 6c 61 74 69 6f 6e 20 74 61 |/ translation ta| +00000480 62 6c 65 20 66 6f 72 20 74 68 65 20 74 61 73 6b |ble for the task| +00000490 20 61 74 20 68 61 6e 64 0a 63 6f 6e 73 74 20 63 | at hand.const c| +000004a0 68 61 72 5b 32 35 36 5d 20 54 52 41 4e 53 4c 41 |har[256] TRANSLA| +000004b0 54 45 20 3d 20 20 20 20 0a 20 20 20 20 22 20 20 |TE = . " | +000004c0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +000004d0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 22 20 | " | +000004e0 20 2f 2f 20 30 20 20 20 0a 20 20 20 20 22 20 20 | // 0 . " | +000004f0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 31 | 01| +00000500 32 33 34 35 36 37 38 39 20 20 20 20 20 20 22 20 |23456789 " | +00000510 20 2f 2f 20 33 32 20 20 20 20 20 0a 20 20 20 20 | // 32 . | +00000520 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 31 |" 57630499617851| +00000530 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 20 |881234762239 | +00000540 20 22 20 20 2f 2f 20 36 34 20 20 20 0a 20 20 20 | " // 64 . | +00000550 20 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 | " 5763049961785| +00000560 31 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 |1881234762239 | +00000570 20 20 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 | ". " | +00000580 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000590 20 20 20 20 20 20 20 20 20 22 0a 20 20 20 20 22 | ". "| +000005a0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +* +000005c0 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 20 20 |". " | +000005d0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +000005e0 20 20 20 20 20 20 20 22 20 20 20 20 0a 20 20 20 | " . | +000005f0 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | " | +00000600 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000610 20 20 22 3b 0a 20 20 20 20 73 74 72 69 6e 67 20 | ";. string | +00000620 72 65 74 3b 0a 20 20 20 20 66 6f 72 65 61 63 68 |ret;. foreach| +00000630 28 63 3b 20 63 61 73 74 28 75 62 79 74 65 5b 5d |(c; cast(ubyte[]| +00000640 29 77 6f 72 64 29 0a 20 20 20 20 20 20 20 20 69 |)word). i| +00000650 66 20 28 54 52 41 4e 53 4c 41 54 45 5b 63 5d 20 |f (TRANSLATE[c] | +00000660 21 3d 20 27 20 27 29 0a 20 20 20 20 20 20 20 20 |!= ' '). | +00000670 20 20 20 20 72 65 74 20 7e 3d 20 54 52 41 4e 53 | ret ~= TRANS| +00000680 4c 41 54 45 5b 63 5d 3b 0a 20 20 20 20 72 65 74 |LATE[c];. ret| +00000690 75 72 6e 20 72 65 74 3b 0a 7d 0a 0a 75 6e 69 74 |urn ret;.}..unit| +000006a0 74 65 73 74 20 7b 0a 20 2f 2f 20 54 65 73 74 20 |test {. // Test | +000006b0 77 6f 72 64 54 6f 4e 75 6d 20 75 73 69 6e 67 20 |wordToNum using | +000006c0 74 68 65 20 74 61 62 6c 65 20 66 72 6f 6d 20 74 |the table from t| +000006d0 68 65 20 74 61 73 6b 20 64 65 73 63 72 69 70 74 |he task descript| +000006e0 69 6f 6e 2e 0a 20 61 73 73 65 72 74 28 20 22 30 |ion.. assert( "0| +000006f0 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 36 |1112223334455666| +00000700 37 37 37 38 38 38 39 39 39 22 20 3d 3d 0a 20 20 |777888999" ==. | +00000710 20 77 6f 72 64 54 6f 4e 75 6d 28 22 45 20 7c 20 | wordToNum("E | | +00000720 4a 20 4e 20 51 20 7c 20 52 20 57 20 58 20 7c 20 |J N Q | R W X | | +00000730 44 20 53 20 59 20 7c 20 46 20 54 20 7c 20 41 20 |D S Y | F T | A | +00000740 4d 20 7c 20 43 20 49 20 56 20 7c 20 42 20 4b 20 |M | C I V | B K | +00000750 55 20 7c 20 4c 20 4f 20 50 20 7c 20 47 20 48 20 |U | L O P | G H | +00000760 5a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 20 22 |Z"));. assert( "| +00000770 30 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 |0111222333445566| +00000780 36 37 37 37 38 38 38 39 39 39 22 20 3d 3d 20 0a |6777888999" == .| +00000790 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 65 20 | wordToNum("e | +000007a0 7c 20 6a 20 6e 20 71 20 7c 20 72 20 77 20 78 20 || j n q | r w x | +000007b0 7c 20 64 20 73 20 79 20 7c 20 66 20 74 20 7c 20 || d s y | f t | | +000007c0 61 20 6d 20 7c 20 63 20 69 20 76 20 7c 20 62 20 |a m | c i v | b | +000007d0 6b 20 75 20 7c 20 6c 20 6f 20 70 20 7c 20 67 20 |k u | l o p | g | +000007e0 68 20 7a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 |h z"));. assert(| +000007f0 20 22 30 31 32 33 34 35 36 37 38 39 22 20 3d 3d | "0123456789" ==| +00000800 20 0a 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 | . wordToNum("| +00000810 30 20 7c 20 20 20 31 20 20 20 7c 20 20 20 32 20 |0 | 1 | 2 | +00000820 20 20 7c 20 20 20 33 20 20 20 7c 20 20 34 20 20 | | 3 | 4 | +00000830 7c 20 20 35 20 20 7c 20 20 20 36 20 20 20 7c 20 || 5 | 6 | | +00000840 20 20 37 20 20 20 7c 20 20 20 38 20 20 20 7c 20 | 7 | 8 | | +00000850 20 20 39 22 29 29 3b 0a 7d 0a 0a 76 6f 69 64 20 | 9"));.}..void | +00000860 6d 61 69 6e 28 20 73 74 72 69 6e 67 5b 5d 20 61 |main( string[] a| +00000870 72 67 73 20 29 0a 7b 0a 20 20 20 20 2f 2f 20 54 |rgs ).{. // T| +00000880 68 69 73 20 61 73 73 6f 63 69 61 74 69 76 65 20 |his associative | +00000890 61 72 72 61 79 20 6d 61 70 73 20 61 20 6e 75 6d |array maps a num| +000008a0 62 65 72 20 74 6f 20 61 6e 20 61 72 72 61 79 20 |ber to an array | +000008b0 6f 66 20 77 6f 72 64 73 2e 20 20 20 20 0a 20 20 |of words. . | +000008c0 20 20 73 74 72 69 6e 67 61 72 72 61 79 5b 73 74 | stringarray[st| +000008d0 72 69 6e 67 5d 20 20 20 20 6e 75 6d 32 77 6f 72 |ring] num2wor| +000008e0 64 73 3b 0a 0a 20 20 20 20 66 6f 72 65 61 63 68 |ds;.. foreach| +000008f0 28 73 74 72 69 6e 67 20 77 6f 72 64 3b 20 6e 65 |(string word; ne| +00000900 77 20 42 75 66 66 65 72 65 64 46 69 6c 65 28 22 |w BufferedFile("| +00000910 64 69 63 74 69 6f 6e 61 72 79 2e 74 78 74 22 20 |dictionary.txt" | +00000920 29 20 29 0a 20 20 20 20 20 20 20 20 6e 75 6d 32 |) ). num2| +00000930 77 6f 72 64 73 5b 20 77 6f 72 64 54 6f 4e 75 6d |words[ wordToNum| +00000940 28 77 6f 72 64 29 20 5d 20 7e 3d 20 77 6f 72 64 |(word) ] ~= word| +00000950 2e 64 75 70 3b 20 20 20 20 20 20 20 20 2f 2f 20 |.dup; // | +00000960 6d 75 73 74 20 64 75 70 0a 0a 20 20 20 20 2f 2f |must dup.. //| +00000970 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 6c 74 65 |/ Finds all alte| +00000980 72 6e 61 74 69 76 65 73 20 66 6f 72 20 74 68 65 |rnatives for the| +00000990 20 67 69 76 65 6e 20 6e 75 6d 62 65 72 0a 20 20 | given number. | +000009a0 20 20 2f 2f 2f 20 28 73 68 6f 75 6c 64 20 68 61 | /// (should ha| +000009b0 76 65 20 62 65 65 6e 20 73 74 72 69 70 70 65 64 |ve been stripped| +000009c0 20 66 72 6f 6d 20 6e 6f 6e 2d 64 69 67 69 74 20 | from non-digit | +000009d0 63 68 61 72 61 63 74 65 72 73 29 0a 20 20 20 20 |characters). | +000009e0 73 74 72 69 6e 67 61 72 72 61 79 20 5f 46 69 6e |stringarray _Fin| +000009f0 64 57 6f 72 64 73 28 20 73 74 72 69 6e 67 20 6e |dWords( string n| +00000a00 75 6d 62 65 72 73 2c 20 62 6f 6f 6c 20 64 69 67 |umbers, bool dig| +00000a10 69 74 6f 6b 20 29 0a 20 20 20 20 69 6e 20 7b 0a |itok ). in {.| +00000a20 20 20 20 20 20 20 20 20 61 73 73 65 72 74 28 6e | assert(n| +00000a30 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 20 3e 20 |umbers.length > | +00000a40 20 30 29 3b 20 20 20 20 0a 20 20 20 20 7d 20 20 | 0); . } | +00000a50 20 20 0a 20 20 20 20 6f 75 74 28 72 65 73 75 6c | . out(resul| +00000a60 74 29 20 7b 0a 20 20 20 20 20 20 20 20 66 6f 72 |t) {. for| +00000a70 65 61 63 68 20 28 61 3b 20 72 65 73 75 6c 74 29 |each (a; result)| +00000a80 0a 20 20 20 20 20 20 20 20 20 20 20 20 61 73 73 |. ass| +00000a90 65 72 74 28 20 77 6f 72 64 54 6f 4e 75 6d 28 61 |ert( wordToNum(a| +00000aa0 29 20 3d 3d 20 6e 75 6d 62 65 72 73 20 29 3b 0a |) == numbers );.| +00000ab0 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 62 6f | } . bo| +00000ac0 64 79 20 7b 0a 20 20 20 20 20 20 20 20 73 74 72 |dy {. str| +00000ad0 69 6e 67 61 72 72 61 79 20 72 65 74 3b 0a 20 20 |ingarray ret;. | +00000ae0 20 20 20 20 20 20 62 6f 6f 6c 20 66 6f 75 6e 64 | bool found| +00000af0 77 6f 72 64 20 3d 20 66 61 6c 73 65 3b 0a 20 20 |word = false;. | +00000b00 20 20 20 20 20 20 66 6f 72 20 28 75 69 6e 74 20 | for (uint | +00000b10 74 3d 31 3b 20 74 3c 3d 6e 75 6d 62 65 72 73 2e |t=1; t<=numbers.| +00000b20 6c 65 6e 67 74 68 3b 20 2b 2b 74 29 20 7b 0a 20 |length; ++t) {. | +00000b30 20 20 20 20 20 20 20 20 20 20 20 61 75 74 6f 20 | auto | +00000b40 61 6c 74 65 72 6e 61 74 69 76 65 73 20 3d 20 6e |alternatives = n| +00000b50 75 6d 62 65 72 73 5b 30 2e 2e 74 5d 20 69 6e 20 |umbers[0..t] in | +00000b60 6e 75 6d 32 77 6f 72 64 73 3b 0a 20 20 20 20 20 |num2words;. | +00000b70 20 20 20 20 20 20 20 69 66 20 28 21 61 6c 74 65 | if (!alte| +00000b80 72 6e 61 74 69 76 65 73 29 0a 20 20 20 20 20 20 |rnatives). | +00000b90 20 20 20 20 20 20 20 20 20 20 63 6f 6e 74 69 6e | contin| +00000ba0 75 65 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 |ue;. | +00000bb0 66 6f 75 6e 64 77 6f 72 64 20 3d 20 74 72 75 65 |foundword = true| +00000bc0 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 69 66 |;. if| +00000bd0 20 28 6e 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 | (numbers.length| +00000be0 20 3e 20 20 74 29 20 7b 0a 20 20 20 20 20 20 20 | > t) {. | +00000bf0 20 20 20 20 20 20 20 20 20 2f 2f 20 43 6f 6d 62 | // Comb| +00000c00 69 6e 65 20 61 6c 6c 20 63 75 72 72 65 6e 74 20 |ine all current | +00000c10 61 6c 74 65 72 6e 61 74 69 76 65 73 20 77 69 74 |alternatives wit| +00000c20 68 20 61 6c 6c 20 61 6c 74 65 72 6e 61 74 69 76 |h all alternativ| +00000c30 65 73 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 |es . | +00000c40 20 20 20 20 20 20 20 20 2f 2f 20 6f 66 20 74 68 | // of th| +00000c50 65 20 72 65 73 74 20 28 6e 65 78 74 20 70 69 65 |e rest (next pie| +00000c60 63 65 20 63 61 6e 20 73 74 61 72 74 20 77 69 74 |ce can start wit| +00000c70 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 |h a digit) | +00000c80 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 | . | +00000c90 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 63 68 | foreach| +00000ca0 20 28 61 32 3b 20 5f 46 69 6e 64 57 6f 72 64 73 | (a2; _FindWords| +00000cb0 28 20 6e 75 6d 62 65 72 73 5b 74 2e 2e 24 5d 2c |( numbers[t..$],| +00000cc0 20 74 72 75 65 20 20 20 20 20 29 20 29 0a 20 20 | true ) ). | +00000cd0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000ce0 20 20 66 6f 72 65 61 63 68 28 61 31 3b 20 2a 61 | foreach(a1; *a| +00000cf0 6c 74 65 72 6e 61 74 69 76 65 73 29 0a 20 20 20 |lternatives). | +00000d00 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000d10 20 20 20 20 72 65 74 20 7e 3d 20 61 31 20 7e 20 | ret ~= a1 ~ | +00000d20 22 20 22 20 7e 20 61 32 3b 0a 20 20 20 20 20 20 |" " ~ a2;. | +00000d30 20 20 20 20 20 20 7d 0a 20 20 20 20 20 20 20 20 | }. | +00000d40 20 20 20 20 65 6c 73 65 20 20 20 20 0a 20 20 20 | else . | +00000d50 20 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 | ret| +00000d60 20 7e 3d 20 2a 61 6c 74 65 72 6e 61 74 69 76 65 | ~= *alternative| +00000d70 73 3b 20 20 20 20 2f 2f 20 61 70 70 65 6e 64 20 |s; // append | +00000d80 74 68 65 73 65 20 61 6c 74 65 72 6e 61 74 69 76 |these alternativ| +00000d90 65 73 0a 20 20 20 20 20 20 20 20 7d 0a 20 20 20 |es. }. | +00000da0 20 20 20 20 20 2f 2f 20 54 72 79 20 74 6f 20 6b | // Try to k| +00000db0 65 65 70 20 31 20 64 69 67 69 74 2c 20 6f 6e 6c |eep 1 digit, onl| +00000dc0 79 20 69 66 20 77 65 27 72 65 20 61 6c 6c 6f 77 |y if we're allow| +00000dd0 65 64 20 61 6e 64 20 6e 6f 20 6f 74 68 65 72 0a |ed and no other.| +00000de0 20 20 20 20 20 20 20 20 2f 2f 20 61 6c 74 65 72 | // alter| +00000df0 6e 61 74 69 76 65 73 20 77 65 72 65 20 66 6f 75 |natives were fou| +00000e00 6e 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 54 65 |nd. // Te| +00000e10 73 74 69 6e 67 20 22 72 65 74 2e 6c 65 6e 67 74 |sting "ret.lengt| +00000e20 68 22 20 6d 61 6b 65 73 20 6d 6f 72 65 20 73 65 |h" makes more se| +00000e30 6e 73 65 20 74 68 61 6e 20 74 65 73 74 69 6e 67 |nse than testing| +00000e40 20 22 66 6f 75 6e 64 77 6f 72 64 22 2c 0a 20 20 | "foundword",. | +00000e50 20 20 20 20 20 20 2f 2f 20 62 75 74 20 74 68 65 | // but the| +00000e60 20 6f 74 68 65 72 20 69 6d 70 6c 65 6d 65 6e 74 | other implement| +00000e70 61 74 69 6f 6e 73 20 73 65 65 6d 20 74 6f 20 64 |ations seem to d| +00000e80 6f 20 6a 75 73 74 20 74 68 69 73 2e 0a 20 20 20 |o just this.. | +00000e90 20 20 20 20 20 69 66 20 28 64 69 67 69 74 6f 6b | if (digitok| +00000ea0 20 26 26 20 21 66 6f 75 6e 64 77 6f 72 64 29 20 | && !foundword) | +00000eb0 7b 20 2f 2f 72 65 74 2e 6c 65 6e 67 74 68 20 3d |{ //ret.length =| +00000ec0 3d 20 30 20 20 0a 20 20 20 20 20 20 20 20 20 20 |= 0 . | +00000ed0 20 20 69 66 28 6e 75 6d 62 65 72 73 2e 6c 65 6e | if(numbers.len| +00000ee0 67 74 68 20 3e 20 20 31 29 20 7b 0a 20 20 20 20 |gth > 1) {. | +00000ef0 20 20 20 20 20 20 20 20 20 20 20 20 2f 2f 20 43 | // C| +00000f00 6f 6d 62 69 6e 65 20 31 20 64 69 67 69 74 20 77 |ombine 1 digit w| +00000f10 69 74 68 20 61 6c 6c 20 61 6c 74 65 6e 61 74 69 |ith all altenati| +00000f20 76 65 73 20 66 72 6f 6d 20 74 68 65 20 72 65 73 |ves from the res| +00000f30 74 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 20 |t . | +00000f40 20 20 20 20 20 20 2f 2f 20 28 6e 65 78 74 20 70 | // (next p| +00000f50 69 65 63 65 20 63 61 6e 20 6e 6f 74 20 73 74 61 |iece can not sta| +00000f60 72 74 20 77 69 74 68 20 61 20 64 69 67 69 74 29 |rt with a digit)| +00000f70 20 20 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 | . | +00000f80 20 20 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 | forea| +00000f90 63 68 20 28 61 3b 20 5f 46 69 6e 64 57 6f 72 64 |ch (a; _FindWord| +00000fa0 73 28 20 6e 75 6d 62 65 72 73 5b 31 2e 2e 24 5d |s( numbers[1..$]| +00000fb0 2c 20 66 61 6c 73 65 20 29 20 29 0a 20 20 20 20 |, false ) ). | +00000fc0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000fd0 72 65 74 20 7e 3d 20 6e 75 6d 62 65 72 73 5b 30 |ret ~= numbers[0| +00000fe0 2e 2e 31 5d 20 7e 20 22 20 22 20 7e 20 61 3b 0a |..1] ~ " " ~ a;.| +00000ff0 20 20 20 20 20 20 20 20 20 20 20 20 7d 20 20 20 | } | +00001000 20 0a 20 20 20 20 20 20 20 20 20 20 20 20 65 6c | . el| +00001010 73 65 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 |se . | +00001020 20 20 20 20 20 20 20 72 65 74 20 7e 3d 20 6e 75 | ret ~= nu| +00001030 6d 62 65 72 73 5b 30 2e 2e 31 5d 3b 20 20 20 20 |mbers[0..1]; | +00001040 2f 2f 20 6a 75 73 74 20 61 70 70 65 6e 64 20 74 |// just append t| +00001050 68 69 73 20 64 69 67 69 74 20 20 20 20 20 20 20 |his digit | +00001060 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 7d | . }| +00001070 20 20 20 20 0a 20 20 20 20 20 20 20 20 72 65 74 | . ret| +00001080 75 72 6e 20 72 65 74 3b 0a 20 20 20 20 7d 0a 0a |urn ret;. }..| +00001090 20 20 20 20 2f 2f 2f 20 28 54 68 69 73 20 66 75 | /// (This fu| +000010a0 6e 63 74 69 6f 6e 20 77 61 73 20 69 6e 6c 69 6e |nction was inlin| +000010b0 65 64 20 69 6e 20 74 68 65 20 6f 72 69 67 69 6e |ed in the origin| +000010c0 61 6c 20 70 72 6f 67 72 61 6d 29 20 0a 20 20 20 |al program) . | +000010d0 20 2f 2f 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 | /// Finds all a| +000010e0 6c 74 65 72 6e 61 74 69 76 65 73 20 66 6f 72 20 |lternatives for | +000010f0 74 68 65 20 67 69 76 65 6e 20 70 68 6f 6e 65 20 |the given phone | +00001100 6e 75 6d 62 65 72 20 0a 20 20 20 20 2f 2f 2f 20 |number . /// | +00001110 52 65 74 75 72 6e 73 3a 20 61 72 72 61 79 20 6f |Returns: array o| +00001120 66 20 73 74 72 69 6e 67 73 20 0a 20 20 20 20 73 |f strings . s| +00001130 74 72 69 6e 67 61 72 72 61 79 20 46 69 6e 64 57 |tringarray FindW| +00001140 6f 72 64 73 28 20 73 74 72 69 6e 67 20 70 68 6f |ords( string pho| +00001150 6e 65 5f 6e 75 6d 62 65 72 20 29 0a 20 20 20 20 |ne_number ). | +00001160 7b 0a 20 20 20 20 20 20 20 20 69 66 20 28 21 70 |{. if (!p| +00001170 68 6f 6e 65 5f 6e 75 6d 62 65 72 2e 6c 65 6e 67 |hone_number.leng| +00001180 74 68 29 0a 20 20 20 20 20 20 20 20 20 20 20 20 |th). | +00001190 72 65 74 75 72 6e 20 6e 75 6c 6c 3b 0a 20 20 20 |return null;. | +000011a0 20 20 20 20 20 2f 2f 20 53 74 72 69 70 20 74 68 | // Strip th| +000011b0 65 20 6e 6f 6e 2d 64 69 67 69 74 20 63 68 61 72 |e non-digit char| +000011c0 61 63 74 65 72 73 20 66 72 6f 6d 20 74 68 65 20 |acters from the | +000011d0 70 68 6f 6e 65 20 6e 75 6d 62 65 72 2c 20 61 6e |phone number, an| +000011e0 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 70 61 73 |d. // pas| +000011f0 73 20 69 74 20 74 6f 20 74 68 65 20 72 65 63 75 |s it to the recu| +00001200 72 73 69 76 65 20 66 75 6e 63 74 69 6f 6e 20 28 |rsive function (| +00001210 6c 65 61 64 69 6e 67 20 64 69 67 69 74 20 69 73 |leading digit is| +00001220 20 61 6c 6c 6f 77 65 64 29 0a 20 20 20 20 20 20 | allowed). | +00001230 20 20 72 65 74 75 72 6e 20 5f 46 69 6e 64 57 6f | return _FindWo| +00001240 72 64 73 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 |rds( stripNonDig| +00001250 69 74 28 70 68 6f 6e 65 5f 6e 75 6d 62 65 72 29 |it(phone_number)| +00001260 2c 20 74 72 75 65 20 29 3b 20 20 20 20 0a 20 20 |, true ); . | +00001270 20 20 7d 20 20 20 20 0a 20 20 20 20 0a 20 20 20 | } . . | +00001280 20 2f 2f 20 52 65 61 64 20 74 68 65 20 70 68 6f | // Read the pho| +00001290 6e 65 20 6e 75 6d 62 65 72 73 20 20 20 20 20 0a |ne numbers .| +000012a0 20 20 20 20 66 6f 72 65 61 63 68 28 73 74 72 69 | foreach(stri| +000012b0 6e 67 20 70 68 6f 6e 65 3b 20 6e 65 77 20 42 75 |ng phone; new Bu| +000012c0 66 66 65 72 65 64 46 69 6c 65 28 22 69 6e 70 75 |fferedFile("inpu| +000012d0 74 2e 74 78 74 22 20 20 20 29 20 29 0a 20 20 20 |t.txt" ) ). | +000012e0 20 20 20 20 20 66 6f 72 65 61 63 68 28 61 6c 74 | foreach(alt| +000012f0 65 72 6e 61 74 69 76 65 3b 20 46 69 6e 64 57 6f |ernative; FindWo| +00001300 72 64 73 28 20 70 68 6f 6e 65 20 29 20 29 0a 20 |rds( phone ) ). | +00001310 20 20 20 20 20 20 20 20 20 20 20 77 72 69 74 65 | write| +00001320 66 6c 6e 28 70 68 6f 6e 65 2c 20 22 3a 20 22 2c |fln(phone, ": ",| +00001330 20 61 6c 74 65 72 6e 61 74 69 76 65 20 29 3b 0a | alternative );.| +00001340 7d 0a 0a |}..| +00001343 -- cgit v1.2.1 From 51ae7c86e921223b808a85dca72ca1622be713d7 Mon Sep 17 00:00:00 2001 From: Wolfgang Mayr Date: Mon, 26 Oct 2015 16:31:01 +0100 Subject: Added Decorators to TypeScript lexer --- pygments/lexers/javascript.py | 2 ++ tests/examplefiles/example.ts | 11 +++++++++++ 2 files changed, 13 insertions(+) diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py index 3982c91e..8e2d9797 100644 --- a/pygments/lexers/javascript.py +++ b/pygments/lexers/javascript.py @@ -511,6 +511,8 @@ class TypeScriptLexer(RegexLexer): (r'[0-9]+', Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), + # Match stuff like: Decorators + (r'@\w+', Keyword.Declaration), ] } diff --git a/tests/examplefiles/example.ts b/tests/examplefiles/example.ts index 545c6cf5..760e2543 100644 --- a/tests/examplefiles/example.ts +++ b/tests/examplefiles/example.ts @@ -21,6 +21,17 @@ class Horse extends Animal { } } +@View({ + templateUrl: "app/components/LoginForm.html", + directives: [FORM_DIRECTIVES, NgIf] +}) +@Component({ + selector: "login-form" +}) +class LoginForm { + +} + var sam = new Snake("Sammy the Python") var tom: Animal = new Horse("Tommy the Palomino") -- cgit v1.2.1 From 0c6c714008e42416f3ae92c4427da0d9f28d8d89 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Tue, 27 Oct 2015 08:08:09 -0700 Subject: Fixes #1153: Change Windows+Py3 workaround code to flush when finished. This avoids a case where the last chunk might not be output in iPython Notebook on Windows. --- pygments/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/util.py b/pygments/util.py index 0859c05d..07b662d0 100644 --- a/pygments/util.py +++ b/pygments/util.py @@ -372,7 +372,7 @@ else: class UnclosingTextIOWrapper(TextIOWrapper): # Don't close underlying buffer on destruction. def close(self): - pass + self.flush() def add_metaclass(metaclass): -- cgit v1.2.1 From 7c53811825bd87f9125b4eb3da9f740084e084aa Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Tue, 27 Oct 2015 08:10:49 -0700 Subject: Add #1153 to changelog. --- CHANGES | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGES b/CHANGES index 6a83a3e0..7056d178 100644 --- a/CHANGES +++ b/CHANGES @@ -66,6 +66,9 @@ Version 2.1 - Added option to pygmentize to show a full traceback on exceptions. +- Fixed incomplete output on Windows and Python 3 (e.g. when using iPython + Notebook). (#1153) + Version 2.0.3 ------------- -- cgit v1.2.1 From 9c30355db8fb8c7822b1c27eeee2396954c3fe9a Mon Sep 17 00:00:00 2001 From: hhsprings Date: Wed, 28 Oct 2015 18:38:06 +0900 Subject: Add the lexer for traditional `bc: an arbitrary precision calculator language'. --- pygments/lexers/_mapping.py | 1 + pygments/lexers/algebra.py | 36 +++++++++++++++++++++- tests/examplefiles/example.bc | 70 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 106 insertions(+), 1 deletion(-) create mode 100644 tests/examplefiles/example.bc diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 03a3c96e..f10743dc 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -43,6 +43,7 @@ LEXERS = { 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), 'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), + 'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()), 'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()), 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')), 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')), diff --git a/pygments/lexers/algebra.py b/pygments/lexers/algebra.py index 873b1bf2..a0d6b4ce 100644 --- a/pygments/lexers/algebra.py +++ b/pygments/lexers/algebra.py @@ -15,7 +15,7 @@ from pygments.lexer import RegexLexer, bygroups, words from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation -__all__ = ['GAPLexer', 'MathematicaLexer', 'MuPADLexer'] +__all__ = ['GAPLexer', 'MathematicaLexer', 'MuPADLexer', 'BCLexer'] class GAPLexer(RegexLexer): @@ -185,3 +185,37 @@ class MuPADLexer(RegexLexer): (r'[*/]', Comment.Multiline) ] } + + +class BCLexer(RegexLexer): + """ + A `BC `_ lexer. + Contributed by Hiroaki Itoh . + + .. versionadded:: 2.1 + """ + name = 'BC' + aliases = ['bc'] + filenames = ['*.bc'] + + tokens = { + 'root': [ + (r'/\*', Comment.Multiline, 'comment'), + (r'"(?:[^"\\]|\\.)*"', String), + (r'[{}();,]', Punctuation), + (r'(if|else|while|for|break|continue|halt|' + r'return|define|auto|print|' + r'read|length|scale|sqrt|limits|quit|warranty)\b', Keyword), + (r'\+\+|--|\|\||&&|' + r'([-<>+*%\^/!=])=?', Operator), + (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number), + (r'\.[0-9]+(?:e[0-9]+)?', Number), + (r'.', Text) + ], + 'comment': [ + (r'[^*/]', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline) + ] + } diff --git a/tests/examplefiles/example.bc b/tests/examplefiles/example.bc new file mode 100644 index 00000000..fe248e58 --- /dev/null +++ b/tests/examplefiles/example.bc @@ -0,0 +1,70 @@ +scale = 20 + +/* Uses the fact that e^x = (e^(x/2))^2 + When x is small enough, we use the series: + e^x = 1 + x + x^2/2! + x^3/3! + ... +*/ + +define e(x) { + auto a, d, e, f, i, m, v, z + + /* Check the sign of x. */ + if (x<0) { + m = 1 + x = -x + } + + /* Precondition x. */ + z = scale; + scale = 4 + z + .44*x; + while (x > 1) { + f += 1; + x /= 2; + } + + /* Initialize the variables. */ + v = 1+x + a = x + d = 1 + + for (i=2; 1; i++) { + e = (a *= x) / (d *= i) + if (e == 0) { + if (f>0) while (f--) v = v*v; + scale = z + if (m) return (1/v); + return (v/1); + } + v += e + } +} + +"e(2)=" ; e(2) + +/* assign operators */ +a = 10 +a += 1 +a++ +++a +a-- +--a +a += 5 +a -= 5 +a *= 2 +a /= 3 +a ^= 2 +a %= 2 + +/* comparison */ +if (a > 2) { +} +if (a >= 2) { +} +if (a == 2) { +} +if (a != 2) { +} +if (a <= 2) { +} +if (a < 2) { +} -- cgit v1.2.1 From ef0518913142c9c87f6fddba695e77d873211a43 Mon Sep 17 00:00:00 2001 From: Miikka Salminen Date: Wed, 28 Oct 2015 16:39:01 +0200 Subject: Added support for od and xxd. Minor enhancements. --- pygments/lexers/hexdump.py | 43 +++-- tests/examplefiles/hexdump_debugexe | 309 +++++++++++++++++++++++++++++++++++ tests/examplefiles/hexdump_hd | 310 ++++++++++++++++++++++++++++++++++++ tests/examplefiles/hexdump_hexcat | 247 ++++++++++++++++++++++++++++ tests/examplefiles/hexdump_hexdump | 310 ++++++++++++++++++++++++++++++++++++ tests/examplefiles/hexdump_od | 310 ++++++++++++++++++++++++++++++++++++ tests/examplefiles/hexdump_test | 310 ------------------------------------ tests/examplefiles/hexdump_xxd | 309 +++++++++++++++++++++++++++++++++++ 8 files changed, 1828 insertions(+), 320 deletions(-) create mode 100644 tests/examplefiles/hexdump_debugexe create mode 100644 tests/examplefiles/hexdump_hd create mode 100644 tests/examplefiles/hexdump_hexcat create mode 100644 tests/examplefiles/hexdump_hexdump create mode 100644 tests/examplefiles/hexdump_od delete mode 100644 tests/examplefiles/hexdump_test create mode 100644 tests/examplefiles/hexdump_xxd diff --git a/pygments/lexers/hexdump.py b/pygments/lexers/hexdump.py index f20244ae..efe16fa7 100644 --- a/pygments/lexers/hexdump.py +++ b/pygments/lexers/hexdump.py @@ -19,7 +19,25 @@ __all__ = ['HexdumpLexer'] class HexdumpLexer(RegexLexer): """ - For hex dumps output by the UNIX hexdump and hexcat tools. + For typical hex dump output formats by the UNIX and GNU/Linux tools ``hexdump``, + ``hd``, ``hexcat``, ``od`` and ``xxd``, and the DOS tool ``DEBUG``. For example: + + .. sourcecode:: hexdump + + 00000000 7f 45 4c 46 02 01 01 00 00 00 00 00 00 00 00 00 |.ELF............| + 00000010 02 00 3e 00 01 00 00 00 c5 48 40 00 00 00 00 00 |..>......H@.....| + + The specific supported formats are the outputs of: + + * ``hexdump FILE`` + * ``hexdump -C FILE`` -- the `canonical` format used in the example. + * ``hd FILE`` -- same as ``hexdump -C FILE``. + * ``hexcat FILE`` + * ``od -t x1z FILE`` + * ``xxd FILE`` + * ``DEBUG.EXE FILE.COM`` and entering ``d`` to the prompt. + + .. versionadded:: 2.1 """ name = 'Hexdump' aliases = ['hexdump'] @@ -30,12 +48,14 @@ class HexdumpLexer(RegexLexer): 'root': [ (r'\n', Text), include('offset'), - (hd+r'{4}', Number.Hex, 'stringless-mode'), + (r'('+hd+r'{2})(\-)('+hd+r'{2})', bygroups(Number.Hex, Punctuation, Number.Hex)), (hd+r'{2}', Number.Hex), - (r'(\s+)(\|)(.{16})(\|)$', bygroups(Text, Punctuation, String, Punctuation), 'piped-strings'), - (r'(\s+)(\|)(.{1,15})(\|)$', bygroups(Text, Punctuation, String, Punctuation)), - (r'(\s+)(.{1,15})$', bygroups(Text, String)), - (r'(\s+)(.{16})$', bygroups(Text, String), 'nonpiped-strings'), + (r'(\s{2,3})(\>)(.{16})(\<)$', bygroups(Text, Punctuation, String, Punctuation), 'bracket-strings'), + (r'(\s{2,3})(\|)(.{16})(\|)$', bygroups(Text, Punctuation, String, Punctuation), 'piped-strings'), + (r'(\s{2,3})(\>)(.{1,15})(\<)$', bygroups(Text, Punctuation, String, Punctuation)), + (r'(\s{2,3})(\|)(.{1,15})(\|)$', bygroups(Text, Punctuation, String, Punctuation)), + (r'(\s{2,3})(.{1,15})$', bygroups(Text, String)), + (r'(\s{2,3})(.{16}|.{20})$', bygroups(Text, String), 'nonpiped-strings'), (r'\s', Text), (r'^\*', Punctuation), ], @@ -48,26 +68,29 @@ class HexdumpLexer(RegexLexer): (hd+'+', Name.Label), (r':', Punctuation) ], - 'stringless-mode': [ + 'piped-strings': [ (r'\n', Text), include('offset'), (hd+r'{2}', Number.Hex), + (r'(\s{2,3})(\|)(.{1,16})(\|)$', bygroups(Text, Punctuation, String, Punctuation)), (r'\s', Text), (r'^\*', Punctuation), ], - 'piped-strings': [ + 'bracket-strings': [ (r'\n', Text), include('offset'), (hd+r'{2}', Number.Hex), - (r'(\s)(\|)(.{1,16})(\|)$', bygroups(Text, Punctuation, String, Punctuation)), + (r'(\s{2,3})(\>)(.{1,16})(\<)$', bygroups(Text, Punctuation, String, Punctuation)), (r'\s', Text), (r'^\*', Punctuation), ], 'nonpiped-strings': [ (r'\n', Text), include('offset'), + (r'('+hd+r'{2})(\-)('+hd+r'{2})', bygroups(Number.Hex, Punctuation, Number.Hex)), (hd+r'{2}', Number.Hex), - (r'(\s+)(.{1,16})$', bygroups(Text, String)), + (r'(\s{19,})(.{1,20}?)$', bygroups(Text, String)), + (r'(\s{2,3})(.{1,20})$', bygroups(Text, String)), (r'\s', Text), (r'^\*', Punctuation), ], diff --git a/tests/examplefiles/hexdump_debugexe b/tests/examplefiles/hexdump_debugexe new file mode 100644 index 00000000..31fefdb7 --- /dev/null +++ b/tests/examplefiles/hexdump_debugexe @@ -0,0 +1,309 @@ +0000:0000 2F 2F 20 43 72 65 61 74-65 64 20 62 79 20 4C 69 // Created by Li +0000:0010 6F 6E 65 6C 6C 6F 20 4C-75 6E 65 73 75 20 61 6E onello Lunesu an +0000:0020 64 20 70 6C 61 63 65 64-20 69 6E 20 74 68 65 20 d placed in the +0000:0030 70 75 62 6C 69 63 20 64-6F 6D 61 69 6E 2E 0A 2F public domain.◙/ +0000:0040 2F 20 54 68 69 73 20 66-69 6C 65 20 68 61 73 20 / This file has +0000:0050 62 65 65 6E 20 6D 6F 64-69 66 69 65 64 20 66 72 been modified fr +0000:0060 6F 6D 20 69 74 73 20 6F-72 69 67 69 6E 61 6C 20 om its original +0000:0070 76 65 72 73 69 6F 6E 2E-0A 2F 2F 20 49 74 20 68 version.◙// It h +0000:0080 61 73 20 62 65 65 6E 20-66 6F 72 6D 61 74 74 65 as been formatte +0000:0090 64 20 74 6F 20 66 69 74-20 79 6F 75 72 20 73 63 d to fit your sc +0000:00A0 72 65 65 6E 2E 0A 6D 6F-64 75 6C 65 20 70 68 6F reen.◙module pho +0000:00B0 6E 65 6E 6F 3B 20 20 20-20 20 2F 2F 20 6F 70 74 neno; // opt +0000:00C0 69 6F 6E 61 6C 0A 69 6D-70 6F 72 74 20 73 74 64 ional◙import std +0000:00D0 2E 73 74 64 69 6F 3B 20-20 20 2F 2F 20 77 72 69 .stdio; // wri +0000:00E0 74 65 66 6C 6E 20 20 20-20 20 0A 69 6D 70 6F 72 tefln ◙impor +0000:00F0 74 20 73 74 64 2E 63 74-79 70 65 3B 20 20 20 2F t std.ctype; / +0000:0100 2F 20 69 73 64 69 67 69-74 20 20 20 20 20 0A 69 / isdigit ◙i +0000:0110 6D 70 6F 72 74 20 73 74-64 2E 73 74 72 65 61 6D mport std.stream +0000:0120 3B 20 20 2F 2F 20 42 75-66 66 65 72 65 64 46 69 ; // BufferedFi +0000:0130 6C 65 0A 0A 2F 2F 20 4A-75 73 74 20 66 6F 72 20 le◙◙// Just for +0000:0140 72 65 61 64 61 62 69 6C-69 74 79 20 28 69 6D 61 readability (ima +0000:0150 67 69 6E 65 20 63 68 61-72 5B 5D 5B 5D 5B 63 68 gine char[][][ch +0000:0160 61 72 5B 5D 5D 29 20 20-20 20 0A 61 6C 69 61 73 ar[]]) ◙alias +0000:0170 20 63 68 61 72 5B 5D 20-73 74 72 69 6E 67 3B 0A char[] string;◙ +0000:0180 61 6C 69 61 73 20 73 74-72 69 6E 67 5B 5D 20 73 alias string[] s +0000:0190 74 72 69 6E 67 61 72 72-61 79 3B 0A 0A 2F 2F 2F tringarray;◙◙/// +0000:01A0 20 53 74 72 69 70 73 20-6E 6F 6E 2D 64 69 67 69 Strips non-digi +0000:01B0 74 20 63 68 61 72 61 63-74 65 72 73 20 66 72 6F t characters fro +0000:01C0 6D 20 74 68 65 20 73 74-72 69 6E 67 20 28 43 4F m the string (CO +0000:01D0 57 29 0A 73 74 72 69 6E-67 20 73 74 72 69 70 4E W)◙string stripN +0000:01E0 6F 6E 44 69 67 69 74 28-20 69 6E 20 73 74 72 69 onDigit( in stri +0000:01F0 6E 67 20 6C 69 6E 65 20-29 20 0A 7B 0A 20 20 20 ng line ) ◙{◙ +0000:0200 20 73 74 72 69 6E 67 20-72 65 74 3B 0A 20 20 20 string ret;◙ +0000:0210 20 66 6F 72 65 61 63 68-28 75 69 6E 74 20 69 2C foreach(uint i, +0000:0220 20 63 3B 20 6C 69 6E 65-29 20 7B 0A 20 20 20 20 c; line) {◙ +0000:0230 20 20 20 20 2F 2F 20 45-72 72 6F 72 3A 20 73 74 // Error: st +0000:0240 64 2E 63 74 79 70 65 2E-69 73 64 69 67 69 74 20 d.ctype.isdigit +0000:0250 61 74 20 43 3A 5C 64 6D-64 5C 73 72 63 5C 70 68 at C:\dmd\src\ph +0000:0260 6F 62 6F 73 5C 73 74 64-5C 63 74 79 70 65 2E 64 obos\std\ctype.d +0000:0270 28 33 37 29 20 0A 20 20-20 20 20 20 20 20 2F 2F (37) ◙ // +0000:0280 20 63 6F 6E 66 6C 69 63-74 73 20 77 69 74 68 20 conflicts with +0000:0290 73 74 64 2E 73 74 72 65-61 6D 2E 69 73 64 69 67 std.stream.isdig +0000:02A0 69 74 20 61 74 20 43 3A-5C 64 6D 64 5C 73 72 63 it at C:\dmd\src +0000:02B0 5C 70 68 6F 62 6F 73 5C-73 74 64 5C 73 74 72 65 \phobos\std\stre +0000:02C0 61 6D 2E 64 28 32 39 32-34 29 0A 20 20 20 20 20 am.d(2924)◙ +0000:02D0 20 20 20 69 66 20 28 21-73 74 64 2E 63 74 79 70 if (!std.ctyp +0000:02E0 65 2E 69 73 64 69 67 69-74 28 63 29 29 20 7B 0A e.isdigit(c)) {◙ +0000:02F0 20 20 20 20 20 20 20 20-20 20 20 20 69 66 20 28 if ( +0000:0300 21 72 65 74 29 0A 20 20-20 20 20 20 20 20 20 20 !ret)◙ +0000:0310 20 20 20 20 20 20 72 65-74 20 3D 20 6C 69 6E 65 ret = line +0000:0320 5B 30 2E 2E 69 5D 3B 20-20 20 20 0A 20 20 20 20 [0..i]; ◙ +0000:0330 20 20 20 20 7D 20 20 20-20 0A 20 20 20 20 20 20 } ◙ +0000:0340 20 20 65 6C 73 65 20 69-66 20 28 72 65 74 29 0A else if (ret)◙ +0000:0350 20 20 20 20 20 20 20 20-20 20 20 20 72 65 74 20 ret +0000:0360 7E 3D 20 63 3B 20 20 20-20 0A 20 20 20 20 7D 20 ~= c; ◙ } +0000:0370 20 20 20 0A 20 20 20 20-72 65 74 75 72 6E 20 72 ◙ return r +0000:0380 65 74 3F 72 65 74 3A 6C-69 6E 65 3B 0A 7D 0A 0A et?ret:line;◙}◙◙ +0000:0390 75 6E 69 74 74 65 73 74-20 7B 0A 20 20 20 20 61 unittest {◙ a +0000:03A0 73 73 65 72 74 28 20 73-74 72 69 70 4E 6F 6E 44 ssert( stripNonD +0000:03B0 69 67 69 74 28 22 61 73-64 66 22 29 20 3D 3D 20 igit("asdf") == +0000:03C0 22 22 20 20 29 3B 0A 20-20 20 20 61 73 73 65 72 "" );◙ asser +0000:03D0 74 28 20 73 74 72 69 70-4E 6F 6E 44 69 67 69 74 t( stripNonDigit +0000:03E0 28 22 5C 27 31 33 2D 3D-32 20 34 6B 6F 70 22 29 ("\'13-=2 4kop") +0000:03F0 20 3D 3D 20 20 22 31 33-32 34 22 20 20 29 3B 0A == "1324" );◙ +0000:0400 7D 0A 0A 2F 2F 2F 20 43-6F 6E 76 65 72 74 73 20 }◙◙/// Converts +0000:0410 61 20 77 6F 72 64 20 69-6E 74 6F 20 61 20 6E 75 a word into a nu +0000:0420 6D 62 65 72 2C 20 69 67-6E 6F 72 69 6E 67 20 61 mber, ignoring a +0000:0430 6C 6C 20 6E 6F 6E 20 61-6C 70 68 61 20 63 68 61 ll non alpha cha +0000:0440 72 61 63 74 65 72 73 20-20 0A 73 74 72 69 6E 67 racters ◙string +0000:0450 20 77 6F 72 64 54 6F 4E-75 6D 28 20 69 6E 20 73 wordToNum( in s +0000:0460 74 72 69 6E 67 20 77 6F-72 64 20 29 0A 7B 0A 2F tring word )◙{◙/ +0000:0470 2F 20 74 72 61 6E 73 6C-61 74 69 6F 6E 20 74 61 / translation ta +0000:0480 62 6C 65 20 66 6F 72 20-74 68 65 20 74 61 73 6B ble for the task +0000:0490 20 61 74 20 68 61 6E 64-0A 63 6F 6E 73 74 20 63 at hand◙const c +0000:04A0 68 61 72 5B 32 35 36 5D-20 54 52 41 4E 53 4C 41 har[256] TRANSLA +0000:04B0 54 45 20 3D 20 20 20 20-0A 20 20 20 20 22 20 20 TE = ◙ " +0000:04C0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:04D0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 22 20 " +0000:04E0 20 2F 2F 20 30 20 20 20-0A 20 20 20 20 22 20 20 // 0 ◙ " +0000:04F0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 30 31 01 +0000:0500 32 33 34 35 36 37 38 39-20 20 20 20 20 20 22 20 23456789 " +0000:0510 20 2F 2F 20 33 32 20 20-20 20 20 0A 20 20 20 20 // 32 ◙ +0000:0520 22 20 35 37 36 33 30 34-39 39 36 31 37 38 35 31 " 57630499617851 +0000:0530 38 38 31 32 33 34 37 36-32 32 33 39 20 20 20 20 881234762239 +0000:0540 20 22 20 20 2F 2F 20 36-34 20 20 20 0A 20 20 20 " // 64 ◙ +0000:0550 20 22 20 35 37 36 33 30-34 39 39 36 31 37 38 35 " 5763049961785 +0000:0560 31 38 38 31 32 33 34 37-36 32 32 33 39 20 20 20 1881234762239 +0000:0570 20 20 22 0A 20 20 20 20-22 20 20 20 20 20 20 20 "◙ " +0000:0580 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:0590 20 20 20 20 20 20 20 20-20 22 0A 20 20 20 20 22 "◙ " +0000:05A0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:05B0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:05C0 22 0A 20 20 20 20 22 20-20 20 20 20 20 20 20 20 "◙ " +0000:05D0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:05E0 20 20 20 20 20 20 20 22-20 20 20 20 0A 20 20 20 " ◙ +0000:05F0 20 22 20 20 20 20 20 20-20 20 20 20 20 20 20 20 " +0000:0600 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:0610 20 20 22 3B 0A 20 20 20-20 73 74 72 69 6E 67 20 ";◙ string +0000:0620 72 65 74 3B 0A 20 20 20-20 66 6F 72 65 61 63 68 ret;◙ foreach +0000:0630 28 63 3B 20 63 61 73 74-28 75 62 79 74 65 5B 5D (c; cast(ubyte[] +0000:0640 29 77 6F 72 64 29 0A 20-20 20 20 20 20 20 20 69 )word)◙ i +0000:0650 66 20 28 54 52 41 4E 53-4C 41 54 45 5B 63 5D 20 f (TRANSLATE[c] +0000:0660 21 3D 20 27 20 27 29 0A-20 20 20 20 20 20 20 20 != ' ')◙ +0000:0670 20 20 20 20 72 65 74 20-7E 3D 20 54 52 41 4E 53 ret ~= TRANS +0000:0680 4C 41 54 45 5B 63 5D 3B-0A 20 20 20 20 72 65 74 LATE[c];◙ ret +0000:0690 75 72 6E 20 72 65 74 3B-0A 7D 0A 0A 75 6E 69 74 urn ret;◙}◙◙unit +0000:06A0 74 65 73 74 20 7B 0A 20-2F 2F 20 54 65 73 74 20 test {◙ // Test +0000:06B0 77 6F 72 64 54 6F 4E 75-6D 20 75 73 69 6E 67 20 wordToNum using +0000:06C0 74 68 65 20 74 61 62 6C-65 20 66 72 6F 6D 20 74 the table from t +0000:06D0 68 65 20 74 61 73 6B 20-64 65 73 63 72 69 70 74 he task descript +0000:06E0 69 6F 6E 2E 0A 20 61 73-73 65 72 74 28 20 22 30 ion.◙ assert( "0 +0000:06F0 31 31 31 32 32 32 33 33-33 34 34 35 35 36 36 36 1112223334455666 +0000:0700 37 37 37 38 38 38 39 39-39 22 20 3D 3D 0A 20 20 777888999" ==◙ +0000:0710 20 77 6F 72 64 54 6F 4E-75 6D 28 22 45 20 7C 20 wordToNum("E | +0000:0720 4A 20 4E 20 51 20 7C 20-52 20 57 20 58 20 7C 20 J N Q | R W X | +0000:0730 44 20 53 20 59 20 7C 20-46 20 54 20 7C 20 41 20 D S Y | F T | A +0000:0740 4D 20 7C 20 43 20 49 20-56 20 7C 20 42 20 4B 20 M | C I V | B K +0000:0750 55 20 7C 20 4C 20 4F 20-50 20 7C 20 47 20 48 20 U | L O P | G H +0000:0760 5A 22 29 29 3B 0A 20 61-73 73 65 72 74 28 20 22 Z"));◙ assert( " +0000:0770 30 31 31 31 32 32 32 33-33 33 34 34 35 35 36 36 0111222333445566 +0000:0780 36 37 37 37 38 38 38 39-39 39 22 20 3D 3D 20 0A 6777888999" == ◙ +0000:0790 20 20 20 77 6F 72 64 54-6F 4E 75 6D 28 22 65 20 wordToNum("e +0000:07A0 7C 20 6A 20 6E 20 71 20-7C 20 72 20 77 20 78 20 | j n q | r w x +0000:07B0 7C 20 64 20 73 20 79 20-7C 20 66 20 74 20 7C 20 | d s y | f t | +0000:07C0 61 20 6D 20 7C 20 63 20-69 20 76 20 7C 20 62 20 a m | c i v | b +0000:07D0 6B 20 75 20 7C 20 6C 20-6F 20 70 20 7C 20 67 20 k u | l o p | g +0000:07E0 68 20 7A 22 29 29 3B 0A-20 61 73 73 65 72 74 28 h z"));◙ assert( +0000:07F0 20 22 30 31 32 33 34 35-36 37 38 39 22 20 3D 3D "0123456789" == +0000:0800 20 0A 20 20 20 77 6F 72-64 54 6F 4E 75 6D 28 22 ◙ wordToNum(" +0000:0810 30 20 7C 20 20 20 31 20-20 20 7C 20 20 20 32 20 0 | 1 | 2 +0000:0820 20 20 7C 20 20 20 33 20-20 20 7C 20 20 34 20 20 | 3 | 4 +0000:0830 7C 20 20 35 20 20 7C 20-20 20 36 20 20 20 7C 20 | 5 | 6 | +0000:0840 20 20 37 20 20 20 7C 20-20 20 38 20 20 20 7C 20 7 | 8 | +0000:0850 20 20 39 22 29 29 3B 0A-7D 0A 0A 76 6F 69 64 20 9"));◙}◙◙void +0000:0860 6D 61 69 6E 28 20 73 74-72 69 6E 67 5B 5D 20 61 main( string[] a +0000:0870 72 67 73 20 29 0A 7B 0A-20 20 20 20 2F 2F 20 54 rgs )◙{◙ // T +0000:0880 68 69 73 20 61 73 73 6F-63 69 61 74 69 76 65 20 his associative +0000:0890 61 72 72 61 79 20 6D 61-70 73 20 61 20 6E 75 6D array maps a num +0000:08A0 62 65 72 20 74 6F 20 61-6E 20 61 72 72 61 79 20 ber to an array +0000:08B0 6F 66 20 77 6F 72 64 73-2E 20 20 20 20 0A 20 20 of words. ◙ +0000:08C0 20 20 73 74 72 69 6E 67-61 72 72 61 79 5B 73 74 stringarray[st +0000:08D0 72 69 6E 67 5D 20 20 20-20 6E 75 6D 32 77 6F 72 ring] num2wor +0000:08E0 64 73 3B 0A 0A 20 20 20-20 66 6F 72 65 61 63 68 ds;◙◙ foreach +0000:08F0 28 73 74 72 69 6E 67 20-77 6F 72 64 3B 20 6E 65 (string word; ne +0000:0900 77 20 42 75 66 66 65 72-65 64 46 69 6C 65 28 22 w BufferedFile(" +0000:0910 64 69 63 74 69 6F 6E 61-72 79 2E 74 78 74 22 20 dictionary.txt" +0000:0920 29 20 29 0A 20 20 20 20-20 20 20 20 6E 75 6D 32 ) )◙ num2 +0000:0930 77 6F 72 64 73 5B 20 77-6F 72 64 54 6F 4E 75 6D words[ wordToNum +0000:0940 28 77 6F 72 64 29 20 5D-20 7E 3D 20 77 6F 72 64 (word) ] ~= word +0000:0950 2E 64 75 70 3B 20 20 20-20 20 20 20 20 2F 2F 20 .dup; // +0000:0960 6D 75 73 74 20 64 75 70-0A 0A 20 20 20 20 2F 2F must dup◙◙ // +0000:0970 2F 20 46 69 6E 64 73 20-61 6C 6C 20 61 6C 74 65 / Finds all alte +0000:0980 72 6E 61 74 69 76 65 73-20 66 6F 72 20 74 68 65 rnatives for the +0000:0990 20 67 69 76 65 6E 20 6E-75 6D 62 65 72 0A 20 20 given number◙ +0000:09A0 20 20 2F 2F 2F 20 28 73-68 6F 75 6C 64 20 68 61 /// (should ha +0000:09B0 76 65 20 62 65 65 6E 20-73 74 72 69 70 70 65 64 ve been stripped +0000:09C0 20 66 72 6F 6D 20 6E 6F-6E 2D 64 69 67 69 74 20 from non-digit +0000:09D0 63 68 61 72 61 63 74 65-72 73 29 0A 20 20 20 20 characters)◙ +0000:09E0 73 74 72 69 6E 67 61 72-72 61 79 20 5F 46 69 6E stringarray _Fin +0000:09F0 64 57 6F 72 64 73 28 20-73 74 72 69 6E 67 20 6E dWords( string n +0000:0A00 75 6D 62 65 72 73 2C 20-62 6F 6F 6C 20 64 69 67 umbers, bool dig +0000:0A10 69 74 6F 6B 20 29 0A 20-20 20 20 69 6E 20 7B 0A itok )◙ in {◙ +0000:0A20 20 20 20 20 20 20 20 20-61 73 73 65 72 74 28 6E assert(n +0000:0A30 75 6D 62 65 72 73 2E 6C-65 6E 67 74 68 20 3E 20 umbers.length > +0000:0A40 20 30 29 3B 20 20 20 20-0A 20 20 20 20 7D 20 20 0); ◙ } +0000:0A50 20 20 0A 20 20 20 20 6F-75 74 28 72 65 73 75 6C ◙ out(resul +0000:0A60 74 29 20 7B 0A 20 20 20-20 20 20 20 20 66 6F 72 t) {◙ for +0000:0A70 65 61 63 68 20 28 61 3B-20 72 65 73 75 6C 74 29 each (a; result) +0000:0A80 0A 20 20 20 20 20 20 20-20 20 20 20 20 61 73 73 ◙ ass +0000:0A90 65 72 74 28 20 77 6F 72-64 54 6F 4E 75 6D 28 61 ert( wordToNum(a +0000:0AA0 29 20 3D 3D 20 6E 75 6D-62 65 72 73 20 29 3B 0A ) == numbers );◙ +0000:0AB0 20 20 20 20 7D 20 20 20-20 0A 20 20 20 20 62 6F } ◙ bo +0000:0AC0 64 79 20 7B 0A 20 20 20-20 20 20 20 20 73 74 72 dy {◙ str +0000:0AD0 69 6E 67 61 72 72 61 79-20 72 65 74 3B 0A 20 20 ingarray ret;◙ +0000:0AE0 20 20 20 20 20 20 62 6F-6F 6C 20 66 6F 75 6E 64 bool found +0000:0AF0 77 6F 72 64 20 3D 20 66-61 6C 73 65 3B 0A 20 20 word = false;◙ +0000:0B00 20 20 20 20 20 20 66 6F-72 20 28 75 69 6E 74 20 for (uint +0000:0B10 74 3D 31 3B 20 74 3C 3D-6E 75 6D 62 65 72 73 2E t=1; t<=numbers. +0000:0B20 6C 65 6E 67 74 68 3B 20-2B 2B 74 29 20 7B 0A 20 length; ++t) {◙ +0000:0B30 20 20 20 20 20 20 20 20-20 20 20 61 75 74 6F 20 auto +0000:0B40 61 6C 74 65 72 6E 61 74-69 76 65 73 20 3D 20 6E alternatives = n +0000:0B50 75 6D 62 65 72 73 5B 30-2E 2E 74 5D 20 69 6E 20 umbers[0..t] in +0000:0B60 6E 75 6D 32 77 6F 72 64-73 3B 0A 20 20 20 20 20 num2words;◙ +0000:0B70 20 20 20 20 20 20 20 69-66 20 28 21 61 6C 74 65 if (!alte +0000:0B80 72 6E 61 74 69 76 65 73-29 0A 20 20 20 20 20 20 rnatives)◙ +0000:0B90 20 20 20 20 20 20 20 20-20 20 63 6F 6E 74 69 6E contin +0000:0BA0 75 65 3B 0A 20 20 20 20-20 20 20 20 20 20 20 20 ue;◙ +0000:0BB0 66 6F 75 6E 64 77 6F 72-64 20 3D 20 74 72 75 65 foundword = true +0000:0BC0 3B 0A 20 20 20 20 20 20-20 20 20 20 20 20 69 66 ;◙ if +0000:0BD0 20 28 6E 75 6D 62 65 72-73 2E 6C 65 6E 67 74 68 (numbers.length +0000:0BE0 20 3E 20 20 74 29 20 7B-0A 20 20 20 20 20 20 20 > t) {◙ +0000:0BF0 20 20 20 20 20 20 20 20-20 2F 2F 20 43 6F 6D 62 // Comb +0000:0C00 69 6E 65 20 61 6C 6C 20-63 75 72 72 65 6E 74 20 ine all current +0000:0C10 61 6C 74 65 72 6E 61 74-69 76 65 73 20 77 69 74 alternatives wit +0000:0C20 68 20 61 6C 6C 20 61 6C-74 65 72 6E 61 74 69 76 h all alternativ +0000:0C30 65 73 20 20 20 20 20 0A-20 20 20 20 20 20 20 20 es ◙ +0000:0C40 20 20 20 20 20 20 20 20-2F 2F 20 6F 66 20 74 68 // of th +0000:0C50 65 20 72 65 73 74 20 28-6E 65 78 74 20 70 69 65 e rest (next pie +0000:0C60 63 65 20 63 61 6E 20 73-74 61 72 74 20 77 69 74 ce can start wit +0000:0C70 68 20 61 20 64 69 67 69-74 29 20 20 20 20 20 20 h a digit) +0000:0C80 20 20 20 20 20 20 20 20-0A 20 20 20 20 20 20 20 ◙ +0000:0C90 20 20 20 20 20 20 20 20-20 66 6F 72 65 61 63 68 foreach +0000:0CA0 20 28 61 32 3B 20 5F 46-69 6E 64 57 6F 72 64 73 (a2; _FindWords +0000:0CB0 28 20 6E 75 6D 62 65 72-73 5B 74 2E 2E 24 5D 2C ( numbers[t..$], +0000:0CC0 20 74 72 75 65 20 20 20-20 20 29 20 29 0A 20 20 true ) )◙ +0000:0CD0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:0CE0 20 20 66 6F 72 65 61 63-68 28 61 31 3B 20 2A 61 foreach(a1; *a +0000:0CF0 6C 74 65 72 6E 61 74 69-76 65 73 29 0A 20 20 20 lternatives)◙ +0000:0D00 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:0D10 20 20 20 20 72 65 74 20-7E 3D 20 61 31 20 7E 20 ret ~= a1 ~ +0000:0D20 22 20 22 20 7E 20 61 32-3B 0A 20 20 20 20 20 20 " " ~ a2;◙ +0000:0D30 20 20 20 20 20 20 7D 0A-20 20 20 20 20 20 20 20 }◙ +0000:0D40 20 20 20 20 65 6C 73 65-20 20 20 20 0A 20 20 20 else ◙ +0000:0D50 20 20 20 20 20 20 20 20-20 20 20 20 20 72 65 74 ret +0000:0D60 20 7E 3D 20 2A 61 6C 74-65 72 6E 61 74 69 76 65 ~= *alternative +0000:0D70 73 3B 20 20 20 20 2F 2F-20 61 70 70 65 6E 64 20 s; // append +0000:0D80 74 68 65 73 65 20 61 6C-74 65 72 6E 61 74 69 76 these alternativ +0000:0D90 65 73 0A 20 20 20 20 20-20 20 20 7D 0A 20 20 20 es◙ }◙ +0000:0DA0 20 20 20 20 20 2F 2F 20-54 72 79 20 74 6F 20 6B // Try to k +0000:0DB0 65 65 70 20 31 20 64 69-67 69 74 2C 20 6F 6E 6C eep 1 digit, onl +0000:0DC0 79 20 69 66 20 77 65 27-72 65 20 61 6C 6C 6F 77 y if we're allow +0000:0DD0 65 64 20 61 6E 64 20 6E-6F 20 6F 74 68 65 72 0A ed and no other◙ +0000:0DE0 20 20 20 20 20 20 20 20-2F 2F 20 61 6C 74 65 72 // alter +0000:0DF0 6E 61 74 69 76 65 73 20-77 65 72 65 20 66 6F 75 natives were fou +0000:0E00 6E 64 0A 20 20 20 20 20-20 20 20 2F 2F 20 54 65 nd◙ // Te +0000:0E10 73 74 69 6E 67 20 22 72-65 74 2E 6C 65 6E 67 74 sting "ret.lengt +0000:0E20 68 22 20 6D 61 6B 65 73-20 6D 6F 72 65 20 73 65 h" makes more se +0000:0E30 6E 73 65 20 74 68 61 6E-20 74 65 73 74 69 6E 67 nse than testing +0000:0E40 20 22 66 6F 75 6E 64 77-6F 72 64 22 2C 0A 20 20 "foundword",◙ +0000:0E50 20 20 20 20 20 20 2F 2F-20 62 75 74 20 74 68 65 // but the +0000:0E60 20 6F 74 68 65 72 20 69-6D 70 6C 65 6D 65 6E 74 other implement +0000:0E70 61 74 69 6F 6E 73 20 73-65 65 6D 20 74 6F 20 64 ations seem to d +0000:0E80 6F 20 6A 75 73 74 20 74-68 69 73 2E 0A 20 20 20 o just this.◙ +0000:0E90 20 20 20 20 20 69 66 20-28 64 69 67 69 74 6F 6B if (digitok +0000:0EA0 20 26 26 20 21 66 6F 75-6E 64 77 6F 72 64 29 20 && !foundword) +0000:0EB0 7B 20 2F 2F 72 65 74 2E-6C 65 6E 67 74 68 20 3D { //ret.length = +0000:0EC0 3D 20 30 20 20 0A 20 20-20 20 20 20 20 20 20 20 = 0 ◙ +0000:0ED0 20 20 69 66 28 6E 75 6D-62 65 72 73 2E 6C 65 6E if(numbers.len +0000:0EE0 67 74 68 20 3E 20 20 31-29 20 7B 0A 20 20 20 20 gth > 1) {◙ +0000:0EF0 20 20 20 20 20 20 20 20-20 20 20 20 2F 2F 20 43 // C +0000:0F00 6F 6D 62 69 6E 65 20 31-20 64 69 67 69 74 20 77 ombine 1 digit w +0000:0F10 69 74 68 20 61 6C 6C 20-61 6C 74 65 6E 61 74 69 ith all altenati +0000:0F20 76 65 73 20 66 72 6F 6D-20 74 68 65 20 72 65 73 ves from the res +0000:0F30 74 20 20 20 20 0A 20 20-20 20 20 20 20 20 20 20 t ◙ +0000:0F40 20 20 20 20 20 20 2F 2F-20 28 6E 65 78 74 20 70 // (next p +0000:0F50 69 65 63 65 20 63 61 6E-20 6E 6F 74 20 73 74 61 iece can not sta +0000:0F60 72 74 20 77 69 74 68 20-61 20 64 69 67 69 74 29 rt with a digit) +0000:0F70 20 20 20 20 20 20 20 20-20 20 0A 20 20 20 20 20 ◙ +0000:0F80 20 20 20 20 20 20 20 20-20 20 20 66 6F 72 65 61 forea +0000:0F90 63 68 20 28 61 3B 20 5F-46 69 6E 64 57 6F 72 64 ch (a; _FindWord +0000:0FA0 73 28 20 6E 75 6D 62 65-72 73 5B 31 2E 2E 24 5D s( numbers[1..$] +0000:0FB0 2C 20 66 61 6C 73 65 20-29 20 29 0A 20 20 20 20 , false ) )◙ +0000:0FC0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:0FD0 72 65 74 20 7E 3D 20 6E-75 6D 62 65 72 73 5B 30 ret ~= numbers[0 +0000:0FE0 2E 2E 31 5D 20 7E 20 22-20 22 20 7E 20 61 3B 0A ..1] ~ " " ~ a;◙ +0000:0FF0 20 20 20 20 20 20 20 20-20 20 20 20 7D 20 20 20 } +0000:1000 20 0A 20 20 20 20 20 20-20 20 20 20 20 20 65 6C ◙ el +0000:1010 73 65 20 20 20 20 0A 20-20 20 20 20 20 20 20 20 se ◙ +0000:1020 20 20 20 20 20 20 20 72-65 74 20 7E 3D 20 6E 75 ret ~= nu +0000:1030 6D 62 65 72 73 5B 30 2E-2E 31 5D 3B 20 20 20 20 mbers[0..1]; +0000:1040 2F 2F 20 6A 75 73 74 20-61 70 70 65 6E 64 20 74 // just append t +0000:1050 68 69 73 20 64 69 67 69-74 20 20 20 20 20 20 20 his digit +0000:1060 20 20 20 20 20 20 0A 20-20 20 20 20 20 20 20 7D ◙ } +0000:1070 20 20 20 20 0A 20 20 20-20 20 20 20 20 72 65 74 ◙ ret +0000:1080 75 72 6E 20 72 65 74 3B-0A 20 20 20 20 7D 0A 0A urn ret;◙ }◙◙ +0000:1090 20 20 20 20 2F 2F 2F 20-28 54 68 69 73 20 66 75 /// (This fu +0000:10A0 6E 63 74 69 6F 6E 20 77-61 73 20 69 6E 6C 69 6E nction was inlin +0000:10B0 65 64 20 69 6E 20 74 68-65 20 6F 72 69 67 69 6E ed in the origin +0000:10C0 61 6C 20 70 72 6F 67 72-61 6D 29 20 0A 20 20 20 al program) ◙ +0000:10D0 20 2F 2F 2F 20 46 69 6E-64 73 20 61 6C 6C 20 61 /// Finds all a +0000:10E0 6C 74 65 72 6E 61 74 69-76 65 73 20 66 6F 72 20 lternatives for +0000:10F0 74 68 65 20 67 69 76 65-6E 20 70 68 6F 6E 65 20 the given phone +0000:1100 6E 75 6D 62 65 72 20 0A-20 20 20 20 2F 2F 2F 20 number ◙ /// +0000:1110 52 65 74 75 72 6E 73 3A-20 61 72 72 61 79 20 6F Returns: array o +0000:1120 66 20 73 74 72 69 6E 67-73 20 0A 20 20 20 20 73 f strings ◙ s +0000:1130 74 72 69 6E 67 61 72 72-61 79 20 46 69 6E 64 57 tringarray FindW +0000:1140 6F 72 64 73 28 20 73 74-72 69 6E 67 20 70 68 6F ords( string pho +0000:1150 6E 65 5F 6E 75 6D 62 65-72 20 29 0A 20 20 20 20 ne_number )◙ +0000:1160 7B 0A 20 20 20 20 20 20-20 20 69 66 20 28 21 70 {◙ if (!p +0000:1170 68 6F 6E 65 5F 6E 75 6D-62 65 72 2E 6C 65 6E 67 hone_number.leng +0000:1180 74 68 29 0A 20 20 20 20-20 20 20 20 20 20 20 20 th)◙ +0000:1190 72 65 74 75 72 6E 20 6E-75 6C 6C 3B 0A 20 20 20 return null;◙ +0000:11A0 20 20 20 20 20 2F 2F 20-53 74 72 69 70 20 74 68 // Strip th +0000:11B0 65 20 6E 6F 6E 2D 64 69-67 69 74 20 63 68 61 72 e non-digit char +0000:11C0 61 63 74 65 72 73 20 66-72 6F 6D 20 74 68 65 20 acters from the +0000:11D0 70 68 6F 6E 65 20 6E 75-6D 62 65 72 2C 20 61 6E phone number, an +0000:11E0 64 0A 20 20 20 20 20 20-20 20 2F 2F 20 70 61 73 d◙ // pas +0000:11F0 73 20 69 74 20 74 6F 20-74 68 65 20 72 65 63 75 s it to the recu +0000:1200 72 73 69 76 65 20 66 75-6E 63 74 69 6F 6E 20 28 rsive function ( +0000:1210 6C 65 61 64 69 6E 67 20-64 69 67 69 74 20 69 73 leading digit is +0000:1220 20 61 6C 6C 6F 77 65 64-29 0A 20 20 20 20 20 20 allowed)◙ +0000:1230 20 20 72 65 74 75 72 6E-20 5F 46 69 6E 64 57 6F return _FindWo +0000:1240 72 64 73 28 20 73 74 72-69 70 4E 6F 6E 44 69 67 rds( stripNonDig +0000:1250 69 74 28 70 68 6F 6E 65-5F 6E 75 6D 62 65 72 29 it(phone_number) +0000:1260 2C 20 74 72 75 65 20 29-3B 20 20 20 20 0A 20 20 , true ); ◙ +0000:1270 20 20 7D 20 20 20 20 0A-20 20 20 20 0A 20 20 20 } ◙ ◙ +0000:1280 20 2F 2F 20 52 65 61 64-20 74 68 65 20 70 68 6F // Read the pho +0000:1290 6E 65 20 6E 75 6D 62 65-72 73 20 20 20 20 20 0A ne numbers ◙ +0000:12A0 20 20 20 20 66 6F 72 65-61 63 68 28 73 74 72 69 foreach(stri +0000:12B0 6E 67 20 70 68 6F 6E 65-3B 20 6E 65 77 20 42 75 ng phone; new Bu +0000:12C0 66 66 65 72 65 64 46 69-6C 65 28 22 69 6E 70 75 fferedFile("inpu +0000:12D0 74 2E 74 78 74 22 20 20-20 29 20 29 0A 20 20 20 t.txt" ) )◙ +0000:12E0 20 20 20 20 20 66 6F 72-65 61 63 68 28 61 6C 74 foreach(alt +0000:12F0 65 72 6E 61 74 69 76 65-3B 20 46 69 6E 64 57 6F ernative; FindWo +0000:1300 72 64 73 28 20 70 68 6F-6E 65 20 29 20 29 0A 20 rds( phone ) )◙ +0000:1310 20 20 20 20 20 20 20 20-20 20 20 77 72 69 74 65 write +0000:1320 66 6C 6E 28 70 68 6F 6E-65 2C 20 22 3A 20 22 2C fln(phone, ": ", +0000:1330 20 61 6C 74 65 72 6E 61-74 69 76 65 20 29 3B 0A alternative );◙ +0000:1340 7D 0A 0A }◙◙ diff --git a/tests/examplefiles/hexdump_hd b/tests/examplefiles/hexdump_hd new file mode 100644 index 00000000..4af46fcb --- /dev/null +++ b/tests/examplefiles/hexdump_hd @@ -0,0 +1,310 @@ +00000000 2f 2f 20 43 72 65 61 74 65 64 20 62 79 20 4c 69 |// Created by Li| +00000010 6f 6e 65 6c 6c 6f 20 4c 75 6e 65 73 75 20 61 6e |onello Lunesu an| +00000020 64 20 70 6c 61 63 65 64 20 69 6e 20 74 68 65 20 |d placed in the | +00000030 70 75 62 6c 69 63 20 64 6f 6d 61 69 6e 2e 0a 2f |public domain../| +00000040 2f 20 54 68 69 73 20 66 69 6c 65 20 68 61 73 20 |/ This file has | +00000050 62 65 65 6e 20 6d 6f 64 69 66 69 65 64 20 66 72 |been modified fr| +00000060 6f 6d 20 69 74 73 20 6f 72 69 67 69 6e 61 6c 20 |om its original | +00000070 76 65 72 73 69 6f 6e 2e 0a 2f 2f 20 49 74 20 68 |version..// It h| +00000080 61 73 20 62 65 65 6e 20 66 6f 72 6d 61 74 74 65 |as been formatte| +00000090 64 20 74 6f 20 66 69 74 20 79 6f 75 72 20 73 63 |d to fit your sc| +000000a0 72 65 65 6e 2e 0a 6d 6f 64 75 6c 65 20 70 68 6f |reen..module pho| +000000b0 6e 65 6e 6f 3b 20 20 20 20 20 2f 2f 20 6f 70 74 |neno; // opt| +000000c0 69 6f 6e 61 6c 0a 69 6d 70 6f 72 74 20 73 74 64 |ional.import std| +000000d0 2e 73 74 64 69 6f 3b 20 20 20 2f 2f 20 77 72 69 |.stdio; // wri| +000000e0 74 65 66 6c 6e 20 20 20 20 20 0a 69 6d 70 6f 72 |tefln .impor| +000000f0 74 20 73 74 64 2e 63 74 79 70 65 3b 20 20 20 2f |t std.ctype; /| +00000100 2f 20 69 73 64 69 67 69 74 20 20 20 20 20 0a 69 |/ isdigit .i| +00000110 6d 70 6f 72 74 20 73 74 64 2e 73 74 72 65 61 6d |mport std.stream| +00000120 3b 20 20 2f 2f 20 42 75 66 66 65 72 65 64 46 69 |; // BufferedFi| +00000130 6c 65 0a 0a 2f 2f 20 4a 75 73 74 20 66 6f 72 20 |le..// Just for | +00000140 72 65 61 64 61 62 69 6c 69 74 79 20 28 69 6d 61 |readability (ima| +00000150 67 69 6e 65 20 63 68 61 72 5b 5d 5b 5d 5b 63 68 |gine char[][][ch| +00000160 61 72 5b 5d 5d 29 20 20 20 20 0a 61 6c 69 61 73 |ar[]]) .alias| +00000170 20 63 68 61 72 5b 5d 20 73 74 72 69 6e 67 3b 0a | char[] string;.| +00000180 61 6c 69 61 73 20 73 74 72 69 6e 67 5b 5d 20 73 |alias string[] s| +00000190 74 72 69 6e 67 61 72 72 61 79 3b 0a 0a 2f 2f 2f |tringarray;..///| +000001a0 20 53 74 72 69 70 73 20 6e 6f 6e 2d 64 69 67 69 | Strips non-digi| +000001b0 74 20 63 68 61 72 61 63 74 65 72 73 20 66 72 6f |t characters fro| +000001c0 6d 20 74 68 65 20 73 74 72 69 6e 67 20 28 43 4f |m the string (CO| +000001d0 57 29 0a 73 74 72 69 6e 67 20 73 74 72 69 70 4e |W).string stripN| +000001e0 6f 6e 44 69 67 69 74 28 20 69 6e 20 73 74 72 69 |onDigit( in stri| +000001f0 6e 67 20 6c 69 6e 65 20 29 20 0a 7b 0a 20 20 20 |ng line ) .{. | +00000200 20 73 74 72 69 6e 67 20 72 65 74 3b 0a 20 20 20 | string ret;. | +00000210 20 66 6f 72 65 61 63 68 28 75 69 6e 74 20 69 2c | foreach(uint i,| +00000220 20 63 3b 20 6c 69 6e 65 29 20 7b 0a 20 20 20 20 | c; line) {. | +00000230 20 20 20 20 2f 2f 20 45 72 72 6f 72 3a 20 73 74 | // Error: st| +00000240 64 2e 63 74 79 70 65 2e 69 73 64 69 67 69 74 20 |d.ctype.isdigit | +00000250 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 5c 70 68 |at C:\dmd\src\ph| +00000260 6f 62 6f 73 5c 73 74 64 5c 63 74 79 70 65 2e 64 |obos\std\ctype.d| +00000270 28 33 37 29 20 0a 20 20 20 20 20 20 20 20 2f 2f |(37) . //| +00000280 20 63 6f 6e 66 6c 69 63 74 73 20 77 69 74 68 20 | conflicts with | +00000290 73 74 64 2e 73 74 72 65 61 6d 2e 69 73 64 69 67 |std.stream.isdig| +000002a0 69 74 20 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 |it at C:\dmd\src| +000002b0 5c 70 68 6f 62 6f 73 5c 73 74 64 5c 73 74 72 65 |\phobos\std\stre| +000002c0 61 6d 2e 64 28 32 39 32 34 29 0a 20 20 20 20 20 |am.d(2924). | +000002d0 20 20 20 69 66 20 28 21 73 74 64 2e 63 74 79 70 | if (!std.ctyp| +000002e0 65 2e 69 73 64 69 67 69 74 28 63 29 29 20 7b 0a |e.isdigit(c)) {.| +000002f0 20 20 20 20 20 20 20 20 20 20 20 20 69 66 20 28 | if (| +00000300 21 72 65 74 29 0a 20 20 20 20 20 20 20 20 20 20 |!ret). | +00000310 20 20 20 20 20 20 72 65 74 20 3d 20 6c 69 6e 65 | ret = line| +00000320 5b 30 2e 2e 69 5d 3b 20 20 20 20 0a 20 20 20 20 |[0..i]; . | +00000330 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 20 20 | } . | +00000340 20 20 65 6c 73 65 20 69 66 20 28 72 65 74 29 0a | else if (ret).| +00000350 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 | ret | +00000360 7e 3d 20 63 3b 20 20 20 20 0a 20 20 20 20 7d 20 |~= c; . } | +00000370 20 20 20 0a 20 20 20 20 72 65 74 75 72 6e 20 72 | . return r| +00000380 65 74 3f 72 65 74 3a 6c 69 6e 65 3b 0a 7d 0a 0a |et?ret:line;.}..| +00000390 75 6e 69 74 74 65 73 74 20 7b 0a 20 20 20 20 61 |unittest {. a| +000003a0 73 73 65 72 74 28 20 73 74 72 69 70 4e 6f 6e 44 |ssert( stripNonD| +000003b0 69 67 69 74 28 22 61 73 64 66 22 29 20 3d 3d 20 |igit("asdf") == | +000003c0 22 22 20 20 29 3b 0a 20 20 20 20 61 73 73 65 72 |"" );. asser| +000003d0 74 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 69 74 |t( stripNonDigit| +000003e0 28 22 5c 27 31 33 2d 3d 32 20 34 6b 6f 70 22 29 |("\'13-=2 4kop")| +000003f0 20 3d 3d 20 20 22 31 33 32 34 22 20 20 29 3b 0a | == "1324" );.| +00000400 7d 0a 0a 2f 2f 2f 20 43 6f 6e 76 65 72 74 73 20 |}../// Converts | +00000410 61 20 77 6f 72 64 20 69 6e 74 6f 20 61 20 6e 75 |a word into a nu| +00000420 6d 62 65 72 2c 20 69 67 6e 6f 72 69 6e 67 20 61 |mber, ignoring a| +00000430 6c 6c 20 6e 6f 6e 20 61 6c 70 68 61 20 63 68 61 |ll non alpha cha| +00000440 72 61 63 74 65 72 73 20 20 0a 73 74 72 69 6e 67 |racters .string| +00000450 20 77 6f 72 64 54 6f 4e 75 6d 28 20 69 6e 20 73 | wordToNum( in s| +00000460 74 72 69 6e 67 20 77 6f 72 64 20 29 0a 7b 0a 2f |tring word ).{./| +00000470 2f 20 74 72 61 6e 73 6c 61 74 69 6f 6e 20 74 61 |/ translation ta| +00000480 62 6c 65 20 66 6f 72 20 74 68 65 20 74 61 73 6b |ble for the task| +00000490 20 61 74 20 68 61 6e 64 0a 63 6f 6e 73 74 20 63 | at hand.const c| +000004a0 68 61 72 5b 32 35 36 5d 20 54 52 41 4e 53 4c 41 |har[256] TRANSLA| +000004b0 54 45 20 3d 20 20 20 20 0a 20 20 20 20 22 20 20 |TE = . " | +000004c0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +000004d0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 22 20 | " | +000004e0 20 2f 2f 20 30 20 20 20 0a 20 20 20 20 22 20 20 | // 0 . " | +000004f0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 31 | 01| +00000500 32 33 34 35 36 37 38 39 20 20 20 20 20 20 22 20 |23456789 " | +00000510 20 2f 2f 20 33 32 20 20 20 20 20 0a 20 20 20 20 | // 32 . | +00000520 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 31 |" 57630499617851| +00000530 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 20 |881234762239 | +00000540 20 22 20 20 2f 2f 20 36 34 20 20 20 0a 20 20 20 | " // 64 . | +00000550 20 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 | " 5763049961785| +00000560 31 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 |1881234762239 | +00000570 20 20 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 | ". " | +00000580 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000590 20 20 20 20 20 20 20 20 20 22 0a 20 20 20 20 22 | ". "| +000005a0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +* +000005c0 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 20 20 |". " | +000005d0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +000005e0 20 20 20 20 20 20 20 22 20 20 20 20 0a 20 20 20 | " . | +000005f0 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | " | +00000600 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000610 20 20 22 3b 0a 20 20 20 20 73 74 72 69 6e 67 20 | ";. string | +00000620 72 65 74 3b 0a 20 20 20 20 66 6f 72 65 61 63 68 |ret;. foreach| +00000630 28 63 3b 20 63 61 73 74 28 75 62 79 74 65 5b 5d |(c; cast(ubyte[]| +00000640 29 77 6f 72 64 29 0a 20 20 20 20 20 20 20 20 69 |)word). i| +00000650 66 20 28 54 52 41 4e 53 4c 41 54 45 5b 63 5d 20 |f (TRANSLATE[c] | +00000660 21 3d 20 27 20 27 29 0a 20 20 20 20 20 20 20 20 |!= ' '). | +00000670 20 20 20 20 72 65 74 20 7e 3d 20 54 52 41 4e 53 | ret ~= TRANS| +00000680 4c 41 54 45 5b 63 5d 3b 0a 20 20 20 20 72 65 74 |LATE[c];. ret| +00000690 75 72 6e 20 72 65 74 3b 0a 7d 0a 0a 75 6e 69 74 |urn ret;.}..unit| +000006a0 74 65 73 74 20 7b 0a 20 2f 2f 20 54 65 73 74 20 |test {. // Test | +000006b0 77 6f 72 64 54 6f 4e 75 6d 20 75 73 69 6e 67 20 |wordToNum using | +000006c0 74 68 65 20 74 61 62 6c 65 20 66 72 6f 6d 20 74 |the table from t| +000006d0 68 65 20 74 61 73 6b 20 64 65 73 63 72 69 70 74 |he task descript| +000006e0 69 6f 6e 2e 0a 20 61 73 73 65 72 74 28 20 22 30 |ion.. assert( "0| +000006f0 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 36 |1112223334455666| +00000700 37 37 37 38 38 38 39 39 39 22 20 3d 3d 0a 20 20 |777888999" ==. | +00000710 20 77 6f 72 64 54 6f 4e 75 6d 28 22 45 20 7c 20 | wordToNum("E | | +00000720 4a 20 4e 20 51 20 7c 20 52 20 57 20 58 20 7c 20 |J N Q | R W X | | +00000730 44 20 53 20 59 20 7c 20 46 20 54 20 7c 20 41 20 |D S Y | F T | A | +00000740 4d 20 7c 20 43 20 49 20 56 20 7c 20 42 20 4b 20 |M | C I V | B K | +00000750 55 20 7c 20 4c 20 4f 20 50 20 7c 20 47 20 48 20 |U | L O P | G H | +00000760 5a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 20 22 |Z"));. assert( "| +00000770 30 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 |0111222333445566| +00000780 36 37 37 37 38 38 38 39 39 39 22 20 3d 3d 20 0a |6777888999" == .| +00000790 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 65 20 | wordToNum("e | +000007a0 7c 20 6a 20 6e 20 71 20 7c 20 72 20 77 20 78 20 || j n q | r w x | +000007b0 7c 20 64 20 73 20 79 20 7c 20 66 20 74 20 7c 20 || d s y | f t | | +000007c0 61 20 6d 20 7c 20 63 20 69 20 76 20 7c 20 62 20 |a m | c i v | b | +000007d0 6b 20 75 20 7c 20 6c 20 6f 20 70 20 7c 20 67 20 |k u | l o p | g | +000007e0 68 20 7a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 |h z"));. assert(| +000007f0 20 22 30 31 32 33 34 35 36 37 38 39 22 20 3d 3d | "0123456789" ==| +00000800 20 0a 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 | . wordToNum("| +00000810 30 20 7c 20 20 20 31 20 20 20 7c 20 20 20 32 20 |0 | 1 | 2 | +00000820 20 20 7c 20 20 20 33 20 20 20 7c 20 20 34 20 20 | | 3 | 4 | +00000830 7c 20 20 35 20 20 7c 20 20 20 36 20 20 20 7c 20 || 5 | 6 | | +00000840 20 20 37 20 20 20 7c 20 20 20 38 20 20 20 7c 20 | 7 | 8 | | +00000850 20 20 39 22 29 29 3b 0a 7d 0a 0a 76 6f 69 64 20 | 9"));.}..void | +00000860 6d 61 69 6e 28 20 73 74 72 69 6e 67 5b 5d 20 61 |main( string[] a| +00000870 72 67 73 20 29 0a 7b 0a 20 20 20 20 2f 2f 20 54 |rgs ).{. // T| +00000880 68 69 73 20 61 73 73 6f 63 69 61 74 69 76 65 20 |his associative | +00000890 61 72 72 61 79 20 6d 61 70 73 20 61 20 6e 75 6d |array maps a num| +000008a0 62 65 72 20 74 6f 20 61 6e 20 61 72 72 61 79 20 |ber to an array | +000008b0 6f 66 20 77 6f 72 64 73 2e 20 20 20 20 0a 20 20 |of words. . | +000008c0 20 20 73 74 72 69 6e 67 61 72 72 61 79 5b 73 74 | stringarray[st| +000008d0 72 69 6e 67 5d 20 20 20 20 6e 75 6d 32 77 6f 72 |ring] num2wor| +000008e0 64 73 3b 0a 0a 20 20 20 20 66 6f 72 65 61 63 68 |ds;.. foreach| +000008f0 28 73 74 72 69 6e 67 20 77 6f 72 64 3b 20 6e 65 |(string word; ne| +00000900 77 20 42 75 66 66 65 72 65 64 46 69 6c 65 28 22 |w BufferedFile("| +00000910 64 69 63 74 69 6f 6e 61 72 79 2e 74 78 74 22 20 |dictionary.txt" | +00000920 29 20 29 0a 20 20 20 20 20 20 20 20 6e 75 6d 32 |) ). num2| +00000930 77 6f 72 64 73 5b 20 77 6f 72 64 54 6f 4e 75 6d |words[ wordToNum| +00000940 28 77 6f 72 64 29 20 5d 20 7e 3d 20 77 6f 72 64 |(word) ] ~= word| +00000950 2e 64 75 70 3b 20 20 20 20 20 20 20 20 2f 2f 20 |.dup; // | +00000960 6d 75 73 74 20 64 75 70 0a 0a 20 20 20 20 2f 2f |must dup.. //| +00000970 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 6c 74 65 |/ Finds all alte| +00000980 72 6e 61 74 69 76 65 73 20 66 6f 72 20 74 68 65 |rnatives for the| +00000990 20 67 69 76 65 6e 20 6e 75 6d 62 65 72 0a 20 20 | given number. | +000009a0 20 20 2f 2f 2f 20 28 73 68 6f 75 6c 64 20 68 61 | /// (should ha| +000009b0 76 65 20 62 65 65 6e 20 73 74 72 69 70 70 65 64 |ve been stripped| +000009c0 20 66 72 6f 6d 20 6e 6f 6e 2d 64 69 67 69 74 20 | from non-digit | +000009d0 63 68 61 72 61 63 74 65 72 73 29 0a 20 20 20 20 |characters). | +000009e0 73 74 72 69 6e 67 61 72 72 61 79 20 5f 46 69 6e |stringarray _Fin| +000009f0 64 57 6f 72 64 73 28 20 73 74 72 69 6e 67 20 6e |dWords( string n| +00000a00 75 6d 62 65 72 73 2c 20 62 6f 6f 6c 20 64 69 67 |umbers, bool dig| +00000a10 69 74 6f 6b 20 29 0a 20 20 20 20 69 6e 20 7b 0a |itok ). in {.| +00000a20 20 20 20 20 20 20 20 20 61 73 73 65 72 74 28 6e | assert(n| +00000a30 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 20 3e 20 |umbers.length > | +00000a40 20 30 29 3b 20 20 20 20 0a 20 20 20 20 7d 20 20 | 0); . } | +00000a50 20 20 0a 20 20 20 20 6f 75 74 28 72 65 73 75 6c | . out(resul| +00000a60 74 29 20 7b 0a 20 20 20 20 20 20 20 20 66 6f 72 |t) {. for| +00000a70 65 61 63 68 20 28 61 3b 20 72 65 73 75 6c 74 29 |each (a; result)| +00000a80 0a 20 20 20 20 20 20 20 20 20 20 20 20 61 73 73 |. ass| +00000a90 65 72 74 28 20 77 6f 72 64 54 6f 4e 75 6d 28 61 |ert( wordToNum(a| +00000aa0 29 20 3d 3d 20 6e 75 6d 62 65 72 73 20 29 3b 0a |) == numbers );.| +00000ab0 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 62 6f | } . bo| +00000ac0 64 79 20 7b 0a 20 20 20 20 20 20 20 20 73 74 72 |dy {. str| +00000ad0 69 6e 67 61 72 72 61 79 20 72 65 74 3b 0a 20 20 |ingarray ret;. | +00000ae0 20 20 20 20 20 20 62 6f 6f 6c 20 66 6f 75 6e 64 | bool found| +00000af0 77 6f 72 64 20 3d 20 66 61 6c 73 65 3b 0a 20 20 |word = false;. | +00000b00 20 20 20 20 20 20 66 6f 72 20 28 75 69 6e 74 20 | for (uint | +00000b10 74 3d 31 3b 20 74 3c 3d 6e 75 6d 62 65 72 73 2e |t=1; t<=numbers.| +00000b20 6c 65 6e 67 74 68 3b 20 2b 2b 74 29 20 7b 0a 20 |length; ++t) {. | +00000b30 20 20 20 20 20 20 20 20 20 20 20 61 75 74 6f 20 | auto | +00000b40 61 6c 74 65 72 6e 61 74 69 76 65 73 20 3d 20 6e |alternatives = n| +00000b50 75 6d 62 65 72 73 5b 30 2e 2e 74 5d 20 69 6e 20 |umbers[0..t] in | +00000b60 6e 75 6d 32 77 6f 72 64 73 3b 0a 20 20 20 20 20 |num2words;. | +00000b70 20 20 20 20 20 20 20 69 66 20 28 21 61 6c 74 65 | if (!alte| +00000b80 72 6e 61 74 69 76 65 73 29 0a 20 20 20 20 20 20 |rnatives). | +00000b90 20 20 20 20 20 20 20 20 20 20 63 6f 6e 74 69 6e | contin| +00000ba0 75 65 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 |ue;. | +00000bb0 66 6f 75 6e 64 77 6f 72 64 20 3d 20 74 72 75 65 |foundword = true| +00000bc0 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 69 66 |;. if| +00000bd0 20 28 6e 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 | (numbers.length| +00000be0 20 3e 20 20 74 29 20 7b 0a 20 20 20 20 20 20 20 | > t) {. | +00000bf0 20 20 20 20 20 20 20 20 20 2f 2f 20 43 6f 6d 62 | // Comb| +00000c00 69 6e 65 20 61 6c 6c 20 63 75 72 72 65 6e 74 20 |ine all current | +00000c10 61 6c 74 65 72 6e 61 74 69 76 65 73 20 77 69 74 |alternatives wit| +00000c20 68 20 61 6c 6c 20 61 6c 74 65 72 6e 61 74 69 76 |h all alternativ| +00000c30 65 73 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 |es . | +00000c40 20 20 20 20 20 20 20 20 2f 2f 20 6f 66 20 74 68 | // of th| +00000c50 65 20 72 65 73 74 20 28 6e 65 78 74 20 70 69 65 |e rest (next pie| +00000c60 63 65 20 63 61 6e 20 73 74 61 72 74 20 77 69 74 |ce can start wit| +00000c70 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 |h a digit) | +00000c80 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 | . | +00000c90 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 63 68 | foreach| +00000ca0 20 28 61 32 3b 20 5f 46 69 6e 64 57 6f 72 64 73 | (a2; _FindWords| +00000cb0 28 20 6e 75 6d 62 65 72 73 5b 74 2e 2e 24 5d 2c |( numbers[t..$],| +00000cc0 20 74 72 75 65 20 20 20 20 20 29 20 29 0a 20 20 | true ) ). | +00000cd0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000ce0 20 20 66 6f 72 65 61 63 68 28 61 31 3b 20 2a 61 | foreach(a1; *a| +00000cf0 6c 74 65 72 6e 61 74 69 76 65 73 29 0a 20 20 20 |lternatives). | +00000d00 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000d10 20 20 20 20 72 65 74 20 7e 3d 20 61 31 20 7e 20 | ret ~= a1 ~ | +00000d20 22 20 22 20 7e 20 61 32 3b 0a 20 20 20 20 20 20 |" " ~ a2;. | +00000d30 20 20 20 20 20 20 7d 0a 20 20 20 20 20 20 20 20 | }. | +00000d40 20 20 20 20 65 6c 73 65 20 20 20 20 0a 20 20 20 | else . | +00000d50 20 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 | ret| +00000d60 20 7e 3d 20 2a 61 6c 74 65 72 6e 61 74 69 76 65 | ~= *alternative| +00000d70 73 3b 20 20 20 20 2f 2f 20 61 70 70 65 6e 64 20 |s; // append | +00000d80 74 68 65 73 65 20 61 6c 74 65 72 6e 61 74 69 76 |these alternativ| +00000d90 65 73 0a 20 20 20 20 20 20 20 20 7d 0a 20 20 20 |es. }. | +00000da0 20 20 20 20 20 2f 2f 20 54 72 79 20 74 6f 20 6b | // Try to k| +00000db0 65 65 70 20 31 20 64 69 67 69 74 2c 20 6f 6e 6c |eep 1 digit, onl| +00000dc0 79 20 69 66 20 77 65 27 72 65 20 61 6c 6c 6f 77 |y if we're allow| +00000dd0 65 64 20 61 6e 64 20 6e 6f 20 6f 74 68 65 72 0a |ed and no other.| +00000de0 20 20 20 20 20 20 20 20 2f 2f 20 61 6c 74 65 72 | // alter| +00000df0 6e 61 74 69 76 65 73 20 77 65 72 65 20 66 6f 75 |natives were fou| +00000e00 6e 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 54 65 |nd. // Te| +00000e10 73 74 69 6e 67 20 22 72 65 74 2e 6c 65 6e 67 74 |sting "ret.lengt| +00000e20 68 22 20 6d 61 6b 65 73 20 6d 6f 72 65 20 73 65 |h" makes more se| +00000e30 6e 73 65 20 74 68 61 6e 20 74 65 73 74 69 6e 67 |nse than testing| +00000e40 20 22 66 6f 75 6e 64 77 6f 72 64 22 2c 0a 20 20 | "foundword",. | +00000e50 20 20 20 20 20 20 2f 2f 20 62 75 74 20 74 68 65 | // but the| +00000e60 20 6f 74 68 65 72 20 69 6d 70 6c 65 6d 65 6e 74 | other implement| +00000e70 61 74 69 6f 6e 73 20 73 65 65 6d 20 74 6f 20 64 |ations seem to d| +00000e80 6f 20 6a 75 73 74 20 74 68 69 73 2e 0a 20 20 20 |o just this.. | +00000e90 20 20 20 20 20 69 66 20 28 64 69 67 69 74 6f 6b | if (digitok| +00000ea0 20 26 26 20 21 66 6f 75 6e 64 77 6f 72 64 29 20 | && !foundword) | +00000eb0 7b 20 2f 2f 72 65 74 2e 6c 65 6e 67 74 68 20 3d |{ //ret.length =| +00000ec0 3d 20 30 20 20 0a 20 20 20 20 20 20 20 20 20 20 |= 0 . | +00000ed0 20 20 69 66 28 6e 75 6d 62 65 72 73 2e 6c 65 6e | if(numbers.len| +00000ee0 67 74 68 20 3e 20 20 31 29 20 7b 0a 20 20 20 20 |gth > 1) {. | +00000ef0 20 20 20 20 20 20 20 20 20 20 20 20 2f 2f 20 43 | // C| +00000f00 6f 6d 62 69 6e 65 20 31 20 64 69 67 69 74 20 77 |ombine 1 digit w| +00000f10 69 74 68 20 61 6c 6c 20 61 6c 74 65 6e 61 74 69 |ith all altenati| +00000f20 76 65 73 20 66 72 6f 6d 20 74 68 65 20 72 65 73 |ves from the res| +00000f30 74 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 20 |t . | +00000f40 20 20 20 20 20 20 2f 2f 20 28 6e 65 78 74 20 70 | // (next p| +00000f50 69 65 63 65 20 63 61 6e 20 6e 6f 74 20 73 74 61 |iece can not sta| +00000f60 72 74 20 77 69 74 68 20 61 20 64 69 67 69 74 29 |rt with a digit)| +00000f70 20 20 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 | . | +00000f80 20 20 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 | forea| +00000f90 63 68 20 28 61 3b 20 5f 46 69 6e 64 57 6f 72 64 |ch (a; _FindWord| +00000fa0 73 28 20 6e 75 6d 62 65 72 73 5b 31 2e 2e 24 5d |s( numbers[1..$]| +00000fb0 2c 20 66 61 6c 73 65 20 29 20 29 0a 20 20 20 20 |, false ) ). | +00000fc0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000fd0 72 65 74 20 7e 3d 20 6e 75 6d 62 65 72 73 5b 30 |ret ~= numbers[0| +00000fe0 2e 2e 31 5d 20 7e 20 22 20 22 20 7e 20 61 3b 0a |..1] ~ " " ~ a;.| +00000ff0 20 20 20 20 20 20 20 20 20 20 20 20 7d 20 20 20 | } | +00001000 20 0a 20 20 20 20 20 20 20 20 20 20 20 20 65 6c | . el| +00001010 73 65 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 |se . | +00001020 20 20 20 20 20 20 20 72 65 74 20 7e 3d 20 6e 75 | ret ~= nu| +00001030 6d 62 65 72 73 5b 30 2e 2e 31 5d 3b 20 20 20 20 |mbers[0..1]; | +00001040 2f 2f 20 6a 75 73 74 20 61 70 70 65 6e 64 20 74 |// just append t| +00001050 68 69 73 20 64 69 67 69 74 20 20 20 20 20 20 20 |his digit | +00001060 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 7d | . }| +00001070 20 20 20 20 0a 20 20 20 20 20 20 20 20 72 65 74 | . ret| +00001080 75 72 6e 20 72 65 74 3b 0a 20 20 20 20 7d 0a 0a |urn ret;. }..| +00001090 20 20 20 20 2f 2f 2f 20 28 54 68 69 73 20 66 75 | /// (This fu| +000010a0 6e 63 74 69 6f 6e 20 77 61 73 20 69 6e 6c 69 6e |nction was inlin| +000010b0 65 64 20 69 6e 20 74 68 65 20 6f 72 69 67 69 6e |ed in the origin| +000010c0 61 6c 20 70 72 6f 67 72 61 6d 29 20 0a 20 20 20 |al program) . | +000010d0 20 2f 2f 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 | /// Finds all a| +000010e0 6c 74 65 72 6e 61 74 69 76 65 73 20 66 6f 72 20 |lternatives for | +000010f0 74 68 65 20 67 69 76 65 6e 20 70 68 6f 6e 65 20 |the given phone | +00001100 6e 75 6d 62 65 72 20 0a 20 20 20 20 2f 2f 2f 20 |number . /// | +00001110 52 65 74 75 72 6e 73 3a 20 61 72 72 61 79 20 6f |Returns: array o| +00001120 66 20 73 74 72 69 6e 67 73 20 0a 20 20 20 20 73 |f strings . s| +00001130 74 72 69 6e 67 61 72 72 61 79 20 46 69 6e 64 57 |tringarray FindW| +00001140 6f 72 64 73 28 20 73 74 72 69 6e 67 20 70 68 6f |ords( string pho| +00001150 6e 65 5f 6e 75 6d 62 65 72 20 29 0a 20 20 20 20 |ne_number ). | +00001160 7b 0a 20 20 20 20 20 20 20 20 69 66 20 28 21 70 |{. if (!p| +00001170 68 6f 6e 65 5f 6e 75 6d 62 65 72 2e 6c 65 6e 67 |hone_number.leng| +00001180 74 68 29 0a 20 20 20 20 20 20 20 20 20 20 20 20 |th). | +00001190 72 65 74 75 72 6e 20 6e 75 6c 6c 3b 0a 20 20 20 |return null;. | +000011a0 20 20 20 20 20 2f 2f 20 53 74 72 69 70 20 74 68 | // Strip th| +000011b0 65 20 6e 6f 6e 2d 64 69 67 69 74 20 63 68 61 72 |e non-digit char| +000011c0 61 63 74 65 72 73 20 66 72 6f 6d 20 74 68 65 20 |acters from the | +000011d0 70 68 6f 6e 65 20 6e 75 6d 62 65 72 2c 20 61 6e |phone number, an| +000011e0 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 70 61 73 |d. // pas| +000011f0 73 20 69 74 20 74 6f 20 74 68 65 20 72 65 63 75 |s it to the recu| +00001200 72 73 69 76 65 20 66 75 6e 63 74 69 6f 6e 20 28 |rsive function (| +00001210 6c 65 61 64 69 6e 67 20 64 69 67 69 74 20 69 73 |leading digit is| +00001220 20 61 6c 6c 6f 77 65 64 29 0a 20 20 20 20 20 20 | allowed). | +00001230 20 20 72 65 74 75 72 6e 20 5f 46 69 6e 64 57 6f | return _FindWo| +00001240 72 64 73 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 |rds( stripNonDig| +00001250 69 74 28 70 68 6f 6e 65 5f 6e 75 6d 62 65 72 29 |it(phone_number)| +00001260 2c 20 74 72 75 65 20 29 3b 20 20 20 20 0a 20 20 |, true ); . | +00001270 20 20 7d 20 20 20 20 0a 20 20 20 20 0a 20 20 20 | } . . | +00001280 20 2f 2f 20 52 65 61 64 20 74 68 65 20 70 68 6f | // Read the pho| +00001290 6e 65 20 6e 75 6d 62 65 72 73 20 20 20 20 20 0a |ne numbers .| +000012a0 20 20 20 20 66 6f 72 65 61 63 68 28 73 74 72 69 | foreach(stri| +000012b0 6e 67 20 70 68 6f 6e 65 3b 20 6e 65 77 20 42 75 |ng phone; new Bu| +000012c0 66 66 65 72 65 64 46 69 6c 65 28 22 69 6e 70 75 |fferedFile("inpu| +000012d0 74 2e 74 78 74 22 20 20 20 29 20 29 0a 20 20 20 |t.txt" ) ). | +000012e0 20 20 20 20 20 66 6f 72 65 61 63 68 28 61 6c 74 | foreach(alt| +000012f0 65 72 6e 61 74 69 76 65 3b 20 46 69 6e 64 57 6f |ernative; FindWo| +00001300 72 64 73 28 20 70 68 6f 6e 65 20 29 20 29 0a 20 |rds( phone ) ). | +00001310 20 20 20 20 20 20 20 20 20 20 20 77 72 69 74 65 | write| +00001320 66 6c 6e 28 70 68 6f 6e 65 2c 20 22 3a 20 22 2c |fln(phone, ": ",| +00001330 20 61 6c 74 65 72 6e 61 74 69 76 65 20 29 3b 0a | alternative );.| +00001340 7d 0a 0a |}..| +00001343 diff --git a/tests/examplefiles/hexdump_hexcat b/tests/examplefiles/hexdump_hexcat new file mode 100644 index 00000000..522074cf --- /dev/null +++ b/tests/examplefiles/hexdump_hexcat @@ -0,0 +1,247 @@ +00000000 2F 2F 20 43 72 65 61 74 65 64 20 62 79 20 4C 69 6F 6E 65 6C //.Created.by.Lionel +00000014 6C 6F 20 4C 75 6E 65 73 75 20 61 6E 64 20 70 6C 61 63 65 64 lo.Lunesu.and.placed +00000028 20 69 6E 20 74 68 65 20 70 75 62 6C 69 63 20 64 6F 6D 61 69 .in.the.public.domai +0000003C 6E 2E 0A 2F 2F 20 54 68 69 73 20 66 69 6C 65 20 68 61 73 20 n..//.This.file.has. +00000050 62 65 65 6E 20 6D 6F 64 69 66 69 65 64 20 66 72 6F 6D 20 69 been.modified.from.i +00000064 74 73 20 6F 72 69 67 69 6E 61 6C 20 76 65 72 73 69 6F 6E 2E ts.original.version. +00000078 0A 2F 2F 20 49 74 20 68 61 73 20 62 65 65 6E 20 66 6F 72 6D .//.It.has.been.form +0000008C 61 74 74 65 64 20 74 6F 20 66 69 74 20 79 6F 75 72 20 73 63 atted.to.fit.your.sc +000000A0 72 65 65 6E 2E 0A 6D 6F 64 75 6C 65 20 70 68 6F 6E 65 6E 6F reen..module.phoneno +000000B4 3B 20 20 20 20 20 2F 2F 20 6F 70 74 69 6F 6E 61 6C 0A 69 6D ;.....//.optional.im +000000C8 70 6F 72 74 20 73 74 64 2E 73 74 64 69 6F 3B 20 20 20 2F 2F port.std.stdio;...// +000000DC 20 77 72 69 74 65 66 6C 6E 20 20 20 20 20 0A 69 6D 70 6F 72 .writefln......impor +000000F0 74 20 73 74 64 2E 63 74 79 70 65 3B 20 20 20 2F 2F 20 69 73 t.std.ctype;...//.is +00000104 64 69 67 69 74 20 20 20 20 20 0A 69 6D 70 6F 72 74 20 73 74 digit......import.st +00000118 64 2E 73 74 72 65 61 6D 3B 20 20 2F 2F 20 42 75 66 66 65 72 d.stream;..//.Buffer +0000012C 65 64 46 69 6C 65 0A 0A 2F 2F 20 4A 75 73 74 20 66 6F 72 20 edFile..//.Just.for. +00000140 72 65 61 64 61 62 69 6C 69 74 79 20 28 69 6D 61 67 69 6E 65 readability.(imagine +00000154 20 63 68 61 72 5B 5D 5B 5D 5B 63 68 61 72 5B 5D 5D 29 20 20 .char[][][char[]]).. +00000168 20 20 0A 61 6C 69 61 73 20 63 68 61 72 5B 5D 20 73 74 72 69 ...alias.char[].stri +0000017C 6E 67 3B 0A 61 6C 69 61 73 20 73 74 72 69 6E 67 5B 5D 20 73 ng;.alias.string[].s +00000190 74 72 69 6E 67 61 72 72 61 79 3B 0A 0A 2F 2F 2F 20 53 74 72 tringarray;..///.Str +000001A4 69 70 73 20 6E 6F 6E 2D 64 69 67 69 74 20 63 68 61 72 61 63 ips.non-digit.charac +000001B8 74 65 72 73 20 66 72 6F 6D 20 74 68 65 20 73 74 72 69 6E 67 ters.from.the.string +000001CC 20 28 43 4F 57 29 0A 73 74 72 69 6E 67 20 73 74 72 69 70 4E .(COW).string.stripN +000001E0 6F 6E 44 69 67 69 74 28 20 69 6E 20 73 74 72 69 6E 67 20 6C onDigit(.in.string.l +000001F4 69 6E 65 20 29 20 0A 7B 0A 20 20 20 20 73 74 72 69 6E 67 20 ine.)..{.....string. +00000208 72 65 74 3B 0A 20 20 20 20 66 6F 72 65 61 63 68 28 75 69 6E ret;.....foreach(uin +0000021C 74 20 69 2C 20 63 3B 20 6C 69 6E 65 29 20 7B 0A 20 20 20 20 t.i,.c;.line).{..... +00000230 20 20 20 20 2F 2F 20 45 72 72 6F 72 3A 20 73 74 64 2E 63 74 ....//.Error:.std.ct +00000244 79 70 65 2E 69 73 64 69 67 69 74 20 61 74 20 43 3A 5C 64 6D ype.isdigit.at.C:\dm +00000258 64 5C 73 72 63 5C 70 68 6F 62 6F 73 5C 73 74 64 5C 63 74 79 d\src\phobos\std\cty +0000026C 70 65 2E 64 28 33 37 29 20 0A 20 20 20 20 20 20 20 20 2F 2F pe.d(37)..........// +00000280 20 63 6F 6E 66 6C 69 63 74 73 20 77 69 74 68 20 73 74 64 2E .conflicts.with.std. +00000294 73 74 72 65 61 6D 2E 69 73 64 69 67 69 74 20 61 74 20 43 3A stream.isdigit.at.C: +000002A8 5C 64 6D 64 5C 73 72 63 5C 70 68 6F 62 6F 73 5C 73 74 64 5C \dmd\src\phobos\std\ +000002BC 73 74 72 65 61 6D 2E 64 28 32 39 32 34 29 0A 20 20 20 20 20 stream.d(2924)...... +000002D0 20 20 20 69 66 20 28 21 73 74 64 2E 63 74 79 70 65 2E 69 73 ...if.(!std.ctype.is +000002E4 64 69 67 69 74 28 63 29 29 20 7B 0A 20 20 20 20 20 20 20 20 digit(c)).{......... +000002F8 20 20 20 20 69 66 20 28 21 72 65 74 29 0A 20 20 20 20 20 20 ....if.(!ret)....... +0000030C 20 20 20 20 20 20 20 20 20 20 72 65 74 20 3D 20 6C 69 6E 65 ..........ret.=.line +00000320 5B 30 2E 2E 69 5D 3B 20 20 20 20 0A 20 20 20 20 20 20 20 20 [0..i];............. +00000334 7D 20 20 20 20 0A 20 20 20 20 20 20 20 20 65 6C 73 65 20 69 }.............else.i +00000348 66 20 28 72 65 74 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 f.(ret)............. +0000035C 72 65 74 20 7E 3D 20 63 3B 20 20 20 20 0A 20 20 20 20 7D 20 ret.~=.c;.........}. +00000370 20 20 20 0A 20 20 20 20 72 65 74 75 72 6E 20 72 65 74 3F 72 ........return.ret?r +00000384 65 74 3A 6C 69 6E 65 3B 0A 7D 0A 0A 75 6E 69 74 74 65 73 74 et:line;.}..unittest +00000398 20 7B 0A 20 20 20 20 61 73 73 65 72 74 28 20 73 74 72 69 70 .{.....assert(.strip +000003AC 4E 6F 6E 44 69 67 69 74 28 22 61 73 64 66 22 29 20 3D 3D 20 NonDigit("asdf").==. +000003C0 22 22 20 20 29 3B 0A 20 20 20 20 61 73 73 65 72 74 28 20 73 ""..);.....assert(.s +000003D4 74 72 69 70 4E 6F 6E 44 69 67 69 74 28 22 5C 27 31 33 2D 3D tripNonDigit("\'13-= +000003E8 32 20 34 6B 6F 70 22 29 20 3D 3D 20 20 22 31 33 32 34 22 20 2.4kop").==.."1324". +000003FC 20 29 3B 0A 7D 0A 0A 2F 2F 2F 20 43 6F 6E 76 65 72 74 73 20 .);.}..///.Converts. +00000410 61 20 77 6F 72 64 20 69 6E 74 6F 20 61 20 6E 75 6D 62 65 72 a.word.into.a.number +00000424 2C 20 69 67 6E 6F 72 69 6E 67 20 61 6C 6C 20 6E 6F 6E 20 61 ,.ignoring.all.non.a +00000438 6C 70 68 61 20 63 68 61 72 61 63 74 65 72 73 20 20 0A 73 74 lpha.characters...st +0000044C 72 69 6E 67 20 77 6F 72 64 54 6F 4E 75 6D 28 20 69 6E 20 73 ring.wordToNum(.in.s +00000460 74 72 69 6E 67 20 77 6F 72 64 20 29 0A 7B 0A 2F 2F 20 74 72 tring.word.).{.//.tr +00000474 61 6E 73 6C 61 74 69 6F 6E 20 74 61 62 6C 65 20 66 6F 72 20 anslation.table.for. +00000488 74 68 65 20 74 61 73 6B 20 61 74 20 68 61 6E 64 0A 63 6F 6E the.task.at.hand.con +0000049C 73 74 20 63 68 61 72 5B 32 35 36 5D 20 54 52 41 4E 53 4C 41 st.char[256].TRANSLA +000004B0 54 45 20 3D 20 20 20 20 0A 20 20 20 20 22 20 20 20 20 20 20 TE.=........."...... +000004C4 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... +000004D8 20 20 20 20 20 20 22 20 20 2F 2F 20 30 20 20 20 0A 20 20 20 ......"..//.0....... +000004EC 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 31 ."................01 +00000500 32 33 34 35 36 37 38 39 20 20 20 20 20 20 22 20 20 2F 2F 20 23456789......"..//. +00000514 33 32 20 20 20 20 20 0A 20 20 20 20 22 20 35 37 36 33 30 34 32..........".576304 +00000528 39 39 36 31 37 38 35 31 38 38 31 32 33 34 37 36 32 32 33 39 99617851881234762239 +0000053C 20 20 20 20 20 22 20 20 2F 2F 20 36 34 20 20 20 0A 20 20 20 ....."..//.64....... +00000550 20 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 31 38 38 31 .".57630499617851881 +00000564 32 33 34 37 36 32 32 33 39 20 20 20 20 20 22 0A 20 20 20 20 234762239....."..... +00000578 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 "................... +0000058C 20 20 20 20 20 20 20 20 20 20 20 20 20 22 0A 20 20 20 20 22 ............."....." +000005A0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... +000005B4 20 20 20 20 20 20 20 20 20 20 20 20 22 0A 20 20 20 20 22 20 ............".....". +000005C8 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... +000005DC 20 20 20 20 20 20 20 20 20 20 20 22 20 20 20 20 0A 20 20 20 ..........."........ +000005F0 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .".................. +00000604 20 20 20 20 20 20 20 20 20 20 20 20 20 20 22 3B 0A 20 20 20 ..............";.... +00000618 20 73 74 72 69 6E 67 20 72 65 74 3B 0A 20 20 20 20 66 6F 72 .string.ret;.....for +0000062C 65 61 63 68 28 63 3B 20 63 61 73 74 28 75 62 79 74 65 5B 5D each(c;.cast(ubyte[] +00000640 29 77 6F 72 64 29 0A 20 20 20 20 20 20 20 20 69 66 20 28 54 )word).........if.(T +00000654 52 41 4E 53 4C 41 54 45 5B 63 5D 20 21 3D 20 27 20 27 29 0A RANSLATE[c].!=.'.'). +00000668 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 7E 3D 20 54 ............ret.~=.T +0000067C 52 41 4E 53 4C 41 54 45 5B 63 5D 3B 0A 20 20 20 20 72 65 74 RANSLATE[c];.....ret +00000690 75 72 6E 20 72 65 74 3B 0A 7D 0A 0A 75 6E 69 74 74 65 73 74 urn.ret;.}..unittest +000006A4 20 7B 0A 20 2F 2F 20 54 65 73 74 20 77 6F 72 64 54 6F 4E 75 .{..//.Test.wordToNu +000006B8 6D 20 75 73 69 6E 67 20 74 68 65 20 74 61 62 6C 65 20 66 72 m.using.the.table.fr +000006CC 6F 6D 20 74 68 65 20 74 61 73 6B 20 64 65 73 63 72 69 70 74 om.the.task.descript +000006E0 69 6F 6E 2E 0A 20 61 73 73 65 72 74 28 20 22 30 31 31 31 32 ion...assert(."01112 +000006F4 32 32 33 33 33 34 34 35 35 36 36 36 37 37 37 38 38 38 39 39 22333445566677788899 +00000708 39 22 20 3D 3D 0A 20 20 20 77 6F 72 64 54 6F 4E 75 6D 28 22 9".==....wordToNum(" +0000071C 45 20 7C 20 4A 20 4E 20 51 20 7C 20 52 20 57 20 58 20 7C 20 E.|.J.N.Q.|.R.W.X.|. +00000730 44 20 53 20 59 20 7C 20 46 20 54 20 7C 20 41 20 4D 20 7C 20 D.S.Y.|.F.T.|.A.M.|. +00000744 43 20 49 20 56 20 7C 20 42 20 4B 20 55 20 7C 20 4C 20 4F 20 C.I.V.|.B.K.U.|.L.O. +00000758 50 20 7C 20 47 20 48 20 5A 22 29 29 3B 0A 20 61 73 73 65 72 P.|.G.H.Z"));..asser +0000076C 74 28 20 22 30 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 t(."0111222333445566 +00000780 36 37 37 37 38 38 38 39 39 39 22 20 3D 3D 20 0A 20 20 20 77 6777888999".==.....w +00000794 6F 72 64 54 6F 4E 75 6D 28 22 65 20 7C 20 6A 20 6E 20 71 20 ordToNum("e.|.j.n.q. +000007A8 7C 20 72 20 77 20 78 20 7C 20 64 20 73 20 79 20 7C 20 66 20 |.r.w.x.|.d.s.y.|.f. +000007BC 74 20 7C 20 61 20 6D 20 7C 20 63 20 69 20 76 20 7C 20 62 20 t.|.a.m.|.c.i.v.|.b. +000007D0 6B 20 75 20 7C 20 6C 20 6F 20 70 20 7C 20 67 20 68 20 7A 22 k.u.|.l.o.p.|.g.h.z" +000007E4 29 29 3B 0A 20 61 73 73 65 72 74 28 20 22 30 31 32 33 34 35 ));..assert(."012345 +000007F8 36 37 38 39 22 20 3D 3D 20 0A 20 20 20 77 6F 72 64 54 6F 4E 6789".==.....wordToN +0000080C 75 6D 28 22 30 20 7C 20 20 20 31 20 20 20 7C 20 20 20 32 20 um("0.|...1...|...2. +00000820 20 20 7C 20 20 20 33 20 20 20 7C 20 20 34 20 20 7C 20 20 35 ..|...3...|..4..|..5 +00000834 20 20 7C 20 20 20 36 20 20 20 7C 20 20 20 37 20 20 20 7C 20 ..|...6...|...7...|. +00000848 20 20 38 20 20 20 7C 20 20 20 39 22 29 29 3B 0A 7D 0A 0A 76 ..8...|...9"));.}..v +0000085C 6F 69 64 20 6D 61 69 6E 28 20 73 74 72 69 6E 67 5B 5D 20 61 oid.main(.string[].a +00000870 72 67 73 20 29 0A 7B 0A 20 20 20 20 2F 2F 20 54 68 69 73 20 rgs.).{.....//.This. +00000884 61 73 73 6F 63 69 61 74 69 76 65 20 61 72 72 61 79 20 6D 61 associative.array.ma +00000898 70 73 20 61 20 6E 75 6D 62 65 72 20 74 6F 20 61 6E 20 61 72 ps.a.number.to.an.ar +000008AC 72 61 79 20 6F 66 20 77 6F 72 64 73 2E 20 20 20 20 0A 20 20 ray.of.words........ +000008C0 20 20 73 74 72 69 6E 67 61 72 72 61 79 5B 73 74 72 69 6E 67 ..stringarray[string +000008D4 5D 20 20 20 20 6E 75 6D 32 77 6F 72 64 73 3B 0A 0A 20 20 20 ]....num2words;..... +000008E8 20 66 6F 72 65 61 63 68 28 73 74 72 69 6E 67 20 77 6F 72 64 .foreach(string.word +000008FC 3B 20 6E 65 77 20 42 75 66 66 65 72 65 64 46 69 6C 65 28 22 ;.new.BufferedFile(" +00000910 64 69 63 74 69 6F 6E 61 72 79 2E 74 78 74 22 20 29 20 29 0A dictionary.txt".).). +00000924 20 20 20 20 20 20 20 20 6E 75 6D 32 77 6F 72 64 73 5B 20 77 ........num2words[.w +00000938 6F 72 64 54 6F 4E 75 6D 28 77 6F 72 64 29 20 5D 20 7E 3D 20 ordToNum(word).].~=. +0000094C 77 6F 72 64 2E 64 75 70 3B 20 20 20 20 20 20 20 20 2F 2F 20 word.dup;........//. +00000960 6D 75 73 74 20 64 75 70 0A 0A 20 20 20 20 2F 2F 2F 20 46 69 must.dup......///.Fi +00000974 6E 64 73 20 61 6C 6C 20 61 6C 74 65 72 6E 61 74 69 76 65 73 nds.all.alternatives +00000988 20 66 6F 72 20 74 68 65 20 67 69 76 65 6E 20 6E 75 6D 62 65 .for.the.given.numbe +0000099C 72 0A 20 20 20 20 2F 2F 2F 20 28 73 68 6F 75 6C 64 20 68 61 r.....///.(should.ha +000009B0 76 65 20 62 65 65 6E 20 73 74 72 69 70 70 65 64 20 66 72 6F ve.been.stripped.fro +000009C4 6D 20 6E 6F 6E 2D 64 69 67 69 74 20 63 68 61 72 61 63 74 65 m.non-digit.characte +000009D8 72 73 29 0A 20 20 20 20 73 74 72 69 6E 67 61 72 72 61 79 20 rs).....stringarray. +000009EC 5F 46 69 6E 64 57 6F 72 64 73 28 20 73 74 72 69 6E 67 20 6E _FindWords(.string.n +00000A00 75 6D 62 65 72 73 2C 20 62 6F 6F 6C 20 64 69 67 69 74 6F 6B umbers,.bool.digitok +00000A14 20 29 0A 20 20 20 20 69 6E 20 7B 0A 20 20 20 20 20 20 20 20 .).....in.{......... +00000A28 61 73 73 65 72 74 28 6E 75 6D 62 65 72 73 2E 6C 65 6E 67 74 assert(numbers.lengt +00000A3C 68 20 3E 20 20 30 29 3B 20 20 20 20 0A 20 20 20 20 7D 20 20 h.>..0);.........}.. +00000A50 20 20 0A 20 20 20 20 6F 75 74 28 72 65 73 75 6C 74 29 20 7B .......out(result).{ +00000A64 0A 20 20 20 20 20 20 20 20 66 6F 72 65 61 63 68 20 28 61 3B .........foreach.(a; +00000A78 20 72 65 73 75 6C 74 29 0A 20 20 20 20 20 20 20 20 20 20 20 .result)............ +00000A8C 20 61 73 73 65 72 74 28 20 77 6F 72 64 54 6F 4E 75 6D 28 61 .assert(.wordToNum(a +00000AA0 29 20 3D 3D 20 6E 75 6D 62 65 72 73 20 29 3B 0A 20 20 20 20 ).==.numbers.);..... +00000AB4 7D 20 20 20 20 0A 20 20 20 20 62 6F 64 79 20 7B 0A 20 20 20 }.........body.{.... +00000AC8 20 20 20 20 20 73 74 72 69 6E 67 61 72 72 61 79 20 72 65 74 .....stringarray.ret +00000ADC 3B 0A 20 20 20 20 20 20 20 20 62 6F 6F 6C 20 66 6F 75 6E 64 ;.........bool.found +00000AF0 77 6F 72 64 20 3D 20 66 61 6C 73 65 3B 0A 20 20 20 20 20 20 word.=.false;....... +00000B04 20 20 66 6F 72 20 28 75 69 6E 74 20 74 3D 31 3B 20 74 3C 3D ..for.(uint.t=1;.t<= +00000B18 6E 75 6D 62 65 72 73 2E 6C 65 6E 67 74 68 3B 20 2B 2B 74 29 numbers.length;.++t) +00000B2C 20 7B 0A 20 20 20 20 20 20 20 20 20 20 20 20 61 75 74 6F 20 .{.............auto. +00000B40 61 6C 74 65 72 6E 61 74 69 76 65 73 20 3D 20 6E 75 6D 62 65 alternatives.=.numbe +00000B54 72 73 5B 30 2E 2E 74 5D 20 69 6E 20 6E 75 6D 32 77 6F 72 64 rs[0..t].in.num2word +00000B68 73 3B 0A 20 20 20 20 20 20 20 20 20 20 20 20 69 66 20 28 21 s;.............if.(! +00000B7C 61 6C 74 65 72 6E 61 74 69 76 65 73 29 0A 20 20 20 20 20 20 alternatives)....... +00000B90 20 20 20 20 20 20 20 20 20 20 63 6F 6E 74 69 6E 75 65 3B 0A ..........continue;. +00000BA4 20 20 20 20 20 20 20 20 20 20 20 20 66 6F 75 6E 64 77 6F 72 ............foundwor +00000BB8 64 20 3D 20 74 72 75 65 3B 0A 20 20 20 20 20 20 20 20 20 20 d.=.true;........... +00000BCC 20 20 69 66 20 28 6E 75 6D 62 65 72 73 2E 6C 65 6E 67 74 68 ..if.(numbers.length +00000BE0 20 3E 20 20 74 29 20 7B 0A 20 20 20 20 20 20 20 20 20 20 20 .>..t).{............ +00000BF4 20 20 20 20 20 2F 2F 20 43 6F 6D 62 69 6E 65 20 61 6C 6C 20 .....//.Combine.all. +00000C08 63 75 72 72 65 6E 74 20 61 6C 74 65 72 6E 61 74 69 76 65 73 current.alternatives +00000C1C 20 77 69 74 68 20 61 6C 6C 20 61 6C 74 65 72 6E 61 74 69 76 .with.all.alternativ +00000C30 65 73 20 20 20 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 es.................. +00000C44 20 20 20 20 2F 2F 20 6F 66 20 74 68 65 20 72 65 73 74 20 28 ....//.of.the.rest.( +00000C58 6E 65 78 74 20 70 69 65 63 65 20 63 61 6E 20 73 74 61 72 74 next.piece.can.start +00000C6C 20 77 69 74 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 .with.a.digit)...... +00000C80 20 20 20 20 20 20 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 .................... +00000C94 20 20 20 20 20 66 6F 72 65 61 63 68 20 28 61 32 3B 20 5F 46 .....foreach.(a2;._F +00000CA8 69 6E 64 57 6F 72 64 73 28 20 6E 75 6D 62 65 72 73 5B 74 2E indWords(.numbers[t. +00000CBC 2E 24 5D 2C 20 74 72 75 65 20 20 20 20 20 29 20 29 0A 20 20 .$],.true.....).)... +00000CD0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 66 6F ..................fo +00000CE4 72 65 61 63 68 28 61 31 3B 20 2A 61 6C 74 65 72 6E 61 74 69 reach(a1;.*alternati +00000CF8 76 65 73 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 ves)................ +00000D0C 20 20 20 20 20 20 20 20 72 65 74 20 7E 3D 20 61 31 20 7E 20 ........ret.~=.a1.~. +00000D20 22 20 22 20 7E 20 61 32 3B 0A 20 20 20 20 20 20 20 20 20 20 ".".~.a2;........... +00000D34 20 20 7D 0A 20 20 20 20 20 20 20 20 20 20 20 20 65 6C 73 65 ..}.............else +00000D48 20 20 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... +00000D5C 20 72 65 74 20 7E 3D 20 2A 61 6C 74 65 72 6E 61 74 69 76 65 .ret.~=.*alternative +00000D70 73 3B 20 20 20 20 2F 2F 20 61 70 70 65 6E 64 20 74 68 65 73 s;....//.append.thes +00000D84 65 20 61 6C 74 65 72 6E 61 74 69 76 65 73 0A 20 20 20 20 20 e.alternatives...... +00000D98 20 20 20 7D 0A 20 20 20 20 20 20 20 20 2F 2F 20 54 72 79 20 ...}.........//.Try. +00000DAC 74 6F 20 6B 65 65 70 20 31 20 64 69 67 69 74 2C 20 6F 6E 6C to.keep.1.digit,.onl +00000DC0 79 20 69 66 20 77 65 27 72 65 20 61 6C 6C 6F 77 65 64 20 61 y.if.we're.allowed.a +00000DD4 6E 64 20 6E 6F 20 6F 74 68 65 72 0A 20 20 20 20 20 20 20 20 nd.no.other......... +00000DE8 2F 2F 20 61 6C 74 65 72 6E 61 74 69 76 65 73 20 77 65 72 65 //.alternatives.were +00000DFC 20 66 6F 75 6E 64 0A 20 20 20 20 20 20 20 20 2F 2F 20 54 65 .found.........//.Te +00000E10 73 74 69 6E 67 20 22 72 65 74 2E 6C 65 6E 67 74 68 22 20 6D sting."ret.length".m +00000E24 61 6B 65 73 20 6D 6F 72 65 20 73 65 6E 73 65 20 74 68 61 6E akes.more.sense.than +00000E38 20 74 65 73 74 69 6E 67 20 22 66 6F 75 6E 64 77 6F 72 64 22 .testing."foundword" +00000E4C 2C 0A 20 20 20 20 20 20 20 20 2F 2F 20 62 75 74 20 74 68 65 ,.........//.but.the +00000E60 20 6F 74 68 65 72 20 69 6D 70 6C 65 6D 65 6E 74 61 74 69 6F .other.implementatio +00000E74 6E 73 20 73 65 65 6D 20 74 6F 20 64 6F 20 6A 75 73 74 20 74 ns.seem.to.do.just.t +00000E88 68 69 73 2E 0A 20 20 20 20 20 20 20 20 69 66 20 28 64 69 67 his..........if.(dig +00000E9C 69 74 6F 6B 20 26 26 20 21 66 6F 75 6E 64 77 6F 72 64 29 20 itok.&&.!foundword). +00000EB0 7B 20 2F 2F 72 65 74 2E 6C 65 6E 67 74 68 20 3D 3D 20 30 20 {.//ret.length.==.0. +00000EC4 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 69 66 28 6E 75 6D ..............if(num +00000ED8 62 65 72 73 2E 6C 65 6E 67 74 68 20 3E 20 20 31 29 20 7B 0A bers.length.>..1).{. +00000EEC 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 2F 2F 20 43 ................//.C +00000F00 6F 6D 62 69 6E 65 20 31 20 64 69 67 69 74 20 77 69 74 68 20 ombine.1.digit.with. +00000F14 61 6C 6C 20 61 6C 74 65 6E 61 74 69 76 65 73 20 66 72 6F 6D all.altenatives.from +00000F28 20 74 68 65 20 72 65 73 74 20 20 20 20 0A 20 20 20 20 20 20 .the.rest........... +00000F3C 20 20 20 20 20 20 20 20 20 20 2F 2F 20 28 6E 65 78 74 20 70 ..........//.(next.p +00000F50 69 65 63 65 20 63 61 6E 20 6E 6F 74 20 73 74 61 72 74 20 77 iece.can.not.start.w +00000F64 69 74 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 20 20 ith.a.digit)........ +00000F78 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 66 ...................f +00000F8C 6F 72 65 61 63 68 20 28 61 3B 20 5F 46 69 6E 64 57 6F 72 64 oreach.(a;._FindWord +00000FA0 73 28 20 6E 75 6D 62 65 72 73 5B 31 2E 2E 24 5D 2C 20 66 61 s(.numbers[1..$],.fa +00000FB4 6C 73 65 20 29 20 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 lse.).)............. +00000FC8 20 20 20 20 20 20 20 20 72 65 74 20 7E 3D 20 6E 75 6D 62 65 ........ret.~=.numbe +00000FDC 72 73 5B 30 2E 2E 31 5D 20 7E 20 22 20 22 20 7E 20 61 3B 0A rs[0..1].~.".".~.a;. +00000FF0 20 20 20 20 20 20 20 20 20 20 20 20 7D 20 20 20 20 0A 20 20 ............}....... +00001004 20 20 20 20 20 20 20 20 20 20 65 6C 73 65 20 20 20 20 0A 20 ..........else...... +00001018 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 7E ...............ret.~ +0000102C 3D 20 6E 75 6D 62 65 72 73 5B 30 2E 2E 31 5D 3B 20 20 20 20 =.numbers[0..1];.... +00001040 2F 2F 20 6A 75 73 74 20 61 70 70 65 6E 64 20 74 68 69 73 20 //.just.append.this. +00001054 64 69 67 69 74 20 20 20 20 20 20 20 20 20 20 20 20 20 0A 20 digit............... +00001068 20 20 20 20 20 20 20 7D 20 20 20 20 0A 20 20 20 20 20 20 20 .......}............ +0000107C 20 72 65 74 75 72 6E 20 72 65 74 3B 0A 20 20 20 20 7D 0A 0A .return.ret;.....}.. +00001090 20 20 20 20 2F 2F 2F 20 28 54 68 69 73 20 66 75 6E 63 74 69 ....///.(This.functi +000010A4 6F 6E 20 77 61 73 20 69 6E 6C 69 6E 65 64 20 69 6E 20 74 68 on.was.inlined.in.th +000010B8 65 20 6F 72 69 67 69 6E 61 6C 20 70 72 6F 67 72 61 6D 29 20 e.original.program). +000010CC 0A 20 20 20 20 2F 2F 2F 20 46 69 6E 64 73 20 61 6C 6C 20 61 .....///.Finds.all.a +000010E0 6C 74 65 72 6E 61 74 69 76 65 73 20 66 6F 72 20 74 68 65 20 lternatives.for.the. +000010F4 67 69 76 65 6E 20 70 68 6F 6E 65 20 6E 75 6D 62 65 72 20 0A given.phone.number.. +00001108 20 20 20 20 2F 2F 2F 20 52 65 74 75 72 6E 73 3A 20 61 72 72 ....///.Returns:.arr +0000111C 61 79 20 6F 66 20 73 74 72 69 6E 67 73 20 0A 20 20 20 20 73 ay.of.strings......s +00001130 74 72 69 6E 67 61 72 72 61 79 20 46 69 6E 64 57 6F 72 64 73 tringarray.FindWords +00001144 28 20 73 74 72 69 6E 67 20 70 68 6F 6E 65 5F 6E 75 6D 62 65 (.string.phone_numbe +00001158 72 20 29 0A 20 20 20 20 7B 0A 20 20 20 20 20 20 20 20 69 66 r.).....{.........if +0000116C 20 28 21 70 68 6F 6E 65 5F 6E 75 6D 62 65 72 2E 6C 65 6E 67 .(!phone_number.leng +00001180 74 68 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 75 th).............retu +00001194 72 6E 20 6E 75 6C 6C 3B 0A 20 20 20 20 20 20 20 20 2F 2F 20 rn.null;.........//. +000011A8 53 74 72 69 70 20 74 68 65 20 6E 6F 6E 2D 64 69 67 69 74 20 Strip.the.non-digit. +000011BC 63 68 61 72 61 63 74 65 72 73 20 66 72 6F 6D 20 74 68 65 20 characters.from.the. +000011D0 70 68 6F 6E 65 20 6E 75 6D 62 65 72 2C 20 61 6E 64 0A 20 20 phone.number,.and... +000011E4 20 20 20 20 20 20 2F 2F 20 70 61 73 73 20 69 74 20 74 6F 20 ......//.pass.it.to. +000011F8 74 68 65 20 72 65 63 75 72 73 69 76 65 20 66 75 6E 63 74 69 the.recursive.functi +0000120C 6F 6E 20 28 6C 65 61 64 69 6E 67 20 64 69 67 69 74 20 69 73 on.(leading.digit.is +00001220 20 61 6C 6C 6F 77 65 64 29 0A 20 20 20 20 20 20 20 20 72 65 .allowed).........re +00001234 74 75 72 6E 20 5F 46 69 6E 64 57 6F 72 64 73 28 20 73 74 72 turn._FindWords(.str +00001248 69 70 4E 6F 6E 44 69 67 69 74 28 70 68 6F 6E 65 5F 6E 75 6D ipNonDigit(phone_num +0000125C 62 65 72 29 2C 20 74 72 75 65 20 29 3B 20 20 20 20 0A 20 20 ber),.true.);....... +00001270 20 20 7D 20 20 20 20 0A 20 20 20 20 0A 20 20 20 20 2F 2F 20 ..}..............//. +00001284 52 65 61 64 20 74 68 65 20 70 68 6F 6E 65 20 6E 75 6D 62 65 Read.the.phone.numbe +00001298 72 73 20 20 20 20 20 0A 20 20 20 20 66 6F 72 65 61 63 68 28 rs..........foreach( +000012AC 73 74 72 69 6E 67 20 70 68 6F 6E 65 3B 20 6E 65 77 20 42 75 string.phone;.new.Bu +000012C0 66 66 65 72 65 64 46 69 6C 65 28 22 69 6E 70 75 74 2E 74 78 fferedFile("input.tx +000012D4 74 22 20 20 20 29 20 29 0A 20 20 20 20 20 20 20 20 66 6F 72 t"...).).........for +000012E8 65 61 63 68 28 61 6C 74 65 72 6E 61 74 69 76 65 3B 20 46 69 each(alternative;.Fi +000012FC 6E 64 57 6F 72 64 73 28 20 70 68 6F 6E 65 20 29 20 29 0A 20 ndWords(.phone.).).. +00001310 20 20 20 20 20 20 20 20 20 20 20 77 72 69 74 65 66 6C 6E 28 ...........writefln( +00001324 70 68 6F 6E 65 2C 20 22 3A 20 22 2C 20 61 6C 74 65 72 6E 61 phone,.":.",.alterna +00001338 74 69 76 65 20 29 3B 0A 7D 0A 0A tive.);.}.. diff --git a/tests/examplefiles/hexdump_hexdump b/tests/examplefiles/hexdump_hexdump new file mode 100644 index 00000000..06c2e861 --- /dev/null +++ b/tests/examplefiles/hexdump_hexdump @@ -0,0 +1,310 @@ +0000000 2f2f 4320 6572 7461 6465 6220 2079 694c +0000010 6e6f 6c65 6f6c 4c20 6e75 7365 2075 6e61 +0000020 2064 6c70 6361 6465 6920 206e 6874 2065 +0000030 7570 6c62 6369 6420 6d6f 6961 2e6e 2f0a +0000040 202f 6854 7369 6620 6c69 2065 6168 2073 +0000050 6562 6e65 6d20 646f 6669 6569 2064 7266 +0000060 6d6f 6920 7374 6f20 6972 6967 616e 206c +0000070 6576 7372 6f69 2e6e 2f0a 202f 7449 6820 +0000080 7361 6220 6565 206e 6f66 6d72 7461 6574 +0000090 2064 6f74 6620 7469 7920 756f 2072 6373 +00000a0 6572 6e65 0a2e 6f6d 7564 656c 7020 6f68 +00000b0 656e 6f6e 203b 2020 2020 2f2f 6f20 7470 +00000c0 6f69 616e 0a6c 6d69 6f70 7472 7320 6474 +00000d0 732e 6474 6f69 203b 2020 2f2f 7720 6972 +00000e0 6574 6c66 206e 2020 2020 690a 706d 726f +00000f0 2074 7473 2e64 7463 7079 3b65 2020 2f20 +0000100 202f 7369 6964 6967 2074 2020 2020 690a +0000110 706d 726f 2074 7473 2e64 7473 6572 6d61 +0000120 203b 2f20 202f 7542 6666 7265 6465 6946 +0000130 656c 0a0a 2f2f 4a20 7375 2074 6f66 2072 +0000140 6572 6461 6261 6c69 7469 2079 6928 616d +0000150 6967 656e 6320 6168 5b72 5b5d 5b5d 6863 +0000160 7261 5d5b 295d 2020 2020 610a 696c 7361 +0000170 6320 6168 5b72 205d 7473 6972 676e 0a3b +0000180 6c61 6169 2073 7473 6972 676e 5d5b 7320 +0000190 7274 6e69 6167 7272 7961 0a3b 2f0a 2f2f +00001a0 5320 7274 7069 2073 6f6e 2d6e 6964 6967 +00001b0 2074 6863 7261 6361 6574 7372 6620 6f72 +00001c0 206d 6874 2065 7473 6972 676e 2820 4f43 +00001d0 2957 730a 7274 6e69 2067 7473 6972 4e70 +00001e0 6e6f 6944 6967 2874 6920 206e 7473 6972 +00001f0 676e 6c20 6e69 2065 2029 7b0a 200a 2020 +0000200 7320 7274 6e69 2067 6572 3b74 200a 2020 +0000210 6620 726f 6165 6863 7528 6e69 2074 2c69 +0000220 6320 203b 696c 656e 2029 0a7b 2020 2020 +0000230 2020 2020 2f2f 4520 7272 726f 203a 7473 +0000240 2e64 7463 7079 2e65 7369 6964 6967 2074 +0000250 7461 4320 5c3a 6d64 5c64 7273 5c63 6870 +0000260 626f 736f 735c 6474 635c 7974 6570 642e +0000270 3328 2937 0a20 2020 2020 2020 2020 2f2f +0000280 6320 6e6f 6c66 6369 7374 7720 7469 2068 +0000290 7473 2e64 7473 6572 6d61 692e 6473 6769 +00002a0 7469 6120 2074 3a43 645c 646d 735c 6372 +00002b0 705c 6f68 6f62 5c73 7473 5c64 7473 6572 +00002c0 6d61 642e 3228 3239 2934 200a 2020 2020 +00002d0 2020 6920 2066 2128 7473 2e64 7463 7079 +00002e0 2e65 7369 6964 6967 2874 2963 2029 0a7b +00002f0 2020 2020 2020 2020 2020 2020 6669 2820 +0000300 7221 7465 0a29 2020 2020 2020 2020 2020 +0000310 2020 2020 2020 6572 2074 203d 696c 656e +0000320 305b 2e2e 5d69 203b 2020 0a20 2020 2020 +0000330 2020 2020 207d 2020 0a20 2020 2020 2020 +0000340 2020 6c65 6573 6920 2066 7228 7465 0a29 +0000350 2020 2020 2020 2020 2020 2020 6572 2074 +0000360 3d7e 6320 203b 2020 0a20 2020 2020 207d +0000370 2020 0a20 2020 2020 6572 7574 6e72 7220 +0000380 7465 723f 7465 6c3a 6e69 3b65 7d0a 0a0a +0000390 6e75 7469 6574 7473 7b20 200a 2020 6120 +00003a0 7373 7265 2874 7320 7274 7069 6f4e 446e +00003b0 6769 7469 2228 7361 6664 2922 3d20 203d +00003c0 2222 2020 3b29 200a 2020 6120 7373 7265 +00003d0 2874 7320 7274 7069 6f4e 446e 6769 7469 +00003e0 2228 275c 3331 3d2d 2032 6b34 706f 2922 +00003f0 3d20 203d 2220 3331 3432 2022 2920 0a3b +0000400 0a7d 2f0a 2f2f 4320 6e6f 6576 7472 2073 +0000410 2061 6f77 6472 6920 746e 206f 2061 756e +0000420 626d 7265 202c 6769 6f6e 6972 676e 6120 +0000430 6c6c 6e20 6e6f 6120 706c 6168 6320 6168 +0000440 6172 7463 7265 2073 0a20 7473 6972 676e +0000450 7720 726f 5464 4e6f 6d75 2028 6e69 7320 +0000460 7274 6e69 2067 6f77 6472 2920 7b0a 2f0a +0000470 202f 7274 6e61 6c73 7461 6f69 206e 6174 +0000480 6c62 2065 6f66 2072 6874 2065 6174 6b73 +0000490 6120 2074 6168 646e 630a 6e6f 7473 6320 +00004a0 6168 5b72 3532 5d36 5420 4152 534e 414c +00004b0 4554 3d20 2020 2020 200a 2020 2220 2020 +00004c0 2020 2020 2020 2020 2020 2020 2020 2020 +00004d0 2020 2020 2020 2020 2020 2020 2020 2022 +00004e0 2f20 202f 2030 2020 200a 2020 2220 2020 +00004f0 2020 2020 2020 2020 2020 2020 2020 3130 +0000500 3332 3534 3736 3938 2020 2020 2020 2022 +0000510 2f20 202f 3233 2020 2020 0a20 2020 2020 +0000520 2022 3735 3336 3430 3939 3136 3837 3135 +0000530 3838 3231 3433 3637 3232 3933 2020 2020 +0000540 2220 2020 2f2f 3620 2034 2020 200a 2020 +0000550 2220 3520 3637 3033 3934 3639 3731 3538 +0000560 3831 3138 3332 3734 3236 3332 2039 2020 +0000570 2020 0a22 2020 2020 2022 2020 2020 2020 +0000580 2020 2020 2020 2020 2020 2020 2020 2020 +0000590 2020 2020 2020 2020 2220 200a 2020 2220 +00005a0 2020 2020 2020 2020 2020 2020 2020 2020 +* +00005c0 0a22 2020 2020 2022 2020 2020 2020 2020 +00005d0 2020 2020 2020 2020 2020 2020 2020 2020 +00005e0 2020 2020 2020 2220 2020 2020 200a 2020 +00005f0 2220 2020 2020 2020 2020 2020 2020 2020 +0000600 2020 2020 2020 2020 2020 2020 2020 2020 +0000610 2020 3b22 200a 2020 7320 7274 6e69 2067 +0000620 6572 3b74 200a 2020 6620 726f 6165 6863 +0000630 6328 203b 6163 7473 7528 7962 6574 5d5b +0000640 7729 726f 2964 200a 2020 2020 2020 6920 +0000650 2066 5428 4152 534e 414c 4554 635b 205d +0000660 3d21 2720 2720 0a29 2020 2020 2020 2020 +0000670 2020 2020 6572 2074 3d7e 5420 4152 534e +0000680 414c 4554 635b 3b5d 200a 2020 7220 7465 +0000690 7275 206e 6572 3b74 7d0a 0a0a 6e75 7469 +00006a0 6574 7473 7b20 200a 2f2f 5420 7365 2074 +00006b0 6f77 6472 6f54 754e 206d 7375 6e69 2067 +00006c0 6874 2065 6174 6c62 2065 7266 6d6f 7420 +00006d0 6568 7420 7361 206b 6564 6373 6972 7470 +00006e0 6f69 2e6e 200a 7361 6573 7472 2028 3022 +00006f0 3131 3231 3232 3333 3433 3534 3635 3636 +0000700 3737 3837 3838 3939 2239 3d20 0a3d 2020 +0000710 7720 726f 5464 4e6f 6d75 2228 2045 207c +0000720 204a 204e 2051 207c 2052 2057 2058 207c +0000730 2044 2053 2059 207c 2046 2054 207c 2041 +0000740 204d 207c 2043 2049 2056 207c 2042 204b +0000750 2055 207c 204c 204f 2050 207c 2047 2048 +0000760 225a 2929 0a3b 6120 7373 7265 2874 2220 +0000770 3130 3131 3232 3332 3333 3434 3535 3636 +0000780 3736 3737 3838 3938 3939 2022 3d3d 0a20 +0000790 2020 7720 726f 5464 4e6f 6d75 2228 2065 +00007a0 207c 206a 206e 2071 207c 2072 2077 2078 +00007b0 207c 2064 2073 2079 207c 2066 2074 207c +00007c0 2061 206d 207c 2063 2069 2076 207c 2062 +00007d0 206b 2075 207c 206c 206f 2070 207c 2067 +00007e0 2068 227a 2929 0a3b 6120 7373 7265 2874 +00007f0 2220 3130 3332 3534 3736 3938 2022 3d3d +0000800 0a20 2020 7720 726f 5464 4e6f 6d75 2228 +0000810 2030 207c 2020 2031 2020 207c 2020 2032 +0000820 2020 207c 2020 2033 2020 207c 3420 2020 +0000830 207c 3520 2020 207c 2020 2036 2020 207c +0000840 2020 2037 2020 207c 2020 2038 2020 207c +0000850 2020 2239 2929 0a3b 0a7d 760a 696f 2064 +0000860 616d 6e69 2028 7473 6972 676e 5d5b 6120 +0000870 6772 2073 0a29 0a7b 2020 2020 2f2f 5420 +0000880 6968 2073 7361 6f73 6963 7461 7669 2065 +0000890 7261 6172 2079 616d 7370 6120 6e20 6d75 +00008a0 6562 2072 6f74 6120 206e 7261 6172 2079 +00008b0 666f 7720 726f 7364 202e 2020 0a20 2020 +00008c0 2020 7473 6972 676e 7261 6172 5b79 7473 +00008d0 6972 676e 205d 2020 6e20 6d75 7732 726f +00008e0 7364 0a3b 200a 2020 6620 726f 6165 6863 +00008f0 7328 7274 6e69 2067 6f77 6472 203b 656e +0000900 2077 7542 6666 7265 6465 6946 656c 2228 +0000910 6964 7463 6f69 616e 7972 742e 7478 2022 +0000920 2029 0a29 2020 2020 2020 2020 756e 326d +0000930 6f77 6472 5b73 7720 726f 5464 4e6f 6d75 +0000940 7728 726f 2964 5d20 7e20 203d 6f77 6472 +0000950 642e 7075 203b 2020 2020 2020 2f20 202f +0000960 756d 7473 6420 7075 0a0a 2020 2020 2f2f +0000970 202f 6946 646e 2073 6c61 206c 6c61 6574 +0000980 6e72 7461 7669 7365 6620 726f 7420 6568 +0000990 6720 7669 6e65 6e20 6d75 6562 0a72 2020 +00009a0 2020 2f2f 202f 7328 6f68 6c75 2064 6168 +00009b0 6576 6220 6565 206e 7473 6972 7070 6465 +00009c0 6620 6f72 206d 6f6e 2d6e 6964 6967 2074 +00009d0 6863 7261 6361 6574 7372 0a29 2020 2020 +00009e0 7473 6972 676e 7261 6172 2079 465f 6e69 +00009f0 5764 726f 7364 2028 7473 6972 676e 6e20 +0000a00 6d75 6562 7372 202c 6f62 6c6f 6420 6769 +0000a10 7469 6b6f 2920 200a 2020 6920 206e 0a7b +0000a20 2020 2020 2020 2020 7361 6573 7472 6e28 +0000a30 6d75 6562 7372 6c2e 6e65 7467 2068 203e +0000a40 3020 3b29 2020 2020 200a 2020 7d20 2020 +0000a50 2020 200a 2020 6f20 7475 7228 7365 6c75 +0000a60 2974 7b20 200a 2020 2020 2020 6620 726f +0000a70 6165 6863 2820 3b61 7220 7365 6c75 2974 +0000a80 200a 2020 2020 2020 2020 2020 6120 7373 +0000a90 7265 2874 7720 726f 5464 4e6f 6d75 6128 +0000aa0 2029 3d3d 6e20 6d75 6562 7372 2920 0a3b +0000ab0 2020 2020 207d 2020 0a20 2020 2020 6f62 +0000ac0 7964 7b20 200a 2020 2020 2020 7320 7274 +0000ad0 6e69 6167 7272 7961 7220 7465 0a3b 2020 +0000ae0 2020 2020 2020 6f62 6c6f 6620 756f 646e +0000af0 6f77 6472 3d20 6620 6c61 6573 0a3b 2020 +0000b00 2020 2020 2020 6f66 2072 7528 6e69 2074 +0000b10 3d74 3b31 7420 3d3c 756e 626d 7265 2e73 +0000b20 656c 676e 6874 203b 2b2b 2974 7b20 200a +0000b30 2020 2020 2020 2020 2020 6120 7475 206f +0000b40 6c61 6574 6e72 7461 7669 7365 3d20 6e20 +0000b50 6d75 6562 7372 305b 2e2e 5d74 6920 206e +0000b60 756e 326d 6f77 6472 3b73 200a 2020 2020 +0000b70 2020 2020 2020 6920 2066 2128 6c61 6574 +0000b80 6e72 7461 7669 7365 0a29 2020 2020 2020 +0000b90 2020 2020 2020 2020 2020 6f63 746e 6e69 +0000ba0 6575 0a3b 2020 2020 2020 2020 2020 2020 +0000bb0 6f66 6e75 7764 726f 2064 203d 7274 6575 +0000bc0 0a3b 2020 2020 2020 2020 2020 2020 6669 +0000bd0 2820 756e 626d 7265 2e73 656c 676e 6874 +0000be0 3e20 2020 2974 7b20 200a 2020 2020 2020 +0000bf0 2020 2020 2020 2020 2f20 202f 6f43 626d +0000c00 6e69 2065 6c61 206c 7563 7272 6e65 2074 +0000c10 6c61 6574 6e72 7461 7669 7365 7720 7469 +0000c20 2068 6c61 206c 6c61 6574 6e72 7461 7669 +0000c30 7365 2020 2020 0a20 2020 2020 2020 2020 +0000c40 2020 2020 2020 2020 2f2f 6f20 2066 6874 +0000c50 2065 6572 7473 2820 656e 7478 7020 6569 +0000c60 6563 6320 6e61 7320 6174 7472 7720 7469 +0000c70 2068 2061 6964 6967 2974 2020 2020 2020 +0000c80 2020 2020 2020 2020 200a 2020 2020 2020 +0000c90 2020 2020 2020 2020 6620 726f 6165 6863 +0000ca0 2820 3261 203b 465f 6e69 5764 726f 7364 +0000cb0 2028 756e 626d 7265 5b73 2e74 242e 2c5d +0000cc0 7420 7572 2065 2020 2020 2029 0a29 2020 +0000cd0 2020 2020 2020 2020 2020 2020 2020 2020 +0000ce0 2020 6f66 6572 6361 2868 3161 203b 612a +0000cf0 746c 7265 616e 6974 6576 2973 200a 2020 +0000d00 2020 2020 2020 2020 2020 2020 2020 2020 +0000d10 2020 2020 6572 2074 3d7e 6120 2031 207e +0000d20 2022 2022 207e 3261 0a3b 2020 2020 2020 +0000d30 2020 2020 2020 0a7d 2020 2020 2020 2020 +0000d40 2020 2020 6c65 6573 2020 2020 200a 2020 +0000d50 2020 2020 2020 2020 2020 2020 7220 7465 +0000d60 7e20 203d 612a 746c 7265 616e 6974 6576 +0000d70 3b73 2020 2020 2f2f 6120 7070 6e65 2064 +0000d80 6874 7365 2065 6c61 6574 6e72 7461 7669 +0000d90 7365 200a 2020 2020 2020 7d20 200a 2020 +0000da0 2020 2020 2f20 202f 7254 2079 6f74 6b20 +0000db0 6565 2070 2031 6964 6967 2c74 6f20 6c6e +0000dc0 2079 6669 7720 2765 6572 6120 6c6c 776f +0000dd0 6465 6120 646e 6e20 206f 746f 6568 0a72 +0000de0 2020 2020 2020 2020 2f2f 6120 746c 7265 +0000df0 616e 6974 6576 2073 6577 6572 6620 756f +0000e00 646e 200a 2020 2020 2020 2f20 202f 6554 +0000e10 7473 6e69 2067 7222 7465 6c2e 6e65 7467 +0000e20 2268 6d20 6b61 7365 6d20 726f 2065 6573 +0000e30 736e 2065 6874 6e61 7420 7365 6974 676e +0000e40 2220 6f66 6e75 7764 726f 2264 0a2c 2020 +0000e50 2020 2020 2020 2f2f 6220 7475 7420 6568 +0000e60 6f20 6874 7265 6920 706d 656c 656d 746e +0000e70 7461 6f69 736e 7320 6565 206d 6f74 6420 +0000e80 206f 756a 7473 7420 6968 2e73 200a 2020 +0000e90 2020 2020 6920 2066 6428 6769 7469 6b6f +0000ea0 2620 2026 6621 756f 646e 6f77 6472 2029 +0000eb0 207b 2f2f 6572 2e74 656c 676e 6874 3d20 +0000ec0 203d 2030 0a20 2020 2020 2020 2020 2020 +0000ed0 2020 6669 6e28 6d75 6562 7372 6c2e 6e65 +0000ee0 7467 2068 203e 3120 2029 0a7b 2020 2020 +0000ef0 2020 2020 2020 2020 2020 2020 2f2f 4320 +0000f00 6d6f 6962 656e 3120 6420 6769 7469 7720 +0000f10 7469 2068 6c61 206c 6c61 6574 616e 6974 +0000f20 6576 2073 7266 6d6f 7420 6568 7220 7365 +0000f30 2074 2020 0a20 2020 2020 2020 2020 2020 +0000f40 2020 2020 2020 2f2f 2820 656e 7478 7020 +0000f50 6569 6563 6320 6e61 6e20 746f 7320 6174 +0000f60 7472 7720 7469 2068 2061 6964 6967 2974 +0000f70 2020 2020 2020 2020 2020 200a 2020 2020 +0000f80 2020 2020 2020 2020 2020 6620 726f 6165 +0000f90 6863 2820 3b61 5f20 6946 646e 6f57 6472 +0000fa0 2873 6e20 6d75 6562 7372 315b 2e2e 5d24 +0000fb0 202c 6166 736c 2065 2029 0a29 2020 2020 +0000fc0 2020 2020 2020 2020 2020 2020 2020 2020 +0000fd0 6572 2074 3d7e 6e20 6d75 6562 7372 305b +0000fe0 2e2e 5d31 7e20 2220 2220 7e20 6120 0a3b +0000ff0 2020 2020 2020 2020 2020 2020 207d 2020 +0001000 0a20 2020 2020 2020 2020 2020 2020 6c65 +0001010 6573 2020 2020 200a 2020 2020 2020 2020 +0001020 2020 2020 2020 7220 7465 7e20 203d 756e +0001030 626d 7265 5b73 2e30 312e 3b5d 2020 2020 +0001040 2f2f 6a20 7375 2074 7061 6570 646e 7420 +0001050 6968 2073 6964 6967 2074 2020 2020 2020 +0001060 2020 2020 2020 200a 2020 2020 2020 7d20 +0001070 2020 2020 200a 2020 2020 2020 7220 7465 +0001080 7275 206e 6572 3b74 200a 2020 7d20 0a0a +0001090 2020 2020 2f2f 202f 5428 6968 2073 7566 +00010a0 636e 6974 6e6f 7720 7361 6920 6c6e 6e69 +00010b0 6465 6920 206e 6874 2065 726f 6769 6e69 +00010c0 6c61 7020 6f72 7267 6d61 2029 200a 2020 +00010d0 2f20 2f2f 4620 6e69 7364 6120 6c6c 6120 +00010e0 746c 7265 616e 6974 6576 2073 6f66 2072 +00010f0 6874 2065 6967 6576 206e 6870 6e6f 2065 +0001100 756e 626d 7265 0a20 2020 2020 2f2f 202f +0001110 6552 7574 6e72 3a73 6120 7272 7961 6f20 +0001120 2066 7473 6972 676e 2073 200a 2020 7320 +0001130 7274 6e69 6167 7272 7961 4620 6e69 5764 +0001140 726f 7364 2028 7473 6972 676e 7020 6f68 +0001150 656e 6e5f 6d75 6562 2072 0a29 2020 2020 +0001160 0a7b 2020 2020 2020 2020 6669 2820 7021 +0001170 6f68 656e 6e5f 6d75 6562 2e72 656c 676e +0001180 6874 0a29 2020 2020 2020 2020 2020 2020 +0001190 6572 7574 6e72 6e20 6c75 3b6c 200a 2020 +00011a0 2020 2020 2f20 202f 7453 6972 2070 6874 +00011b0 2065 6f6e 2d6e 6964 6967 2074 6863 7261 +00011c0 6361 6574 7372 6620 6f72 206d 6874 2065 +00011d0 6870 6e6f 2065 756e 626d 7265 202c 6e61 +00011e0 0a64 2020 2020 2020 2020 2f2f 7020 7361 +00011f0 2073 7469 7420 206f 6874 2065 6572 7563 +0001200 7372 7669 2065 7566 636e 6974 6e6f 2820 +0001210 656c 6461 6e69 2067 6964 6967 2074 7369 +0001220 6120 6c6c 776f 6465 0a29 2020 2020 2020 +0001230 2020 6572 7574 6e72 5f20 6946 646e 6f57 +0001240 6472 2873 7320 7274 7069 6f4e 446e 6769 +0001250 7469 7028 6f68 656e 6e5f 6d75 6562 2972 +0001260 202c 7274 6575 2920 203b 2020 0a20 2020 +0001270 2020 207d 2020 0a20 2020 2020 200a 2020 +0001280 2f20 202f 6552 6461 7420 6568 7020 6f68 +0001290 656e 6e20 6d75 6562 7372 2020 2020 0a20 +00012a0 2020 2020 6f66 6572 6361 2868 7473 6972 +00012b0 676e 7020 6f68 656e 203b 656e 2077 7542 +00012c0 6666 7265 6465 6946 656c 2228 6e69 7570 +00012d0 2e74 7874 2274 2020 2920 2920 200a 2020 +00012e0 2020 2020 6620 726f 6165 6863 6128 746c +00012f0 7265 616e 6974 6576 203b 6946 646e 6f57 +0001300 6472 2873 7020 6f68 656e 2920 2920 200a +0001310 2020 2020 2020 2020 2020 7720 6972 6574 +0001320 6c66 286e 6870 6e6f 2c65 2220 203a 2c22 +0001330 6120 746c 7265 616e 6974 6576 2920 0a3b +0001340 0a7d 000a +0001343 diff --git a/tests/examplefiles/hexdump_od b/tests/examplefiles/hexdump_od new file mode 100644 index 00000000..a407aef0 --- /dev/null +++ b/tests/examplefiles/hexdump_od @@ -0,0 +1,310 @@ +0000000 2f 2f 20 43 72 65 61 74 65 64 20 62 79 20 4c 69 >// Created by Li< +0000020 6f 6e 65 6c 6c 6f 20 4c 75 6e 65 73 75 20 61 6e >onello Lunesu an< +0000040 64 20 70 6c 61 63 65 64 20 69 6e 20 74 68 65 20 >d placed in the < +0000060 70 75 62 6c 69 63 20 64 6f 6d 61 69 6e 2e 0a 2f >public domain../< +0000100 2f 20 54 68 69 73 20 66 69 6c 65 20 68 61 73 20 >/ This file has < +0000120 62 65 65 6e 20 6d 6f 64 69 66 69 65 64 20 66 72 >been modified fr< +0000140 6f 6d 20 69 74 73 20 6f 72 69 67 69 6e 61 6c 20 >om its original < +0000160 76 65 72 73 69 6f 6e 2e 0a 2f 2f 20 49 74 20 68 >version..// It h< +0000200 61 73 20 62 65 65 6e 20 66 6f 72 6d 61 74 74 65 >as been formatte< +0000220 64 20 74 6f 20 66 69 74 20 79 6f 75 72 20 73 63 >d to fit your sc< +0000240 72 65 65 6e 2e 0a 6d 6f 64 75 6c 65 20 70 68 6f >reen..module pho< +0000260 6e 65 6e 6f 3b 20 20 20 20 20 2f 2f 20 6f 70 74 >neno; // opt< +0000300 69 6f 6e 61 6c 0a 69 6d 70 6f 72 74 20 73 74 64 >ional.import std< +0000320 2e 73 74 64 69 6f 3b 20 20 20 2f 2f 20 77 72 69 >.stdio; // wri< +0000340 74 65 66 6c 6e 20 20 20 20 20 0a 69 6d 70 6f 72 >tefln .impor< +0000360 74 20 73 74 64 2e 63 74 79 70 65 3b 20 20 20 2f >t std.ctype; /< +0000400 2f 20 69 73 64 69 67 69 74 20 20 20 20 20 0a 69 >/ isdigit .i< +0000420 6d 70 6f 72 74 20 73 74 64 2e 73 74 72 65 61 6d >mport std.stream< +0000440 3b 20 20 2f 2f 20 42 75 66 66 65 72 65 64 46 69 >; // BufferedFi< +0000460 6c 65 0a 0a 2f 2f 20 4a 75 73 74 20 66 6f 72 20 >le..// Just for < +0000500 72 65 61 64 61 62 69 6c 69 74 79 20 28 69 6d 61 >readability (ima< +0000520 67 69 6e 65 20 63 68 61 72 5b 5d 5b 5d 5b 63 68 >gine char[][][ch< +0000540 61 72 5b 5d 5d 29 20 20 20 20 0a 61 6c 69 61 73 >ar[]]) .alias< +0000560 20 63 68 61 72 5b 5d 20 73 74 72 69 6e 67 3b 0a > char[] string;.< +0000600 61 6c 69 61 73 20 73 74 72 69 6e 67 5b 5d 20 73 >alias string[] s< +0000620 74 72 69 6e 67 61 72 72 61 79 3b 0a 0a 2f 2f 2f >tringarray;..///< +0000640 20 53 74 72 69 70 73 20 6e 6f 6e 2d 64 69 67 69 > Strips non-digi< +0000660 74 20 63 68 61 72 61 63 74 65 72 73 20 66 72 6f >t characters fro< +0000700 6d 20 74 68 65 20 73 74 72 69 6e 67 20 28 43 4f >m the string (CO< +0000720 57 29 0a 73 74 72 69 6e 67 20 73 74 72 69 70 4e >W).string stripN< +0000740 6f 6e 44 69 67 69 74 28 20 69 6e 20 73 74 72 69 >onDigit( in stri< +0000760 6e 67 20 6c 69 6e 65 20 29 20 0a 7b 0a 20 20 20 >ng line ) .{. < +0001000 20 73 74 72 69 6e 67 20 72 65 74 3b 0a 20 20 20 > string ret;. < +0001020 20 66 6f 72 65 61 63 68 28 75 69 6e 74 20 69 2c > foreach(uint i,< +0001040 20 63 3b 20 6c 69 6e 65 29 20 7b 0a 20 20 20 20 > c; line) {. < +0001060 20 20 20 20 2f 2f 20 45 72 72 6f 72 3a 20 73 74 > // Error: st< +0001100 64 2e 63 74 79 70 65 2e 69 73 64 69 67 69 74 20 >d.ctype.isdigit < +0001120 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 5c 70 68 >at C:\dmd\src\ph< +0001140 6f 62 6f 73 5c 73 74 64 5c 63 74 79 70 65 2e 64 >obos\std\ctype.d< +0001160 28 33 37 29 20 0a 20 20 20 20 20 20 20 20 2f 2f >(37) . //< +0001200 20 63 6f 6e 66 6c 69 63 74 73 20 77 69 74 68 20 > conflicts with < +0001220 73 74 64 2e 73 74 72 65 61 6d 2e 69 73 64 69 67 >std.stream.isdig< +0001240 69 74 20 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 >it at C:\dmd\src< +0001260 5c 70 68 6f 62 6f 73 5c 73 74 64 5c 73 74 72 65 >\phobos\std\stre< +0001300 61 6d 2e 64 28 32 39 32 34 29 0a 20 20 20 20 20 >am.d(2924). < +0001320 20 20 20 69 66 20 28 21 73 74 64 2e 63 74 79 70 > if (!std.ctyp< +0001340 65 2e 69 73 64 69 67 69 74 28 63 29 29 20 7b 0a >e.isdigit(c)) {.< +0001360 20 20 20 20 20 20 20 20 20 20 20 20 69 66 20 28 > if (< +0001400 21 72 65 74 29 0a 20 20 20 20 20 20 20 20 20 20 >!ret). < +0001420 20 20 20 20 20 20 72 65 74 20 3d 20 6c 69 6e 65 > ret = line< +0001440 5b 30 2e 2e 69 5d 3b 20 20 20 20 0a 20 20 20 20 >[0..i]; . < +0001460 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 20 20 > } . < +0001500 20 20 65 6c 73 65 20 69 66 20 28 72 65 74 29 0a > else if (ret).< +0001520 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 > ret < +0001540 7e 3d 20 63 3b 20 20 20 20 0a 20 20 20 20 7d 20 >~= c; . } < +0001560 20 20 20 0a 20 20 20 20 72 65 74 75 72 6e 20 72 > . return r< +0001600 65 74 3f 72 65 74 3a 6c 69 6e 65 3b 0a 7d 0a 0a >et?ret:line;.}..< +0001620 75 6e 69 74 74 65 73 74 20 7b 0a 20 20 20 20 61 >unittest {. a< +0001640 73 73 65 72 74 28 20 73 74 72 69 70 4e 6f 6e 44 >ssert( stripNonD< +0001660 69 67 69 74 28 22 61 73 64 66 22 29 20 3d 3d 20 >igit("asdf") == < +0001700 22 22 20 20 29 3b 0a 20 20 20 20 61 73 73 65 72 >"" );. asser< +0001720 74 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 69 74 >t( stripNonDigit< +0001740 28 22 5c 27 31 33 2d 3d 32 20 34 6b 6f 70 22 29 >("\'13-=2 4kop")< +0001760 20 3d 3d 20 20 22 31 33 32 34 22 20 20 29 3b 0a > == "1324" );.< +0002000 7d 0a 0a 2f 2f 2f 20 43 6f 6e 76 65 72 74 73 20 >}../// Converts < +0002020 61 20 77 6f 72 64 20 69 6e 74 6f 20 61 20 6e 75 >a word into a nu< +0002040 6d 62 65 72 2c 20 69 67 6e 6f 72 69 6e 67 20 61 >mber, ignoring a< +0002060 6c 6c 20 6e 6f 6e 20 61 6c 70 68 61 20 63 68 61 >ll non alpha cha< +0002100 72 61 63 74 65 72 73 20 20 0a 73 74 72 69 6e 67 >racters .string< +0002120 20 77 6f 72 64 54 6f 4e 75 6d 28 20 69 6e 20 73 > wordToNum( in s< +0002140 74 72 69 6e 67 20 77 6f 72 64 20 29 0a 7b 0a 2f >tring word ).{./< +0002160 2f 20 74 72 61 6e 73 6c 61 74 69 6f 6e 20 74 61 >/ translation ta< +0002200 62 6c 65 20 66 6f 72 20 74 68 65 20 74 61 73 6b >ble for the task< +0002220 20 61 74 20 68 61 6e 64 0a 63 6f 6e 73 74 20 63 > at hand.const c< +0002240 68 61 72 5b 32 35 36 5d 20 54 52 41 4e 53 4c 41 >har[256] TRANSLA< +0002260 54 45 20 3d 20 20 20 20 0a 20 20 20 20 22 20 20 >TE = . " < +0002300 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0002320 20 20 20 20 20 20 20 20 20 20 20 20 20 20 22 20 > " < +0002340 20 2f 2f 20 30 20 20 20 0a 20 20 20 20 22 20 20 > // 0 . " < +0002360 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 31 > 01< +0002400 32 33 34 35 36 37 38 39 20 20 20 20 20 20 22 20 >23456789 " < +0002420 20 2f 2f 20 33 32 20 20 20 20 20 0a 20 20 20 20 > // 32 . < +0002440 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 31 >" 57630499617851< +0002460 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 20 >881234762239 < +0002500 20 22 20 20 2f 2f 20 36 34 20 20 20 0a 20 20 20 > " // 64 . < +0002520 20 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 > " 5763049961785< +0002540 31 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 >1881234762239 < +0002560 20 20 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 > ". " < +0002600 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0002620 20 20 20 20 20 20 20 20 20 22 0a 20 20 20 20 22 > ". "< +0002640 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +* +0002700 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 20 20 >". " < +0002720 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0002740 20 20 20 20 20 20 20 22 20 20 20 20 0a 20 20 20 > " . < +0002760 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > " < +0003000 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0003020 20 20 22 3b 0a 20 20 20 20 73 74 72 69 6e 67 20 > ";. string < +0003040 72 65 74 3b 0a 20 20 20 20 66 6f 72 65 61 63 68 >ret;. foreach< +0003060 28 63 3b 20 63 61 73 74 28 75 62 79 74 65 5b 5d >(c; cast(ubyte[]< +0003100 29 77 6f 72 64 29 0a 20 20 20 20 20 20 20 20 69 >)word). i< +0003120 66 20 28 54 52 41 4e 53 4c 41 54 45 5b 63 5d 20 >f (TRANSLATE[c] < +0003140 21 3d 20 27 20 27 29 0a 20 20 20 20 20 20 20 20 >!= ' '). < +0003160 20 20 20 20 72 65 74 20 7e 3d 20 54 52 41 4e 53 > ret ~= TRANS< +0003200 4c 41 54 45 5b 63 5d 3b 0a 20 20 20 20 72 65 74 >LATE[c];. ret< +0003220 75 72 6e 20 72 65 74 3b 0a 7d 0a 0a 75 6e 69 74 >urn ret;.}..unit< +0003240 74 65 73 74 20 7b 0a 20 2f 2f 20 54 65 73 74 20 >test {. // Test < +0003260 77 6f 72 64 54 6f 4e 75 6d 20 75 73 69 6e 67 20 >wordToNum using < +0003300 74 68 65 20 74 61 62 6c 65 20 66 72 6f 6d 20 74 >the table from t< +0003320 68 65 20 74 61 73 6b 20 64 65 73 63 72 69 70 74 >he task descript< +0003340 69 6f 6e 2e 0a 20 61 73 73 65 72 74 28 20 22 30 >ion.. assert( "0< +0003360 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 36 >1112223334455666< +0003400 37 37 37 38 38 38 39 39 39 22 20 3d 3d 0a 20 20 >777888999" ==. < +0003420 20 77 6f 72 64 54 6f 4e 75 6d 28 22 45 20 7c 20 > wordToNum("E | < +0003440 4a 20 4e 20 51 20 7c 20 52 20 57 20 58 20 7c 20 >J N Q | R W X | < +0003460 44 20 53 20 59 20 7c 20 46 20 54 20 7c 20 41 20 >D S Y | F T | A < +0003500 4d 20 7c 20 43 20 49 20 56 20 7c 20 42 20 4b 20 >M | C I V | B K < +0003520 55 20 7c 20 4c 20 4f 20 50 20 7c 20 47 20 48 20 >U | L O P | G H < +0003540 5a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 20 22 >Z"));. assert( "< +0003560 30 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 >0111222333445566< +0003600 36 37 37 37 38 38 38 39 39 39 22 20 3d 3d 20 0a >6777888999" == .< +0003620 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 65 20 > wordToNum("e < +0003640 7c 20 6a 20 6e 20 71 20 7c 20 72 20 77 20 78 20 >| j n q | r w x < +0003660 7c 20 64 20 73 20 79 20 7c 20 66 20 74 20 7c 20 >| d s y | f t | < +0003700 61 20 6d 20 7c 20 63 20 69 20 76 20 7c 20 62 20 >a m | c i v | b < +0003720 6b 20 75 20 7c 20 6c 20 6f 20 70 20 7c 20 67 20 >k u | l o p | g < +0003740 68 20 7a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 >h z"));. assert(< +0003760 20 22 30 31 32 33 34 35 36 37 38 39 22 20 3d 3d > "0123456789" ==< +0004000 20 0a 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 > . wordToNum("< +0004020 30 20 7c 20 20 20 31 20 20 20 7c 20 20 20 32 20 >0 | 1 | 2 < +0004040 20 20 7c 20 20 20 33 20 20 20 7c 20 20 34 20 20 > | 3 | 4 < +0004060 7c 20 20 35 20 20 7c 20 20 20 36 20 20 20 7c 20 >| 5 | 6 | < +0004100 20 20 37 20 20 20 7c 20 20 20 38 20 20 20 7c 20 > 7 | 8 | < +0004120 20 20 39 22 29 29 3b 0a 7d 0a 0a 76 6f 69 64 20 > 9"));.}..void < +0004140 6d 61 69 6e 28 20 73 74 72 69 6e 67 5b 5d 20 61 >main( string[] a< +0004160 72 67 73 20 29 0a 7b 0a 20 20 20 20 2f 2f 20 54 >rgs ).{. // T< +0004200 68 69 73 20 61 73 73 6f 63 69 61 74 69 76 65 20 >his associative < +0004220 61 72 72 61 79 20 6d 61 70 73 20 61 20 6e 75 6d >array maps a num< +0004240 62 65 72 20 74 6f 20 61 6e 20 61 72 72 61 79 20 >ber to an array < +0004260 6f 66 20 77 6f 72 64 73 2e 20 20 20 20 0a 20 20 >of words. . < +0004300 20 20 73 74 72 69 6e 67 61 72 72 61 79 5b 73 74 > stringarray[st< +0004320 72 69 6e 67 5d 20 20 20 20 6e 75 6d 32 77 6f 72 >ring] num2wor< +0004340 64 73 3b 0a 0a 20 20 20 20 66 6f 72 65 61 63 68 >ds;.. foreach< +0004360 28 73 74 72 69 6e 67 20 77 6f 72 64 3b 20 6e 65 >(string word; ne< +0004400 77 20 42 75 66 66 65 72 65 64 46 69 6c 65 28 22 >w BufferedFile("< +0004420 64 69 63 74 69 6f 6e 61 72 79 2e 74 78 74 22 20 >dictionary.txt" < +0004440 29 20 29 0a 20 20 20 20 20 20 20 20 6e 75 6d 32 >) ). num2< +0004460 77 6f 72 64 73 5b 20 77 6f 72 64 54 6f 4e 75 6d >words[ wordToNum< +0004500 28 77 6f 72 64 29 20 5d 20 7e 3d 20 77 6f 72 64 >(word) ] ~= word< +0004520 2e 64 75 70 3b 20 20 20 20 20 20 20 20 2f 2f 20 >.dup; // < +0004540 6d 75 73 74 20 64 75 70 0a 0a 20 20 20 20 2f 2f >must dup.. //< +0004560 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 6c 74 65 >/ Finds all alte< +0004600 72 6e 61 74 69 76 65 73 20 66 6f 72 20 74 68 65 >rnatives for the< +0004620 20 67 69 76 65 6e 20 6e 75 6d 62 65 72 0a 20 20 > given number. < +0004640 20 20 2f 2f 2f 20 28 73 68 6f 75 6c 64 20 68 61 > /// (should ha< +0004660 76 65 20 62 65 65 6e 20 73 74 72 69 70 70 65 64 >ve been stripped< +0004700 20 66 72 6f 6d 20 6e 6f 6e 2d 64 69 67 69 74 20 > from non-digit < +0004720 63 68 61 72 61 63 74 65 72 73 29 0a 20 20 20 20 >characters). < +0004740 73 74 72 69 6e 67 61 72 72 61 79 20 5f 46 69 6e >stringarray _Fin< +0004760 64 57 6f 72 64 73 28 20 73 74 72 69 6e 67 20 6e >dWords( string n< +0005000 75 6d 62 65 72 73 2c 20 62 6f 6f 6c 20 64 69 67 >umbers, bool dig< +0005020 69 74 6f 6b 20 29 0a 20 20 20 20 69 6e 20 7b 0a >itok ). in {.< +0005040 20 20 20 20 20 20 20 20 61 73 73 65 72 74 28 6e > assert(n< +0005060 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 20 3e 20 >umbers.length > < +0005100 20 30 29 3b 20 20 20 20 0a 20 20 20 20 7d 20 20 > 0); . } < +0005120 20 20 0a 20 20 20 20 6f 75 74 28 72 65 73 75 6c > . out(resul< +0005140 74 29 20 7b 0a 20 20 20 20 20 20 20 20 66 6f 72 >t) {. for< +0005160 65 61 63 68 20 28 61 3b 20 72 65 73 75 6c 74 29 >each (a; result)< +0005200 0a 20 20 20 20 20 20 20 20 20 20 20 20 61 73 73 >. ass< +0005220 65 72 74 28 20 77 6f 72 64 54 6f 4e 75 6d 28 61 >ert( wordToNum(a< +0005240 29 20 3d 3d 20 6e 75 6d 62 65 72 73 20 29 3b 0a >) == numbers );.< +0005260 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 62 6f > } . bo< +0005300 64 79 20 7b 0a 20 20 20 20 20 20 20 20 73 74 72 >dy {. str< +0005320 69 6e 67 61 72 72 61 79 20 72 65 74 3b 0a 20 20 >ingarray ret;. < +0005340 20 20 20 20 20 20 62 6f 6f 6c 20 66 6f 75 6e 64 > bool found< +0005360 77 6f 72 64 20 3d 20 66 61 6c 73 65 3b 0a 20 20 >word = false;. < +0005400 20 20 20 20 20 20 66 6f 72 20 28 75 69 6e 74 20 > for (uint < +0005420 74 3d 31 3b 20 74 3c 3d 6e 75 6d 62 65 72 73 2e >t=1; t<=numbers.< +0005440 6c 65 6e 67 74 68 3b 20 2b 2b 74 29 20 7b 0a 20 >length; ++t) {. < +0005460 20 20 20 20 20 20 20 20 20 20 20 61 75 74 6f 20 > auto < +0005500 61 6c 74 65 72 6e 61 74 69 76 65 73 20 3d 20 6e >alternatives = n< +0005520 75 6d 62 65 72 73 5b 30 2e 2e 74 5d 20 69 6e 20 >umbers[0..t] in < +0005540 6e 75 6d 32 77 6f 72 64 73 3b 0a 20 20 20 20 20 >num2words;. < +0005560 20 20 20 20 20 20 20 69 66 20 28 21 61 6c 74 65 > if (!alte< +0005600 72 6e 61 74 69 76 65 73 29 0a 20 20 20 20 20 20 >rnatives). < +0005620 20 20 20 20 20 20 20 20 20 20 63 6f 6e 74 69 6e > contin< +0005640 75 65 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 >ue;. < +0005660 66 6f 75 6e 64 77 6f 72 64 20 3d 20 74 72 75 65 >foundword = true< +0005700 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 69 66 >;. if< +0005720 20 28 6e 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 > (numbers.length< +0005740 20 3e 20 20 74 29 20 7b 0a 20 20 20 20 20 20 20 > > t) {. < +0005760 20 20 20 20 20 20 20 20 20 2f 2f 20 43 6f 6d 62 > // Comb< +0006000 69 6e 65 20 61 6c 6c 20 63 75 72 72 65 6e 74 20 >ine all current < +0006020 61 6c 74 65 72 6e 61 74 69 76 65 73 20 77 69 74 >alternatives wit< +0006040 68 20 61 6c 6c 20 61 6c 74 65 72 6e 61 74 69 76 >h all alternativ< +0006060 65 73 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 >es . < +0006100 20 20 20 20 20 20 20 20 2f 2f 20 6f 66 20 74 68 > // of th< +0006120 65 20 72 65 73 74 20 28 6e 65 78 74 20 70 69 65 >e rest (next pie< +0006140 63 65 20 63 61 6e 20 73 74 61 72 74 20 77 69 74 >ce can start wit< +0006160 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 >h a digit) < +0006200 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 > . < +0006220 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 63 68 > foreach< +0006240 20 28 61 32 3b 20 5f 46 69 6e 64 57 6f 72 64 73 > (a2; _FindWords< +0006260 28 20 6e 75 6d 62 65 72 73 5b 74 2e 2e 24 5d 2c >( numbers[t..$],< +0006300 20 74 72 75 65 20 20 20 20 20 29 20 29 0a 20 20 > true ) ). < +0006320 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0006340 20 20 66 6f 72 65 61 63 68 28 61 31 3b 20 2a 61 > foreach(a1; *a< +0006360 6c 74 65 72 6e 61 74 69 76 65 73 29 0a 20 20 20 >lternatives). < +0006400 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0006420 20 20 20 20 72 65 74 20 7e 3d 20 61 31 20 7e 20 > ret ~= a1 ~ < +0006440 22 20 22 20 7e 20 61 32 3b 0a 20 20 20 20 20 20 >" " ~ a2;. < +0006460 20 20 20 20 20 20 7d 0a 20 20 20 20 20 20 20 20 > }. < +0006500 20 20 20 20 65 6c 73 65 20 20 20 20 0a 20 20 20 > else . < +0006520 20 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 > ret< +0006540 20 7e 3d 20 2a 61 6c 74 65 72 6e 61 74 69 76 65 > ~= *alternative< +0006560 73 3b 20 20 20 20 2f 2f 20 61 70 70 65 6e 64 20 >s; // append < +0006600 74 68 65 73 65 20 61 6c 74 65 72 6e 61 74 69 76 >these alternativ< +0006620 65 73 0a 20 20 20 20 20 20 20 20 7d 0a 20 20 20 >es. }. < +0006640 20 20 20 20 20 2f 2f 20 54 72 79 20 74 6f 20 6b > // Try to k< +0006660 65 65 70 20 31 20 64 69 67 69 74 2c 20 6f 6e 6c >eep 1 digit, onl< +0006700 79 20 69 66 20 77 65 27 72 65 20 61 6c 6c 6f 77 >y if we're allow< +0006720 65 64 20 61 6e 64 20 6e 6f 20 6f 74 68 65 72 0a >ed and no other.< +0006740 20 20 20 20 20 20 20 20 2f 2f 20 61 6c 74 65 72 > // alter< +0006760 6e 61 74 69 76 65 73 20 77 65 72 65 20 66 6f 75 >natives were fou< +0007000 6e 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 54 65 >nd. // Te< +0007020 73 74 69 6e 67 20 22 72 65 74 2e 6c 65 6e 67 74 >sting "ret.lengt< +0007040 68 22 20 6d 61 6b 65 73 20 6d 6f 72 65 20 73 65 >h" makes more se< +0007060 6e 73 65 20 74 68 61 6e 20 74 65 73 74 69 6e 67 >nse than testing< +0007100 20 22 66 6f 75 6e 64 77 6f 72 64 22 2c 0a 20 20 > "foundword",. < +0007120 20 20 20 20 20 20 2f 2f 20 62 75 74 20 74 68 65 > // but the< +0007140 20 6f 74 68 65 72 20 69 6d 70 6c 65 6d 65 6e 74 > other implement< +0007160 61 74 69 6f 6e 73 20 73 65 65 6d 20 74 6f 20 64 >ations seem to d< +0007200 6f 20 6a 75 73 74 20 74 68 69 73 2e 0a 20 20 20 >o just this.. < +0007220 20 20 20 20 20 69 66 20 28 64 69 67 69 74 6f 6b > if (digitok< +0007240 20 26 26 20 21 66 6f 75 6e 64 77 6f 72 64 29 20 > && !foundword) < +0007260 7b 20 2f 2f 72 65 74 2e 6c 65 6e 67 74 68 20 3d >{ //ret.length =< +0007300 3d 20 30 20 20 0a 20 20 20 20 20 20 20 20 20 20 >= 0 . < +0007320 20 20 69 66 28 6e 75 6d 62 65 72 73 2e 6c 65 6e > if(numbers.len< +0007340 67 74 68 20 3e 20 20 31 29 20 7b 0a 20 20 20 20 >gth > 1) {. < +0007360 20 20 20 20 20 20 20 20 20 20 20 20 2f 2f 20 43 > // C< +0007400 6f 6d 62 69 6e 65 20 31 20 64 69 67 69 74 20 77 >ombine 1 digit w< +0007420 69 74 68 20 61 6c 6c 20 61 6c 74 65 6e 61 74 69 >ith all altenati< +0007440 76 65 73 20 66 72 6f 6d 20 74 68 65 20 72 65 73 >ves from the res< +0007460 74 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 20 >t . < +0007500 20 20 20 20 20 20 2f 2f 20 28 6e 65 78 74 20 70 > // (next p< +0007520 69 65 63 65 20 63 61 6e 20 6e 6f 74 20 73 74 61 >iece can not sta< +0007540 72 74 20 77 69 74 68 20 61 20 64 69 67 69 74 29 >rt with a digit)< +0007560 20 20 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 > . < +0007600 20 20 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 > forea< +0007620 63 68 20 28 61 3b 20 5f 46 69 6e 64 57 6f 72 64 >ch (a; _FindWord< +0007640 73 28 20 6e 75 6d 62 65 72 73 5b 31 2e 2e 24 5d >s( numbers[1..$]< +0007660 2c 20 66 61 6c 73 65 20 29 20 29 0a 20 20 20 20 >, false ) ). < +0007700 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0007720 72 65 74 20 7e 3d 20 6e 75 6d 62 65 72 73 5b 30 >ret ~= numbers[0< +0007740 2e 2e 31 5d 20 7e 20 22 20 22 20 7e 20 61 3b 0a >..1] ~ " " ~ a;.< +0007760 20 20 20 20 20 20 20 20 20 20 20 20 7d 20 20 20 > } < +0010000 20 0a 20 20 20 20 20 20 20 20 20 20 20 20 65 6c > . el< +0010020 73 65 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 >se . < +0010040 20 20 20 20 20 20 20 72 65 74 20 7e 3d 20 6e 75 > ret ~= nu< +0010060 6d 62 65 72 73 5b 30 2e 2e 31 5d 3b 20 20 20 20 >mbers[0..1]; < +0010100 2f 2f 20 6a 75 73 74 20 61 70 70 65 6e 64 20 74 >// just append t< +0010120 68 69 73 20 64 69 67 69 74 20 20 20 20 20 20 20 >his digit < +0010140 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 7d > . }< +0010160 20 20 20 20 0a 20 20 20 20 20 20 20 20 72 65 74 > . ret< +0010200 75 72 6e 20 72 65 74 3b 0a 20 20 20 20 7d 0a 0a >urn ret;. }..< +0010220 20 20 20 20 2f 2f 2f 20 28 54 68 69 73 20 66 75 > /// (This fu< +0010240 6e 63 74 69 6f 6e 20 77 61 73 20 69 6e 6c 69 6e >nction was inlin< +0010260 65 64 20 69 6e 20 74 68 65 20 6f 72 69 67 69 6e >ed in the origin< +0010300 61 6c 20 70 72 6f 67 72 61 6d 29 20 0a 20 20 20 >al program) . < +0010320 20 2f 2f 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 > /// Finds all a< +0010340 6c 74 65 72 6e 61 74 69 76 65 73 20 66 6f 72 20 >lternatives for < +0010360 74 68 65 20 67 69 76 65 6e 20 70 68 6f 6e 65 20 >the given phone < +0010400 6e 75 6d 62 65 72 20 0a 20 20 20 20 2f 2f 2f 20 >number . /// < +0010420 52 65 74 75 72 6e 73 3a 20 61 72 72 61 79 20 6f >Returns: array o< +0010440 66 20 73 74 72 69 6e 67 73 20 0a 20 20 20 20 73 >f strings . s< +0010460 74 72 69 6e 67 61 72 72 61 79 20 46 69 6e 64 57 >tringarray FindW< +0010500 6f 72 64 73 28 20 73 74 72 69 6e 67 20 70 68 6f >ords( string pho< +0010520 6e 65 5f 6e 75 6d 62 65 72 20 29 0a 20 20 20 20 >ne_number ). < +0010540 7b 0a 20 20 20 20 20 20 20 20 69 66 20 28 21 70 >{. if (!p< +0010560 68 6f 6e 65 5f 6e 75 6d 62 65 72 2e 6c 65 6e 67 >hone_number.leng< +0010600 74 68 29 0a 20 20 20 20 20 20 20 20 20 20 20 20 >th). < +0010620 72 65 74 75 72 6e 20 6e 75 6c 6c 3b 0a 20 20 20 >return null;. < +0010640 20 20 20 20 20 2f 2f 20 53 74 72 69 70 20 74 68 > // Strip th< +0010660 65 20 6e 6f 6e 2d 64 69 67 69 74 20 63 68 61 72 >e non-digit char< +0010700 61 63 74 65 72 73 20 66 72 6f 6d 20 74 68 65 20 >acters from the < +0010720 70 68 6f 6e 65 20 6e 75 6d 62 65 72 2c 20 61 6e >phone number, an< +0010740 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 70 61 73 >d. // pas< +0010760 73 20 69 74 20 74 6f 20 74 68 65 20 72 65 63 75 >s it to the recu< +0011000 72 73 69 76 65 20 66 75 6e 63 74 69 6f 6e 20 28 >rsive function (< +0011020 6c 65 61 64 69 6e 67 20 64 69 67 69 74 20 69 73 >leading digit is< +0011040 20 61 6c 6c 6f 77 65 64 29 0a 20 20 20 20 20 20 > allowed). < +0011060 20 20 72 65 74 75 72 6e 20 5f 46 69 6e 64 57 6f > return _FindWo< +0011100 72 64 73 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 >rds( stripNonDig< +0011120 69 74 28 70 68 6f 6e 65 5f 6e 75 6d 62 65 72 29 >it(phone_number)< +0011140 2c 20 74 72 75 65 20 29 3b 20 20 20 20 0a 20 20 >, true ); . < +0011160 20 20 7d 20 20 20 20 0a 20 20 20 20 0a 20 20 20 > } . . < +0011200 20 2f 2f 20 52 65 61 64 20 74 68 65 20 70 68 6f > // Read the pho< +0011220 6e 65 20 6e 75 6d 62 65 72 73 20 20 20 20 20 0a >ne numbers .< +0011240 20 20 20 20 66 6f 72 65 61 63 68 28 73 74 72 69 > foreach(stri< +0011260 6e 67 20 70 68 6f 6e 65 3b 20 6e 65 77 20 42 75 >ng phone; new Bu< +0011300 66 66 65 72 65 64 46 69 6c 65 28 22 69 6e 70 75 >fferedFile("inpu< +0011320 74 2e 74 78 74 22 20 20 20 29 20 29 0a 20 20 20 >t.txt" ) ). < +0011340 20 20 20 20 20 66 6f 72 65 61 63 68 28 61 6c 74 > foreach(alt< +0011360 65 72 6e 61 74 69 76 65 3b 20 46 69 6e 64 57 6f >ernative; FindWo< +0011400 72 64 73 28 20 70 68 6f 6e 65 20 29 20 29 0a 20 >rds( phone ) ). < +0011420 20 20 20 20 20 20 20 20 20 20 20 77 72 69 74 65 > write< +0011440 66 6c 6e 28 70 68 6f 6e 65 2c 20 22 3a 20 22 2c >fln(phone, ": ",< +0011460 20 61 6c 74 65 72 6e 61 74 69 76 65 20 29 3b 0a > alternative );.< +0011500 7d 0a 0a >}..< +0011503 diff --git a/tests/examplefiles/hexdump_test b/tests/examplefiles/hexdump_test deleted file mode 100644 index 4af46fcb..00000000 --- a/tests/examplefiles/hexdump_test +++ /dev/null @@ -1,310 +0,0 @@ -00000000 2f 2f 20 43 72 65 61 74 65 64 20 62 79 20 4c 69 |// Created by Li| -00000010 6f 6e 65 6c 6c 6f 20 4c 75 6e 65 73 75 20 61 6e |onello Lunesu an| -00000020 64 20 70 6c 61 63 65 64 20 69 6e 20 74 68 65 20 |d placed in the | -00000030 70 75 62 6c 69 63 20 64 6f 6d 61 69 6e 2e 0a 2f |public domain../| -00000040 2f 20 54 68 69 73 20 66 69 6c 65 20 68 61 73 20 |/ This file has | -00000050 62 65 65 6e 20 6d 6f 64 69 66 69 65 64 20 66 72 |been modified fr| -00000060 6f 6d 20 69 74 73 20 6f 72 69 67 69 6e 61 6c 20 |om its original | -00000070 76 65 72 73 69 6f 6e 2e 0a 2f 2f 20 49 74 20 68 |version..// It h| -00000080 61 73 20 62 65 65 6e 20 66 6f 72 6d 61 74 74 65 |as been formatte| -00000090 64 20 74 6f 20 66 69 74 20 79 6f 75 72 20 73 63 |d to fit your sc| -000000a0 72 65 65 6e 2e 0a 6d 6f 64 75 6c 65 20 70 68 6f |reen..module pho| -000000b0 6e 65 6e 6f 3b 20 20 20 20 20 2f 2f 20 6f 70 74 |neno; // opt| -000000c0 69 6f 6e 61 6c 0a 69 6d 70 6f 72 74 20 73 74 64 |ional.import std| -000000d0 2e 73 74 64 69 6f 3b 20 20 20 2f 2f 20 77 72 69 |.stdio; // wri| -000000e0 74 65 66 6c 6e 20 20 20 20 20 0a 69 6d 70 6f 72 |tefln .impor| -000000f0 74 20 73 74 64 2e 63 74 79 70 65 3b 20 20 20 2f |t std.ctype; /| -00000100 2f 20 69 73 64 69 67 69 74 20 20 20 20 20 0a 69 |/ isdigit .i| -00000110 6d 70 6f 72 74 20 73 74 64 2e 73 74 72 65 61 6d |mport std.stream| -00000120 3b 20 20 2f 2f 20 42 75 66 66 65 72 65 64 46 69 |; // BufferedFi| -00000130 6c 65 0a 0a 2f 2f 20 4a 75 73 74 20 66 6f 72 20 |le..// Just for | -00000140 72 65 61 64 61 62 69 6c 69 74 79 20 28 69 6d 61 |readability (ima| -00000150 67 69 6e 65 20 63 68 61 72 5b 5d 5b 5d 5b 63 68 |gine char[][][ch| -00000160 61 72 5b 5d 5d 29 20 20 20 20 0a 61 6c 69 61 73 |ar[]]) .alias| -00000170 20 63 68 61 72 5b 5d 20 73 74 72 69 6e 67 3b 0a | char[] string;.| -00000180 61 6c 69 61 73 20 73 74 72 69 6e 67 5b 5d 20 73 |alias string[] s| -00000190 74 72 69 6e 67 61 72 72 61 79 3b 0a 0a 2f 2f 2f |tringarray;..///| -000001a0 20 53 74 72 69 70 73 20 6e 6f 6e 2d 64 69 67 69 | Strips non-digi| -000001b0 74 20 63 68 61 72 61 63 74 65 72 73 20 66 72 6f |t characters fro| -000001c0 6d 20 74 68 65 20 73 74 72 69 6e 67 20 28 43 4f |m the string (CO| -000001d0 57 29 0a 73 74 72 69 6e 67 20 73 74 72 69 70 4e |W).string stripN| -000001e0 6f 6e 44 69 67 69 74 28 20 69 6e 20 73 74 72 69 |onDigit( in stri| -000001f0 6e 67 20 6c 69 6e 65 20 29 20 0a 7b 0a 20 20 20 |ng line ) .{. | -00000200 20 73 74 72 69 6e 67 20 72 65 74 3b 0a 20 20 20 | string ret;. | -00000210 20 66 6f 72 65 61 63 68 28 75 69 6e 74 20 69 2c | foreach(uint i,| -00000220 20 63 3b 20 6c 69 6e 65 29 20 7b 0a 20 20 20 20 | c; line) {. | -00000230 20 20 20 20 2f 2f 20 45 72 72 6f 72 3a 20 73 74 | // Error: st| -00000240 64 2e 63 74 79 70 65 2e 69 73 64 69 67 69 74 20 |d.ctype.isdigit | -00000250 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 5c 70 68 |at C:\dmd\src\ph| -00000260 6f 62 6f 73 5c 73 74 64 5c 63 74 79 70 65 2e 64 |obos\std\ctype.d| -00000270 28 33 37 29 20 0a 20 20 20 20 20 20 20 20 2f 2f |(37) . //| -00000280 20 63 6f 6e 66 6c 69 63 74 73 20 77 69 74 68 20 | conflicts with | -00000290 73 74 64 2e 73 74 72 65 61 6d 2e 69 73 64 69 67 |std.stream.isdig| -000002a0 69 74 20 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 |it at C:\dmd\src| -000002b0 5c 70 68 6f 62 6f 73 5c 73 74 64 5c 73 74 72 65 |\phobos\std\stre| -000002c0 61 6d 2e 64 28 32 39 32 34 29 0a 20 20 20 20 20 |am.d(2924). | -000002d0 20 20 20 69 66 20 28 21 73 74 64 2e 63 74 79 70 | if (!std.ctyp| -000002e0 65 2e 69 73 64 69 67 69 74 28 63 29 29 20 7b 0a |e.isdigit(c)) {.| -000002f0 20 20 20 20 20 20 20 20 20 20 20 20 69 66 20 28 | if (| -00000300 21 72 65 74 29 0a 20 20 20 20 20 20 20 20 20 20 |!ret). | -00000310 20 20 20 20 20 20 72 65 74 20 3d 20 6c 69 6e 65 | ret = line| -00000320 5b 30 2e 2e 69 5d 3b 20 20 20 20 0a 20 20 20 20 |[0..i]; . | -00000330 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 20 20 | } . | -00000340 20 20 65 6c 73 65 20 69 66 20 28 72 65 74 29 0a | else if (ret).| -00000350 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 | ret | -00000360 7e 3d 20 63 3b 20 20 20 20 0a 20 20 20 20 7d 20 |~= c; . } | -00000370 20 20 20 0a 20 20 20 20 72 65 74 75 72 6e 20 72 | . return r| -00000380 65 74 3f 72 65 74 3a 6c 69 6e 65 3b 0a 7d 0a 0a |et?ret:line;.}..| -00000390 75 6e 69 74 74 65 73 74 20 7b 0a 20 20 20 20 61 |unittest {. a| -000003a0 73 73 65 72 74 28 20 73 74 72 69 70 4e 6f 6e 44 |ssert( stripNonD| -000003b0 69 67 69 74 28 22 61 73 64 66 22 29 20 3d 3d 20 |igit("asdf") == | -000003c0 22 22 20 20 29 3b 0a 20 20 20 20 61 73 73 65 72 |"" );. asser| -000003d0 74 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 69 74 |t( stripNonDigit| -000003e0 28 22 5c 27 31 33 2d 3d 32 20 34 6b 6f 70 22 29 |("\'13-=2 4kop")| -000003f0 20 3d 3d 20 20 22 31 33 32 34 22 20 20 29 3b 0a | == "1324" );.| -00000400 7d 0a 0a 2f 2f 2f 20 43 6f 6e 76 65 72 74 73 20 |}../// Converts | -00000410 61 20 77 6f 72 64 20 69 6e 74 6f 20 61 20 6e 75 |a word into a nu| -00000420 6d 62 65 72 2c 20 69 67 6e 6f 72 69 6e 67 20 61 |mber, ignoring a| -00000430 6c 6c 20 6e 6f 6e 20 61 6c 70 68 61 20 63 68 61 |ll non alpha cha| -00000440 72 61 63 74 65 72 73 20 20 0a 73 74 72 69 6e 67 |racters .string| -00000450 20 77 6f 72 64 54 6f 4e 75 6d 28 20 69 6e 20 73 | wordToNum( in s| -00000460 74 72 69 6e 67 20 77 6f 72 64 20 29 0a 7b 0a 2f |tring word ).{./| -00000470 2f 20 74 72 61 6e 73 6c 61 74 69 6f 6e 20 74 61 |/ translation ta| -00000480 62 6c 65 20 66 6f 72 20 74 68 65 20 74 61 73 6b |ble for the task| -00000490 20 61 74 20 68 61 6e 64 0a 63 6f 6e 73 74 20 63 | at hand.const c| -000004a0 68 61 72 5b 32 35 36 5d 20 54 52 41 4e 53 4c 41 |har[256] TRANSLA| -000004b0 54 45 20 3d 20 20 20 20 0a 20 20 20 20 22 20 20 |TE = . " | -000004c0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -000004d0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 22 20 | " | -000004e0 20 2f 2f 20 30 20 20 20 0a 20 20 20 20 22 20 20 | // 0 . " | -000004f0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 31 | 01| -00000500 32 33 34 35 36 37 38 39 20 20 20 20 20 20 22 20 |23456789 " | -00000510 20 2f 2f 20 33 32 20 20 20 20 20 0a 20 20 20 20 | // 32 . | -00000520 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 31 |" 57630499617851| -00000530 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 20 |881234762239 | -00000540 20 22 20 20 2f 2f 20 36 34 20 20 20 0a 20 20 20 | " // 64 . | -00000550 20 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 | " 5763049961785| -00000560 31 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 |1881234762239 | -00000570 20 20 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 | ". " | -00000580 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -00000590 20 20 20 20 20 20 20 20 20 22 0a 20 20 20 20 22 | ". "| -000005a0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -* -000005c0 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 20 20 |". " | -000005d0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -000005e0 20 20 20 20 20 20 20 22 20 20 20 20 0a 20 20 20 | " . | -000005f0 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | " | -00000600 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -00000610 20 20 22 3b 0a 20 20 20 20 73 74 72 69 6e 67 20 | ";. string | -00000620 72 65 74 3b 0a 20 20 20 20 66 6f 72 65 61 63 68 |ret;. foreach| -00000630 28 63 3b 20 63 61 73 74 28 75 62 79 74 65 5b 5d |(c; cast(ubyte[]| -00000640 29 77 6f 72 64 29 0a 20 20 20 20 20 20 20 20 69 |)word). i| -00000650 66 20 28 54 52 41 4e 53 4c 41 54 45 5b 63 5d 20 |f (TRANSLATE[c] | -00000660 21 3d 20 27 20 27 29 0a 20 20 20 20 20 20 20 20 |!= ' '). | -00000670 20 20 20 20 72 65 74 20 7e 3d 20 54 52 41 4e 53 | ret ~= TRANS| -00000680 4c 41 54 45 5b 63 5d 3b 0a 20 20 20 20 72 65 74 |LATE[c];. ret| -00000690 75 72 6e 20 72 65 74 3b 0a 7d 0a 0a 75 6e 69 74 |urn ret;.}..unit| -000006a0 74 65 73 74 20 7b 0a 20 2f 2f 20 54 65 73 74 20 |test {. // Test | -000006b0 77 6f 72 64 54 6f 4e 75 6d 20 75 73 69 6e 67 20 |wordToNum using | -000006c0 74 68 65 20 74 61 62 6c 65 20 66 72 6f 6d 20 74 |the table from t| -000006d0 68 65 20 74 61 73 6b 20 64 65 73 63 72 69 70 74 |he task descript| -000006e0 69 6f 6e 2e 0a 20 61 73 73 65 72 74 28 20 22 30 |ion.. assert( "0| -000006f0 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 36 |1112223334455666| -00000700 37 37 37 38 38 38 39 39 39 22 20 3d 3d 0a 20 20 |777888999" ==. | -00000710 20 77 6f 72 64 54 6f 4e 75 6d 28 22 45 20 7c 20 | wordToNum("E | | -00000720 4a 20 4e 20 51 20 7c 20 52 20 57 20 58 20 7c 20 |J N Q | R W X | | -00000730 44 20 53 20 59 20 7c 20 46 20 54 20 7c 20 41 20 |D S Y | F T | A | -00000740 4d 20 7c 20 43 20 49 20 56 20 7c 20 42 20 4b 20 |M | C I V | B K | -00000750 55 20 7c 20 4c 20 4f 20 50 20 7c 20 47 20 48 20 |U | L O P | G H | -00000760 5a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 20 22 |Z"));. assert( "| -00000770 30 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 |0111222333445566| -00000780 36 37 37 37 38 38 38 39 39 39 22 20 3d 3d 20 0a |6777888999" == .| -00000790 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 65 20 | wordToNum("e | -000007a0 7c 20 6a 20 6e 20 71 20 7c 20 72 20 77 20 78 20 || j n q | r w x | -000007b0 7c 20 64 20 73 20 79 20 7c 20 66 20 74 20 7c 20 || d s y | f t | | -000007c0 61 20 6d 20 7c 20 63 20 69 20 76 20 7c 20 62 20 |a m | c i v | b | -000007d0 6b 20 75 20 7c 20 6c 20 6f 20 70 20 7c 20 67 20 |k u | l o p | g | -000007e0 68 20 7a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 |h z"));. assert(| -000007f0 20 22 30 31 32 33 34 35 36 37 38 39 22 20 3d 3d | "0123456789" ==| -00000800 20 0a 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 | . wordToNum("| -00000810 30 20 7c 20 20 20 31 20 20 20 7c 20 20 20 32 20 |0 | 1 | 2 | -00000820 20 20 7c 20 20 20 33 20 20 20 7c 20 20 34 20 20 | | 3 | 4 | -00000830 7c 20 20 35 20 20 7c 20 20 20 36 20 20 20 7c 20 || 5 | 6 | | -00000840 20 20 37 20 20 20 7c 20 20 20 38 20 20 20 7c 20 | 7 | 8 | | -00000850 20 20 39 22 29 29 3b 0a 7d 0a 0a 76 6f 69 64 20 | 9"));.}..void | -00000860 6d 61 69 6e 28 20 73 74 72 69 6e 67 5b 5d 20 61 |main( string[] a| -00000870 72 67 73 20 29 0a 7b 0a 20 20 20 20 2f 2f 20 54 |rgs ).{. // T| -00000880 68 69 73 20 61 73 73 6f 63 69 61 74 69 76 65 20 |his associative | -00000890 61 72 72 61 79 20 6d 61 70 73 20 61 20 6e 75 6d |array maps a num| -000008a0 62 65 72 20 74 6f 20 61 6e 20 61 72 72 61 79 20 |ber to an array | -000008b0 6f 66 20 77 6f 72 64 73 2e 20 20 20 20 0a 20 20 |of words. . | -000008c0 20 20 73 74 72 69 6e 67 61 72 72 61 79 5b 73 74 | stringarray[st| -000008d0 72 69 6e 67 5d 20 20 20 20 6e 75 6d 32 77 6f 72 |ring] num2wor| -000008e0 64 73 3b 0a 0a 20 20 20 20 66 6f 72 65 61 63 68 |ds;.. foreach| -000008f0 28 73 74 72 69 6e 67 20 77 6f 72 64 3b 20 6e 65 |(string word; ne| -00000900 77 20 42 75 66 66 65 72 65 64 46 69 6c 65 28 22 |w BufferedFile("| -00000910 64 69 63 74 69 6f 6e 61 72 79 2e 74 78 74 22 20 |dictionary.txt" | -00000920 29 20 29 0a 20 20 20 20 20 20 20 20 6e 75 6d 32 |) ). num2| -00000930 77 6f 72 64 73 5b 20 77 6f 72 64 54 6f 4e 75 6d |words[ wordToNum| -00000940 28 77 6f 72 64 29 20 5d 20 7e 3d 20 77 6f 72 64 |(word) ] ~= word| -00000950 2e 64 75 70 3b 20 20 20 20 20 20 20 20 2f 2f 20 |.dup; // | -00000960 6d 75 73 74 20 64 75 70 0a 0a 20 20 20 20 2f 2f |must dup.. //| -00000970 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 6c 74 65 |/ Finds all alte| -00000980 72 6e 61 74 69 76 65 73 20 66 6f 72 20 74 68 65 |rnatives for the| -00000990 20 67 69 76 65 6e 20 6e 75 6d 62 65 72 0a 20 20 | given number. | -000009a0 20 20 2f 2f 2f 20 28 73 68 6f 75 6c 64 20 68 61 | /// (should ha| -000009b0 76 65 20 62 65 65 6e 20 73 74 72 69 70 70 65 64 |ve been stripped| -000009c0 20 66 72 6f 6d 20 6e 6f 6e 2d 64 69 67 69 74 20 | from non-digit | -000009d0 63 68 61 72 61 63 74 65 72 73 29 0a 20 20 20 20 |characters). | -000009e0 73 74 72 69 6e 67 61 72 72 61 79 20 5f 46 69 6e |stringarray _Fin| -000009f0 64 57 6f 72 64 73 28 20 73 74 72 69 6e 67 20 6e |dWords( string n| -00000a00 75 6d 62 65 72 73 2c 20 62 6f 6f 6c 20 64 69 67 |umbers, bool dig| -00000a10 69 74 6f 6b 20 29 0a 20 20 20 20 69 6e 20 7b 0a |itok ). in {.| -00000a20 20 20 20 20 20 20 20 20 61 73 73 65 72 74 28 6e | assert(n| -00000a30 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 20 3e 20 |umbers.length > | -00000a40 20 30 29 3b 20 20 20 20 0a 20 20 20 20 7d 20 20 | 0); . } | -00000a50 20 20 0a 20 20 20 20 6f 75 74 28 72 65 73 75 6c | . out(resul| -00000a60 74 29 20 7b 0a 20 20 20 20 20 20 20 20 66 6f 72 |t) {. for| -00000a70 65 61 63 68 20 28 61 3b 20 72 65 73 75 6c 74 29 |each (a; result)| -00000a80 0a 20 20 20 20 20 20 20 20 20 20 20 20 61 73 73 |. ass| -00000a90 65 72 74 28 20 77 6f 72 64 54 6f 4e 75 6d 28 61 |ert( wordToNum(a| -00000aa0 29 20 3d 3d 20 6e 75 6d 62 65 72 73 20 29 3b 0a |) == numbers );.| -00000ab0 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 62 6f | } . bo| -00000ac0 64 79 20 7b 0a 20 20 20 20 20 20 20 20 73 74 72 |dy {. str| -00000ad0 69 6e 67 61 72 72 61 79 20 72 65 74 3b 0a 20 20 |ingarray ret;. | -00000ae0 20 20 20 20 20 20 62 6f 6f 6c 20 66 6f 75 6e 64 | bool found| -00000af0 77 6f 72 64 20 3d 20 66 61 6c 73 65 3b 0a 20 20 |word = false;. | -00000b00 20 20 20 20 20 20 66 6f 72 20 28 75 69 6e 74 20 | for (uint | -00000b10 74 3d 31 3b 20 74 3c 3d 6e 75 6d 62 65 72 73 2e |t=1; t<=numbers.| -00000b20 6c 65 6e 67 74 68 3b 20 2b 2b 74 29 20 7b 0a 20 |length; ++t) {. | -00000b30 20 20 20 20 20 20 20 20 20 20 20 61 75 74 6f 20 | auto | -00000b40 61 6c 74 65 72 6e 61 74 69 76 65 73 20 3d 20 6e |alternatives = n| -00000b50 75 6d 62 65 72 73 5b 30 2e 2e 74 5d 20 69 6e 20 |umbers[0..t] in | -00000b60 6e 75 6d 32 77 6f 72 64 73 3b 0a 20 20 20 20 20 |num2words;. | -00000b70 20 20 20 20 20 20 20 69 66 20 28 21 61 6c 74 65 | if (!alte| -00000b80 72 6e 61 74 69 76 65 73 29 0a 20 20 20 20 20 20 |rnatives). | -00000b90 20 20 20 20 20 20 20 20 20 20 63 6f 6e 74 69 6e | contin| -00000ba0 75 65 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 |ue;. | -00000bb0 66 6f 75 6e 64 77 6f 72 64 20 3d 20 74 72 75 65 |foundword = true| -00000bc0 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 69 66 |;. if| -00000bd0 20 28 6e 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 | (numbers.length| -00000be0 20 3e 20 20 74 29 20 7b 0a 20 20 20 20 20 20 20 | > t) {. | -00000bf0 20 20 20 20 20 20 20 20 20 2f 2f 20 43 6f 6d 62 | // Comb| -00000c00 69 6e 65 20 61 6c 6c 20 63 75 72 72 65 6e 74 20 |ine all current | -00000c10 61 6c 74 65 72 6e 61 74 69 76 65 73 20 77 69 74 |alternatives wit| -00000c20 68 20 61 6c 6c 20 61 6c 74 65 72 6e 61 74 69 76 |h all alternativ| -00000c30 65 73 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 |es . | -00000c40 20 20 20 20 20 20 20 20 2f 2f 20 6f 66 20 74 68 | // of th| -00000c50 65 20 72 65 73 74 20 28 6e 65 78 74 20 70 69 65 |e rest (next pie| -00000c60 63 65 20 63 61 6e 20 73 74 61 72 74 20 77 69 74 |ce can start wit| -00000c70 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 |h a digit) | -00000c80 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 | . | -00000c90 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 63 68 | foreach| -00000ca0 20 28 61 32 3b 20 5f 46 69 6e 64 57 6f 72 64 73 | (a2; _FindWords| -00000cb0 28 20 6e 75 6d 62 65 72 73 5b 74 2e 2e 24 5d 2c |( numbers[t..$],| -00000cc0 20 74 72 75 65 20 20 20 20 20 29 20 29 0a 20 20 | true ) ). | -00000cd0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -00000ce0 20 20 66 6f 72 65 61 63 68 28 61 31 3b 20 2a 61 | foreach(a1; *a| -00000cf0 6c 74 65 72 6e 61 74 69 76 65 73 29 0a 20 20 20 |lternatives). | -00000d00 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -00000d10 20 20 20 20 72 65 74 20 7e 3d 20 61 31 20 7e 20 | ret ~= a1 ~ | -00000d20 22 20 22 20 7e 20 61 32 3b 0a 20 20 20 20 20 20 |" " ~ a2;. | -00000d30 20 20 20 20 20 20 7d 0a 20 20 20 20 20 20 20 20 | }. | -00000d40 20 20 20 20 65 6c 73 65 20 20 20 20 0a 20 20 20 | else . | -00000d50 20 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 | ret| -00000d60 20 7e 3d 20 2a 61 6c 74 65 72 6e 61 74 69 76 65 | ~= *alternative| -00000d70 73 3b 20 20 20 20 2f 2f 20 61 70 70 65 6e 64 20 |s; // append | -00000d80 74 68 65 73 65 20 61 6c 74 65 72 6e 61 74 69 76 |these alternativ| -00000d90 65 73 0a 20 20 20 20 20 20 20 20 7d 0a 20 20 20 |es. }. | -00000da0 20 20 20 20 20 2f 2f 20 54 72 79 20 74 6f 20 6b | // Try to k| -00000db0 65 65 70 20 31 20 64 69 67 69 74 2c 20 6f 6e 6c |eep 1 digit, onl| -00000dc0 79 20 69 66 20 77 65 27 72 65 20 61 6c 6c 6f 77 |y if we're allow| -00000dd0 65 64 20 61 6e 64 20 6e 6f 20 6f 74 68 65 72 0a |ed and no other.| -00000de0 20 20 20 20 20 20 20 20 2f 2f 20 61 6c 74 65 72 | // alter| -00000df0 6e 61 74 69 76 65 73 20 77 65 72 65 20 66 6f 75 |natives were fou| -00000e00 6e 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 54 65 |nd. // Te| -00000e10 73 74 69 6e 67 20 22 72 65 74 2e 6c 65 6e 67 74 |sting "ret.lengt| -00000e20 68 22 20 6d 61 6b 65 73 20 6d 6f 72 65 20 73 65 |h" makes more se| -00000e30 6e 73 65 20 74 68 61 6e 20 74 65 73 74 69 6e 67 |nse than testing| -00000e40 20 22 66 6f 75 6e 64 77 6f 72 64 22 2c 0a 20 20 | "foundword",. | -00000e50 20 20 20 20 20 20 2f 2f 20 62 75 74 20 74 68 65 | // but the| -00000e60 20 6f 74 68 65 72 20 69 6d 70 6c 65 6d 65 6e 74 | other implement| -00000e70 61 74 69 6f 6e 73 20 73 65 65 6d 20 74 6f 20 64 |ations seem to d| -00000e80 6f 20 6a 75 73 74 20 74 68 69 73 2e 0a 20 20 20 |o just this.. | -00000e90 20 20 20 20 20 69 66 20 28 64 69 67 69 74 6f 6b | if (digitok| -00000ea0 20 26 26 20 21 66 6f 75 6e 64 77 6f 72 64 29 20 | && !foundword) | -00000eb0 7b 20 2f 2f 72 65 74 2e 6c 65 6e 67 74 68 20 3d |{ //ret.length =| -00000ec0 3d 20 30 20 20 0a 20 20 20 20 20 20 20 20 20 20 |= 0 . | -00000ed0 20 20 69 66 28 6e 75 6d 62 65 72 73 2e 6c 65 6e | if(numbers.len| -00000ee0 67 74 68 20 3e 20 20 31 29 20 7b 0a 20 20 20 20 |gth > 1) {. | -00000ef0 20 20 20 20 20 20 20 20 20 20 20 20 2f 2f 20 43 | // C| -00000f00 6f 6d 62 69 6e 65 20 31 20 64 69 67 69 74 20 77 |ombine 1 digit w| -00000f10 69 74 68 20 61 6c 6c 20 61 6c 74 65 6e 61 74 69 |ith all altenati| -00000f20 76 65 73 20 66 72 6f 6d 20 74 68 65 20 72 65 73 |ves from the res| -00000f30 74 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 20 |t . | -00000f40 20 20 20 20 20 20 2f 2f 20 28 6e 65 78 74 20 70 | // (next p| -00000f50 69 65 63 65 20 63 61 6e 20 6e 6f 74 20 73 74 61 |iece can not sta| -00000f60 72 74 20 77 69 74 68 20 61 20 64 69 67 69 74 29 |rt with a digit)| -00000f70 20 20 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 | . | -00000f80 20 20 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 | forea| -00000f90 63 68 20 28 61 3b 20 5f 46 69 6e 64 57 6f 72 64 |ch (a; _FindWord| -00000fa0 73 28 20 6e 75 6d 62 65 72 73 5b 31 2e 2e 24 5d |s( numbers[1..$]| -00000fb0 2c 20 66 61 6c 73 65 20 29 20 29 0a 20 20 20 20 |, false ) ). | -00000fc0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -00000fd0 72 65 74 20 7e 3d 20 6e 75 6d 62 65 72 73 5b 30 |ret ~= numbers[0| -00000fe0 2e 2e 31 5d 20 7e 20 22 20 22 20 7e 20 61 3b 0a |..1] ~ " " ~ a;.| -00000ff0 20 20 20 20 20 20 20 20 20 20 20 20 7d 20 20 20 | } | -00001000 20 0a 20 20 20 20 20 20 20 20 20 20 20 20 65 6c | . el| -00001010 73 65 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 |se . | -00001020 20 20 20 20 20 20 20 72 65 74 20 7e 3d 20 6e 75 | ret ~= nu| -00001030 6d 62 65 72 73 5b 30 2e 2e 31 5d 3b 20 20 20 20 |mbers[0..1]; | -00001040 2f 2f 20 6a 75 73 74 20 61 70 70 65 6e 64 20 74 |// just append t| -00001050 68 69 73 20 64 69 67 69 74 20 20 20 20 20 20 20 |his digit | -00001060 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 7d | . }| -00001070 20 20 20 20 0a 20 20 20 20 20 20 20 20 72 65 74 | . ret| -00001080 75 72 6e 20 72 65 74 3b 0a 20 20 20 20 7d 0a 0a |urn ret;. }..| -00001090 20 20 20 20 2f 2f 2f 20 28 54 68 69 73 20 66 75 | /// (This fu| -000010a0 6e 63 74 69 6f 6e 20 77 61 73 20 69 6e 6c 69 6e |nction was inlin| -000010b0 65 64 20 69 6e 20 74 68 65 20 6f 72 69 67 69 6e |ed in the origin| -000010c0 61 6c 20 70 72 6f 67 72 61 6d 29 20 0a 20 20 20 |al program) . | -000010d0 20 2f 2f 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 | /// Finds all a| -000010e0 6c 74 65 72 6e 61 74 69 76 65 73 20 66 6f 72 20 |lternatives for | -000010f0 74 68 65 20 67 69 76 65 6e 20 70 68 6f 6e 65 20 |the given phone | -00001100 6e 75 6d 62 65 72 20 0a 20 20 20 20 2f 2f 2f 20 |number . /// | -00001110 52 65 74 75 72 6e 73 3a 20 61 72 72 61 79 20 6f |Returns: array o| -00001120 66 20 73 74 72 69 6e 67 73 20 0a 20 20 20 20 73 |f strings . s| -00001130 74 72 69 6e 67 61 72 72 61 79 20 46 69 6e 64 57 |tringarray FindW| -00001140 6f 72 64 73 28 20 73 74 72 69 6e 67 20 70 68 6f |ords( string pho| -00001150 6e 65 5f 6e 75 6d 62 65 72 20 29 0a 20 20 20 20 |ne_number ). | -00001160 7b 0a 20 20 20 20 20 20 20 20 69 66 20 28 21 70 |{. if (!p| -00001170 68 6f 6e 65 5f 6e 75 6d 62 65 72 2e 6c 65 6e 67 |hone_number.leng| -00001180 74 68 29 0a 20 20 20 20 20 20 20 20 20 20 20 20 |th). | -00001190 72 65 74 75 72 6e 20 6e 75 6c 6c 3b 0a 20 20 20 |return null;. | -000011a0 20 20 20 20 20 2f 2f 20 53 74 72 69 70 20 74 68 | // Strip th| -000011b0 65 20 6e 6f 6e 2d 64 69 67 69 74 20 63 68 61 72 |e non-digit char| -000011c0 61 63 74 65 72 73 20 66 72 6f 6d 20 74 68 65 20 |acters from the | -000011d0 70 68 6f 6e 65 20 6e 75 6d 62 65 72 2c 20 61 6e |phone number, an| -000011e0 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 70 61 73 |d. // pas| -000011f0 73 20 69 74 20 74 6f 20 74 68 65 20 72 65 63 75 |s it to the recu| -00001200 72 73 69 76 65 20 66 75 6e 63 74 69 6f 6e 20 28 |rsive function (| -00001210 6c 65 61 64 69 6e 67 20 64 69 67 69 74 20 69 73 |leading digit is| -00001220 20 61 6c 6c 6f 77 65 64 29 0a 20 20 20 20 20 20 | allowed). | -00001230 20 20 72 65 74 75 72 6e 20 5f 46 69 6e 64 57 6f | return _FindWo| -00001240 72 64 73 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 |rds( stripNonDig| -00001250 69 74 28 70 68 6f 6e 65 5f 6e 75 6d 62 65 72 29 |it(phone_number)| -00001260 2c 20 74 72 75 65 20 29 3b 20 20 20 20 0a 20 20 |, true ); . | -00001270 20 20 7d 20 20 20 20 0a 20 20 20 20 0a 20 20 20 | } . . | -00001280 20 2f 2f 20 52 65 61 64 20 74 68 65 20 70 68 6f | // Read the pho| -00001290 6e 65 20 6e 75 6d 62 65 72 73 20 20 20 20 20 0a |ne numbers .| -000012a0 20 20 20 20 66 6f 72 65 61 63 68 28 73 74 72 69 | foreach(stri| -000012b0 6e 67 20 70 68 6f 6e 65 3b 20 6e 65 77 20 42 75 |ng phone; new Bu| -000012c0 66 66 65 72 65 64 46 69 6c 65 28 22 69 6e 70 75 |fferedFile("inpu| -000012d0 74 2e 74 78 74 22 20 20 20 29 20 29 0a 20 20 20 |t.txt" ) ). | -000012e0 20 20 20 20 20 66 6f 72 65 61 63 68 28 61 6c 74 | foreach(alt| -000012f0 65 72 6e 61 74 69 76 65 3b 20 46 69 6e 64 57 6f |ernative; FindWo| -00001300 72 64 73 28 20 70 68 6f 6e 65 20 29 20 29 0a 20 |rds( phone ) ). | -00001310 20 20 20 20 20 20 20 20 20 20 20 77 72 69 74 65 | write| -00001320 66 6c 6e 28 70 68 6f 6e 65 2c 20 22 3a 20 22 2c |fln(phone, ": ",| -00001330 20 61 6c 74 65 72 6e 61 74 69 76 65 20 29 3b 0a | alternative );.| -00001340 7d 0a 0a |}..| -00001343 diff --git a/tests/examplefiles/hexdump_xxd b/tests/examplefiles/hexdump_xxd new file mode 100644 index 00000000..33a8a6e1 --- /dev/null +++ b/tests/examplefiles/hexdump_xxd @@ -0,0 +1,309 @@ +0000000: 2f2f 2043 7265 6174 6564 2062 7920 4c69 // Created by Li +0000010: 6f6e 656c 6c6f 204c 756e 6573 7520 616e onello Lunesu an +0000020: 6420 706c 6163 6564 2069 6e20 7468 6520 d placed in the +0000030: 7075 626c 6963 2064 6f6d 6169 6e2e 0a2f public domain../ +0000040: 2f20 5468 6973 2066 696c 6520 6861 7320 / This file has +0000050: 6265 656e 206d 6f64 6966 6965 6420 6672 been modified fr +0000060: 6f6d 2069 7473 206f 7269 6769 6e61 6c20 om its original +0000070: 7665 7273 696f 6e2e 0a2f 2f20 4974 2068 version..// It h +0000080: 6173 2062 6565 6e20 666f 726d 6174 7465 as been formatte +0000090: 6420 746f 2066 6974 2079 6f75 7220 7363 d to fit your sc +00000a0: 7265 656e 2e0a 6d6f 6475 6c65 2070 686f reen..module pho +00000b0: 6e65 6e6f 3b20 2020 2020 2f2f 206f 7074 neno; // opt +00000c0: 696f 6e61 6c0a 696d 706f 7274 2073 7464 ional.import std +00000d0: 2e73 7464 696f 3b20 2020 2f2f 2077 7269 .stdio; // wri +00000e0: 7465 666c 6e20 2020 2020 0a69 6d70 6f72 tefln .impor +00000f0: 7420 7374 642e 6374 7970 653b 2020 202f t std.ctype; / +0000100: 2f20 6973 6469 6769 7420 2020 2020 0a69 / isdigit .i +0000110: 6d70 6f72 7420 7374 642e 7374 7265 616d mport std.stream +0000120: 3b20 202f 2f20 4275 6666 6572 6564 4669 ; // BufferedFi +0000130: 6c65 0a0a 2f2f 204a 7573 7420 666f 7220 le..// Just for +0000140: 7265 6164 6162 696c 6974 7920 2869 6d61 readability (ima +0000150: 6769 6e65 2063 6861 725b 5d5b 5d5b 6368 gine char[][][ch +0000160: 6172 5b5d 5d29 2020 2020 0a61 6c69 6173 ar[]]) .alias +0000170: 2063 6861 725b 5d20 7374 7269 6e67 3b0a char[] string;. +0000180: 616c 6961 7320 7374 7269 6e67 5b5d 2073 alias string[] s +0000190: 7472 696e 6761 7272 6179 3b0a 0a2f 2f2f tringarray;../// +00001a0: 2053 7472 6970 7320 6e6f 6e2d 6469 6769 Strips non-digi +00001b0: 7420 6368 6172 6163 7465 7273 2066 726f t characters fro +00001c0: 6d20 7468 6520 7374 7269 6e67 2028 434f m the string (CO +00001d0: 5729 0a73 7472 696e 6720 7374 7269 704e W).string stripN +00001e0: 6f6e 4469 6769 7428 2069 6e20 7374 7269 onDigit( in stri +00001f0: 6e67 206c 696e 6520 2920 0a7b 0a20 2020 ng line ) .{. +0000200: 2073 7472 696e 6720 7265 743b 0a20 2020 string ret;. +0000210: 2066 6f72 6561 6368 2875 696e 7420 692c foreach(uint i, +0000220: 2063 3b20 6c69 6e65 2920 7b0a 2020 2020 c; line) {. +0000230: 2020 2020 2f2f 2045 7272 6f72 3a20 7374 // Error: st +0000240: 642e 6374 7970 652e 6973 6469 6769 7420 d.ctype.isdigit +0000250: 6174 2043 3a5c 646d 645c 7372 635c 7068 at C:\dmd\src\ph +0000260: 6f62 6f73 5c73 7464 5c63 7479 7065 2e64 obos\std\ctype.d +0000270: 2833 3729 200a 2020 2020 2020 2020 2f2f (37) . // +0000280: 2063 6f6e 666c 6963 7473 2077 6974 6820 conflicts with +0000290: 7374 642e 7374 7265 616d 2e69 7364 6967 std.stream.isdig +00002a0: 6974 2061 7420 433a 5c64 6d64 5c73 7263 it at C:\dmd\src +00002b0: 5c70 686f 626f 735c 7374 645c 7374 7265 \phobos\std\stre +00002c0: 616d 2e64 2832 3932 3429 0a20 2020 2020 am.d(2924). +00002d0: 2020 2069 6620 2821 7374 642e 6374 7970 if (!std.ctyp +00002e0: 652e 6973 6469 6769 7428 6329 2920 7b0a e.isdigit(c)) {. +00002f0: 2020 2020 2020 2020 2020 2020 6966 2028 if ( +0000300: 2172 6574 290a 2020 2020 2020 2020 2020 !ret). +0000310: 2020 2020 2020 7265 7420 3d20 6c69 6e65 ret = line +0000320: 5b30 2e2e 695d 3b20 2020 200a 2020 2020 [0..i]; . +0000330: 2020 2020 7d20 2020 200a 2020 2020 2020 } . +0000340: 2020 656c 7365 2069 6620 2872 6574 290a else if (ret). +0000350: 2020 2020 2020 2020 2020 2020 7265 7420 ret +0000360: 7e3d 2063 3b20 2020 200a 2020 2020 7d20 ~= c; . } +0000370: 2020 200a 2020 2020 7265 7475 726e 2072 . return r +0000380: 6574 3f72 6574 3a6c 696e 653b 0a7d 0a0a et?ret:line;.}.. +0000390: 756e 6974 7465 7374 207b 0a20 2020 2061 unittest {. a +00003a0: 7373 6572 7428 2073 7472 6970 4e6f 6e44 ssert( stripNonD +00003b0: 6967 6974 2822 6173 6466 2229 203d 3d20 igit("asdf") == +00003c0: 2222 2020 293b 0a20 2020 2061 7373 6572 "" );. asser +00003d0: 7428 2073 7472 6970 4e6f 6e44 6967 6974 t( stripNonDigit +00003e0: 2822 5c27 3133 2d3d 3220 346b 6f70 2229 ("\'13-=2 4kop") +00003f0: 203d 3d20 2022 3133 3234 2220 2029 3b0a == "1324" );. +0000400: 7d0a 0a2f 2f2f 2043 6f6e 7665 7274 7320 }../// Converts +0000410: 6120 776f 7264 2069 6e74 6f20 6120 6e75 a word into a nu +0000420: 6d62 6572 2c20 6967 6e6f 7269 6e67 2061 mber, ignoring a +0000430: 6c6c 206e 6f6e 2061 6c70 6861 2063 6861 ll non alpha cha +0000440: 7261 6374 6572 7320 200a 7374 7269 6e67 racters .string +0000450: 2077 6f72 6454 6f4e 756d 2820 696e 2073 wordToNum( in s +0000460: 7472 696e 6720 776f 7264 2029 0a7b 0a2f tring word ).{./ +0000470: 2f20 7472 616e 736c 6174 696f 6e20 7461 / translation ta +0000480: 626c 6520 666f 7220 7468 6520 7461 736b ble for the task +0000490: 2061 7420 6861 6e64 0a63 6f6e 7374 2063 at hand.const c +00004a0: 6861 725b 3235 365d 2054 5241 4e53 4c41 har[256] TRANSLA +00004b0: 5445 203d 2020 2020 0a20 2020 2022 2020 TE = . " +00004c0: 2020 2020 2020 2020 2020 2020 2020 2020 +00004d0: 2020 2020 2020 2020 2020 2020 2020 2220 " +00004e0: 202f 2f20 3020 2020 0a20 2020 2022 2020 // 0 . " +00004f0: 2020 2020 2020 2020 2020 2020 2020 3031 01 +0000500: 3233 3435 3637 3839 2020 2020 2020 2220 23456789 " +0000510: 202f 2f20 3332 2020 2020 200a 2020 2020 // 32 . +0000520: 2220 3537 3633 3034 3939 3631 3738 3531 " 57630499617851 +0000530: 3838 3132 3334 3736 3232 3339 2020 2020 881234762239 +0000540: 2022 2020 2f2f 2036 3420 2020 0a20 2020 " // 64 . +0000550: 2022 2035 3736 3330 3439 3936 3137 3835 " 5763049961785 +0000560: 3138 3831 3233 3437 3632 3233 3920 2020 1881234762239 +0000570: 2020 220a 2020 2020 2220 2020 2020 2020 ". " +0000580: 2020 2020 2020 2020 2020 2020 2020 2020 +0000590: 2020 2020 2020 2020 2022 0a20 2020 2022 ". " +00005a0: 2020 2020 2020 2020 2020 2020 2020 2020 +00005b0: 2020 2020 2020 2020 2020 2020 2020 2020 +00005c0: 220a 2020 2020 2220 2020 2020 2020 2020 ". " +00005d0: 2020 2020 2020 2020 2020 2020 2020 2020 +00005e0: 2020 2020 2020 2022 2020 2020 0a20 2020 " . +00005f0: 2022 2020 2020 2020 2020 2020 2020 2020 " +0000600: 2020 2020 2020 2020 2020 2020 2020 2020 +0000610: 2020 223b 0a20 2020 2073 7472 696e 6720 ";. string +0000620: 7265 743b 0a20 2020 2066 6f72 6561 6368 ret;. foreach +0000630: 2863 3b20 6361 7374 2875 6279 7465 5b5d (c; cast(ubyte[] +0000640: 2977 6f72 6429 0a20 2020 2020 2020 2069 )word). i +0000650: 6620 2854 5241 4e53 4c41 5445 5b63 5d20 f (TRANSLATE[c] +0000660: 213d 2027 2027 290a 2020 2020 2020 2020 != ' '). +0000670: 2020 2020 7265 7420 7e3d 2054 5241 4e53 ret ~= TRANS +0000680: 4c41 5445 5b63 5d3b 0a20 2020 2072 6574 LATE[c];. ret +0000690: 7572 6e20 7265 743b 0a7d 0a0a 756e 6974 urn ret;.}..unit +00006a0: 7465 7374 207b 0a20 2f2f 2054 6573 7420 test {. // Test +00006b0: 776f 7264 546f 4e75 6d20 7573 696e 6720 wordToNum using +00006c0: 7468 6520 7461 626c 6520 6672 6f6d 2074 the table from t +00006d0: 6865 2074 6173 6b20 6465 7363 7269 7074 he task descript +00006e0: 696f 6e2e 0a20 6173 7365 7274 2820 2230 ion.. assert( "0 +00006f0: 3131 3132 3232 3333 3334 3435 3536 3636 1112223334455666 +0000700: 3737 3738 3838 3939 3922 203d 3d0a 2020 777888999" ==. +0000710: 2077 6f72 6454 6f4e 756d 2822 4520 7c20 wordToNum("E | +0000720: 4a20 4e20 5120 7c20 5220 5720 5820 7c20 J N Q | R W X | +0000730: 4420 5320 5920 7c20 4620 5420 7c20 4120 D S Y | F T | A +0000740: 4d20 7c20 4320 4920 5620 7c20 4220 4b20 M | C I V | B K +0000750: 5520 7c20 4c20 4f20 5020 7c20 4720 4820 U | L O P | G H +0000760: 5a22 2929 3b0a 2061 7373 6572 7428 2022 Z"));. assert( " +0000770: 3031 3131 3232 3233 3333 3434 3535 3636 0111222333445566 +0000780: 3637 3737 3838 3839 3939 2220 3d3d 200a 6777888999" == . +0000790: 2020 2077 6f72 6454 6f4e 756d 2822 6520 wordToNum("e +00007a0: 7c20 6a20 6e20 7120 7c20 7220 7720 7820 | j n q | r w x +00007b0: 7c20 6420 7320 7920 7c20 6620 7420 7c20 | d s y | f t | +00007c0: 6120 6d20 7c20 6320 6920 7620 7c20 6220 a m | c i v | b +00007d0: 6b20 7520 7c20 6c20 6f20 7020 7c20 6720 k u | l o p | g +00007e0: 6820 7a22 2929 3b0a 2061 7373 6572 7428 h z"));. assert( +00007f0: 2022 3031 3233 3435 3637 3839 2220 3d3d "0123456789" == +0000800: 200a 2020 2077 6f72 6454 6f4e 756d 2822 . wordToNum(" +0000810: 3020 7c20 2020 3120 2020 7c20 2020 3220 0 | 1 | 2 +0000820: 2020 7c20 2020 3320 2020 7c20 2034 2020 | 3 | 4 +0000830: 7c20 2035 2020 7c20 2020 3620 2020 7c20 | 5 | 6 | +0000840: 2020 3720 2020 7c20 2020 3820 2020 7c20 7 | 8 | +0000850: 2020 3922 2929 3b0a 7d0a 0a76 6f69 6420 9"));.}..void +0000860: 6d61 696e 2820 7374 7269 6e67 5b5d 2061 main( string[] a +0000870: 7267 7320 290a 7b0a 2020 2020 2f2f 2054 rgs ).{. // T +0000880: 6869 7320 6173 736f 6369 6174 6976 6520 his associative +0000890: 6172 7261 7920 6d61 7073 2061 206e 756d array maps a num +00008a0: 6265 7220 746f 2061 6e20 6172 7261 7920 ber to an array +00008b0: 6f66 2077 6f72 6473 2e20 2020 200a 2020 of words. . +00008c0: 2020 7374 7269 6e67 6172 7261 795b 7374 stringarray[st +00008d0: 7269 6e67 5d20 2020 206e 756d 3277 6f72 ring] num2wor +00008e0: 6473 3b0a 0a20 2020 2066 6f72 6561 6368 ds;.. foreach +00008f0: 2873 7472 696e 6720 776f 7264 3b20 6e65 (string word; ne +0000900: 7720 4275 6666 6572 6564 4669 6c65 2822 w BufferedFile(" +0000910: 6469 6374 696f 6e61 7279 2e74 7874 2220 dictionary.txt" +0000920: 2920 290a 2020 2020 2020 2020 6e75 6d32 ) ). num2 +0000930: 776f 7264 735b 2077 6f72 6454 6f4e 756d words[ wordToNum +0000940: 2877 6f72 6429 205d 207e 3d20 776f 7264 (word) ] ~= word +0000950: 2e64 7570 3b20 2020 2020 2020 202f 2f20 .dup; // +0000960: 6d75 7374 2064 7570 0a0a 2020 2020 2f2f must dup.. // +0000970: 2f20 4669 6e64 7320 616c 6c20 616c 7465 / Finds all alte +0000980: 726e 6174 6976 6573 2066 6f72 2074 6865 rnatives for the +0000990: 2067 6976 656e 206e 756d 6265 720a 2020 given number. +00009a0: 2020 2f2f 2f20 2873 686f 756c 6420 6861 /// (should ha +00009b0: 7665 2062 6565 6e20 7374 7269 7070 6564 ve been stripped +00009c0: 2066 726f 6d20 6e6f 6e2d 6469 6769 7420 from non-digit +00009d0: 6368 6172 6163 7465 7273 290a 2020 2020 characters). +00009e0: 7374 7269 6e67 6172 7261 7920 5f46 696e stringarray _Fin +00009f0: 6457 6f72 6473 2820 7374 7269 6e67 206e dWords( string n +0000a00: 756d 6265 7273 2c20 626f 6f6c 2064 6967 umbers, bool dig +0000a10: 6974 6f6b 2029 0a20 2020 2069 6e20 7b0a itok ). in {. +0000a20: 2020 2020 2020 2020 6173 7365 7274 286e assert(n +0000a30: 756d 6265 7273 2e6c 656e 6774 6820 3e20 umbers.length > +0000a40: 2030 293b 2020 2020 0a20 2020 207d 2020 0); . } +0000a50: 2020 0a20 2020 206f 7574 2872 6573 756c . out(resul +0000a60: 7429 207b 0a20 2020 2020 2020 2066 6f72 t) {. for +0000a70: 6561 6368 2028 613b 2072 6573 756c 7429 each (a; result) +0000a80: 0a20 2020 2020 2020 2020 2020 2061 7373 . ass +0000a90: 6572 7428 2077 6f72 6454 6f4e 756d 2861 ert( wordToNum(a +0000aa0: 2920 3d3d 206e 756d 6265 7273 2029 3b0a ) == numbers );. +0000ab0: 2020 2020 7d20 2020 200a 2020 2020 626f } . bo +0000ac0: 6479 207b 0a20 2020 2020 2020 2073 7472 dy {. str +0000ad0: 696e 6761 7272 6179 2072 6574 3b0a 2020 ingarray ret;. +0000ae0: 2020 2020 2020 626f 6f6c 2066 6f75 6e64 bool found +0000af0: 776f 7264 203d 2066 616c 7365 3b0a 2020 word = false;. +0000b00: 2020 2020 2020 666f 7220 2875 696e 7420 for (uint +0000b10: 743d 313b 2074 3c3d 6e75 6d62 6572 732e t=1; t<=numbers. +0000b20: 6c65 6e67 7468 3b20 2b2b 7429 207b 0a20 length; ++t) {. +0000b30: 2020 2020 2020 2020 2020 2061 7574 6f20 auto +0000b40: 616c 7465 726e 6174 6976 6573 203d 206e alternatives = n +0000b50: 756d 6265 7273 5b30 2e2e 745d 2069 6e20 umbers[0..t] in +0000b60: 6e75 6d32 776f 7264 733b 0a20 2020 2020 num2words;. +0000b70: 2020 2020 2020 2069 6620 2821 616c 7465 if (!alte +0000b80: 726e 6174 6976 6573 290a 2020 2020 2020 rnatives). +0000b90: 2020 2020 2020 2020 2020 636f 6e74 696e contin +0000ba0: 7565 3b0a 2020 2020 2020 2020 2020 2020 ue;. +0000bb0: 666f 756e 6477 6f72 6420 3d20 7472 7565 foundword = true +0000bc0: 3b0a 2020 2020 2020 2020 2020 2020 6966 ;. if +0000bd0: 2028 6e75 6d62 6572 732e 6c65 6e67 7468 (numbers.length +0000be0: 203e 2020 7429 207b 0a20 2020 2020 2020 > t) {. +0000bf0: 2020 2020 2020 2020 202f 2f20 436f 6d62 // Comb +0000c00: 696e 6520 616c 6c20 6375 7272 656e 7420 ine all current +0000c10: 616c 7465 726e 6174 6976 6573 2077 6974 alternatives wit +0000c20: 6820 616c 6c20 616c 7465 726e 6174 6976 h all alternativ +0000c30: 6573 2020 2020 200a 2020 2020 2020 2020 es . +0000c40: 2020 2020 2020 2020 2f2f 206f 6620 7468 // of th +0000c50: 6520 7265 7374 2028 6e65 7874 2070 6965 e rest (next pie +0000c60: 6365 2063 616e 2073 7461 7274 2077 6974 ce can start wit +0000c70: 6820 6120 6469 6769 7429 2020 2020 2020 h a digit) +0000c80: 2020 2020 2020 2020 0a20 2020 2020 2020 . +0000c90: 2020 2020 2020 2020 2066 6f72 6561 6368 foreach +0000ca0: 2028 6132 3b20 5f46 696e 6457 6f72 6473 (a2; _FindWords +0000cb0: 2820 6e75 6d62 6572 735b 742e 2e24 5d2c ( numbers[t..$], +0000cc0: 2074 7275 6520 2020 2020 2920 290a 2020 true ) ). +0000cd0: 2020 2020 2020 2020 2020 2020 2020 2020 +0000ce0: 2020 666f 7265 6163 6828 6131 3b20 2a61 foreach(a1; *a +0000cf0: 6c74 6572 6e61 7469 7665 7329 0a20 2020 lternatives). +0000d00: 2020 2020 2020 2020 2020 2020 2020 2020 +0000d10: 2020 2020 7265 7420 7e3d 2061 3120 7e20 ret ~= a1 ~ +0000d20: 2220 2220 7e20 6132 3b0a 2020 2020 2020 " " ~ a2;. +0000d30: 2020 2020 2020 7d0a 2020 2020 2020 2020 }. +0000d40: 2020 2020 656c 7365 2020 2020 0a20 2020 else . +0000d50: 2020 2020 2020 2020 2020 2020 2072 6574 ret +0000d60: 207e 3d20 2a61 6c74 6572 6e61 7469 7665 ~= *alternative +0000d70: 733b 2020 2020 2f2f 2061 7070 656e 6420 s; // append +0000d80: 7468 6573 6520 616c 7465 726e 6174 6976 these alternativ +0000d90: 6573 0a20 2020 2020 2020 207d 0a20 2020 es. }. +0000da0: 2020 2020 202f 2f20 5472 7920 746f 206b // Try to k +0000db0: 6565 7020 3120 6469 6769 742c 206f 6e6c eep 1 digit, onl +0000dc0: 7920 6966 2077 6527 7265 2061 6c6c 6f77 y if we're allow +0000dd0: 6564 2061 6e64 206e 6f20 6f74 6865 720a ed and no other. +0000de0: 2020 2020 2020 2020 2f2f 2061 6c74 6572 // alter +0000df0: 6e61 7469 7665 7320 7765 7265 2066 6f75 natives were fou +0000e00: 6e64 0a20 2020 2020 2020 202f 2f20 5465 nd. // Te +0000e10: 7374 696e 6720 2272 6574 2e6c 656e 6774 sting "ret.lengt +0000e20: 6822 206d 616b 6573 206d 6f72 6520 7365 h" makes more se +0000e30: 6e73 6520 7468 616e 2074 6573 7469 6e67 nse than testing +0000e40: 2022 666f 756e 6477 6f72 6422 2c0a 2020 "foundword",. +0000e50: 2020 2020 2020 2f2f 2062 7574 2074 6865 // but the +0000e60: 206f 7468 6572 2069 6d70 6c65 6d65 6e74 other implement +0000e70: 6174 696f 6e73 2073 6565 6d20 746f 2064 ations seem to d +0000e80: 6f20 6a75 7374 2074 6869 732e 0a20 2020 o just this.. +0000e90: 2020 2020 2069 6620 2864 6967 6974 6f6b if (digitok +0000ea0: 2026 2620 2166 6f75 6e64 776f 7264 2920 && !foundword) +0000eb0: 7b20 2f2f 7265 742e 6c65 6e67 7468 203d { //ret.length = +0000ec0: 3d20 3020 200a 2020 2020 2020 2020 2020 = 0 . +0000ed0: 2020 6966 286e 756d 6265 7273 2e6c 656e if(numbers.len +0000ee0: 6774 6820 3e20 2031 2920 7b0a 2020 2020 gth > 1) {. +0000ef0: 2020 2020 2020 2020 2020 2020 2f2f 2043 // C +0000f00: 6f6d 6269 6e65 2031 2064 6967 6974 2077 ombine 1 digit w +0000f10: 6974 6820 616c 6c20 616c 7465 6e61 7469 ith all altenati +0000f20: 7665 7320 6672 6f6d 2074 6865 2072 6573 ves from the res +0000f30: 7420 2020 200a 2020 2020 2020 2020 2020 t . +0000f40: 2020 2020 2020 2f2f 2028 6e65 7874 2070 // (next p +0000f50: 6965 6365 2063 616e 206e 6f74 2073 7461 iece can not sta +0000f60: 7274 2077 6974 6820 6120 6469 6769 7429 rt with a digit) +0000f70: 2020 2020 2020 2020 2020 0a20 2020 2020 . +0000f80: 2020 2020 2020 2020 2020 2066 6f72 6561 forea +0000f90: 6368 2028 613b 205f 4669 6e64 576f 7264 ch (a; _FindWord +0000fa0: 7328 206e 756d 6265 7273 5b31 2e2e 245d s( numbers[1..$] +0000fb0: 2c20 6661 6c73 6520 2920 290a 2020 2020 , false ) ). +0000fc0: 2020 2020 2020 2020 2020 2020 2020 2020 +0000fd0: 7265 7420 7e3d 206e 756d 6265 7273 5b30 ret ~= numbers[0 +0000fe0: 2e2e 315d 207e 2022 2022 207e 2061 3b0a ..1] ~ " " ~ a;. +0000ff0: 2020 2020 2020 2020 2020 2020 7d20 2020 } +0001000: 200a 2020 2020 2020 2020 2020 2020 656c . el +0001010: 7365 2020 2020 0a20 2020 2020 2020 2020 se . +0001020: 2020 2020 2020 2072 6574 207e 3d20 6e75 ret ~= nu +0001030: 6d62 6572 735b 302e 2e31 5d3b 2020 2020 mbers[0..1]; +0001040: 2f2f 206a 7573 7420 6170 7065 6e64 2074 // just append t +0001050: 6869 7320 6469 6769 7420 2020 2020 2020 his digit +0001060: 2020 2020 2020 0a20 2020 2020 2020 207d . } +0001070: 2020 2020 0a20 2020 2020 2020 2072 6574 . ret +0001080: 7572 6e20 7265 743b 0a20 2020 207d 0a0a urn ret;. }.. +0001090: 2020 2020 2f2f 2f20 2854 6869 7320 6675 /// (This fu +00010a0: 6e63 7469 6f6e 2077 6173 2069 6e6c 696e nction was inlin +00010b0: 6564 2069 6e20 7468 6520 6f72 6967 696e ed in the origin +00010c0: 616c 2070 726f 6772 616d 2920 0a20 2020 al program) . +00010d0: 202f 2f2f 2046 696e 6473 2061 6c6c 2061 /// Finds all a +00010e0: 6c74 6572 6e61 7469 7665 7320 666f 7220 lternatives for +00010f0: 7468 6520 6769 7665 6e20 7068 6f6e 6520 the given phone +0001100: 6e75 6d62 6572 200a 2020 2020 2f2f 2f20 number . /// +0001110: 5265 7475 726e 733a 2061 7272 6179 206f Returns: array o +0001120: 6620 7374 7269 6e67 7320 0a20 2020 2073 f strings . s +0001130: 7472 696e 6761 7272 6179 2046 696e 6457 tringarray FindW +0001140: 6f72 6473 2820 7374 7269 6e67 2070 686f ords( string pho +0001150: 6e65 5f6e 756d 6265 7220 290a 2020 2020 ne_number ). +0001160: 7b0a 2020 2020 2020 2020 6966 2028 2170 {. if (!p +0001170: 686f 6e65 5f6e 756d 6265 722e 6c65 6e67 hone_number.leng +0001180: 7468 290a 2020 2020 2020 2020 2020 2020 th). +0001190: 7265 7475 726e 206e 756c 6c3b 0a20 2020 return null;. +00011a0: 2020 2020 202f 2f20 5374 7269 7020 7468 // Strip th +00011b0: 6520 6e6f 6e2d 6469 6769 7420 6368 6172 e non-digit char +00011c0: 6163 7465 7273 2066 726f 6d20 7468 6520 acters from the +00011d0: 7068 6f6e 6520 6e75 6d62 6572 2c20 616e phone number, an +00011e0: 640a 2020 2020 2020 2020 2f2f 2070 6173 d. // pas +00011f0: 7320 6974 2074 6f20 7468 6520 7265 6375 s it to the recu +0001200: 7273 6976 6520 6675 6e63 7469 6f6e 2028 rsive function ( +0001210: 6c65 6164 696e 6720 6469 6769 7420 6973 leading digit is +0001220: 2061 6c6c 6f77 6564 290a 2020 2020 2020 allowed). +0001230: 2020 7265 7475 726e 205f 4669 6e64 576f return _FindWo +0001240: 7264 7328 2073 7472 6970 4e6f 6e44 6967 rds( stripNonDig +0001250: 6974 2870 686f 6e65 5f6e 756d 6265 7229 it(phone_number) +0001260: 2c20 7472 7565 2029 3b20 2020 200a 2020 , true ); . +0001270: 2020 7d20 2020 200a 2020 2020 0a20 2020 } . . +0001280: 202f 2f20 5265 6164 2074 6865 2070 686f // Read the pho +0001290: 6e65 206e 756d 6265 7273 2020 2020 200a ne numbers . +00012a0: 2020 2020 666f 7265 6163 6828 7374 7269 foreach(stri +00012b0: 6e67 2070 686f 6e65 3b20 6e65 7720 4275 ng phone; new Bu +00012c0: 6666 6572 6564 4669 6c65 2822 696e 7075 fferedFile("inpu +00012d0: 742e 7478 7422 2020 2029 2029 0a20 2020 t.txt" ) ). +00012e0: 2020 2020 2066 6f72 6561 6368 2861 6c74 foreach(alt +00012f0: 6572 6e61 7469 7665 3b20 4669 6e64 576f ernative; FindWo +0001300: 7264 7328 2070 686f 6e65 2029 2029 0a20 rds( phone ) ). +0001310: 2020 2020 2020 2020 2020 2077 7269 7465 write +0001320: 666c 6e28 7068 6f6e 652c 2022 3a20 222c fln(phone, ": ", +0001330: 2061 6c74 6572 6e61 7469 7665 2029 3b0a alternative );. +0001340: 7d0a 0a }.. -- cgit v1.2.1 From b6cddf8c258f96d14543240d6bc4a3cc1019422e Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Wed, 28 Oct 2015 20:04:19 -0700 Subject: Remove AGPL example file. --- tests/examplefiles/test.ecl | 58 --------------------------------------------- 1 file changed, 58 deletions(-) delete mode 100644 tests/examplefiles/test.ecl diff --git a/tests/examplefiles/test.ecl b/tests/examplefiles/test.ecl deleted file mode 100644 index b686492a..00000000 --- a/tests/examplefiles/test.ecl +++ /dev/null @@ -1,58 +0,0 @@ -/*############################################################################## - - Copyright (C) 2011 HPCC Systems. - - All rights reserved. This program is free software: you can redistribute it and/or modify - it under the terms of the GNU Affero General Public License as - published by the Free Software Foundation, either version 3 of the - License, or (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Affero General Public License for more details. - - You should have received a copy of the GNU Affero General Public License - along with this program. If not, see . -############################################################################## */ - -#option ('slidingJoins', true); - -namesRecord := - RECORD -string20 surname; -string10 forename; -integer2 age; -integer2 dadAge; -integer2 mumAge; - END; - -namesRecord2 := - record -string10 extra; -namesRecord; - end; - -namesTable := dataset('x',namesRecord,FLAT); -namesTable2 := dataset('y',namesRecord2,FLAT); - -integer2 aveAgeL(namesRecord l) := (l.dadAge+l.mumAge)/2; -integer2 aveAgeR(namesRecord2 r) := (r.dadAge+r.mumAge)/2; - -// Standard join on a function of left and right -output(join(namesTable, namesTable2, aveAgeL(left) = aveAgeR(right))); - -//Several simple examples of sliding join syntax -output(join(namesTable, namesTable2, left.age >= right.age - 10 and left.age <= right.age +10)); -output(join(namesTable, namesTable2, left.age between right.age - 10 and right.age +10)); -output(join(namesTable, namesTable2, left.age between right.age + 10 and right.age +30)); -output(join(namesTable, namesTable2, left.age between (right.age + 20) - 10 and (right.age +20) + 10)); -output(join(namesTable, namesTable2, aveAgeL(left) between aveAgeR(right)+10 and aveAgeR(right)+40)); - -//Same, but on strings. Also includes age to ensure sort is done by non-sliding before sliding. -output(join(namesTable, namesTable2, left.surname between right.surname[1..10]+'AAAAAAAAAA' and right.surname[1..10]+'ZZZZZZZZZZ' and left.age=right.age)); -output(join(namesTable, namesTable2, left.surname between right.surname[1..10]+'AAAAAAAAAA' and right.surname[1..10]+'ZZZZZZZZZZ' and left.age=right.age,all)); - -//This should not generate a self join -output(join(namesTable, namesTable, left.age between right.age - 10 and right.age +10)); - -- cgit v1.2.1 From c14f2a8cd4cf44d2cd405b2fd88ca190929e39af Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 12:31:12 -0700 Subject: Add test file for Python tracebacks changes; normalize AUTHORS --- AUTHORS | 11 +-- tests/examplefiles/pycon_ctrlc_traceback | 118 +++++++++++++++++++++++++++++++ 2 files changed, 124 insertions(+), 5 deletions(-) create mode 100644 tests/examplefiles/pycon_ctrlc_traceback diff --git a/AUTHORS b/AUTHORS index 30b88082..3434c20c 100644 --- a/AUTHORS +++ b/AUTHORS @@ -31,12 +31,13 @@ Other contributors, listed alphabetically, are: * Adam Blinkinsop -- Haskell, Redcode lexers * Frits van Bommel -- assembler lexers * Pierre Bourdon -- bugfixes +* chebee7i -- Python traceback lexer improvements * Hiram Chirino -- Scaml and Jade lexers * Ian Cooper -- VGL lexer * David Corbett -- Inform, Jasmin, and TADS 3 lexers * Leaf Corcoran -- MoonScript lexer * Christopher Creutzig -- MuPAD lexer -* Daniël W. Crompton - Pike lexer +* Daniël W. Crompton -- Pike lexer * Pete Curry -- bugfixes * Bryan Davis -- EBNF lexer * Bruno Deferrari -- Shen lexer @@ -80,15 +81,15 @@ Other contributors, listed alphabetically, are: * Doug Hogan -- Mscgen lexer * Ben Hollis -- Mason lexer * Max Horn -- GAP lexer -* Dustin Howett -- Logos lexer * Alastair Houghton -- Lexer inheritance facility * Tim Howard -- BlitzMax lexer +* Dustin Howett -- Logos lexer * Ivan Inozemtsev -- Fantom lexer * Hiroaki Itoh -- Shell console rewrite * Brian R. Jackson -- Tea lexer * Christian Jann -- ShellSession lexer * Dennis Kaarsemaker -- sources.list lexer -* Dmitri Kabak - Inferno Limbo lexer +* Dmitri Kabak -- Inferno Limbo lexer * Igor Kalnitsky -- vhdl lexer * Alexander Kit -- MaskJS lexer * Pekka Klärck -- Robot Framework lexer @@ -138,8 +139,8 @@ Other contributors, listed alphabetically, are: * David Oliva -- Rebol lexer * Pat Pannuto -- nesC lexer * Jon Parise -- Protocol buffers lexer -* Ronny Pfannschmidt -- BBCode lexer * Benjamin Peterson -- Test suite refactoring +* Ronny Pfannschmidt -- BBCode lexer * Dominik Picheta -- Nimrod lexer * Andrew Pinkham -- RTF Formatter Refactoring * Clément Prévost -- UrbiScript lexer @@ -180,7 +181,6 @@ Other contributors, listed alphabetically, are: * Daniele Varrazzo -- PostgreSQL lexers * Abe Voelker -- OpenEdge ABL lexer * Pepijn de Vos -- HTML formatter CTags support -* Whitney Young -- ObjectiveC lexer * Matthias Vallentin -- Bro lexer * Linh Vu Hong -- RSL lexer * Nathan Weizenbaum -- Haml and Sass lexers @@ -188,6 +188,7 @@ Other contributors, listed alphabetically, are: * Dietmar Winkler -- Modelica lexer * Nils Winter -- Smalltalk lexer * Davy Wybiral -- Clojure lexer +* Whitney Young -- ObjectiveC lexer * Diego Zamboni -- CFengine3 lexer * Enrique Zamudio -- Ceylon lexer * Alex Zimin -- Nemerle lexer diff --git a/tests/examplefiles/pycon_ctrlc_traceback b/tests/examplefiles/pycon_ctrlc_traceback new file mode 100644 index 00000000..4998fd9c --- /dev/null +++ b/tests/examplefiles/pycon_ctrlc_traceback @@ -0,0 +1,118 @@ +x = r""" +>>> import os +>>> print os + +>>> for x in range(10): +... y = x + 2 +... print(x) +... if x > 5: +... raise Exception +... +0 +1 +2 +3 +4 +5 +6 +Traceback (most recent call last): + File "", line 5, in +Exception +>>> +>>> while True: +... pass +... +^CTraceback (most recent call last): + File "", line 1, in +KeyboardInterrupt + +>>> class A(Exception):pass +... +>>> class B(Exception):pass +... +>>> try: +... try: +... raise A('first') +... finally: +... raise B('second') +... except A as c: +... print(c) +... +Traceback (most recent call last): + File "", line 3, in +__main__.A: first + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "", line 5, in +__main__.B: second + +>>> x = + File "", line 1 + x = + ^ +SyntaxError: invalid syntax +>>> + +>>> x = 3 +>>> with 5 as y: +... print(x + y) +... +8 + +# TODO +#>>> raise ValueError('multi\n line\ndetail') +#Traceback (most recent call last): +#........ +#ValueError: multi +# line +#detail + +>>> raise ValueError('multi\n line\ndetail') +Traceback (most recent call last): + .123 +ValueError: multi + line +detail + +>>> raise ValueError('multi\n line\ndetail') +Traceback (most recent call last): + ... +ValueError: multi + line +detail + +>>> raise ValueError('multi\n line\ndetail') +Traceback (most recent call last): + .... +ValueError: multi + line +detail + +>>> raise ValueError('multi\n line\ndetail') +Traceback (most recent call last): + .... +ValueError: multi + line +detail + +>>> raise ValueError('multi\n line\ndetail') +Traceback (most recent call last): + ... +ValueError: multi + line +detail + +>>> raise Exception +Traceback (most recent call last): + File "", line 1, in +Exception +>>> import somemodule +>>> somemodule.blah() +Traceback (most recent call last): + File "", line 1, in + File "/path/to/stuff/somemodule/blah.py", line 658, in blah + raise Exception('Hi.') +Exception: Hi. + -- cgit v1.2.1 From f15aaf9bc0d5af296e31246a0a155ec448151b36 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 12:39:18 -0700 Subject: Minor changes to Ezhil lexer --- AUTHORS | 1 + pygments/lexers/ezhil.py | 31 +++++++++++++++++-------------- tests/examplefiles/ezhil_primefactors.n | 2 ++ 3 files changed, 20 insertions(+), 14 deletions(-) diff --git a/AUTHORS b/AUTHORS index 06119231..3f785574 100644 --- a/AUTHORS +++ b/AUTHORS @@ -8,6 +8,7 @@ Other contributors, listed alphabetically, are: * Sam Aaron -- Ioke lexer * Ali Afshar -- image formatter * Thomas Aglassinger -- Rexx lexer +* Muthiah Annamalai -- Ezhil lexer * Kumar Appaiah -- Debian control lexer * Andreas Amann -- AppleScript lexer * Timothy Armstrong -- Dart lexer fixes diff --git a/pygments/lexers/ezhil.py b/pygments/lexers/ezhil.py index f1d739ce..713541ee 100644 --- a/pygments/lexers/ezhil.py +++ b/pygments/lexers/ezhil.py @@ -5,7 +5,7 @@ Pygments lexers for Ezhil language. - :copyright: Copyright 2015 Muthiah Annamalai + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -18,22 +18,24 @@ __all__ = ['EzhilLexer'] class EzhilLexer(RegexLexer): """ - *New in Pygments. Lexical analyzer for Tamil script based programming language, Ezhil* - *See: http://ezhillang.org/ + Lexer for `Ezhil, a Tamil script-based programming language `_ + + .. versionadded:: 2.1 """ name = 'Ezhil' aliases = ['ezhil'] filenames = ['*.n'] mimetypes = ['text/x-ezhil'] flags = re.MULTILINE | re.UNICODE - # see constant from open-tamil package: tamil.utf8.tamil_letters - _TALETTERS = u'a-zA-Z_|அ|ஆ|இ|ஈ|உ|ஊ|எ|ஏ|ஐ|ஒ|ஓ|ஔ|ஃ|க்|ச்|ட்|த்|ப்|ற்|ங்|ஞ்|ண்|ந்|ம்|ன்|ய்|ர்|ல்|வ்|ழ்|ள்|க|ச|ட|த|ப|ற|ஞ|ங|ண|ந|ம|ன|ய|ர|ல|வ|ழ|ள|ஜ|ஷ|ஸ|ஹ|க|கா|கி|கீ|கு|கூ|கெ|கே|கை|கொ|கோ|கௌ|ச|சா|சி|சீ|சு|சூ|செ|சே|சை|சொ|சோ|சௌ|ட|டா|டி|டீ|டு|டூ|டெ|டே|டை|டொ|டோ|டௌ|த|தா|தி|தீ|து|தூ|தெ|தே|தை|தொ|தோ|தௌ|ப|பா|பி|பீ|பு|பூ|பெ|பே|பை|பொ|போ|பௌ|ற|றா|றி|றீ|று|றூ|றெ|றே|றை|றொ|றோ|றௌ|ஞ|ஞா|ஞி|ஞீ|ஞு|ஞூ|ஞெ|ஞே|ஞை|ஞொ|ஞோ|ஞௌ|ங|ஙா|ஙி|ஙீ|ஙு|ஙூ|ஙெ|ஙே|ஙை|ஙொ|ஙோ|ஙௌ|ண|ணா|ணி|ணீ|ணு|ணூ|ணெ|ணே|ணை|ணொ|ணோ|ணௌ|ந|நா|நி|நீ|நு|நூ|நெ|நே|நை|நொ|நோ|நௌ|ம|மா|மி|மீ|மு|மூ|மெ|மே|மை|மொ|மோ|மௌ|ன|னா|னி|னீ|னு|னூ|னெ|னே|னை|னொ|னோ|னௌ|ய|யா|யி|யீ|யு|யூ|யெ|யே|யை|யொ|யோ|யௌ|ர|ரா|ரி|ரீ|ரு|ரூ|ரெ|ரே|ரை|ரொ|ரோ|ரௌ|ல|லா|லி|லீ|லு|லூ|லெ|லே|லை|லொ|லோ|லௌ|வ|வா|வி|வீ|வு|வூ|வெ|வே|வை|வொ|வோ|வௌ|ழ|ழா|ழி|ழீ|ழு|ழூ|ழெ|ழே|ழை|ழொ|ழோ|ழௌ|ள|ளா|ளி|ளீ|ளு|ளூ|ளெ|ளே|ளை|ளொ|ளோ|ளௌ|ௐ|ஜ|ஜா|ஜி|ஜீ|ஜு|ஜூ|ஜெ|ஜே|ஜை|ஜொ|ஜோ|ஜௌ|ஷ|ஷா|ஷி|ஷீ|ஷு|ஷூ|ஷெ|ஷே|ஷை|ஷொ|ஷோ|ஷௌ|ஸ|ஸா|ஸி|ஸீ|ஸு|ஸூ|ஸெ|ஸே|ஸை|ஸொ|ஸோ|ஸௌ|ஹ|ஹா|ஹி|ஹீ|ஹு|ஹூ|ஹெ|ஹே|ஹை|ஹொ|ஹோ|ஹௌ' + # Refer to tamil.utf8.tamil_letters from open-tamil for a stricter version of this. + # This much simpler version is close enough, and includes combining marks. + _TALETTERS = u'[a-zA-Z_]|[\u0b80-\u0bff]' tokens = { 'root': [ include('keywords'), (r'#.*\n', Comment.Single), - (r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?',Operator), - (u'இல்',Operator.Word), + (r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?', Operator), + (u'இல்', Operator.Word), (words(('assert', 'max', 'min', 'நீளம்','சரம்_இடமாற்று','சரம்_கண்டுபிடி', 'பட்டியல்','பின்இணை','வரிசைப்படுத்து', @@ -41,24 +43,25 @@ class EzhilLexer(RegexLexer): 'கோப்பை_திற','கோப்பை_எழுது','கோப்பை_மூடு', 'pi','sin','cos','tan','sqrt','hypot','pow','exp','log','log10' 'min','max','exit', - ), suffix=r'\b'),Name.Builtin), + ), suffix=r'\b'), Name.Builtin), (r'(True|False)\b', Keyword.Constant), (r'[^\S\n]+', Text), include('identifier'), include('literal'), (r'[(){}\[\]:;.]', Punctuation), - ], + ], 'keywords': [ (u'பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword), - ], + ], 'identifier': [ - (u'['+_TALETTERS+']['+u'0-9'+_TALETTERS+u']*',Name), - ], + (u'(?:'+_TALETTERS+u')(?:[0-9]|'+_TALETTERS+u')*', Name), + ], 'literal': [ (r'".*?"', String), (r'(?u)\d+((\.\d*)?[eE][+-]?\d+|\.\d*)', Number.Float), - (r'(?u)\d+',Number.Integer), - ]} + (r'(?u)\d+', Number.Integer), + ] + } def __init__(self, **options): super(EzhilLexer, self).__init__(**options) diff --git a/tests/examplefiles/ezhil_primefactors.n b/tests/examplefiles/ezhil_primefactors.n index 96c82b4c..13390611 100644 --- a/tests/examplefiles/ezhil_primefactors.n +++ b/tests/examplefiles/ezhil_primefactors.n @@ -1,6 +1,8 @@ # (C) முத்தையா அண்ணாமலை 2013 # (A) என். சொக்கன் # எழில் தமிழ் நிரலாக்க மொழி உதாரணம் +# Muthu A granted permission for this to be included under the BSD license +# https://bitbucket.org/birkenfeld/pygments-main/pull-requests/443/ezhil-language-lexer-for-pygments/diff ## Prime Factors Example ## பகா எண் கூறுகளைக் கண்டறியும் உதாரணம் -- cgit v1.2.1 From 99969ec88af8af4a0cae9a9b24e0c3140b0196f9 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 12:41:26 -0700 Subject: Update changelog --- CHANGES | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGES b/CHANGES index 7056d178..f3e55b25 100644 --- a/CHANGES +++ b/CHANGES @@ -34,6 +34,7 @@ Version 2.1 * Crmsh (PR#440) * Praat (PR#492) * CSound (PR#494) + * Ezhil (PR#443) - Added styles: @@ -69,6 +70,8 @@ Version 2.1 - Fixed incomplete output on Windows and Python 3 (e.g. when using iPython Notebook). (#1153) +- Allowed more traceback styles in Python console lexer. (PR#253) + Version 2.0.3 ------------- -- cgit v1.2.1 From 470742e01fda77c990872ae52672c39522c5b5a1 Mon Sep 17 00:00:00 2001 From: Nathan Whetsell Date: Sat, 31 Oct 2015 15:52:10 -0400 Subject: Add example CSD file --- tests/examplefiles/test.csd | 260 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 260 insertions(+) create mode 100644 tests/examplefiles/test.csd diff --git a/tests/examplefiles/test.csd b/tests/examplefiles/test.csd new file mode 100644 index 00000000..9122309b --- /dev/null +++ b/tests/examplefiles/test.csd @@ -0,0 +1,260 @@ + + +// This is a Csound orchestra file for testing a Pygments +// lexer. Csound single-line comments can be preceded by a pair of forward +// slashes... +; ...or a semicolon. + +/* Block comments begin with /* and end with */ + +// Orchestras begin with a header of audio parameters. +nchnls = 1 +nchnls_i = 1 +sr = 44100 +0dbfs = 1 +ksmps = 10 + +// The control rate kr = sr / ksmps can be omitted when the number of audio +// samples in a control period (ksmps) is set, but kr may appear in older +// orchestras. +kr = 4410 + +// Orchestras contain instruments. These begin with the keyword instr followed +// by a comma-separated list of numbers or names of the instrument. Instruments +// end at the endin keyword and cannot be nested. +instr 1, N_a_M_e_, +Name + // Instruments contain statements. Here is a typical statement: + aSignal oscil 0dbfs, 440, 1 + // Statements are terminated with a newline (possibly preceded by a comment). + // To write a statement on several lines, precede the newline with a + // backslash. + prints \ + "hello, world\n";comment + + // Csound 6 introduced function syntax for opcodes with one or zero outputs. + // The oscil statement above is the same as + aSignal = oscil(0dbfs, 440, 1) + + // Instruments can contain control structures. + kNote = p3 + if (kNote == 0) then + kFrequency = 220 + elseif kNote == 1 then // Parentheses around binary expressions are optional. + kFrequency = 440 + endif + + // Csound 6 introduced looping structures. + iIndex = 0 + while iIndex < 5 do + print iIndex + iIndex += 1 + od + iIndex = 0 + until iIndex >= 5 do + print iIndex + iIndex += 1 + enduntil + // Both kinds of loops can be terminated by either od or enduntil. + + // Single-line strings are enclosed in double-quotes. + prints "string\\\r\n\t\"" + // Multi-line strings are enclosed in pairs of curly braces. + prints {{ + hello, + + world + }} + + // Instruments often end with a statement containing an output opcode. + outc aSignal +endin + +// Orchestras can also contain user-defined opcodes (UDOs). Here is an +// oscillator with one audio-rate output and two control-rate inputs: +opcode anOscillator, a, kk + kAmplitude, kFrequency xin + aSignal vco2 kAmplitude, kFrequency + xout aSignal +endop +instr TestOscillator + outc(anOscillator(0dbfs, 110)) +endin + +// Python can be executed in Csound +// . So can Lua +// . +pyruni {{ +import random + +pool = [(1 + i / 10.0) ** 1.2 for i in range(100)] + +def get_number_from_pool(n, p): + if random.random() < p: + i = int(random.random() * len(pool)) + pool[i] = n + return random.choice(pool) +}} + +// The Csound preprocessor supports conditional compilation and including files. +#ifdef DEBUG +#undef DEBUG +#include "filename.orc" +#endif + +// The preprocessor also supports object- and function-like macros. This is an +// object-like macro that defines a number: +#define A_HZ #440# + +// This is a function-like macro: +#define OSCIL_MACRO(VOLUME'FREQUENCY'TABLE) #oscil $VOLUME, $FREQUENCY, $TABLE# + +// Bodies of macros are enclosed in # and can contain newlines. The arguments of +// function-like macros are separated by single-quotes. Uses of macros are +// prefixed with a dollar sign. +instr TestMacro + aSignal $OSCIL_MACRO(1'$A_HZ'1) + // Not unlike PHP, macros expand in double-quoted strings. + prints "The frequency of the oscillator is $A_HZ Hz.\n" + out aSignal +endin + +// Here are other things to note about Csound. + +// There are two bitwise NOT operators, ~ and ¬ (U+00AC). The latter is common +// on keyboards in the United Kingdom +// . +instr TestBitwiseNOT + print ~42 + print ¬42 +endin + +// Csound uses # for bitwise XOR, which the Csound manual calls bitwise +// non-equivalence . +instr TestBitwiseXOR + print 0 # 0 + print 0 # 1 + print 1 # 0 + print 1 # 1 +endin + +// Loops and if-then statements are relatively recent additions to Csound. There +// are many flow-control opcodes that involve goto and labels. +instr TestGoto + // This... + if p3 > 0 goto if_label + goto else_label +if_label: + prints "if branch\n" + goto endif_label +else_label: + prints "else branch\n" +endif_label: + + // ...is the same as this. + if p3 > 0 then + prints "if branch\n" + else + prints "else branch\n" + endif + + // This... + iIndex = 0 +loop_label: + print iIndex + iIndex += 1 + if iIndex < 10 goto loop_label + + // ...is the same as this... + iIndex = 0 +loop_lt_label: + print iIndex + loop_lt iIndex, 1, 10, loop_lt_label + + // ...and this. + iIndex = 0 + while iIndex < 10 do + print iIndex + iIndex += 1 + od +endin + +// The prints and printks opcodes +// , arguably +// the primary methods of logging output, treat certain sequences of characters +// different from printf in C. +instr TestPrints + // ^ prints an ESCAPE character (U+001B), not a CIRCUMFLEX ACCENT character + // (U+005E). ^^ prints a CIRCUMFLEX ACCENT. + prints "^^\n" + // ~ prints an ESCAPE character (U+001B) followed by a [, not a TILDE + // character (U+007E). ~~ prints a TILDE. + prints "~~\n" + // \A, \B, \N, \R, and \T correspond to the escaped lowercase characters (that + // is, BELL (U+0007), BACKSPACE (U+0008), new line (U+000A), CARRIAGE RETURN + // (U+000D), and tab (U+0009)). + prints "\T\R\N" + // %n, %r, and %t are the same as \n, \r, and \t, as are %N, %R, and %T. + prints "%t%r%n" + // %! prints a semicolon. This is a hold-over from old versions of Csound that + // allowed comments to begin in strings. + prints "; %!\n" +endin + +// The arguments of function-like macros can be separated by # instead of '. +// These two lines define the same macro. +#define OSCIL_MACRO(VOLUME'FREQUENCY'TABLE) #oscil $VOLUME, $FREQUENCY, $TABLE# +#define OSCIL_MACRO(VOLUME#FREQUENCY#TABLE) #oscil $VOLUME, $FREQUENCY, $TABLE# + +// Uses of macros can optionally be suffixed with a period. +instr TestMacroPeriodSuffix + aSignal $OSCIL_MACRO.(1'$A_HZ'1) + prints "The frequency of the oscillator is $A_HZ.Hz.\n" + out aSignal +endin + +// Csound has @ and @@ operator-like macros that, when followed by a literal +// non-negative integer, expand to the next power of 2 and the next power of 2 +// plus 1: +// @x = 2^(ceil(log2(x + 1))), x >= 0 +// @@0 = 2 +// @@x = 2^(ceil(log2(x))) + 1, x > 0 +// These macros are in +// (and +// ) +// and are described at . +instr TestAt + prints "%d %2d %2d\n", 0, @0, @@0 + prints "%d %2d %2d\n", 1, @1, @@1 + prints "%d %2d %2d\n", 2, @2, @@2 + prints "%d %2d %2d\n", 3, @3, @@3 + prints "%d %2d %2d\n", 4, @4, @@4 + prints "%d %2d %2d\n", 5, @5, @@5 + prints "%d %2d %2d\n", 6, @6, @@6 + prints "%d %2d %2d\n", 7, @7, @@7 + prints "%d %2d %2d\n", 8, @8, @@8 + prints "%d %2d %2d\n", 9, @9, @@9 +endin + +// Including newlines in macros can lead to confusing code, but it tests the +// lexer. +instr MacroAbuse + if 1 == 1 then + prints "on\n" +#define FOO# +BAR +#endif // This ends the if statement. It is not a preprocessor directive. +endin + + +f 1 0 16384 10 1 +i "N_a_M_e_" 0 2 +i "TestOscillator" 2 2 +i "TestBitwiseNOT" 0 1 +i "TestBitwiseXOR" 0 1 +i "TestGoto" 0 1 +i "TestMacroPeriodSuffix" 4 1 +i "TestAt" 0 1 +i "MacroAbuse" 0 1 +e + + -- cgit v1.2.1 From 981e86cd167c9e1c2878805116fc342106121459 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 15:12:52 -0700 Subject: Add tests for c/cpp includes --- tests/test_clexer.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/tests/test_clexer.py b/tests/test_clexer.py index 6a3dcbce..fd7f58fc 100644 --- a/tests/test_clexer.py +++ b/tests/test_clexer.py @@ -234,3 +234,26 @@ class CLexerTest(unittest.TestCase): (Token.Text, u'\n'), ] self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment)))) + + def testPreprocFile(self): + fragment = u'#include \n' + tokens = [ + (Token.Comment.Preproc, u'#'), + (Token.Comment.Preproc, u'include'), + (Token.Text, u' '), + (Token.Comment.PreprocFile, u''), + (Token.Comment.Preproc, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testPreprocFile2(self): + fragment = u'#include "foo.h"\n' + tokens = [ + (Token.Comment.Preproc, u'#'), + (Token.Comment.Preproc, u'include'), + (Token.Text, u' '), + (Token.Comment.PreprocFile, u'"foo.h"'), + (Token.Comment.Preproc, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + -- cgit v1.2.1 From 0b6221d5a7f59338e2a61b1f6a2ab8821a3efbf8 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 15:24:39 -0700 Subject: Add demo file for Thrift --- tests/examplefiles/demo.thrift | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 tests/examplefiles/demo.thrift diff --git a/tests/examplefiles/demo.thrift b/tests/examplefiles/demo.thrift new file mode 100644 index 00000000..e50544d5 --- /dev/null +++ b/tests/examplefiles/demo.thrift @@ -0,0 +1,14 @@ +/* comment */ +/** doc comment */ + +namespace cpp shared // inline comment + +struct Foo1 { + 1: i32 key + 2: string value +} + +service Foo2 { + Foo1 bar(1: i32 key) +} + -- cgit v1.2.1 From 0192f12b6f3e7a3cd4d164a1825a3961d3cb9d21 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 15:37:33 -0700 Subject: Add Thrift to changelog --- CHANGES | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES b/CHANGES index f3e55b25..e1ebba70 100644 --- a/CHANGES +++ b/CHANGES @@ -35,6 +35,7 @@ Version 2.1 * Praat (PR#492) * CSound (PR#494) * Ezhil (PR#443) + * Thrift (PR#469) - Added styles: -- cgit v1.2.1 From 05375e980cac24ef64ca4290e18415c1971aece2 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 15:44:11 -0700 Subject: Note TypeScript merge. --- CHANGES | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGES b/CHANGES index e1ebba70..eb49b761 100644 --- a/CHANGES +++ b/CHANGES @@ -73,6 +73,8 @@ Version 2.1 - Allowed more traceback styles in Python console lexer. (PR#253) +- Added decorators to TypeScript. (PR#509) + Version 2.0.3 ------------- -- cgit v1.2.1 From 52a893ce593d616fd0fd4e7dfeddf6dc1c45b4f2 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 15:53:09 -0700 Subject: Fix missing imports after move, add example file. --- pygments/lexers/_mapping.py | 20 +++----------------- pygments/lexers/qvt.py | 7 +++++++ tests/examplefiles/sample.qvto | 4 ++++ 3 files changed, 14 insertions(+), 17 deletions(-) create mode 100644 tests/examplefiles/sample.qvto diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index db133904..6f565999 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -129,10 +129,10 @@ LEXERS = { 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), 'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)), 'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), - 'EzhilLexer': ('pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)), 'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)), 'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)), 'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)), + 'EzhilLexer': ('pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)), 'FSharpLexer': ('pygments.lexers.dotnet', 'FSharp', ('fsharp',), ('*.fs', '*.fsi'), ('text/x-fsharp',)), 'FactorLexer': ('pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), @@ -307,25 +307,11 @@ LEXERS = { 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')), 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)), 'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), + 'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()), 'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')), 'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), 'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')), - 'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), - 'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), - 'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), - 'PuppetLexer': ('pygments.lexers.other', 'Puppet', ('puppet',), ('*.pp',), ()), - 'PyPyLogLexer': ('pygments.lexers.text', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), - 'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')), - 'Python3TracebackLexer': ('pygments.lexers.agile', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)), - 'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), - 'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')), - 'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)), - 'QVToLexer': ('pygments.lexers.other', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()), - 'QmlLexer': ('pygments.lexers.web', 'QML', ('qml', 'Qt Meta Language', 'Qt modeling Language'), ('*.qml',), ('application/x-qml',)), - 'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), - 'RPMSpecLexer': ('pygments.lexers.other', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), - 'RacketLexer': ('pygments.lexers.functional', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktl'), ('text/x-racket', 'application/x-racket')), 'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()), 'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()), 'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()), @@ -388,7 +374,7 @@ LEXERS = { 'TerraformLexer': ('pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')), 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), - 'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ()), + 'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)), 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), 'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')), diff --git a/pygments/lexers/qvt.py b/pygments/lexers/qvt.py index 2dfec3e6..5bc61310 100644 --- a/pygments/lexers/qvt.py +++ b/pygments/lexers/qvt.py @@ -9,6 +9,13 @@ :license: BSD, see LICENSE for details. """ +from pygments.lexer import RegexLexer, bygroups, include, combined +from pygments.token import Text, Comment, Operator, Keyword, Punctuation, \ + Name, String, Number + +__all__ = ['QVToLexer'] + + class QVToLexer(RegexLexer): """ For the `QVT Operational Mapping language `_. diff --git a/tests/examplefiles/sample.qvto b/tests/examplefiles/sample.qvto new file mode 100644 index 00000000..6241ee23 --- /dev/null +++ b/tests/examplefiles/sample.qvto @@ -0,0 +1,4 @@ +transformation Foo(uml: SimpleUML, + rdbms : SimpleRDBMS) { +} +/* comment */ -- cgit v1.2.1 From ad10e9431265105575a1c250b682757d73d077a9 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 15:53:54 -0700 Subject: Update changelog --- CHANGES | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES b/CHANGES index eb49b761..511a748e 100644 --- a/CHANGES +++ b/CHANGES @@ -36,6 +36,7 @@ Version 2.1 * CSound (PR#494) * Ezhil (PR#443) * Thrift (PR#469) + * QVT Operational (PR#204) - Added styles: -- cgit v1.2.1 From ebaff7b49b924cc1e804599c877e1f3f35e6ae26 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 16:23:13 -0700 Subject: Update changelog --- AUTHORS | 2 +- CHANGES | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index dc6a91fa..d7ba35d4 100644 --- a/AUTHORS +++ b/AUTHORS @@ -155,7 +155,7 @@ Other contributors, listed alphabetically, are: * Andre Roberge -- Tango style * Konrad Rudolph -- LaTeX formatter enhancements * Mario Ruggier -- Evoque lexers -* Miikka Salminen -- Lovelace style, lexer enhancements +* Miikka Salminen -- Lovelace style, Hexdump lexer, lexer enhancements * Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers * Matteo Sasso -- Common Lisp lexer * Joe Schafer -- Ada lexer diff --git a/CHANGES b/CHANGES index e1ebba70..6bb6b424 100644 --- a/CHANGES +++ b/CHANGES @@ -36,6 +36,7 @@ Version 2.1 * CSound (PR#494) * Ezhil (PR#443) * Thrift (PR#469) + * Hexdump (PR#508) - Added styles: -- cgit v1.2.1 -- cgit v1.2.1 From d8e98405b98721399c457e820e2220076f26178c Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 16:36:40 -0700 Subject: Update changelog --- AUTHORS | 1 + CHANGES | 1 + 2 files changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index e289d1b4..8e825848 100644 --- a/AUTHORS +++ b/AUTHORS @@ -50,6 +50,7 @@ Other contributors, listed alphabetically, are: * Nick Efford -- Python 3 lexer * Sven Efftinge -- Xtend lexer * Artem Egorkine -- terminal256 formatter +* Matthew Fernandez -- CAmkES lexer * Michael Ficarra -- CPSA lexer * James H. Fisher -- PostScript lexer * William S. Fulton -- SWIG lexer diff --git a/CHANGES b/CHANGES index ba3557fd..7d648bce 100644 --- a/CHANGES +++ b/CHANGES @@ -38,6 +38,7 @@ Version 2.1 * Thrift (PR#469) * QVT Operational (PR#204) * Hexdump (PR#508) + * CAmkES Configuration (PR#462) - Added styles: -- cgit v1.2.1 From 07806868236f553d98e7428f9dad55b93d4d3e31 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 17:28:02 -0700 Subject: Add tests for function keywords --- AUTHORS | 1 + tests/examplefiles/noexcept.cpp | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 tests/examplefiles/noexcept.cpp diff --git a/AUTHORS b/AUTHORS index 4fa7e0da..4ce9dde7 100644 --- a/AUTHORS +++ b/AUTHORS @@ -94,6 +94,7 @@ Other contributors, listed alphabetically, are: * Olov Lassus -- Dart lexer * Sylvestre Ledru -- Scilab lexer * Mark Lee -- Vala lexer +* Valentin Lorentz -- C++ lexer improvements * Ben Mabey -- Gherkin lexer * Angus MacArthur -- QML lexer * Louis Marchand -- Eiffel lexer diff --git a/tests/examplefiles/noexcept.cpp b/tests/examplefiles/noexcept.cpp new file mode 100644 index 00000000..f83e50db --- /dev/null +++ b/tests/examplefiles/noexcept.cpp @@ -0,0 +1,8 @@ +void* operator new (std::size_t size); +void* operator new (std::size_t size, const std::nothrow_t& nothrow_value) noexcept; +void* operator new (std::size_t size, const std::nothrow_t& nothrow_value)noexcept; +void* operator new (std::size_t size, const std::nothrow_t& nothrow_value); +void* operator new (std::size_t size); +void* operator new (std::size_t size) noexcept; +void* operator new (std::size_t size)noexcept; + -- cgit v1.2.1 From c9b2e19f2e787e88ed95de941dc6c60dc6a8a974 Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Sat, 31 Oct 2015 17:49:48 -0700 Subject: Unbreak NesC after PR#487 merge. --- pygments/lexers/c_cpp.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py index 4489298a..99f73a57 100644 --- a/pygments/lexers/c_cpp.py +++ b/pygments/lexers/c_cpp.py @@ -88,7 +88,7 @@ class CFamilyLexer(RegexLexer): (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments r'([a-zA-Z_]\w*)' # method name r'(\s*\([^;]*?\))' # signature - r'([^{]*)?(\{)', + r'([^;{]*)?(\{)', bygroups(using(this), Name.Function, using(this), using(this), Punctuation), 'function'), -- cgit v1.2.1 From d7ebb3aac453a57ee12ce0f7020c5ef645c87362 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Sun, 1 Nov 2015 11:28:05 +0900 Subject: Fix regarding to Tim's review. 1. move to my contribution notice to AUTHORS 2. some styles. 3. remove unnecessary nested comment support (BC doesn't support it.) 4. use `word` for Keyword Thanks! --- AUTHORS | 2 +- pygments/lexers/algebra.py | 17 ++++++++--------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/AUTHORS b/AUTHORS index d8d58e8a..8a5a9ed7 100644 --- a/AUTHORS +++ b/AUTHORS @@ -88,7 +88,7 @@ Other contributors, listed alphabetically, are: * Tim Howard -- BlitzMax lexer * Dustin Howett -- Logos lexer * Ivan Inozemtsev -- Fantom lexer -* Hiroaki Itoh -- Shell console rewrite +* Hiroaki Itoh -- Shell console rewrite, BC Lexer * Brian R. Jackson -- Tea lexer * Christian Jann -- ShellSession lexer * Dennis Kaarsemaker -- sources.list lexer diff --git a/pygments/lexers/algebra.py b/pygments/lexers/algebra.py index a0d6b4ce..88683e82 100644 --- a/pygments/lexers/algebra.py +++ b/pygments/lexers/algebra.py @@ -65,7 +65,7 @@ class GAPLexer(RegexLexer): (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number), (r'\.[0-9]+(?:e[0-9]+)?', Number), (r'.', Text) - ] + ], } @@ -183,14 +183,13 @@ class MuPADLexer(RegexLexer): (r'/\*', Comment.Multiline, '#push'), (r'\*/', Comment.Multiline, '#pop'), (r'[*/]', Comment.Multiline) - ] + ], } class BCLexer(RegexLexer): """ A `BC `_ lexer. - Contributed by Hiroaki Itoh . .. versionadded:: 2.1 """ @@ -203,19 +202,19 @@ class BCLexer(RegexLexer): (r'/\*', Comment.Multiline, 'comment'), (r'"(?:[^"\\]|\\.)*"', String), (r'[{}();,]', Punctuation), - (r'(if|else|while|for|break|continue|halt|' - r'return|define|auto|print|' - r'read|length|scale|sqrt|limits|quit|warranty)\b', Keyword), + (words(('if', 'else', 'while', 'for', 'break', 'continue', + 'halt', 'return', 'define', 'auto', 'print', 'read', + 'length', 'scale', 'sqrt', 'limits', 'quit', + 'warranty'), suffix=r'\b'), Keyword), (r'\+\+|--|\|\||&&|' r'([-<>+*%\^/!=])=?', Operator), (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number), (r'\.[0-9]+(?:e[0-9]+)?', Number), (r'.', Text) - ], + ], 'comment': [ (r'[^*/]', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), (r'\*/', Comment.Multiline, '#pop'), (r'[*/]', Comment.Multiline) - ] + ], } -- cgit v1.2.1 From 336611769846ea528fb791b81246f35c7a44a844 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Mon, 2 Nov 2015 07:28:57 +0900 Subject: update AUTHORS --- AUTHORS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 8a5a9ed7..060423c1 100644 --- a/AUTHORS +++ b/AUTHORS @@ -88,7 +88,7 @@ Other contributors, listed alphabetically, are: * Tim Howard -- BlitzMax lexer * Dustin Howett -- Logos lexer * Ivan Inozemtsev -- Fantom lexer -* Hiroaki Itoh -- Shell console rewrite, BC Lexer +* Hiroaki Itoh -- Shell console rewrite, Lexers for PowerShell session, MSDOS session, BC * Brian R. Jackson -- Tea lexer * Christian Jann -- ShellSession lexer * Dennis Kaarsemaker -- sources.list lexer -- cgit v1.2.1 From 78a5a260042ce6911d2a3bc668f6be159d7a612b Mon Sep 17 00:00:00 2001 From: Tim Hatch Date: Mon, 2 Nov 2015 13:18:57 -0800 Subject: Unbreak C/C++ tests (again). I don't know how this got through my running the local tests earlier, unless there's a Python-version-specific issue here. --- pygments/lexers/c_cpp.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py index 99f73a57..5a7137ea 100644 --- a/pygments/lexers/c_cpp.py +++ b/pygments/lexers/c_cpp.py @@ -88,7 +88,7 @@ class CFamilyLexer(RegexLexer): (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments r'([a-zA-Z_]\w*)' # method name r'(\s*\([^;]*?\))' # signature - r'([^;{]*)?(\{)', + r'([^;{]*)(\{)', bygroups(using(this), Name.Function, using(this), using(this), Punctuation), 'function'), @@ -96,7 +96,7 @@ class CFamilyLexer(RegexLexer): (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments r'([a-zA-Z_]\w*)' # method name r'(\s*\([^;]*?\))' # signature - r'([^;]*)?(;)', + r'([^;]*)(;)', bygroups(using(this), Name.Function, using(this), using(this), Punctuation)), default('statement'), -- cgit v1.2.1 From 15fdacf09682cae8d24f3e47499bdb3715e300b3 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Tue, 3 Nov 2015 17:31:21 +0900 Subject: Update example. --- tests/examplefiles/example.bc | 55 ++++++++++++------------------------------- 1 file changed, 15 insertions(+), 40 deletions(-) diff --git a/tests/examplefiles/example.bc b/tests/examplefiles/example.bc index fe248e58..283b243c 100644 --- a/tests/examplefiles/example.bc +++ b/tests/examplefiles/example.bc @@ -1,45 +1,20 @@ -scale = 20 - -/* Uses the fact that e^x = (e^(x/2))^2 - When x is small enough, we use the series: - e^x = 1 + x + x^2/2! + x^3/3! + ... -*/ - -define e(x) { - auto a, d, e, f, i, m, v, z - - /* Check the sign of x. */ - if (x<0) { - m = 1 - x = -x - } - - /* Precondition x. */ - z = scale; - scale = 4 + z + .44*x; - while (x > 1) { - f += 1; - x /= 2; - } - - /* Initialize the variables. */ - v = 1+x - a = x - d = 1 - - for (i=2; 1; i++) { - e = (a *= x) / (d *= i) - if (e == 0) { - if (f>0) while (f--) v = v*v; - scale = z - if (m) return (1/v); - return (v/1); +/* + * Calculate the Greatest Common Divisor of a and b. + */ +define gcd(a, b) { + auto tmp; + + /* + * Euclidean algorithm + */ + while (b != 0) { + tmp = a % b; + a = b; + b = tmp; } - v += e - } + return a; } - -"e(2)=" ; e(2) +"gcd(225, 150) = " ; gcd(225, 150) /* assign operators */ a = 10 -- cgit v1.2.1 From 21e7602921ac2214c5feac8bd5de07dda5074456 Mon Sep 17 00:00:00 2001 From: Nathan Whetsell Date: Tue, 3 Nov 2015 20:50:12 -0500 Subject: Add Csound score file --- tests/examplefiles/test.sco | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 tests/examplefiles/test.sco diff --git a/tests/examplefiles/test.sco b/tests/examplefiles/test.sco new file mode 100644 index 00000000..a0b39251 --- /dev/null +++ b/tests/examplefiles/test.sco @@ -0,0 +1,10 @@ +f 1 0 16384 10 1 +i "N_a_M_e_" 0 2 +i "TestOscillator" 2 2 +i "TestBitwiseNOT" 0 1 +i "TestBitwiseXOR" 0 1 +i "TestGoto" 0 1 +i "TestMacroPeriodSuffix" 4 1 +i "TestAt" 0 1 +i "MacroAbuse" 0 1 +e -- cgit v1.2.1 From 632293a82e0909cfaa9b5d96659439abcd62c049 Mon Sep 17 00:00:00 2001 From: Nathan Whetsell Date: Tue, 3 Nov 2015 20:50:30 -0500 Subject: Add aliases for Csound lexers --- pygments/lexers/_mapping.py | 6 +++--- pygments/lexers/csound.py | 4 +++- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index af7eec36..b73e288f 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -91,9 +91,9 @@ LEXERS = { 'CrmshLexer': ('pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()), 'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), - 'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', (), ('*.csd',), ()), - 'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', (), ('*.orc',), ()), - 'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', (), ('*.sco',), ()), + 'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()), + 'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc',), ()), + 'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()), 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')), 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)), 'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)), diff --git a/pygments/lexers/csound.py b/pygments/lexers/csound.py index b9613bdf..dd7b62c5 100644 --- a/pygments/lexers/csound.py +++ b/pygments/lexers/csound.py @@ -98,6 +98,7 @@ class CsoundScoreLexer(CsoundLexer): """ name = 'Csound Score' + aliases = ['csound-score', 'csound-sco'] filenames = ['*.sco'] tokens = { @@ -140,6 +141,7 @@ class CsoundOrchestraLexer(CsoundLexer): """ name = 'Csound Orchestra' + aliases = ['csound', 'csound-orc'] filenames = ['*.orc'] user_defined_opcodes = set() @@ -317,7 +319,7 @@ class CsoundDocumentLexer(XmlLexer): """ name = 'Csound Document' - aliases = [] + aliases = ['csound-document', 'csound-csd'] filenames = ['*.csd'] mimetypes = [] -- cgit v1.2.1 From 39fb35c76b8af70b5e7775a1b5933b0d4e98176e Mon Sep 17 00:00:00 2001 From: Nathan Whetsell Date: Tue, 3 Nov 2015 21:37:39 -0500 Subject: Remove deepcopy of XmlLexer tokens from CsoundDocumentLexer to fix LaTeX output --- pygments/lexers/csound.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/csound.py b/pygments/lexers/csound.py index dd7b62c5..f49cb409 100644 --- a/pygments/lexers/csound.py +++ b/pygments/lexers/csound.py @@ -9,7 +9,7 @@ :license: BSD, see LICENSE for details. """ -import copy, re +import re from pygments.lexer import RegexLexer, bygroups, default, include, using, words from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \ @@ -323,7 +323,7 @@ class CsoundDocumentLexer(XmlLexer): filenames = ['*.csd'] mimetypes = [] - tokens = copy.deepcopy(XmlLexer.tokens) + tokens = XmlLexer.tokens for i, item in enumerate(tokens['root']): if len(item) > 2 and item[2] == 'tag': (tokens['root']).insert(i, (r'(<)(\s*)(CsInstruments)(\s*)', -- cgit v1.2.1 From 56e90ee69d37431d9b74edb4e7f6c6b751c61ffd Mon Sep 17 00:00:00 2001 From: hhsprings Date: Wed, 4 Nov 2015 18:16:40 +0900 Subject: Add the lexer for traditional termcap/terminfo source. This is very simple and minimal, but it gives us much enough readability. --- pygments/lexers/_mapping.py | 2 + pygments/lexers/configs.py | 96 ++- tests/examplefiles/termcap | 1340 +++++++++++++++++++++++++++++++++++++++ tests/examplefiles/terminfo | 1445 +++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 2881 insertions(+), 2 deletions(-) create mode 100644 tests/examplefiles/termcap create mode 100644 tests/examplefiles/terminfo diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index af7eec36..a9979d81 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -374,6 +374,8 @@ LEXERS = { 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), 'TcshSessionLexer': ('pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()), 'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)), + 'TermcapLexer': ('pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()), + 'TerminfoLexer': ('pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()), 'TerraformLexer': ('pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')), 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py index f5a67bc4..8a83b433 100644 --- a/pygments/lexers/configs.py +++ b/pygments/lexers/configs.py @@ -13,13 +13,13 @@ import re from pygments.lexer import RegexLexer, default, words, bygroups, include, using from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Whitespace + Number, Punctuation, Whitespace, Literal from pygments.lexers.shell import BashLexer __all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer', 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer', 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer', - 'TerraformLexer'] + 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer'] class IniLexer(RegexLexer): @@ -617,3 +617,95 @@ class TerraformLexer(RegexLexer): (r'\\\n', Text), ], } + + +class TermcapLexer(RegexLexer): + """ + Lexer for termcap database source. + + This is very simple and minimal. + + .. versionadded:: 2.1 + """ + name = 'Termcap' + aliases = ['termcap',] + + filenames = ['termcap', 'termcap.src',] + mimetypes = [] + + # NOTE: + # * multiline with trailing backslash + # * separator is ':' + # * to embed colon as data, we must use \072 + # * space after separator is not allowed (mayve) + tokens = { + 'root': [ + (r'^#.*$', Comment), + (r'^[^\s#:\|]+', Name.Tag, 'names'), + ], + 'names': [ + (r'\n', Text, '#pop'), + (r':', Punctuation, 'defs'), + (r'\|', Punctuation), + (r'[^:\|]+', Name.Attribute), + ], + 'defs': [ + (r'\\\n[ \t]*', Text), + (r'\n[ \t]*', Text, '#pop:2'), + (r'(#)([0-9]+)', bygroups(Operator, Number)), + (r'=', Operator, 'data'), + (r':', Punctuation), + (r'[^\s:=#]+', Name.Class), + ], + 'data': [ + (r'\\072', Literal), + (r':', Punctuation, '#pop'), + (r'.', Literal), + ], + } + + +class TerminfoLexer(RegexLexer): + """ + Lexer for terminfo database source. + + This is very simple and minimal. + + .. versionadded:: 2.1 + """ + name = 'Terminfo' + aliases = ['terminfo',] + + filenames = ['terminfo', 'terminfo.src',] + mimetypes = [] + + # NOTE: + # * multiline with leading whitespace + # * separator is ',' + # * to embed comma as data, we can use \, + # * space after separator is allowed + tokens = { + 'root': [ + (r'^#.*$', Comment), + (r'^[^\s#,\|]+', Name.Tag, 'names'), + ], + 'names': [ + (r'\n', Text, '#pop'), + (r'(,)([ \t]*)', bygroups(Punctuation, Text), 'defs'), + (r'\|', Punctuation), + (r'[^,\|]+', Name.Attribute), + ], + 'defs': [ + (r'\n[ \t]+', Text), + (r'\n', Text, '#pop:2'), + (r'(#)([0-9]+)', bygroups(Operator, Number)), + (r'=', Operator, 'data'), + (r'(,)([ \t]*)', bygroups(Punctuation, Text)), + (r'[^\s,=#]+', Name.Class), + ], + 'data': [ + (r'\\[,\\]', Literal), + (r'(,)([ \t]*)', bygroups(Punctuation, Text), '#pop'), + (r'.', Literal), + ], + } diff --git a/tests/examplefiles/termcap b/tests/examplefiles/termcap new file mode 100644 index 00000000..e20adaba --- /dev/null +++ b/tests/examplefiles/termcap @@ -0,0 +1,1340 @@ +######## This example from excerpt of : +# +# Version 11.0.1 +# $Date: 2000/03/02 15:51:11 $ +# termcap syntax +# + +######## ANSI, UNIX CONSOLE, AND SPECIAL TYPES +# +# This section describes terminal classes and brands that are still +# quite common. +# + +#### Specials +# +# Special "terminals". These are used to label tty lines when you don't +# know what kind of terminal is on it. The characteristics of an unknown +# terminal are the lowest common denominator - they look about like a ti 700. +# + +dumb|80-column dumb tty:\ + :am:\ + :co#80:\ + :bl=^G:cr=^M:do=^J:sf=^J: +unknown|unknown terminal type:\ + :gn:tc=dumb: +lpr|printer|line printer:\ + :bs:hc:os:\ + :co#132:li#66:\ + :bl=^G:cr=^M:do=^J:ff=^L:le=^H:sf=^J: +glasstty|classic glass tty interpreting ASCII control characters:\ + :am:bs:\ + :co#80:\ + :bl=^G:cl=^L:cr=^M:do=^J:kd=^J:kl=^H:le=^H:nw=^M^J:ta=^I: +vanilla:\ + :bs:\ + :bl=^G:cr=^M:do=^J:sf=^J: + +#### ANSI.SYS/ISO 6429/ECMA-48 Capabilities +# +# See the end-of-file comment for more on these. +# + +# ANSI capabilities are broken up into pieces, so that a terminal +# implementing some ANSI subset can use many of them. +ansi+local1:\ + :do=\E[B:le=\E[D:nd=\E[C:up=\E[A: +ansi+local:\ + :DO=\E[%dB:LE=\E[%dD:RI=\E[%dC:UP=\E[%dA:tc=ansi+local1: +ansi+tabs:\ + :bt=\E[Z:ct=\E[2g:st=\EH:ta=^I: +ansi+inittabs:\ + :it#8:tc=ansi+tabs: +ansi+erase:\ + :cd=\E[J:ce=\E[K:cl=\E[H\E[J: +ansi+rca:\ + :ch=\E[%+^AG:cv=\E[%+^Ad: +ansi+cup:\ + :cm=\E[%i%d;%dH:ho=\E[H: +ansi+rep:\ + :..rp=%p1%c\E[%p2%{1}%-%db: +ansi+idl1:\ + :al=\E[L:dl=\E[M: +ansi+idl:\ + :AL=\E[%dL:DL=\E[%dM:tc=ansi+idl1: +ansi+idc:\ + :IC=\E[%d@:dc=\E[P:ei=\E6:ic=\E[@:im=\E6: +ansi+arrows:\ + :kb=^H:kd=\E[B:kh=\E[H:kl=\E[D:kr=\E[C:ku=\E[A: +ansi+sgr|ansi graphic renditions:\ + :mb=\E[5m:me=\E[0m:mk=\E[8m:mr=\E[7m: +ansi+sgrso|ansi standout only:\ + :se=\E[m:so=\E[7m: +ansi+sgrul|ansi underline only:\ + :ue=\E[m:us=\E[4m: +ansi+sgrbold|ansi graphic renditions; assuming terminal has bold; not dim:\ + :md=\E[1m:\ + :..sa=\E[%?%p1%t7;%;%?%p2%t4;%;%?%p3%t7;%;%?%p4%t5;%;%?%p6%t1;%;m:tc=ansi+sgr:tc=ansi+sgrso:tc=ansi+sgrul: +ansi+sgrdim|ansi graphic renditions; assuming terminal has dim; not bold:\ + :mh=\E[2m:\ + :..sa=\E[%?%p1%t7;%;%?%p2%t4;%;%?%p3%t7;%;%?%p4%t5;%;%?%p5%t2;%;m:tc=ansi+sgr:tc=ansi+sgrso:tc=ansi+sgrul: +ansi+pp|ansi printer port:\ + :pf=\E[4i:po=\E[5i:ps=\E[0i: +ansi+csr|ansi scroll-region plus cursor save & restore:\ + :cs=\E[%i%d;%dr:rc=\E8:sc=\E7: + +# The IBM PC alternate character set. Plug this into any Intel console entry. +# We use \E[11m for rmacs rather than \E[12m so the string can use the +# ROM graphics for control characters such as the diamond, up- and down-arrow. +# This works with the System V, Linux, and BSDI consoles. It's a safe bet this +# will work with any Intel console, they all seem to have inherited \E[11m +# from the ANSI.SYS de-facto standard. +klone+acs|alternate character set for ansi.sys displays:\ + :ac=+\020\054\021-\030.^Y0\333`\004a\261f\370g\361h\260j\331k\277l\332m\300n\305o~p\304q\304r\304s_t\303u\264v\301w\302x\263y\363z\362{\343|\330}\234~\376:\ + :ae=\E[10m:as=\E[11m: + +# Highlight controls corresponding to the ANSI.SYS standard. Most +# console drivers for Intel boxes obey these. Makes the same assumption +# about \E[11m as klone+acs. True ANSI/ECMA-48 would have :se=\E[27m:, +# :ue=\E[24m:, but this isn't a documented feature of ANSI.SYS. +klone+sgr|attribute control for ansi.sys displays:\ + :S2=\E[11m:S3=\E[10m:mb=\E[5m:md=\E[1m:me=\E[0;10m:\ + :mk=\E[8m:mr=\E[7m:\ + :..sa=\E[0;10%?%p1%t;7%;%?%p2%t;4%;%?%p3%t;7%;%?%p4%t;5%;%?%p6%t;1%;%?%p7%t;8%;%?%p9%t;11%;m:\ + :se=\E[m:so=\E[7m:ue=\E[m:us=\E[4m:\ + :tc=klone+acs: + +# Highlight controls corresponding to the ANSI.SYS standard. *All* +# console drivers for Intel boxes obey these. Does not assume \E[11m will +# work; uses \E[12m instead, which is pretty bulletproof but loses you the ACS +# diamond and arrow characters under curses. +klone+sgr-dumb|attribute control for ansi.sys displays (no ESC [ 11 m):\ + :as=\E[12m:mb=\E[5m:md=\E[1m:me=\E[0;10m:mk=\E[8m:\ + :mr=\E[7m:\ + :..sa=\E[0;10%?%p1%t;7%;%?%p2%t;4%;%?%p3%t;7%;%?%p4%t;5%;%?%p6%t;1%;%?%p7%t;8%;%?%p9%t;12%;m:\ + :se=\E[m:so=\E[7m:ue=\E[m:us=\E[4m:\ + :tc=klone+acs: + +# KOI8-R (RFC1489) acs (alternate character set) +# From: Qing Long , 24 Feb 1996. +klone+koi8acs|alternate character set for ansi.sys displays with KOI8 charset:\ + :ac=+\020\054\021-\036.^_0\215`\004a\237f\234g\232h\222i\220j\205k\203l\202m\204n\212o\213p\216q\0r\217s\214t\206u\207v\210w\211x\201y\230z\231{\267|\274}L~\225:\ + :ae=\E[10m:as=\E[11m: + +# ANSI.SYS color control. The setab/setaf caps depend on the coincidence +# between SVr4/XPG4's color numbers and ANSI.SYS attributes. Here are longer +# but equivalent strings that don't rely on that coincidence: +# setb=\E[4%?%p1%{1}%=%t4%e%p1%{3}%=%t6%e%p1%{4}%=%t1%e%p1%{6}%=%t3%e%p1%d%;m, +# setf=\E[3%?%p1%{1}%=%t4%e%p1%{3}%=%t6%e%p1%{4}%=%t1%e%p1%{6}%=%t3%e%p1%d%;m, +# The DOS 5 manual asserts that these sequences meet the ISO 6429 standard. +# They match a subset of ECMA-48. +klone+color|color control for ansi.sys and ISO6429-compatible displays:\ + :Co#8:NC#3:pa#64:\ + :AB=\E[4%p1%dm:AF=\E[3%p1%dm:op=\E[37;40m: + +# This is better than klone+color, it doesn't assume white-on-black as the +# default color pair, but many `ANSI' terminals don't grok the cap. +ecma+color|color control for ECMA-48-compatible terminals:\ + :Co#8:NC#3:pa#64:\ + :AB=\E[4%p1%dm:AF=\E[3%p1%dm:op=\E[39;49m: + +# Attribute control for ECMA-48-compatible terminals +ecma+sgr|attribute capabilities for true ECMA-48 terminals:\ + :se=\E[27m:ue=\E[24m:\ + :tc=klone+sgr: + +# For comparison, here are all the capabilities implied by the Intel +# Binary Compatibility Standard (level 2) that fit within terminfo. +# For more detail on this rather pathetic standard, see the comments +# near the end of this file. +ibcs2|Intel Binary Compatibility Standard prescriptions:\ + :AL=\E[%dL:DC=\E[%dP:DO=\E[%dB:IC=\E[%d@:LE=\E[%dD:\ + :RA=\E[?7l:RI=\E[%dC:S1=\E=%p1%dg:SA=\E[?7h:SF=\E[%dS:\ + :SR=\E[%dT:UP=\E[%dA:bt=\E[Z:ch=\E[%i%dG:cl=\Ec:\ + :cm=\E[%i%d;%dH:ct=\E[g:cv=\E[%i%dd:ec=\E[%dX:ei=:im=:\ + :rc=\E7:sc=\E7:st=\EH: + +#### ANSI/ECMA-48 terminals and terminal emulators +# +# See near the end of this file for details on ANSI conformance. +# Don't mess with these entries! Lots of other entries depend on them! +# +# This section lists entries in a least-capable to most-capable order. +# if you're in doubt about what `ANSI' matches yours, try them in that +# order and back off from the first that breaks. + +# ansi-mr is for ANSI terminals with ONLY relative cursor addressing +# and more than one page of memory. It uses local motions instead of +# direct cursor addressing, and makes almost no assumptions. It does +# assume auto margins, no padding and/or xon/xoff, and a 24x80 screen. +ansi-mr|mem rel cup ansi:\ + :am:xo:\ + :co#80:li#24:tc=vanilla:tc=ansi+erase:tc=ansi+local1: + +# ansi-mini is a bare minimum ANSI terminal. This should work on anything, but +# beware of screen size problems and memory relative cursor addressing. +ansi-mini|minimum ansi standard terminal:\ + :am:xo:\ + :co#80:li#24:tc=vanilla:tc=ansi+cup:tc=ansi+erase: + +# ansi-mtabs adds relative addressing and minimal tab support +ansi-mtabs|any ansi terminal with pessimistic assumptions:\ + :it#8:\ + :ta=^I:tc=ansi+local1:tc=ansi-mini: + +# ANSI X3.64 from emory!mlhhh (Hugh Hansard) via BRL +# +# The following is an entry for the full ANSI 3.64 (1977). It lacks +# padding, but most terminals using the standard are "fast" enough +# not to require any -- even at 9600 bps. If you encounter problems, +# try including the padding specifications. +# +# Note: the :as: and :ae: specifications are not implemented here, for +# the available termcap documentation does not make clear WHICH alternate +# character set to specify. ANSI 3.64 seems to make allowances for several. +# Please make the appropriate adjustments to fit your needs -- that is +# if you will be using alternate character sets. +# +# There are very few terminals running the full ANSI 3.64 standard, +# so I could only test this entry on one verified terminal (Visual 102). +# I would appreciate the results on other terminals sent to me. +# +# Please report comments, changes, and problems to: +# +# U.S. MAIL: Hugh Hansard +# Box: 22830 +# Emory University +# Atlanta, GA. 30322. +# +# USENET {akgua,msdc,sb1,sb6,gatech}!emory!mlhhh. +# +# (Added vt100 :rc:,:sc: to quiet a tic warning --esr) +ansi77|ansi 3.64 standard 1977 version:\ + :am:bs:mi:\ + :co#80:it#8:li#24:\ + :al=5*\E[L:bl=^G:cd=\E[J:ce=\E[K:cl=\E[;H\E[2J:\ + :cm=\E[%i%d;%dH:cr=^M:cs=\E[%i%d;%dr:dc=\E[P:dl=5*\E[M:\ + :do=\E[B:ei=\E[4l:ho=\E[H:im=\E[4h:k1=\EOP:k2=\EOR:k4=\EOS:\ + :kb=^H:kd=\E[B:kh=\E[H:kl=\E[D:kr=\E[C:ku=\E[A:le=^H:\ + :nd=\E[C:nw=^M\ED:rc=\E8:sc=\E7:se=\E[m:sf=\ED:so=\E[7m:\ + :sr=\EM:ta=^I:ue=\E[m:up=\E[A:us=\E[4m: + +# Procomm and some other ANSI emulations don't recognize all of the ANSI- +# standard capabilities. This entry deletes :UP:, :RI:, :DO:, :LE:, and +# / capabilities, forcing curses to use repetitions of :up:, +# :nd:, :do: and :le:. Also deleted :IC: and :ic:, as QModem up to +# 5.03 doesn't recognize these. Finally, we delete :rp: and :sr:, which seem +# to confuse many emulators. On the other hand, we can count on these programs +# doing :ae:/:as:/:sa:. Older versions of this entry featured +# , but now seems to be more common under +# ANSI.SYS influence. +# From: Eric S. Raymond Oct 30 1995 +pcansi-m|pcansi-mono|ibm-pc terminal programs claiming to be ansi (mono mode):\ + :am:bs:mi:ms:\ + :co#80:it#8:li#24:\ + :al=\E[L:bl=^G:bt=\E[Z:cd=\E[J:ce=\E[K:cl=\E[H\E[J:\ + :cm=\E[%i%d;%dH:cr=^M:ct=\E[2g:dc=\E[P:dl=\E[M:do=\E[B:\ + :ho=\E[H:kb=^H:kd=\E[B:kh=\E[H:kl=\E[D:kr=\E[C:ku=\E[A:\ + :le=\E[D:nd=\E[C:sf=^J:st=\EH:ta=^I:up=\E[A:\ + :tc=klone+sgr-dumb: +pcansi-25-m|pcansi25m|ibm-pc terminal programs with 25 lines (mono mode):\ + :li#25:tc=pcansi-m: +pcansi-33-m|pcansi33m|ibm-pc terminal programs with 33 lines (mono mode):\ + :li#33:tc=pcansi-m: +pcansi-43-m|ansi43m|ibm-pc terminal programs with 43 lines (mono mode):\ + :li#43:tc=pcansi-m: +# The color versions. All PC emulators do color... +pcansi|ibm-pc terminal programs claiming to be ansi:\ + :tc=klone+color:tc=pcansi-m: +pcansi-25|pcansi25|ibm-pc terminal programs with 25 lines:\ + :li#25:tc=pcansi: +pcansi-33|pcansi33|ibm-pc terminal programs with 33 lines:\ + :li#33:tc=pcansi: +pcansi-43|pcansi43|ibm-pc terminal programs with 43 lines:\ + :li#43:tc=pcansi: + +# ansi-m -- full ANSI X3.64 with ANSI.SYS-compatible attributes, no color. +# If you want pound signs rather than dollars, replace `B' with `A' +# in the , , , and capabilities. +# From: Eric S. Raymond Nov 6 1995 +ansi-m|ansi-mono|ANSI X3.64-1979 terminal with ANSI.SYS compatible attributes:\ + :5i:\ + :AL=\E[%dL:DC=\E[%dP:DL=\E[%dM:DO=\E[%dB:IC=\E[%d@:\ + :LE=\E[%dD:RI=\E[%dC:SF=\E[%dS:SR=\E[%dT:UP=\E[%dA:\ + :cb=\E[1K:ch=\E[%i%dG:ct=\E[2g:cv=\E[%i%dd:ec=\E[%dX:ei=:\ + :im=:kB=\E[Z:kI=\E[L:kb=^H:kd=\E[B:kl=\E[D:kr=\E[C:ku=\E[A:\ + :nw=\r\E[S:pf=\E[4i:po=\E[5i:..rp=%p1%c\E[%p2%{1}%-%db:\ + :s0=\E(B:s1=\E)B:s2=\E*B:s3=\E+B:ta=\E[I:\ + :tc=pcansi-m: + +# ansi -- this terminfo expresses the largest subset of X3.64 that will fit in +# standard terminfo. Assumes ANSI.SYS-compatible attributes and color. +# From: Eric S. Raymond Nov 6 1995 +ansi|ansi/pc-term compatible with color:\ + :u6=\E[%i%d;%dR:u7=\E[6n:..u8=\E[?%[;0123456789]c:\ + :u9=\E[c:\ + :tc=ecma+color:tc=klone+sgr:tc=ansi-m: + +# ansi-generic is a vanilla ANSI terminal. This is assumed to implement +# all the normal ANSI stuff with no extensions. It assumes +# insert/delete line/char is there, so it won't work with +# vt100 clones. It assumes video attributes for bold, blink, +# underline, and reverse, which won't matter much if the terminal +# can't do some of those. Padding is assumed to be zero, which +# shouldn't hurt since xon/xoff is assumed. +ansi-generic|generic ansi standard terminal:\ + :am:xo:\ + :co#80:li#24:tc=vanilla:tc=ansi+csr:tc=ansi+cup:\ + :tc=ansi+rca:tc=ansi+erase:tc=ansi+tabs:tc=ansi+local:\ + :tc=ansi+idc:tc=ansi+idl:tc=ansi+rep:tc=ansi+sgrbold:\ + :tc=ansi+arrows: + +#### Linux consoles +# + +# This entry is good for the 1.2.13 or later version of the Linux console. +# +# *************************************************************************** +# * * +# * WARNING: * +# * Linuxes come with a default keyboard mapping kcbt=^I. This entry, in * +# * response to user requests, assumes kcbt=\E[Z, the ANSI/ECMA reverse-tab * +# * character. Here are the keymap replacement lines that will set this up: * +# * * +# keycode 15 = Tab Tab +# alt keycode 15 = Meta_Tab +# shift keycode 15 = F26 +# string F26 ="\033[Z" +# * * +# * This has to use a key slot which is unfortunate (any unused one will * +# * do, F26 is the higher-numbered one). The change ought to be built * +# * into the kernel tables. * +# * * +# *************************************************************************** +# +# The 1.3.x kernels add color-change capabilities; if yours doesn't have this +# and it matters, turn off . The %02x escape used to implement this is +# not back-portable to SV curses and not supported in ncurses versions before +# 1.9.9. All linux kernels since 1.2.13 (at least) set the screen size +# themselves; this entry assumes that capability. +# +# This entry is good for the 1.2.13 or later version of the Linux console. +# +# *************************************************************************** +# * * +# * WARNING: * +# * Linuxes come with a default keyboard mapping kcbt=^I. This entry, in * +# * response to user requests, assumes kcbt=\E[Z, the ANSI/ECMA reverse-tab * +# * character. Here are the keymap replacement lines that will set this up: * +# * * +# keycode 15 = Tab Tab +# alt keycode 15 = Meta_Tab +# shift keycode 15 = F26 +# string F26 ="\033[Z" +# * * +# * This has to use a key slot which is unfortunate (any unused one will * +# * do, F26 is the higher-numbered one). The change ought to be built * +# * into the kernel tables. * +# * * +# *************************************************************************** +# +# The 1.3.x kernels add color-change capabilities; if yours doesn't have this +# and it matters, turn off . The %02x escape used to implement this is +# not back-portable to SV curses and not supported in ncurses versions before +# 1.9.9. All linux kernels since 1.2.13 (at least) set the screen size +# themselves; this entry assumes that capability. +# +# The 2.2.x kernels add a private mode that sets the cursor type; use that to +# get a block cursor for cvvis. +# reported by Frank Heckenbach . +# (untranslatable capabilities removed to fit entry within 1023 bytes) +# (sgr removed to fit entry within 1023 bytes) +# (terminfo-only capabilities suppressed to fit entry within 1023 bytes) +linux|linux console:\ + :am:eo:mi:ms:xn:xo:\ + :it#8:\ + :AL=\E[%dL:DC=\E[%dP:DL=\E[%dM:IC=\E[%d@:K2=\E[G:al=\E[L:\ + :bl=^G:cd=\E[J:ce=\E[K:cl=\E[H\E[J:cm=\E[%i%d;%dH:cr=^M:\ + :cs=\E[%i%d;%dr:ct=\E[3g:dc=\E[P:dl=\E[M:do=^J:ec=\E[%dX:\ + :ei=\E[4l:ho=\E[H:ic=\E[@:im=\E[4h:k1=\E[[A:k2=\E[[B:\ + :k3=\E[[C:k4=\E[[D:k5=\E[[E:k6=\E[17~:k7=\E[18~:k8=\E[19~:\ + :k9=\E[20~:kD=\E[3~:kI=\E[2~:kN=\E[6~:kP=\E[5~:kb=\177:\ + :kd=\E[B:kh=\E[1~:kl=\E[D:kr=\E[C:ku=\E[A:le=^H:mh=\E[2m:\ + :mr=\E[7m:nd=\E[C:nw=^M^J:rc=\E8:sc=\E7:se=\E[27m:sf=^J:\ + :sr=\EM:st=\EH:ta=^I:ue=\E[24m:up=\E[A:us=\E[4m:\ + :vb=200\E[?5h\E[?5l:ve=\E[?25h\E[?0c:vi=\E[?25l\E[?1c:\ + :vs=\E[?25h\E[?8c:\ + :tc=klone+sgr:tc=ecma+color: +linux-m|Linux console no color:\ + :Co@:pa@:\ + :AB@:AF@:Sb@:Sf@:tc=linux: +linux-c-nc|linux console 1.3.x hack for ncurses only:\ + :cc:\ + :..Ic=\E]P%p1%x%p2%{255}%*%{1000}%/%02x%p3%{255}%*%{1000}%/%02x%p4%{255}%*%{1000}%/%02x:\ + :oc=\E]R:\ + :tc=linux: +# From: Dennis Henriksen , 9 July 1996 +linux-c|linux console 1.3.6+ with private palette for each virtual console:\ + :cc:\ + :Co#8:pa#64:\ + :..Ic=\E]P%?%p1%{9}%>%t%p1%{10}%-%'a'%+%c%e%p1%d%;%p2%{255}%&%Pr%gr%{16}%/%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;%gr%{15}%&%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;%p3%{255}%&%Pr%gr%{16}%/%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;%gr%{15}%&%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;%p4%{255}%&%Pr%gr%{16}%/%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;%gr%{15}%&%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;:\ + :oc=\E]R:\ + :tc=linux: + +# See the note on ICH/ICH1 VERSUS RMIR/SMIR near the end of file +linux-nic|linux with ich/ich1 suppressed for non-curses programs:\ + :IC@:ei=:ic@:im=:\ + :tc=linux: + +# This assumes you have used setfont(8) to load one of the Linux koi8-r fonts. +# acsc entry from Pavel Roskin" , 29 Sep 1997. +linux-koi8|linux with koi8 alternate character set:\ + :ac=+\020\054\021-\030.^Y0\215`\004a\221f\234g\237h\220i\276j\205k\203l\202m\204n\212o~p\0q\0r\0s_t\206u\207v\211w\210x\201y\230z\231{\267|\274~\224:tc=linux:\ + :tc=klone+koi8acs: + +# Another entry for KOI8-r with Qing Long's acsc. +# (which one better complies with the standard?) +linux-koi8r|linux with koi8-r alternate character set:\ + :tc=linux:tc=klone+koi8acs: + +# Entry for the latin1 and latin2 fonts +linux-lat|linux with latin1 or latin2 alternate character set:\ + :ac=+\020\054\021-\030.^Y0\333`\004a\013f\370g\361h\260i\316j\211k\214l\206m\203n\305o~p\304q\212r\304s_t\207u\215v\301w\302x\205y\363z\362{\343|\330}\234~\376:\ + :tc=linux: + +#### NetBSD consoles +# +# pcvt termcap database entries (corresponding to release 3.31) +# Author's last edit-date: [Fri Sep 15 20:29:10 1995] +# +# (For the terminfo master file, I translated these into terminfo syntax. +# Then I dropped all the pseudo-HP entries. we don't want and can't use +# the :Xs: flag. Then I split :is: into a size-independent :i1: and a +# size-dependent :is:. Finally, I added / -- esr) + +# NOTE: :ic: has been taken out of this entry. for reference, it should +# be . For discussion, see ICH/ICH1 VERSUS RMIR/SMIR below. +# (esr: added :vi: and :ve: to resolve NetBSD Problem Report #4583) +pcvtXX|pcvt vt200 emulator (DEC VT220):\ + :am:km:mi:ms:xn:\ + :it#8:vt#3:\ + :AL=\E[%dL:DC=\E[%dP:DL=\E[%dM:DO=\E[%dB:IC=\E[%d@:\ + :LE=\E[%dD:RA=\E[?7l:RI=\E[%dC:SA=\E[?7h:SF=\E[%dS:\ + :SR=\E[%dT:UP=\E[%dA:\ + :ac=++\054\054--..00``aaffgghhiijjkkllmmnnooppqqrrssttuuvvwwxxyyzz~~:\ + :ae=\E(B:al=\E[L:as=\E(0:bl=^G:cb=\E[1K:cd=\E[J:ce=\E[K:\ + :cl=\E[H\E[J:cm=\E[%i%d;%dH:cr=^M:cs=\E[%i%d;%dr:\ + :ct=\E[3g:dc=\E[P:dl=\E[M:do=\E[B:ei=\E[4l:ho=\E[H:\ + :i1=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h:im=\E[4h:\ + :k1=\E[17~:k2=\E[18~:k3=\E[19~:k4=\E[20~:k5=\E[21~:\ + :k6=\E[23~:k7=\E[24~:k8=\E[25~:kD=\E[3~:kH=\E[4~:kI=\E[2~:\ + :kN=\E[6~:kP=\E[5~:kb=\177:kd=\EOB:ke=\E[?1l\E>:kh=\E[1~:\ + :kl=\EOD:kr=\EOC:ks=\E[?1h\E=:ku=\EOA:le=^H:mb=\E[5m:\ + :md=\E[1m:me=\E[m:mr=\E[7m:nd=\E[C:nw=\EE:\ + :r1=\Ec\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h:rc=\E8:\ + :rf=/usr/share/tabset/vt100:sc=\E7:se=\E[27m:sf=\ED:\ + :so=\E[7m:sr=\EM:st=\EH:ta=^I:ue=\E[24m:up=\E[A:us=\E[4m:\ + :ve=\E[?25h:vi=\E[?25l: + +# NetBSD/FreeBSD vt220 terminal emulator console (pc keyboard & monitor) +# termcap entries for pure VT220-Emulation and 25, 28, 35, 40, 43 and +# 50 lines entries; 80 columns +pcvt25|dec vt220 emulation with 25 lines:\ + :co#80:li#25:\ + :is=\E[1;25r\E[25;1H:tc=pcvtXX: +pcvt28|dec vt220 emulation with 28 lines:\ + :co#80:li#28:\ + :is=\E[1;28r\E[28;1H:tc=pcvtXX: +pcvt35|dec vt220 emulation with 35 lines:\ + :co#80:li#35:\ + :is=\E[1;35r\E[35;1H:tc=pcvtXX: +pcvt40|dec vt220 emulation with 40 lines:\ + :co#80:li#40:\ + :is=\E[1;40r\E[40;1H:tc=pcvtXX: +pcvt43|dec vt220 emulation with 43 lines:\ + :co#80:li#43:\ + :is=\E[1;43r\E[43;1H:tc=pcvtXX: +pcvt50|dec vt220 emulation with 50 lines:\ + :co#80:li#50:\ + :is=\E[1;50r\E[50;1H:tc=pcvtXX: + +# NetBSD/FreeBSD vt220 terminal emulator console (pc keyboard & monitor) +# termcap entries for pure VT220-Emulation and 25, 28, 35, 40, 43 and +# 50 lines entries; 132 columns +pcvt25w|dec vt220 emulation with 25 lines and 132 cols:\ + :co#132:li#25:\ + :is=\E[1;25r\E[25;1H:tc=pcvtXX: +pcvt28w|dec vt220 emulation with 28 lines and 132 cols:\ + :co#132:li#28:\ + :is=\E[1;28r\E[28;1H:tc=pcvtXX: +pcvt35w|dec vt220 emulation with 35 lines and 132 cols:\ + :co#132:li#35:\ + :is=\E[1;35r\E[35;1H:tc=pcvtXX: +pcvt40w|dec vt220 emulation with 40 lines and 132 cols:\ + :co#132:li#40:\ + :is=\E[1;40r\E[40;1H:tc=pcvtXX: +pcvt43w|dec vt220 emulation with 43 lines and 132 cols:\ + :co#132:li#43:\ + :is=\E[1;43r\E[43;1H:tc=pcvtXX: +pcvt50w|dec vt220 emulation with 50 lines and 132 cols:\ + :co#132:li#50:\ + :is=\E[1;50r\E[50;1H:tc=pcvtXX: + +# Terminfo entries to enable the use of the ncurses library in colour on a +# NetBSD-arm32 console (only tested on a RiscPC). +# Created by Dave Millen 22.07.98 +# modified codes for setf/setb to setaf/setab, then to klone+color, corrected +# typo in invis - TD +arm100|arm100-am|Arm(RiscPC) ncurses compatible (for 640x480):\ + :am:ms:ut:xn:xo:\ + :co#80:it#8:li#30:\ + :@8=\E[M:DO=\E[%dB:K1=\E[q:K2=\E[r:K3=\E[s:K4=\E[p:K5=\E[n:\ + :LE=\E[%dD:RA=\E[?7l:RI=\E[%dC:SA=\E[?7h:UP=\E[%dA:\ + :ac=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~:\ + :ae=^O:as=^N:bl=^G:cb=\E[1K:cd=\E[J:ce=\E[K:cl=\E[H\E[J:\ + :cm=\E[%i%d;%dH:cr=^M:cs=\E[%i%d;%dr:ct=\E[3g:do=^J:\ + :eA=\E(B\E)0:ho=\E[H:k0=\E[y:k1=\E[P:k2=\E[Q:k3=\E[R:\ + :k4=\E[S:k5=\E[t:k6=\E[u:k7=\E[v:k8=\E[l:k9=\E[w:k;=\E[x:\ + :kb=^H:kd=\E[B:ke=\E[?1l\E>:kl=\E[D:kr=\E[C:ks=\E[?1h\E=:\ + :ku=\E[A:le=^H:mb=\E[5m:md=\E[1m:me=\E[m\017:mk=\E[8m:\ + :mr=\E[6m:nd=\E[C:r2=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h:\ + :rc=\E8:\ + :..sa=\E[0%?%p1%p6%|%t;1%;%?%p2%t;4%;%?%p1%p3%|%t;7%;%?%p4%t;5%;m%?%p9%t\016%e\017%;:\ + :sc=\E7:se=\E[m:sf=^J:so=\E[7m:sr=\EM:st=\EH:ta=^I:ue=\E[m:\ + :up=\E[A:us=\E[4m:\ + :tc=ecma+sgr:tc=klone+color: +arm100-w|arm100-wam|Arm(RiscPC) ncurses compatible (for 1024x768):\ + :co#132:li#50:tc=arm100: + +# NetBSD/x68k console vt200 emulator. This port runs on a 68K machine +# manufactured by Sharp for the Japenese market. +# From Minoura Makoto , 12 May 1996 +x68k|x68k-ite|NetBSD/x68k ITE:\ + :co#96:li#32:\ + :%1=\E[28~:kC=\E[9~:tc=vt220: + +# : +# Entry for the DNARD OpenFirmware console, close to ANSI but not quite. +# +# (still unfinished, but good enough so far.) +ofcons:\ + :bw:\ + :co#80:li#30:\ + :AL=\233%dL:DC=\233%dP:DL=\233%dM:DO=\233%dB:IC=\233%d@:\ + :LE=\233%dD:RI=\233%dC:UP=\233%dA:al=\233L:bl=^G:cd=\233J:\ + :ce=\233K:cl=^L:cm=\233%i%d;%dH:cr=^M:dc=\233P:dl=\233M:\ + :do=\233B:ei=:ic=\233@:im=:k1=\2330P:k2=\2330Q:k3=\2330W:\ + :k4=\2330x:k5=\2330t:k6=\2330u:k7=\2330q:k8=\2330r:\ + :k9=\2330p:k;=\2330M:kD=\233P:kN=\233/:kP=\233?:kb=^H:\ + :kd=\233B:kl=\233D:kr=\233C:ku=\233A:le=\233D:mb=\2337;2m:\ + :md=\2331m:me=\2330m:mh=\2332m:mk=\2338m:mr=\2337m:\ + :nd=\233C:nw=^M^J:se=\2330m:sf=^J:ta=^I:ue=\2330m:up=\233A:\ + :vb=^G: + +# NetBSD "wscons" emulator in vt220 mode +# These are micro-minimal and probably need to be redone for real +# after the manner of the pcvt entries. +wsvt25|NetBSD wscons in 25 line DEC VT220 mode:\ + :co#80:li#25:tc=vt220: + +wsvt25m|NetBSD wscons in 25 line DEC VT220 mode with Meta:\ + :km:\ + :co#80:li#25:tc=vt220: + +# `rasterconsole' provided by 4.4BSD, NetBSD and OpenBSD on SPARC, and +# DECstation/pmax. +rcons|BSD rasterconsole:\ + :tc=sun-il: +# Color version of above. Color currenly only provided by NetBSD. +rcons-color|BSD rasterconsole with ANSI color:\ + :ut:\ + :Co#8:pa#64:\ + :AB=\E[4%dm:AF=\E[3%dm:op=\E[m:tc=rcons: + +#### FreeBSD console entries +# +# From: Andrey Chernov 29 Mar 1996 +# Andrey Chernov maintains the FreeBSD termcap distributions. +# +# Note: Users of FreeBSD 2.1.0 and older versions must either upgrade +# or comment out the :cb: capability in the console entry. +# +# Alexander Lukyanov reports: +# I have seen FreeBSD-2.1.5R... The old el1 bug changed, but it is still there. +# Now el1 clears not only to the line beginning, but also a large chunk +# of previous line. But there is another bug - ech does not work at all. +# + +# for syscons +# common entry without semigraphics +# Bug: The capability resets attributes. +# Bug? The ech and el1 attributes appear to move the cursor in some cases; for +# instance el1 does if the cursor is moved to the right margin first. Removed +# by T.Dickey 97/5/3 (ech=\E[%p1%dX, el1=\E[1K) +# +# Setting colors turns off reverse; we cannot guarantee order, so use ncv. +# Note that this disables standout with color. +cons25w|ansiw|ansi80x25-raw|freebsd console (25-line raw mode):\ + :NP:am:bw:eo:ms:ut:\ + :Co#8:NC#21:co#80:it#8:li#25:pa#64:\ + :@7=\E[F:AB=\E[4%p1%dm:AF=\E[3%p1%dm:AL=\E[%dL:DC=\E[%dP:\ + :DL=\E[%dM:DO=\E[%dB:F1=\E[W:F2=\E[X:IC=\E[%d@:K2=\E[E:\ + :LE=\E[%dD:RI=\E[%dC:SF=\E[%dS:SR=\E[%dT:UP=\E[%dA:\ + :al=\E[L:bl=^G:bt=\E[Z:cd=\E[J:ce=\E[K:ch=\E[%i%d`:\ + :cl=\E[H\E[J:cm=\E[%i%d;%dH:cr=^M:cv=\E[%i%dd:dc=\E[P:\ + :dl=\E[M:do=\E[B:ei=:ho=\E[H:ic=\E[@:im=:k1=\E[M:k2=\E[N:\ + :k3=\E[O:k4=\E[P:k5=\E[Q:k6=\E[R:k7=\E[S:k8=\E[T:k9=\E[U:\ + :k;=\E[V:kB=\E[Z:kD=\177:kI=\E[L:kN=\E[G:kP=\E[I:kb=^H:\ + :kd=\E[B:kh=\E[H:kl=\E[D:kr=\E[C:ku=\E[A:le=^H:mb=\E[5m:\ + :md=\E[1m:me=\E[m:mh=\E[30;1m:mr=\E[7m:nd=\E[C:nw=\E[E:\ + :op=\E[x:r1=\E[x\E[m\Ec:se=\E[m:sf=\E[S:so=\E[7m:sr=\E[T:\ + :ta=^I:up=\E[A:ve=\E[=0C:vs=\E[=1C: +cons25|ansis|ansi80x25|freebsd console (25-line ansi mode):\ + :ac=-\030.^Y0\333`\004a\260f\370g\361h\261i\025j\331k\277l\332m\300n\305q\304t\303u\264v\301w\302x\263y\363z\362~\371:\ + :tc=cons25w: +cons25-m|ansis-mono|ansi80x25-mono|freebsd console (25-line mono ansi mode):\ + :Co@:pa@:\ + :AB@:AF@:md@:mh@:op@:ue=\E[m:us=\E[4m:tc=cons25: +cons30|ansi80x30|freebsd console (30-line ansi mode):\ + :li#30:tc=cons25: +cons30-m|ansi80x30-mono|freebsd console (30-line mono ansi mode):\ + :li#30:tc=cons25-m: +cons43|ansi80x43|freebsd console (43-line ansi mode):\ + :li#43:tc=cons25: +cons43-m|ansi80x43-mono|freebsd console (43-line mono ansi mode):\ + :li#43:tc=cons25-m: +cons50|ansil|ansi80x50|freebsd console (50-line ansi mode):\ + :li#50:tc=cons25: +cons50-m|ansil-mono|ansi80x50-mono|freebsd console (50-line mono ansi mode):\ + :li#50:tc=cons25-m: +cons60|ansi80x60|freebsd console (60-line ansi mode):\ + :li#60:tc=cons25: +cons60-m|ansi80x60-mono|freebsd console (60-line mono ansi mode):\ + :li#60:tc=cons25-m: +cons25r|pc3r|ibmpc3r|cons25-koi8-r|freebsd console w/koi8-r cyrillic:\ + :ac=-\030.^Y0\215`\004a\220f\234h\221i\025j\205k\203l\202m\204n\212q\0t\206u\207v\211w\210x\201y\230z\231~\225:\ + :tc=cons25w: +cons25r-m|pc3r-m|ibmpc3r-mono|cons25-koi8r-m|freebsd console w/koi8-r cyrillic (mono):\ + :Co@:pa@:\ + :AB@:AF@:op@:ue=\E[m:us=\E[4m:tc=cons25r: +cons50r|cons50-koi8r|freebsd console w/koi8-r cyrillic (50 lines):\ + :li#50:tc=cons25r: +cons50r-m|cons50-koi8r-m|freebsd console w/koi8-r cyrillic (50-line mono):\ + :li#50:tc=cons25r-m: +cons60r|cons60-koi8r|freebsd console w/koi8-r cyrillic (60 lines):\ + :li#60:tc=cons25r: +cons60r-m|cons60-koi8r-m|freebsd console w/koi8-r cyrillic (60-line mono):\ + :li#60:tc=cons25r-m: +# ISO 8859-1 FreeBSD console +cons25l1|cons25-iso8859|freebsd console w/iso 8859-1 chars:\ + :ac=+\253\054\273-\030.\031`\201a\202f\207g\210i\247j\213k\214l\215m\216n\217o\220p\221q\222r\223s\224t\225u\226v\227w\230x\231y\232z\233~\237:\ + :tc=cons25w: +cons25l1-m|cons25-iso-m|freebsd console w/iso 8859-1 chars (mono):\ + :Co@:pa@:\ + :AB@:AF@:md@:mh@:op@:ue=\E[m:us=\E[4m:tc=cons25l1: +cons50l1|cons50-iso8859|freebsd console w/iso 8859-1 chars (50 lines):\ + :li#50:tc=cons25l1: +cons50l1-m|cons50-iso-m|freebsd console w/iso 8859-1 chars (50-line mono):\ + :li#50:tc=cons25l1-m: +cons60l1|cons60-iso|freebsd console w/iso 8859-1 chars (60 lines):\ + :li#60:tc=cons25l1: +cons60l1-m|cons60-iso-m|freebsd console w/iso 8859-1 chars (60-line mono):\ + :li#60:tc=cons25l1-m: + +#### 386BSD and BSD/OS Consoles +# + +# This was the original 386BSD console entry (I think). +# Some places it's named oldpc3|oldibmpc3. +# From: Alex R.N. Wetmore +origpc3|origibmpc3|IBM PC 386BSD Console:\ + :am:bw:eo:xo:\ + :co#80:li#25:\ + :ac=j\331k\277l\332m\300n\305q\304t\303u\264v\301w\302x\263:\ + :cd=\E[J:ce=\E[K:cl=\Ec:cm=\E[%i%2;%2H:do=\E[B:ho=\E[H:\ + :kd=\E[B:kh=\E[Y:kl=\E[D:kr=\E[C:ku=\E[A:le=^H:md=\E[7m:\ + :me=\E[m\E[1;0x\E[2;7x:nd=\E[C:se=\E[1;0x\E[2;7x:\ + :sf=\E[S:so=\E[1;7x\E[2;0x:sr=\E[T:ue=\E[1;0x\E[2;7x:\ + :up=\E[A:us=\E[1;7x\E[2;0x: + +# description of BSD/386 console emulator in version 1.0 (supplied by BSDI) +oldpc3|oldibmpc3|old IBM PC BSD/386 Console:\ + :km:\ + :li#25:\ + :al=\E[L:bl=^G:cr=^M:dl=\E[M:do=^J:kH=\E[F:kI=\E[L:kN=\E[G:\ + :kP=\E[I:kb=^H:kd=\E[B:kh=\E[H:kl=\E[D:kr=\E[C:ku=\E[A:\ + :md=\E[=15F:me=\E[=R:mh=\E[=8F:nw=^M^J:sf=^J:ta=^I: + +# Description of BSD/OS console emulator in version 1.1, 2.0, 2.1 +# Note, the emulator supports many of the additional console features +# listed in the iBCS2 (e.g. character-set selection) though not all +# are described here. This entry really ought to be upgraded. +# Also note, the console will also work with fewer lines after doing +# "stty rows NN", e.g. to use 24 lines. +# (Color support from Kevin Rosenberg , 2 May 1996) +# Bug: The capability resets attributes. +bsdos-pc-nobold|BSD/OS PC console w/o bold:\ + :am:eo:km:xo:\ + :co#80:it#8:li#25:\ + :AL=\E[%dL:DL=\E[%dM:DO=\E[%dB:LE=\E[%dD:RI=\E[%dC:\ + :UP=\E[%dA:al=\E[L:bl=^G:cd=\E[J:ce=\E[K:cl=\Ec:\ + :cm=\E[%i%d;%dH:cr=^M:dl=\E[M:do=^J:ho=\E[H:kH=\E[F:\ + :kI=\E[L:kN=\E[G:kP=\E[I:kb=^H:kd=\E[B:kh=\E[H:kl=\E[D:\ + :kr=\E[C:ku=\E[A:le=^H:nd=\E[C:nw=^M^J:rc=\E8:\ + :..sa=\E[0;10%?%p1%t;7%;%?%p3%t;7%;%?%p4%t;5%;%?%p6%t;1%;%?%p7%t;8%;%?%p9%t;11%;m%?%p5%t\E[=8F%;:\ + :sc=\E7:sf=^J:ta=^I:up=\E[A:\ + :tc=klone+sgr:tc=klone+color: +bsdos-pc|IBM PC BSD/OS Console:\ + :..sa=\E[0;10%?%p1%t;7%;%?%p2%t;1%;%?%p3%t;7%;%?%p4%t;5%;%?%p6%t;1%;%?%p7%t;8%;%?%p9%t;11%;m:tc=bsdos-pc-nobold: + +# Old names for BSD/OS PC console used in releases before 4.1. +pc3|BSD/OS on the PC Console:\ + :tc=bsdos-pc-nobold: +ibmpc3|pc3-bold|BSD/OS on the PC Console with bold instead of underline:\ + :tc=bsdos-pc: + +# BSD/OS on the SPARC +bsdos-sparc|Sun SPARC BSD/OS Console:\ + :tc=sun: + +# BSD/OS on the PowerPC +bsdos-ppc|PowerPC BSD/OS Console:\ + :tc=bsdos-pc: + +#### DEC VT100 and compatibles +# +# DEC terminals from the vt100 forward are collected here. Older DEC terminals +# and micro consoles can be found in the `obsolete' section. More details on +# the relationship between the VT100 and ANSI X3.64/ISO 6429/ECMA-48 may be +# found near the end of this file. +# +# Except where noted, these entries are DEC's official terminfos. +# Contact Bill Hedberg of Terminal Support +# Engineering for more information. Updated terminfos and termcaps +# are kept available at ftp://gatekeeper.dec.com/pub/DEC/termcaps. +# +# In October 1995 DEC sold its terminals business, including the VT and Dorio +# line and trademark, to SunRiver Data Systems. SunRiver has since changed +# its name to Boundless Technologies; see http://www.boundless.com. +# + +# NOTE: Any VT100 emulation, whether in hardware or software, almost +# certainly includes what DEC called the `Level 1 editing extension' codes; +# only the very oldest VT100s lacked these and there probably aren't any of +# those left alive. To capture these, use one of the VT102 entries. +# +# Note that the :xn: glitch in vt100 is not quite the same as on the Concept, +# since the cursor is left in a different position while in the +# weird state (concept at beginning of next line, vt100 at end +# of this line) so all versions of vi before 3.7 don't handle +# :xn: right on vt100. The correct way to handle :xn: is when +# you output the char in column 80, immediately output CR LF +# and then assume you are in column 1 of the next line. If :xn: +# is on, am should be on too. +# +# I assume you have smooth scroll off or are at a slow enough baud +# rate that it doesn't matter (1200? or less). Also this assumes +# that you set auto-nl to "on", if you set it off use vt100-nam +# below. +# +# The padding requirements listed here are guesses. It is strongly +# recommended that xon/xoff be enabled, as this is assumed here. +# +# The vt100 uses and rather than :is:/:ct:/:st: because the +# tab settings are in non-volatile memory and don't need to be +# reset upon login. Also setting the number of columns glitches +# the screen annoyingly. You can type "reset" to get them set. +# +# The VT100 series terminals have cursor ("arrows") keys which can operate +# in two different modes: Cursor Mode and Application Mode. Cursor Mode +# is the reset state, and is assumed to be the normal state. Application +# Mode is the "set" state. In Cursor Mode, the cursor keys transmit +# "Esc [ {code}" sequences, conforming to ANSI standards. In Application +# Mode, the cursor keys transmit "Esc O " sequences. Application Mode +# was provided primarily as an aid to the porting of VT52 applications. It is +# assumed that the cursor keys are normally in Cursor Mode, and expected that +# applications such as vi will always transmit the :ks: string. Therefore, +# the definitions for the cursor keys are made to match what the terminal +# transmits after the :ks: string is transmitted. If the :ks: string +# is a null string or is not defined, then cursor keys are assumed to be in +# "Cursor Mode", and the cursor keys definitions should match that assumption, +# else the appication may fail. It is also expected that applications will +# always transmit the :ke: string to the terminal before they exit. +# +# The VT100 series terminals have an auxilliary keypad, commonly referred to as +# the "Numeric Keypad", because it is a cluster of numeric and function keys. +# The Numeric Keypad which can operate in two different modes: Numeric Mode and +# Application Mode. Numeric Mode is the reset state, and is assumed to be +# the normal state. Application Mode is the "set" state. In Numeric Mode, +# the numeric and punctuation keys transmit ASCII 7-bit characters, and the +# Enter key transmits the same as the Return key (Note: the Return key +# can be configured to send either LF (\015) or CR LF). In Application Mode, +# all the keypad keys transmit "Esc O {code}" sequences. The PF1 - PF4 keys +# always send the same "Esc O {code}" sequences. It is assumed that the keypad +# is normally in Numeric Mode. If an application requires that the keypad be +# in Application Mode then it is expected that the user, or the application, +# will set the TERM environment variable to point to a terminfo entry which has +# defined the :ks: string to include the codes that switch the keypad into +# Application Mode, and the terminfo entry will also define function key +# fields to match the Application Mode control codes. If the :ks: string +# is a null string or is not defined, then the keypad is assumed to be in +# Numeric Mode. If the :ks: string switches the keypad into Application +# Mode, it is expected that the :ke: string will contain the control codes +# necessary to reset the keypad to "Normal" mode, and it is also expected that +# applications which transmit the :ks: string will also always transmit the +# :ke: string to the terminal before they exit. +# +# Here's a diagram of the VT100 keypad keys with their bindings. +# The top line is the name of the key (some DEC keyboards have the keys +# labelled somewhat differently, like GOLD instead of PF1, but this is +# the most "official" name). The second line is the escape sequence it +# generates in Application Keypad mode (where "$" means the ESC +# character). The third line contains two items, first the mapping of +# the key in terminfo, and then in termcap. +# _______________________________________ +# | PF1 | PF2 | PF3 | PF4 | +# | $OP | $OQ | $OR | $OS | +# |_kf1__k1_|_kf2__k2_|_kf3__k3_|_kf4__k4_| +# | 7 8 9 - | +# | $Ow | $Ox | $Oy | $Om | +# |_kf9__k9_|_kf10_k;_|_kf0__k0_|_________| +# | 4 | 5 | 6 | , | +# | $Ot | $Ou | $Ov | $Ol | +# |_kf5__k5_|_kf6__k6_|_kf7__k7_|_kf8__k8_| +# | 1 | 2 | 3 | | +# | $Oq | $Or | $Os | enter | +# |_ka1__K1_|_kb2__K2_|_ka3__K3_| $OM | +# | 0 | . | | +# | $Op | $On | | +# |___kc1_______K4____|_kc3__K5_|_kent_@8_| +# +# And here, for those of you with orphaned VT100s lacking documentation, is +# a description of the soft switches invoked when you do `Set Up'. +# +# Scroll 0-Jump Shifted 3 0-# +# | 1-Smooth | 1-British pound sign +# | Autorepeat 0-Off | Wrap Around 0-Off +# | | 1-On | | 1-On +# | | Screen 0-Dark Bkg | | New Line 0-Off +# | | | 1-Light Bkg | | | 1-On +# | | | Cursor 0-Underline | | | Interlace 0-Off +# | | | | 1-Block | | | | 1-On +# | | | | | | | | +# 1 1 0 1 1 1 1 1 0 1 0 0 0 0 1 0 <--Standard Settings +# | | | | | | | | +# | | | Auto XON/XOFF 0-Off | | | Power 0-60 Hz +# | | | 1-On | | | 1-50 Hz +# | | Ansi/VT52 0-VT52 | | Bits Per Char. 0-7 Bits +# | | 1-ANSI | | 1-8 Bits +# | Keyclick 0-Off | Parity 0-Off +# | 1-On | 1-On +# Margin Bell 0-Off Parity Sense 0-Odd +# 1-On 1-Even +# +# The following SET-UP modes are assumed for normal operation: +# ANSI_MODE AUTO_XON/XOFF_ON NEWLINE_OFF 80_COLUMNS +# WRAP_AROUND_ON JUMP_SCROLL_OFF +# Other SET-UP modes may be set for operator convenience or communication +# requirements; I recommend +# AUTOREPEAT_ON BLOCK_CURSOR MARGIN_BELL_OFF SHIFTED_3_# +# Unless you have a graphics add-on such as Digital Engineering's VT640 +# (and even then, whenever it can be arranged!) you should set +# INTERLACE_OFF +# +# (vt100: I added / based on the init string, also :bs:. -- esr) +vt100|vt100-am|dec vt100 (w/advanced video):\ + :am:bs:ms:xn:xo:\ + :co#80:it#8:li#24:vt#3:\ + :@8=\EOM:DO=\E[%dB:K1=\EOq:K2=\EOr:K3=\EOs:K4=\EOp:K5=\EOn:\ + :LE=\E[%dD:RA=\E[?7l:RI=\E[%dC:SA=\E[?7h:UP=\E[%dA:\ + :ac=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~:\ + :ae=^O:as=^N:bl=^G:cb=\E[1K:cd=\E[J:ce=\E[K:cl=\E[H\E[J:\ + :cm=\E[%i%d;%dH:cr=^M:cs=\E[%i%d;%dr:ct=\E[3g:do=^J:\ + :eA=\E(B\E)0:ho=\E[H:k0=\EOy:k1=\EOP:k2=\EOQ:k3=\EOR:\ + :k4=\EOS:k5=\EOt:k6=\EOu:k7=\EOv:k8=\EOl:k9=\EOw:k;=\EOx:\ + :kb=^H:kd=\EOB:ke=\E[?1l\E>:kl=\EOD:kr=\EOC:ks=\E[?1h\E=:\ + :ku=\EOA:le=^H:mb=\E[5m:md=\E[1m:me=\E[m\017:mr=\E[7m:\ + :nd=\E[C:r2=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h:rc=\E8:\ + :..sa=\E[0%?%p1%p6%|%t;1%;%?%p2%t;4%;%?%p1%p3%|%t;7%;%?%p4%t;5%;m%?%p9%t\016%e\017%;:\ + :sc=\E7:se=\E[m:sf=^J:so=\E[7m:sr=\EM:st=\EH:ta=^I:ue=\E[m:\ + :up=\E[A:us=\E[4m: +vt100nam|vt100-nam|vt100 no automargins:\ + :am@:xn@:tc=vt100-am: +vt100-vb|dec vt100 (w/advanced video) & no beep:\ + :bl@:vb=\E[?5h\E[?5l:tc=vt100: + +# Ordinary vt100 in 132 column ("wide") mode. +vt100-w|vt100-w-am|dec vt100 132 cols (w/advanced video):\ + :co#132:li#24:\ + :r2=\E>\E[?3h\E[?4l\E[?5l\E[?8h:tc=vt100-am: +vt100-w-nam|vt100-nam-w|dec vt100 132 cols (w/advanced video no automargin):\ + :co#132:li#14:vt@:\ + :r2=\E>\E[?3h\E[?4l\E[?5l\E[?8h:tc=vt100-nam: + +# vt100 with no advanced video. +vt100-nav|vt100 without advanced video option:\ + :sg#1:\ + :mb@:md@:me@:mr@:sa@:se=\E[m:so=\E[7m:ue@:us@:tc=vt100: +vt100-nav-w|vt100-w-nav|dec vt100 132 cols 14 lines (no advanced video option):\ + :co#132:li#14:tc=vt100-nav: + +# vt100 with one of the 24 lines used as a status line. +# We put the status line on the top. +vt100-s|vt100-s-top|vt100-top-s|vt100 for use with top sysline:\ + :es:hs:\ + :li#23:\ + :cl=\E[2;1H\E[J:cm=\E[%i%+^A;%dH:cs=\E[%i%i%d;%dr:\ + :ds=\E7\E[1;24r\E8:fs=\E8:ho=\E[2;1H:is=\E7\E[2;24r\E8:\ + :ts=\E7\E[1;%p1%dH\E[1K:\ + :tc=vt100-am: + +# Status line at bottom. +# Clearing the screen will clobber status line. +vt100-s-bot|vt100-bot-s|vt100 for use with bottom sysline:\ + :es:hs:\ + :li#23:\ + :ds=\E7\E[1;24r\E8:fs=\E8:is=\E[1;23r\E[23;1H:\ + :ts=\E7\E[24;%p1%dH\E[1K:\ + :tc=vt100-am: + +# Most of the `vt100' emulators out there actually emulate a vt102 +# This entry (or vt102-nsgr) is probably the right thing to use for +# these. +vt102|dec vt102:\ + :mi:\ + :al=\E[L:dc=\E[P:dl=\E[M:ei=\E[4l:im=\E[4h:tc=vt100: +vt102-w|dec vt102 in wide mode:\ + :co#132:\ + :r3=\E[?3h:tc=vt102: + +# Many brain-dead PC comm programs that pretend to be `vt100-compatible' +# fail to interpret the ^O and ^N escapes properly. Symptom: the :me: +# string in the canonical vt100 entry above leaves the screen littered +# with little snowflake or star characters (IBM PC ROM character \017 = ^O) +# after highlight turnoffs. This entry should fix that, and even leave +# ACS support working, at the cost of making multiple-highlight changes +# slightly more expensive. +# From: Eric S. Raymond July 22 1995 +vt102-nsgr|vt102 no sgr (use if you see snowflakes after highlight changes):\ + :me=\E[m:sa@:\ + :tc=vt102: + +# VT125 Graphics CRT. Clear screen also erases graphics +vt125|vt125 graphics terminal:\ + :cl=\E[H\E[2J\EPpS(E)\E\:tc=vt100: + +# This isn't a DEC entry, it came from University of Wisconsin. +# (vt131: I added / based on the init string, also :bs: -- esr) +vt131|dec vt131:\ + :am:bs:xn:\ + :co#80:it#8:li#24:vt#3:\ + :RA=\E[?7h:SA=\E[?7h:bl=^G:cd=50\E[J:ce=3\E[K:\ + :cl=50\E[;H\E[2J:cm=5\E[%i%d;%dH:cr=^M:cs=\E[%i%d;%dr:\ + :do=^J:ho=\E[H:is=\E[1;24r\E[24;1H:k1=\EOP:k2=\EOQ:\ + :k3=\EOR:k4=\EOS:kb=^H:kd=\EOB:ke=\E[?1l\E>:kl=\EOD:\ + :kr=\EOC:ks=\E[?1h\E=:ku=\EOA:le=^H:mb=2\E[5m:md=2\E[1m:\ + :me=2\E[m:mr=2\E[7m:nd=2\E[C:nw=^M^J:\ + :r1=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h:rc=\E8:sc=\E7:\ + :se=2\E[m:so=2\E[7m:sr=5\EM:ta=^I:ue=2\E[m:up=2\E[A:\ + :us=2\E[4m: + +# vt132 - like vt100 but slower and has ins/del line and such. +# I'm told that :im:/:ei: are backwards in the terminal from the +# manual and from the ANSI standard, this describes the actual +# terminal. I've never actually used a vt132 myself, so this +# is untested. +# +vt132|DEC vt132:\ + :xn:\ + :al=\E[L:dc=\E[P:dl=\E[M:ei=\E[4h:im=\E[4l:ip=:sf=\n:tc=vt100: + +# This vt220 description maps F5--F9 to the second block of function keys +# at the top of the keyboard. The "DO" key is used as F10 to avoid conflict +# with the key marked (ESC) on the vt220. See vt220d for an alternate mapping. +# PF1--PF4 are used as F1--F4. +# +vt220-old|vt200-old|DEC VT220 in vt100 emulation mode:\ + :am:bs:mi:pt:xn:xo:\ + :co#80:li#24:vt#3:\ + :@7=\E[4~:RA=\E[?7l:SA=\E[?7h:\ + :ac=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~:\ + :ae=\E(B:al=\E[L:as=\E(0:bl=^G:cd=\E[J:ce=\E[K:\ + :cl=\E[H\E[2J:cm=\E[%i%d;%dH:cr=^M:cs=\E[%i%d;%dr:\ + :dc=\E[P:dl=\E[M:do=\E[B:ei=\E[4l:ho=\E[H:\ + :if=/usr/share/tabset/vt100:im=\E[4h:\ + :is=\E[1;24r\E[24;1H:k1=\EOP:k2=\EOQ:k3=\EOR:k4=\EOS:\ + :k5=\E[17~:k6=\E[18~:k7=\E[19~:k8=\E[20~:k9=\E[21~:\ + :k;=\E[29~:kD=\E[3~:kI=\E[2~:kN=\E[6~:kP=\E[5~:kb=^H:\ + :kd=\E[B:kh=\E[1~:kl=\E[D:kr=\E[C:ku=\E[A:le=^H:mb=\E[5m:\ + :md=\E[1m:me=\E[m:mr=\E[7m:nd=\E[C:nl=^J:\ + :r2=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h:rc=\E8:\ + :rf=/usr/share/tabset/vt100:\ + :..sa=\E[0%?%p6%t;1%;%?%p2%t;4%;%?%p4%t;5%;%?%p1%p3%|%t;7%;m%?%p9%t\E(0%e\E(B%;:\ + :sc=\E7:se=\E[27m:sf=20\ED:so=\E[7m:sr=14\EM:ta=^I:\ + :ue=\E[24m:up=\E[A:us=\E[4m:ve=\E[?25h:vi=\E[?25l: + +# A much better description of the VT200/220; used to be vt220-8 +# (untranslatable capabilities removed to fit entry within 1023 bytes) +# (sgr removed to fit entry within 1023 bytes) +# (terminfo-only capabilities suppressed to fit entry within 1023 bytes) +vt220|vt200|dec vt220:\ + :am:bs:mi:ms:xn:xo:\ + :co#80:it#8:li#24:vt#3:\ + :AL=\E[%dL:DC=\E[%dP:DL=\E[%dM:DO=\E[%dB:IC=\E[%d@:\ + :LE=\E[%dD:RI=\E[%dC:UP=\E[%dA:ae=^O:al=\E[L:as=^N:bl=^G:\ + :cd=\E[J:ce=\E[K:cl=\E[H\E[J:cm=\E[%i%d;%dH:cr=^M:\ + :cs=\E[%i%d;%dr:ct=\E[3g:dc=\E[P:dl=\E[M:do=^J:ec=\E[%dX:\ + :ei=\E[4l:ho=\E[H:if=/usr/share/tabset/vt100:im=\E[4h:\ + :is=\E[?7h\E[>\E[?1h\E F\E[?4l:k1=\EOP:k2=\EOQ:k3=\EOR:\ + :k4=\EOS:k6=\E[17~:k7=\E[18~:k8=\E[19~:k9=\E[20~:kI=\E[2~:\ + :kN=\E[6~:kP=\E[5~:kb=^H:kd=\E[B:kh=\E[H:kl=\E[D:kr=\E[C:\ + :ku=\E[A:le=^H:mb=\E[5m:md=\E[1m:me=\E[m:mr=\E[7m:nd=\E[C:\ + :nw=\EE:rc=\E8:sc=\E7:se=\E[27m:sf=\ED:so=\E[7m:sr=\EM:\ + :st=\EH:ta=^I:ue=\E[24m:up=\E[A:us=\E[4m:vb=\E[?5h\E[?5l: +vt220-w|vt200-w|DEC vt220 in wide mode:\ + :co#132:\ + :r3=\E[?3h:tc=vt220: +# (untranslatable capabilities removed to fit entry within 1023 bytes) +# (sgr removed to fit entry within 1023 bytes) +# (terminfo-only capabilities suppressed to fit entry within 1023 bytes) +vt220-8bit|vt220-8|vt200-8bit|vt200-8|dec vt220/200 in 8-bit mode:\ + :am:bs:mi:ms:xn:xo:\ + :co#80:it#8:li#24:vt#3:\ + :AL=\233%dL:DC=\233%dP:DL=\233%dM:DO=\233%dB:IC=\233%d@:\ + :LE=\233%dD:RI=\233%dC:UP=\233%dA:ae=^O:al=\233L:as=^N:\ + :bl=^G:cd=\233J:ce=\233K:cl=\233H\233J:cm=\233%i%d;%dH:\ + :cr=^M:cs=\233%i%d;%dr:ct=\2333g:dc=\233P:dl=\233M:do=^J:\ + :ec=\233%dX:ei=\2334l:ho=\233H:\ + :if=/usr/share/tabset/vt100:im=\2334h:\ + :is=\233?7h\233>\233?1h\E F\233?4l:k1=\EOP:k2=\EOQ:\ + :k3=\EOR:k4=\EOS:k6=\23317~:k7=\23318~:k8=\23319~:\ + :k9=\23320~:kI=\2332~:kN=\2336~:kP=\2335~:kb=^H:kd=\233B:\ + :kh=\233H:kl=\233D:kr=\233C:ku=\233A:le=^H:mb=\2335m:\ + :md=\2331m:me=\233m:mr=\2337m:nd=\233C:nw=\EE:rc=\E8:\ + :sc=\E7:se=\23327m:sf=\ED:so=\2337m:sr=\EM:st=\EH:ta=^I:\ + :ue=\23324m:up=\233A:us=\2334m:vb=\233?5h\233?5l: + +# +# vt220d: +# This vt220 description regards F6--F10 as the second block of function keys +# at the top of the keyboard. This mapping follows the description given +# in the VT220 Programmer Reference Manual and agrees with the labeling +# on some terminals that emulate the vt220. There is no support for an F5. +# See vt220 for an alternate mapping. +# +vt220d|DEC VT220 in vt100 mode with DEC function key labeling:\ + :F1=\E[23~:F2=\E[24~:F3=\E[25~:F4=\E[26~:F5=\E[28~:\ + :F6=\E[29~:F7=\E[31~:F8=\E[32~:F9=\E[33~:FA=\E[34~:k5@:\ + :k6=\E[17~:k7=\E[18~:k8=\E[19~:k9=\E[20~:k;=\E[21~:\ + :tc=vt220-old: + +vt220-nam|v200-nam|VT220 in vt100 mode with no auto margins:\ + :am@:\ + :r2=\E>\E[?3l\E[?4l\E[?5l\E[?7l\E[?8h:tc=vt220: + +# vt220 termcap written Tue Oct 25 20:41:10 1988 by Alex Latzko +# (not an official DEC entry!) +# The problem with real vt220 terminals is they don't send escapes when in +# in vt220 mode. This can be gotten around two ways. 1> don't send +# escapes or 2> put the vt220 into vt100 mode and use all the nifty +# features of vt100 advanced video which it then has. +# +# This entry takes the view of putting a vt220 into vt100 mode so +# you can use the escape key in emacs and everything else which needs it. +# +# You probably don't want to use this on a VMS machine since VMS will think +# it has a vt220 and will get fouled up coming out of emacs +# +# From: Alexander Latzko , 30 Dec 1996 +# (Added vt100 :rc:,:sc: to quiet a tic warning -- esr) +vt200-js|vt220-js|dec vt200 series with jump scroll:\ + :am:\ + :co#80:\ + :al=\E[L:bl=^G:cd=\E[J:ce=\E[K:cl=\E[H\E[J:cm=\E[%i%d;%dH:\ + :cr=^M:cs=\E[%i%d;%dr:dc=\E[P:dl=\E[M:dm=:do=^J:ed=:\ + :ei=\E[4l:ho=\E[H:im=\E[4h:\ + :is=\E[61"p\E[H\E[?3l\E[?4l\E[?1l\E[?5l\E[?6l\E[?7h\E[?8h\E[?25h\E>\E[m:\ + :k1=\EOP:k2=\EOQ:k3=\EOR:k4=\EOS:kb=^H:kd=\EOB:\ + :ke=\E[?1l\E>:kl=\EOD:kr=\EOC:ks=\E[?1h\E=:ku=\EOA:le=^H:\ + :nw=^M\ED:r1=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h:rc=\E8:\ + :rf=/usr/lib/tabset/vt100:sc=\E7:se=5\E[27m:sf=\ED:\ + :so=5\E[7m:sr=\EM:ta=^I:ue=\E[24m:up=\E[A:us=\E[4m: + + +# This was DEC's vt320. Use the purpose-built one below instead +#vt320|DEC VT320 in vt100 emulation mode, +# use=vt220, + +# +# Use v320n for SCO's LYRIX. Otherwise, use Adam Thompson's vt320-nam. +# +vt320nam|v320n|DEC VT320 in vt100 emul. mode with NO AUTO WRAP mode:\ + :am@:\ + :r2=\E>\E[?3l\E[?4l\E[?5l\E[?7l\E[?8h:tc=vt220: + +# These entries are not DEC's official ones, they were purpose-built for the +# VT320. Here are the designer's notes: +# is end on a PC kbd. Actually 'select' on a VT. Mapped to +# 'Erase to End of Field'... since nothing seems to use 'end' anyways... +# khome is Home on a PC kbd. Actually 'FIND' on a VT. +# Things that use usually use tab anyways... and things that don't use +# tab usually use instead... +# kprv is same as tab - Backtab is useless... +# I left out :sa: because of its RIDICULOUS complexity, +# and the resulting fact that it causes the termcap translation of the entry +# to SMASH the 1k-barrier... +# From: Adam Thompson Sept 10 1995 +# (vt320: uncommented :fs:, comnmmented out to avoid a conflict --esr) +# (untranslatable capabilities removed to fit entry within 1023 bytes) +# (sgr removed to fit entry within 1023 bytes) +# (terminfo-only capabilities suppressed to fit entry within 1023 bytes) +vt320|vt300|dec vt320 7 bit terminal:\ + :am:es:hs:mi:ms:xn:\ + :co#80:li#24:ws#80:\ + :AL=\E[%dL:DC=\E[%dP:DL=\E[%dM:DO=\E[%dB:IC=\E[%d@:\ + :K1=\EOw:K2=\EOu:K3=\EOy:K4=\EOq:K5=\EOs:LE=\E[%dD:\ + :RI=\E[%dC:UP=\E[%dA:ae=\E(B:al=\E[L:as=\E(0:bl=^G:cd=\E[J:\ + :ce=\E[K:cl=\E[H\E[2J:cm=\E[%i%d;%dH:cr=^M:\ + :cs=\E[%i%d;%dr:ct=\E[3g:dc=\E[P:dl=\E[M:do=^J:ec=\E[%dX:\ + :ei=\E[4l:fs=\E[0$}:ho=\E[H:im=\E[4h:\ + :is=\E>\E[?3l\E[?4l\E[5?l\E[?7h\E[?8h\E[1;24r\E[24;1H:\ + :k1=\EOP:k2=\EOQ:k3=\EOR:k4=\EOS:k6=\E[17~:k7=\E[18~:\ + :k8=\E[19~:k9=\E[20~:kD=\E[3~:kI=\E[2~:kN=\E[6~:kP=\E[5~:\ + :kb=\177:kd=\EOB:ke=\E[?1l\E>:kh=\E[1~:kl=\EOD:kr=\EOC:\ + :ks=\E[?1h\E=:ku=\EOA:le=^H:mb=\E[5m:md=\E[1m:me=\E[m:\ + :mr=\E[7m:nd=\E[C:nw=\EE:rc=\E8:sc=\E7:se=\E[m:sf=\ED:\ + :so=\E[7m:sr=\EM:st=\EH:ta=^I:ts=\E[1$}\E[H\E[K:ue=\E[m:\ + :up=\E[A:us=\E[4m:ve=\E[?25h:vi=\E[?25l: +vt320-nam|vt300-nam|dec vt320 7 bit terminal with no am to make SAS happy:\ + :am@:\ + :is=\E>\E[?3l\E[?4l\E[5?l\E[?7l\E[?8h\E[1;24r\E[24;1H:\ + :r2=\E>\E[?3l\E[?4l\E[5?l\E[?7l\E[?8h\E[1;24r\E[24;1H:\ + :tc=vt320: +# We have to init 132-col mode, not 80-col mode. +vt320-w|vt300-w|dec vt320 wide 7 bit terminal:\ + :co#132:ws#132:\ + :is=\E>\E[?3h\E[?4l\E[5?l\E[?7h\E[?8h\E[1;24r\E[24;1H:\ + :r2=\E>\E[?3h\E[?4l\E[5?l\E[?7h\E[?8h\E[1;24r\E[24;1H:\ + :tc=vt320: +vt320-w-nam|vt300-w-nam|dec vt320 wide 7 bit terminal with no am:\ + :am@:\ + :is=\E>\E[?3h\E[?4l\E[5?l\E[?7l\E[?8h\E[1;24r\E[24;1H:\ + :r2=\E>\E[?3h\E[?4l\E[5?l\E[?7l\E[?8h\E[1;24r\E[24;1H:\ + :tc=vt320-w: + +# VT330 and VT340 -- These are ReGIS and SIXEL graphics terminals +# which are pretty much a superset of the VT320. They have the +# host writable status line, yet another different DRCS matrix size, +# and such, but they add the DEC Technical character set, Multiple text +# pages, selectable length pages, and the like. The difference between +# the vt330 and vt340 is that the latter has only 2 planes and a monochrome +# monitor, the former has 4 planes and a color monitor. These terminals +# support VT131 and ANSI block mode, but as with much of these things, +# termcap/terminfo doesn't deal with these features. +# +# Note that this entry is are set up in what was the standard way for GNU +# Emacs v18 terminal modes to deal with the cursor keys in that the arrow +# keys were switched into application mode at the same time the numeric pad +# is switched into application mode. This changes the definitions of the +# arrow keys. Emacs v19 is smarter and mines its keys directly out of +# your termcap or terminfo entry, +# +# From: Daniel Glasser , 13 Oct 1993 +# (vt340: string capability "sb=\E[M" corrected to "sr"; +# also, added / based on the init string -- esr) +vt340|dec-vt340|vt330|dec-vt330|dec vt340 graphics terminal with 24 line page:\ + :am:es:hs:mi:ms:xn:xo:\ + :co#80:it#8:li#24:vt#3:\ + :AL=\E[%dL:DC=\E[%dP:DL=\E[%dM:DO=\E[%dB:IC=\E[%d@:\ + :LE=\E[%dD:RA=\E[?7l:RI=\E[%dC:SA=\E[?7h:UP=\E[%dA:\ + :ac=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~:\ + :ae=^O:al=\E[L:as=^N:cd=\E[J:ce=\E[K:cl=\E[H\E[J:\ + :cm=\E[%i%d;%dH:cr=^M:cs=\E[%i%d;%dr:ct=\E[3g:dc=\E[P:\ + :dl=\E[M:do=^J:ds=\E[2$~\r\E[1$}\E[K\E[$}:ei=\E[4l:\ + :fs=\E[$}:ho=\E[H:im=\E[4h:\ + :is=\E<\E F\E>\E[?1h\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h\E[1;24r\E[24;1H:\ + :k1=\EOP:k2=\EOQ:k3=\EOR:k4=\EOS:k6=\E[17~:k7=\E[18~:\ + :k8=\E[19~:k9=\E[20~:kb=^H:kd=\EOB:ke=\E[?1l\E>:kl=\EOD:\ + :kr=\EOC:ks=\E[?1h\E=:ku=\EOA:l1=pf1:l2=pf2:l3=pf3:l4=pf4:\ + :le=^H:mb=\E[5m:md=\E[1m:me=\E[m:mr=\E[7m:nd=\E[C:nw=^M\ED:\ + :r1=\E[?3l:rc=\E8:rf=/usr/share/tabset/vt300:sc=\E7:\ + :se=\E[27m:sf=\ED:so=\E[7m:sr=\EM:st=\EH:ta=^I:\ + :ts=\E[2$~\E[1$}\E[1;%dH:ue=\E[24m:up=\E[A:us=\E[4m:\ + :vb=200\E[?5h\E[?5l:ve=\E[?25h:vi=\E[?25l:vs=\E[?25h: + +# DEC doesn't supply a vt400 description, so we add Daniel Glasser's +# (originally written with vt420 as its primary name, and usable for it). +# +# VT400/420 -- This terminal is a superset of the vt320. It adds the multiple +# text pages and long text pages with selectable length of the vt340, along +# with left and right margins, rectangular area text copy, fill, and erase +# operations, selected region character attribute change operations, +# page memory and rectangle checksums, insert/delete column, reception +# macros, and other features too numerous to remember right now. TERMCAP +# can only take advantage of a few of these added features. +# +# Note that this entry is are set up in what was the standard way for GNU +# Emacs v18 terminal modes to deal with the cursor keys in that the arrow +# keys were switched into application mode at the same time the numeric pad +# is switched into application mode. This changes the definitions of the +# arrow keys. Emacs v19 is smarter and mines its keys directly out of +# your termcap entry, +# +# From: Daniel Glasser , 13 Oct 1993 +# (vt400: string capability ":sb=\E[M:" corrected to ":sr=\E[M:"; +# also, added / based on the init string -- esr) +# (untranslatable capabilities removed to fit entry within 1023 bytes) +# (sgr removed to fit entry within 1023 bytes) +# (terminfo-only capabilities suppressed to fit entry within 1023 bytes) +vt400|vt400-24|dec-vt400|dec vt400 24x80 column autowrap:\ + :am:es:hs:mi:ms:xn:xo:\ + :co#80:it#8:li#24:vt#3:\ + :AL=\E[%dL:DC=\E[%dP:DL=\E[%dM:DO=\E[%dB:IC=\E[%d@:\ + :LE=\E[%dD:RI=\E[%dC:UP=\E[%dA:ae=^O:al=\E[L:as=^N:\ + :cd=10\E[J:ce=4\E[K:cl=10\E[H\E[J:cm=\E[%i%d;%dH:cr=^M:\ + :cs=\E[%i%d;%dr:ct=\E[3g:dc=\E[P:dl=\E[M:do=^J:\ + :ds=\E[2$~\r\E[1$}\E[K\E[$}:ei=\E[4l:fs=\E[$}:ho=\E[H:\ + :ic=\E[@:im=\E[4h:\ + :is=\E<\E F\E>\E[?1h\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h\E[1;24r\E[24;1H:\ + :k1=\EOP:k2=\EOQ:k3=\EOR:k4=\EOS:k6=\E[17~:k7=\E[18~:\ + :k8=\E[19~:k9=\E[20~:kb=^H:kd=\EOB:ke=\E[?1l\E>:kl=\EOD:\ + :kr=\EOC:ks=\E[?1h\E=:ku=\EOA:le=^H:mb=\E[5m:md=\E[1m:\ + :me=\E[m:mr=\E[7m:nd=\E[C:nw=^M\ED:rc=\E8:sc=\E7:se=\E[27m:\ + :sf=\ED:so=\E[7m:sr=\EM:st=\EH:ta=^I:\ + :ts=\E[2$~\E[1$}\E[1;%dH:ue=\E[24m:up=\E[A:us=\E[4m:\ + :vb=200\E[?5h\E[?5l:ve=\E[?25h:vi=\E[?25l:vs=\E[?25h: + +# (vt420: I removed :k0:, it collided with . I also restored +# a missing :sc: -- esr) +vt420|DEC VT420:\ + :am:mi:xn:xo:\ + :co#80:li#24:vt#3:\ + :*6=\E[4~:@0=\E[1~:RA=\E[?7l:\ + :S5=\E[?0;0r\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h:\ + :SA=\E[?7h:\ + :ac=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~:\ + :ae=\E(B:al=\E[L:as=\E(0:bl=^G:cd=\E[J:ce=\E[K:\ + :cl=\E[H\E[2J:cm=\E[%i%d;%dH:cr=^M:cs=\E[%i%d;%dr:\ + :dc=\E[P:dl=\E[M:do=\E[B:ei=\E[4l:ho=\E[H:\ + :i2=\E[?67h\E[64;1"p:if=/usr/share/tabset/vt300:\ + :im=\E[4h:is=\E[1;24r\E[24;1H:k1=\EOP:k2=\EOQ:k3=\EOR:\ + :k4=\EOS:k5=\E[17~:k6=\E[18~:k7=\E[19~:k8=\E[20~:\ + :k9=\E[21~:k;=\E[29~:kD=\E[3~:kI=\E[2~:kN=\E[6~:kP=\E[5~:\ + :kb=^H:kd=\E[B:ke=\E>:kl=\E[D:kr=\E[C:ks=\E=:ku=\E[A:le=^H:\ + :mb=\E[5m:md=\E[1m:me=\E[m:mr=\E[7m:nd=\E[C:\ + :r3=\E[?67h\E[64;1"p:rc=\E8:rf=/usr/share/tabset/vt300:\ + :..sa=\E[0%?%p6%t;1%;%?%p2%t;4%;%?%p4%t;5%;%?%p1%p3%|%t;7%;m%?%p9%t\E(0%e\E(B%;:\ + :sc=\E7:se=\E[m:sf=\ED:so=\E[7m:sr=\EM:ta=^I:ue=\E[m:\ + :up=\E[A:us=\E[4m: + +# +# DEC VT220 and up support DECUDK (user-defined keys). DECUDK (i.e., pfx) +# takes two parameters, the key and the string. Translating the key is +# straightforward (keys 1-5 are not defined on real terminals, though some +# emulators define these): +# +# if (key < 16) then value = key; +# else if (key < 21) then value = key + 1; +# else if (key < 25) then value = key + 2; +# else if (key < 27) then value = key + 3; +# else if (key < 30) then value = key + 4; +# else value = key + 5; +# +# The string must be the hexadecimal equivalent, e.g., "5052494E" for "PRINT". +# There's no provision in terminfo for emitting a string in this format, so the +# application has to know it. +# +vt420pc|DEC VT420 w/PC keyboard:\ + :@7=\E[4~:F1=\E[23~:F2=\E[24~:F3=\E[11;2~:F4=\E[12;2~:\ + :F5=\E[13;2~:F6=\E[14;2~:F7=\E[15;2~:F8=\E[17;2~:\ + :F9=\E[18;2~:FA=\E[19;2~:FB=\E[20;2~:FC=\E[21;2~:\ + :FD=\E[23;2~:FE=\E[24;2~:FF=\E[23~:FG=\E[24~:FH=\E[25~:\ + :FI=\E[26~:FJ=\E[28~:FK=\E[29~:FL=\E[31~:FM=\E[32~:\ + :FN=\E[33~:FO=\E[34~:FP=\E[35~:FQ=\E[36~:FR=\E[23;2~:\ + :FS=\E[24;2~:FT=\E[25;2~:FU=\E[26;2~:FV=\E[28;2~:\ + :FW=\E[29;2~:FX=\E[31;2~:FY=\E[32;2~:FZ=\E[33;2~:\ + :Fa=\E[34;2~:Fb=\E[35;2~:Fc=\E[36;2~:\ + :S6=USR_TERM\072vt420pcdos\072:k1=\E[11~:k2=\E[12~:\ + :k3=\E[13~:k4=\E[14~:k5=\E[15~:k6=\E[17~:k7=\E[18~:\ + :k8=\E[19~:k9=\E[20~:k;=\E[21~:kD=\177:kh=\E[H:\ + :..px=\EP1;1|%?%{16}%p1%>%t%{0}%e%{21}%p1%>%t%{1}%e%{25}%p1%>%t%{2}%e%{27}%p1%>%t%{3}%e%{30}%p1%>%t%{4}%e%{5}%;%p1%+%d/%p2%s\E\:tc=vt420: + +vt420pcdos|DEC VT420 w/PC for DOS Merge:\ + :li#25:\ + :S1=%?%p2%{19}%=%t\E\023\021%e%p2%{32}%<%t\E%p2%c%e%p2%{127}%=%t\E\177%e%p2%c%;:\ + :S4=\E[?1;2r\E[34h:\ + :S5=\E[?0;0r\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h:S6@:\ + :me=\E[m:sa@:\ + :tc=vt420pc: + +vt420f|DEC VT420 with VT kbd; VT400 mode; F1-F5 used as Fkeys:\ + :F1=\E[23~:F2=\E[24~:F3=\E[25~:F4=\E[26~:F5=\E[28~:\ + :F6=\E[29~:F7=\E[31~:F8=\E[32~:F9=\E[33~:FA=\E[34~:\ + :k1=\E[11~:k2=\E[12~:k3=\E[13~:k4=\E[14~:k5=\E[15~:\ + :k6=\E[17~:k7=\E[18~:k8=\E[19~:k9=\E[20~:k;=\E[21~:\ + :kD=\177:kh=\E[H:l1=\EOP:l2=\EOQ:l3=\EOR:l4=\EOS:\ + :tc=vt420: + +vt510|DEC VT510:\ + :tc=vt420: +vt510pc|DEC VT510 w/PC keyboard:\ + :tc=vt420pc: +vt510pcdos|DEC VT510 w/PC for DOS Merge:\ + :tc=vt420pcdos: + +# VT520/VT525 +# +# The VT520 is a monochrome text terminal capable of managing up to +# four independent sessions in the terminal. It has multiple ANSI +# emulations (VT520, VT420, VT320, VT220, VT100, VT PCTerm, SCO Console) +# and ASCII emulations (WY160/60, PCTerm, 50/50+, 150/120, TVI 950, +# 925 910+, ADDS A2). This terminfo data is for the ANSI emulations only. +# +# Terminal Set-Up is entered by pressing [F3], [Caps Lock]/[F3] or +# [Alt]/[Print Screen] depending upon which keyboard and which +# terminal mode is being used. If Set-Up has been disabled or +# assigned to an unknown key, Set-Up may be entered by pressing +# [F3] as the first key after power up, regardless of keyboard type. +# (vt520: I added / based on the init string, also :sc: -- esr) +# (untranslatable capabilities removed to fit entry within 1023 bytes) +vt520|DEC VT520:\ + :am:mi:xn:xo:\ + :co#80:li#24:vt#3:\ + :*6=\E[4~:@0=\E[1~:RA=\E[?7l:\ + :S5=\E[?0;0r\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h:\ + :SA=\E[?7h:\ + :ac=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~:\ + :ae=\E(B:al=\E[L:as=\E(0:bl=^G:cd=\E[J:ce=\E[K:\ + :cl=\E[H\E[2J:cm=\E[%i%d;%dH:cr=^M:cs=\E[%i%d;%dr:\ + :dc=\E[P:dl=\E[M:do=\E[B:ei=\E[4l:ho=\E[H:\ + :i2=\E[?67h\E[64;1"p:if=/usr/share/tabset/vt300:\ + :im=\E[4h:is=\E[1;24r\E[24;1H:k0=\E[29~:k1=\EOP:k2=\EOQ:\ + :k3=\EOR:k4=\EOS:k5=\E[17~:k6=\E[18~:k7=\E[19~:k8=\E[20~:\ + :k9=\E[21~:k;=\E[29~:kD=\E[3~:kI=\E[2~:kN=\E[6~:kP=\E[5~:\ + :kb=^H:kd=\E[B:kl=\E[D:kr=\E[C:ku=\E[A:le=^H:mb=\E[5m:\ + :md=\E[1m:me=\E[m:mr=\E[7m:nd=\E[C:r3=\E[?67h\E[64;1"p:\ + :rc=\E8:rf=/usr/share/tabset/vt300:sc=\E7:se=\E[m:sf=\ED:\ + :so=\E[7m:sr=\EM:ta=^I:ue=\E[m:up=\E[A:us=\E[4m: + +# (vt525: I added / based on the init string; +# removed :se:=\E[m, :ue:=\E[m, added :sc: -- esr) +# (untranslatable capabilities removed to fit entry within 1023 bytes) +vt525|DEC VT525:\ + :am:mi:xn:xo:\ + :co#80:li#24:vt#3:\ + :*6=\E[4~:@0=\E[1~:RA=\E[?7l:\ + :S5=\E[?0;0r\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h:\ + :SA=\E[?7h:\ + :ac=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~:\ + :ae=\E(B:al=\E[L:as=\E(0:bl=^G:cd=\E[J:ce=\E[K:\ + :cl=\E[H\E[2J:cm=\E[%i%d;%dH:cr=^M:cs=\E[%i%d;%dr:\ + :dc=\E[P:dl=\E[M:do=\E[B:ei=\E[4l:ho=\E[H:\ + :i2=\E[?67h\E[64;1"p:if=/usr/share/tabset/vt300:\ + :im=\E[4h:is=\E[1;24r\E[24;1H:k0=\E[29~:k1=\EOP:k2=\EOQ:\ + :k3=\EOR:k4=\EOS:k5=\E[17~:k6=\E[18~:k7=\E[19~:k8=\E[20~:\ + :k9=\E[21~:k;=\E[29~:kD=\E[3~:kI=\E[2~:kN=\E[6~:kP=\E[5~:\ + :kb=^H:kd=\E[B:kl=\E[D:kr=\E[C:ku=\E[A:le=^H:mb=\E[5m:\ + :md=\E[1m:me=\E[m:mr=\E[7m:nd=\E[C:r3=\E[?67h\E[64;1"p:\ + :rc=\E8:rf=/usr/share/tabset/vt300:sc=\E7:se=\E[m:sf=\ED:\ + :so=\E[7m:sr=\EM:ta=^I:ue=\E[m:up=\E[A:us=\E[4m: diff --git a/tests/examplefiles/terminfo b/tests/examplefiles/terminfo new file mode 100644 index 00000000..2b68d035 --- /dev/null +++ b/tests/examplefiles/terminfo @@ -0,0 +1,1445 @@ +######## This example from excerpt of : +# +# Version 11.0.1 +# $Date: 2000/03/02 15:51:11 $ +# terminfo syntax +# + +######## ANSI, UNIX CONSOLE, AND SPECIAL TYPES +# +# This section describes terminal classes and brands that are still +# quite common. +# + +#### Specials +# +# Special "terminals". These are used to label tty lines when you don't +# know what kind of terminal is on it. The characteristics of an unknown +# terminal are the lowest common denominator - they look about like a ti 700. +# + +dumb|80-column dumb tty, + am, + cols#80, + bel=^G, cr=^M, cud1=^J, ind=^J, +unknown|unknown terminal type, + gn, use=dumb, +lpr|printer|line printer, + hc, os, + cols#132, lines#66, + bel=^G, cr=^M, cub1=^H, cud1=^J, ff=^L, ind=^J, +glasstty|classic glass tty interpreting ASCII control characters, + am, + cols#80, + bel=^G, clear=^L, cr=^M, cub1=^H, cud1=^J, ht=^I, kcub1=^H, + kcud1=^J, nel=^M^J, +vanilla, + bel=^G, cr=^M, cud1=^J, ind=^J, + +#### ANSI.SYS/ISO 6429/ECMA-48 Capabilities +# +# See the end-of-file comment for more on these. +# + +# ANSI capabilities are broken up into pieces, so that a terminal +# implementing some ANSI subset can use many of them. +ansi+local1, + cub1=\E[D, cud1=\E[B, cuf1=\E[C, cuu1=\E[A, +ansi+local, + cub=\E[%p1%dD, cud=\E[%p1%dB, cuf=\E[%p1%dC, + cuu=\E[%p1%dA, + use=ansi+local1, +ansi+tabs, + cbt=\E[Z, ht=^I, hts=\EH, tbc=\E[2g, +ansi+inittabs, + it#8, use=ansi+tabs, +ansi+erase, + clear=\E[H\E[J, ed=\E[J, el=\E[K, +ansi+rca, + hpa=\E[%p1%{1}%+%dG, vpa=\E[%p1%{1}%+%dd, +ansi+cup, + cup=\E[%i%p1%d;%p2%dH, home=\E[H, +ansi+rep, + rep=%p1%c\E[%p2%{1}%-%db, +ansi+idl1, + dl1=\E[M, il1=\E[L, +ansi+idl, + dl=\E[%p1%dM, il=\E[%p1%dL, use=ansi+idl1, +ansi+idc, + dch1=\E[P, ich=\E[%p1%d@, ich1=\E[@, rmir=\E6, smir=\E6, +ansi+arrows, + kbs=^H, kcub1=\E[D, kcud1=\E[B, kcuf1=\E[C, kcuu1=\E[A, + khome=\E[H, +ansi+sgr|ansi graphic renditions, + blink=\E[5m, invis=\E[8m, rev=\E[7m, sgr0=\E[0m, +ansi+sgrso|ansi standout only, + rmso=\E[m, smso=\E[7m, +ansi+sgrul|ansi underline only, + rmul=\E[m, smul=\E[4m, +ansi+sgrbold|ansi graphic renditions; assuming terminal has bold; not dim, + bold=\E[1m, + sgr=\E[%?%p1%t7;%;%?%p2%t4;%;%?%p3%t7;%;%?%p4%t5;%;%?%p6%t1;%;m, use=ansi+sgr, use=ansi+sgrso, use=ansi+sgrul, +ansi+sgrdim|ansi graphic renditions; assuming terminal has dim; not bold, + dim=\E[2m, + sgr=\E[%?%p1%t7;%;%?%p2%t4;%;%?%p3%t7;%;%?%p4%t5;%;%?%p5%t2;%;m, use=ansi+sgr, use=ansi+sgrso, use=ansi+sgrul, +ansi+pp|ansi printer port, + mc0=\E[0i, mc4=\E[4i, mc5=\E[5i, +ansi+csr|ansi scroll-region plus cursor save & restore, + csr=\E[%i%p1%d;%p2%dr, rc=\E8, sc=\E7, + +# The IBM PC alternate character set. Plug this into any Intel console entry. +# We use \E[11m for rmacs rather than \E[12m so the string can use the +# ROM graphics for control characters such as the diamond, up- and down-arrow. +# This works with the System V, Linux, and BSDI consoles. It's a safe bet this +# will work with any Intel console, they all seem to have inherited \E[11m +# from the ANSI.SYS de-facto standard. +klone+acs|alternate character set for ansi.sys displays, + acsc=+\020\,\021-\030.^Y0\333`\004a\261f\370g\361h\260j\331k\277l\332m\300n\305o~p\304q\304r\304s_t\303u\264v\301w\302x\263y\363z\362{\343|\330}\234~\376, + rmacs=\E[10m, smacs=\E[11m, + +# Highlight controls corresponding to the ANSI.SYS standard. Most +# console drivers for Intel boxes obey these. Makes the same assumption +# about \E[11m as klone+acs. True ANSI/ECMA-48 would have , +# , but this isn't a documented feature of ANSI.SYS. +klone+sgr|attribute control for ansi.sys displays, + blink=\E[5m, bold=\E[1m, invis=\E[8m, rev=\E[7m, + rmpch=\E[10m, rmso=\E[m, rmul=\E[m, + sgr=\E[0;10%?%p1%t;7%;%?%p2%t;4%;%?%p3%t;7%;%?%p4%t;5%;%?%p6%t;1%;%?%p7%t;8%;%?%p9%t;11%;m, + sgr0=\E[0;10m, smpch=\E[11m, smso=\E[7m, smul=\E[4m, + use=klone+acs, + +# Highlight controls corresponding to the ANSI.SYS standard. *All* +# console drivers for Intel boxes obey these. Does not assume \E[11m will +# work; uses \E[12m instead, which is pretty bulletproof but loses you the ACS +# diamond and arrow characters under curses. +klone+sgr-dumb|attribute control for ansi.sys displays (no ESC [ 11 m), + blink=\E[5m, bold=\E[1m, invis=\E[8m, rev=\E[7m, rmso=\E[m, + rmul=\E[m, + sgr=\E[0;10%?%p1%t;7%;%?%p2%t;4%;%?%p3%t;7%;%?%p4%t;5%;%?%p6%t;1%;%?%p7%t;8%;%?%p9%t;12%;m, + sgr0=\E[0;10m, smacs=\E[12m, smso=\E[7m, smul=\E[4m, + use=klone+acs, + +# KOI8-R (RFC1489) acs (alternate character set) +# From: Qing Long , 24 Feb 1996. +klone+koi8acs|alternate character set for ansi.sys displays with KOI8 charset, + acsc=+\020\,\021-\036.^_0\215`\004a\237f\234g\232h\222i\220j\205k\203l\202m\204n\212o\213p\216q\0r\217s\214t\206u\207v\210w\211x\201y\230z\231{\267|\274}L~\225, + rmacs=\E[10m, smacs=\E[11m, + +# ANSI.SYS color control. The setab/setaf caps depend on the coincidence +# between SVr4/XPG4's color numbers and ANSI.SYS attributes. Here are longer +# but equivalent strings that don't rely on that coincidence: +# setb=\E[4%?%p1%{1}%=%t4%e%p1%{3}%=%t6%e%p1%{4}%=%t1%e%p1%{6}%=%t3%e%p1%d%;m, +# setf=\E[3%?%p1%{1}%=%t4%e%p1%{3}%=%t6%e%p1%{4}%=%t1%e%p1%{6}%=%t3%e%p1%d%;m, +# The DOS 5 manual asserts that these sequences meet the ISO 6429 standard. +# They match a subset of ECMA-48. +klone+color|color control for ansi.sys and ISO6429-compatible displays, + colors#8, ncv#3, pairs#64, + op=\E[37;40m, setab=\E[4%p1%dm, setaf=\E[3%p1%dm, + +# This is better than klone+color, it doesn't assume white-on-black as the +# default color pair, but many `ANSI' terminals don't grok the cap. +ecma+color|color control for ECMA-48-compatible terminals, + colors#8, ncv#3, pairs#64, + op=\E[39;49m, setab=\E[4%p1%dm, setaf=\E[3%p1%dm, + +# Attribute control for ECMA-48-compatible terminals +ecma+sgr|attribute capabilities for true ECMA-48 terminals, + rmso=\E[27m, rmul=\E[24m, + use=klone+sgr, + +# For comparison, here are all the capabilities implied by the Intel +# Binary Compatibility Standard (level 2) that fit within terminfo. +# For more detail on this rather pathetic standard, see the comments +# near the end of this file. +ibcs2|Intel Binary Compatibility Standard prescriptions, + cbt=\E[Z, clear=\Ec, cub=\E[%p1%dD, cud=\E[%p1%dB, + cuf=\E[%p1%dC, cup=\E[%i%p1%d;%p2%dH, cuu=\E[%p1%dA, + dch=\E[%p1%dP, dispc=\E=%p1%dg, ech=\E[%p1%dX, + hpa=\E[%i%p1%dG, hts=\EH, ich=\E[%p1%d@, il=\E[%p1%dL, + indn=\E[%p1%dS, rc=\E7, rin=\E[%p1%dT, rmam=\E[?7l, sc=\E7, + smam=\E[?7h, tbc=\E[g, vpa=\E[%i%p1%dd, + +#### ANSI/ECMA-48 terminals and terminal emulators +# +# See near the end of this file for details on ANSI conformance. +# Don't mess with these entries! Lots of other entries depend on them! +# +# This section lists entries in a least-capable to most-capable order. +# if you're in doubt about what `ANSI' matches yours, try them in that +# order and back off from the first that breaks. + +# ansi-mr is for ANSI terminals with ONLY relative cursor addressing +# and more than one page of memory. It uses local motions instead of +# direct cursor addressing, and makes almost no assumptions. It does +# assume auto margins, no padding and/or xon/xoff, and a 24x80 screen. +ansi-mr|mem rel cup ansi, + am, xon, + cols#80, lines#24, use=vanilla, use=ansi+erase, + use=ansi+local1, + +# ansi-mini is a bare minimum ANSI terminal. This should work on anything, but +# beware of screen size problems and memory relative cursor addressing. +ansi-mini|minimum ansi standard terminal, + am, xon, + cols#80, lines#24, use=vanilla, use=ansi+cup, + use=ansi+erase, + +# ansi-mtabs adds relative addressing and minimal tab support +ansi-mtabs|any ansi terminal with pessimistic assumptions, + it#8, + ht=^I, use=ansi+local1, use=ansi-mini, + +# ANSI X3.64 from emory!mlhhh (Hugh Hansard) via BRL +# +# The following is an entry for the full ANSI 3.64 (1977). It lacks +# padding, but most terminals using the standard are "fast" enough +# not to require any -- even at 9600 bps. If you encounter problems, +# try including the padding specifications. +# +# Note: the :as: and :ae: specifications are not implemented here, for +# the available termcap documentation does not make clear WHICH alternate +# character set to specify. ANSI 3.64 seems to make allowances for several. +# Please make the appropriate adjustments to fit your needs -- that is +# if you will be using alternate character sets. +# +# There are very few terminals running the full ANSI 3.64 standard, +# so I could only test this entry on one verified terminal (Visual 102). +# I would appreciate the results on other terminals sent to me. +# +# Please report comments, changes, and problems to: +# +# U.S. MAIL: Hugh Hansard +# Box: 22830 +# Emory University +# Atlanta, GA. 30322. +# +# USENET {akgua,msdc,sb1,sb6,gatech}!emory!mlhhh. +# +# (Added vt100 , to quiet a tic warning --esr) +ansi77|ansi 3.64 standard 1977 version, + am, mir, + cols#80, it#8, lines#24, + bel=^G, clear=\E[;H\E[2J, cr=^M, csr=\E[%i%p1%d;%p2%dr, + cub1=^H, cud1=\E[B, cuf1=\E[C, cup=\E[%i%p1%d;%p2%dH, + cuu1=\E[A, dch1=\E[P, dl1=\E[M$<5*/>, ed=\E[J, el=\E[K, + home=\E[H, ht=^I, il1=\E[L$<5*/>, ind=\ED, kbs=^H, + kcub1=\E[D, kcud1=\E[B, kcuf1=\E[C, kcuu1=\E[A, kf1=\EOP, + kf2=\EOR, kf4=\EOS, khome=\E[H, nel=^M\ED, rc=\E8, ri=\EM, + rmir=\E[4l, rmso=\E[m, rmul=\E[m, sc=\E7, smir=\E[4h, + smso=\E[7m, smul=\E[4m, + +# Procomm and some other ANSI emulations don't recognize all of the ANSI- +# standard capabilities. This entry deletes , , , , and +# / capabilities, forcing curses to use repetitions of , +# , and . Also deleted and , as QModem up to +# 5.03 doesn't recognize these. Finally, we delete and , which seem +# to confuse many emulators. On the other hand, we can count on these programs +# doing //. Older versions of this entry featured +# , but now seems to be more common under +# ANSI.SYS influence. +# From: Eric S. Raymond Oct 30 1995 +pcansi-m|pcansi-mono|ibm-pc terminal programs claiming to be ansi (mono mode), + am, mir, msgr, + cols#80, it#8, lines#24, + bel=^G, cbt=\E[Z, clear=\E[H\E[J, cr=^M, cub1=\E[D, + cud1=\E[B, cuf1=\E[C, cup=\E[%i%p1%d;%p2%dH, cuu1=\E[A, + dch1=\E[P, dl1=\E[M, ed=\E[J, el=\E[K, home=\E[H, ht=^I, + hts=\EH, il1=\E[L, ind=^J, kbs=^H, kcub1=\E[D, kcud1=\E[B, + kcuf1=\E[C, kcuu1=\E[A, khome=\E[H, tbc=\E[2g, + use=klone+sgr-dumb, +pcansi-25-m|pcansi25m|ibm-pc terminal programs with 25 lines (mono mode), + lines#25, use=pcansi-m, +pcansi-33-m|pcansi33m|ibm-pc terminal programs with 33 lines (mono mode), + lines#33, use=pcansi-m, +pcansi-43-m|ansi43m|ibm-pc terminal programs with 43 lines (mono mode), + lines#43, use=pcansi-m, +# The color versions. All PC emulators do color... +pcansi|ibm-pc terminal programs claiming to be ansi, + use=klone+color, use=pcansi-m, +pcansi-25|pcansi25|ibm-pc terminal programs with 25 lines, + lines#25, use=pcansi, +pcansi-33|pcansi33|ibm-pc terminal programs with 33 lines, + lines#33, use=pcansi, +pcansi-43|pcansi43|ibm-pc terminal programs with 43 lines, + lines#43, use=pcansi, + +# ansi-m -- full ANSI X3.64 with ANSI.SYS-compatible attributes, no color. +# If you want pound signs rather than dollars, replace `B' with `A' +# in the , , , and capabilities. +# From: Eric S. Raymond Nov 6 1995 +ansi-m|ansi-mono|ANSI X3.64-1979 terminal with ANSI.SYS compatible attributes, + mc5i, + cub=\E[%p1%dD, cud=\E[%p1%dB, cuf=\E[%p1%dC, + cuu=\E[%p1%dA, dch=\E[%p1%dP, dl=\E[%p1%dM, + ech=\E[%p1%dX, el1=\E[1K, hpa=\E[%i%p1%dG, ht=\E[I, + ich=\E[%p1%d@, il=\E[%p1%dL, indn=\E[%p1%dS, kbs=^H, + kcbt=\E[Z, kcub1=\E[D, kcud1=\E[B, kcuf1=\E[C, kcuu1=\E[A, + kich1=\E[L, mc4=\E[4i, mc5=\E[5i, nel=\r\E[S, + rep=%p1%c\E[%p2%{1}%-%db, rin=\E[%p1%dT, s0ds=\E(B, + s1ds=\E)B, s2ds=\E*B, s3ds=\E+B, tbc=\E[2g, + vpa=\E[%i%p1%dd, use=pcansi-m, + +# ansi -- this terminfo expresses the largest subset of X3.64 that will fit in +# standard terminfo. Assumes ANSI.SYS-compatible attributes and color. +# From: Eric S. Raymond Nov 6 1995 +ansi|ansi/pc-term compatible with color, + u6=\E[%i%d;%dR, u7=\E[6n, u8=\E[?%[;0123456789]c, + u9=\E[c, + use=ecma+color, use=klone+sgr, use=ansi-m, + +# ansi-generic is a vanilla ANSI terminal. This is assumed to implement +# all the normal ANSI stuff with no extensions. It assumes +# insert/delete line/char is there, so it won't work with +# vt100 clones. It assumes video attributes for bold, blink, +# underline, and reverse, which won't matter much if the terminal +# can't do some of those. Padding is assumed to be zero, which +# shouldn't hurt since xon/xoff is assumed. +ansi-generic|generic ansi standard terminal, + am, xon, + cols#80, lines#24, use=vanilla, use=ansi+csr, use=ansi+cup, + use=ansi+rca, use=ansi+erase, use=ansi+tabs, + use=ansi+local, use=ansi+idc, use=ansi+idl, use=ansi+rep, + use=ansi+sgrbold, use=ansi+arrows, + +#### Linux consoles +# + +# This entry is good for the 1.2.13 or later version of the Linux console. +# +# *************************************************************************** +# * * +# * WARNING: * +# * Linuxes come with a default keyboard mapping kcbt=^I. This entry, in * +# * response to user requests, assumes kcbt=\E[Z, the ANSI/ECMA reverse-tab * +# * character. Here are the keymap replacement lines that will set this up: * +# * * +# keycode 15 = Tab Tab +# alt keycode 15 = Meta_Tab +# shift keycode 15 = F26 +# string F26 ="\033[Z" +# * * +# * This has to use a key slot which is unfortunate (any unused one will * +# * do, F26 is the higher-numbered one). The change ought to be built * +# * into the kernel tables. * +# * * +# *************************************************************************** +# +# The 1.3.x kernels add color-change capabilities; if yours doesn't have this +# and it matters, turn off . The %02x escape used to implement this is +# not back-portable to SV curses and not supported in ncurses versions before +# 1.9.9. All linux kernels since 1.2.13 (at least) set the screen size +# themselves; this entry assumes that capability. +# +# This entry is good for the 1.2.13 or later version of the Linux console. +# +# *************************************************************************** +# * * +# * WARNING: * +# * Linuxes come with a default keyboard mapping kcbt=^I. This entry, in * +# * response to user requests, assumes kcbt=\E[Z, the ANSI/ECMA reverse-tab * +# * character. Here are the keymap replacement lines that will set this up: * +# * * +# keycode 15 = Tab Tab +# alt keycode 15 = Meta_Tab +# shift keycode 15 = F26 +# string F26 ="\033[Z" +# * * +# * This has to use a key slot which is unfortunate (any unused one will * +# * do, F26 is the higher-numbered one). The change ought to be built * +# * into the kernel tables. * +# * * +# *************************************************************************** +# +# The 1.3.x kernels add color-change capabilities; if yours doesn't have this +# and it matters, turn off . The %02x escape used to implement this is +# not back-portable to SV curses and not supported in ncurses versions before +# 1.9.9. All linux kernels since 1.2.13 (at least) set the screen size +# themselves; this entry assumes that capability. +# +# The 2.2.x kernels add a private mode that sets the cursor type; use that to +# get a block cursor for cvvis. +# reported by Frank Heckenbach . +linux|linux console, + am, bce, eo, mir, msgr, xenl, xon, + it#8, ncv#2, + acsc=+\020\,\021-\030.^Y0\333`\004a\261f\370g\361h\260i\316j\331k\277l\332m\300n\305o~p\304q\304r\304s_t\303u\264v\301w\302x\263y\363z\362{\343|\330}\234~\376, + bel=^G, civis=\E[?25l\E[?1c, clear=\E[H\E[J, + cnorm=\E[?25h\E[?0c, cr=^M, csr=\E[%i%p1%d;%p2%dr, + cub1=^H, cud1=^J, cuf1=\E[C, cup=\E[%i%p1%d;%p2%dH, + cuu1=\E[A, cvvis=\E[?25h\E[?8c, dch=\E[%p1%dP, dch1=\E[P, + dim=\E[2m, dl=\E[%p1%dM, dl1=\E[M, ech=\E[%p1%dX, ed=\E[J, + el=\E[K, el1=\E[1K, flash=\E[?5h\E[?5l$<200/>, home=\E[H, + hpa=\E[%i%p1%dG, ht=^I, hts=\EH, ich=\E[%p1%d@, ich1=\E[@, + il=\E[%p1%dL, il1=\E[L, ind=^J, kb2=\E[G, kbs=\177, + kcbt=\E[Z, kcub1=\E[D, kcud1=\E[B, kcuf1=\E[C, kcuu1=\E[A, + kdch1=\E[3~, kend=\E[4~, kf1=\E[[A, kf10=\E[21~, + kf11=\E[23~, kf12=\E[24~, kf13=\E[25~, kf14=\E[26~, + kf15=\E[28~, kf16=\E[29~, kf17=\E[31~, kf18=\E[32~, + kf19=\E[33~, kf2=\E[[B, kf20=\E[34~, kf3=\E[[C, kf4=\E[[D, + kf5=\E[[E, kf6=\E[17~, kf7=\E[18~, kf8=\E[19~, kf9=\E[20~, + khome=\E[1~, kich1=\E[2~, knp=\E[6~, kpp=\E[5~, kspd=^Z, + nel=^M^J, rc=\E8, rev=\E[7m, ri=\EM, rmir=\E[4l, rmso=\E[27m, + rmul=\E[24m, rs1=\Ec\E]R, sc=\E7, + sgr=\E[0;10%?%p1%t;7%;%?%p2%t;4%;%?%p3%t;7%;%?%p4%t;5%;%?%p5%t;2%;%?%p6%t;1%;%?%p7%t;8%;%?%p9%t;11%;m, + smir=\E[4h, smul=\E[4m, tbc=\E[3g, u6=\E[%i%d;%dR, + u7=\E[6n, u8=\E[?6c, u9=\E[c, vpa=\E[%i%p1%dd, + use=klone+sgr, use=ecma+color, +linux-m|Linux console no color, + colors@, pairs@, + setab@, setaf@, setb@, setf@, use=linux, +linux-c-nc|linux console 1.3.x hack for ncurses only, + ccc, + initc=\E]P%p1%x%p2%{255}%*%{1000}%/%02x%p3%{255}%*%{1000}%/%02x%p4%{255}%*%{1000}%/%02x, + oc=\E]R, + use=linux, +# From: Dennis Henriksen , 9 July 1996 +linux-c|linux console 1.3.6+ with private palette for each virtual console, + ccc, + colors#8, pairs#64, + initc=\E]P%?%p1%{9}%>%t%p1%{10}%-%'a'%+%c%e%p1%d%;%p2%{255}%&%Pr%gr%{16}%/%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;%gr%{15}%&%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;%p3%{255}%&%Pr%gr%{16}%/%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;%gr%{15}%&%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;%p4%{255}%&%Pr%gr%{16}%/%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;%gr%{15}%&%Px%?%gx%{9}%>%t%gx%{10}%-%'A'%+%c%e%gx%d%;, + oc=\E]R, + use=linux, + +# See the note on ICH/ICH1 VERSUS RMIR/SMIR near the end of file +linux-nic|linux with ich/ich1 suppressed for non-curses programs, + ich@, ich1@, + use=linux, + +# This assumes you have used setfont(8) to load one of the Linux koi8-r fonts. +# acsc entry from Pavel Roskin" , 29 Sep 1997. +linux-koi8|linux with koi8 alternate character set, + acsc=+\020\,\021-\030.^Y0\215`\004a\221f\234g\237h\220i\276j\205k\203l\202m\204n\212o~p\0q\0r\0s_t\206u\207v\211w\210x\201y\230z\231{\267|\274~\224, + use=linux, use=klone+koi8acs, + +# Another entry for KOI8-r with Qing Long's acsc. +# (which one better complies with the standard?) +linux-koi8r|linux with koi8-r alternate character set, + use=linux, use=klone+koi8acs, + +# Entry for the latin1 and latin2 fonts +linux-lat|linux with latin1 or latin2 alternate character set, + acsc=+\020\,\021-\030.^Y0\333`\004a\013f\370g\361h\260i\316j\211k\214l\206m\203n\305o~p\304q\212r\304s_t\207u\215v\301w\302x\205y\363z\362{\343|\330}\234~\376, + use=linux, + +#### NetBSD consoles +# +# pcvt termcap database entries (corresponding to release 3.31) +# Author's last edit-date: [Fri Sep 15 20:29:10 1995] +# +# (For the terminfo master file, I translated these into terminfo syntax. +# Then I dropped all the pseudo-HP entries. we don't want and can't use +# the :Xs: flag. Then I split :is: into a size-independent and a +# size-dependent . Finally, I added / -- esr) + +# NOTE: has been taken out of this entry. for reference, it should +# be . For discussion, see ICH/ICH1 VERSUS RMIR/SMIR below. +# (esr: added and to resolve NetBSD Problem Report #4583) +pcvtXX|pcvt vt200 emulator (DEC VT220), + am, km, mir, msgr, xenl, + it#8, vt#3, + acsc=++\,\,--..00``aaffgghhiijjkkllmmnnooppqqrrssttuuvvwwxxyyzz~~, + bel=^G, blink=\E[5m, bold=\E[1m, civis=\E[?25l, + clear=\E[H\E[J, cnorm=\E[?25h, cr=^M, + csr=\E[%i%p1%d;%p2%dr, cub=\E[%p1%dD, cub1=^H, + cud=\E[%p1%dB, cud1=\E[B, cuf=\E[%p1%dC, cuf1=\E[C, + cup=\E[%i%p1%d;%p2%dH, cuu=\E[%p1%dA, cuu1=\E[A, + dch=\E[%p1%dP, dch1=\E[P, dl=\E[%p1%dM, dl1=\E[M, ed=\E[J, + el=\E[K, el1=\E[1K, home=\E[H, ht=^I, hts=\EH, ich=\E[%p1%d@, + il=\E[%p1%dL, il1=\E[L, ind=\ED, indn=\E[%p1%dS, + is1=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h, kbs=\177, + kcub1=\EOD, kcud1=\EOB, kcuf1=\EOC, kcuu1=\EOA, + kdch1=\E[3~, kf1=\E[17~, kf2=\E[18~, kf3=\E[19~, + kf4=\E[20~, kf5=\E[21~, kf6=\E[23~, kf7=\E[24~, kf8=\E[25~, + khome=\E[1~, kich1=\E[2~, kll=\E[4~, knp=\E[6~, kpp=\E[5~, + nel=\EE, rc=\E8, rev=\E[7m, rf=/usr/share/tabset/vt100, + ri=\EM, rin=\E[%p1%dT, rmacs=\E(B, rmam=\E[?7l, rmir=\E[4l, + rmkx=\E[?1l\E>, rmso=\E[27m, rmul=\E[24m, + rs1=\Ec\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h, sc=\E7, + sgr0=\E[m, smacs=\E(0, smam=\E[?7h, smir=\E[4h, + smkx=\E[?1h\E=, smso=\E[7m, smul=\E[4m, tbc=\E[3g, + +# NetBSD/FreeBSD vt220 terminal emulator console (pc keyboard & monitor) +# termcap entries for pure VT220-Emulation and 25, 28, 35, 40, 43 and +# 50 lines entries; 80 columns +pcvt25|dec vt220 emulation with 25 lines, + cols#80, lines#25, + is2=\E[1;25r\E[25;1H, use=pcvtXX, +pcvt28|dec vt220 emulation with 28 lines, + cols#80, lines#28, + is2=\E[1;28r\E[28;1H, use=pcvtXX, +pcvt35|dec vt220 emulation with 35 lines, + cols#80, lines#35, + is2=\E[1;35r\E[35;1H, use=pcvtXX, +pcvt40|dec vt220 emulation with 40 lines, + cols#80, lines#40, + is2=\E[1;40r\E[40;1H, use=pcvtXX, +pcvt43|dec vt220 emulation with 43 lines, + cols#80, lines#43, + is2=\E[1;43r\E[43;1H, use=pcvtXX, +pcvt50|dec vt220 emulation with 50 lines, + cols#80, lines#50, + is2=\E[1;50r\E[50;1H, use=pcvtXX, + +# NetBSD/FreeBSD vt220 terminal emulator console (pc keyboard & monitor) +# termcap entries for pure VT220-Emulation and 25, 28, 35, 40, 43 and +# 50 lines entries; 132 columns +pcvt25w|dec vt220 emulation with 25 lines and 132 cols, + cols#132, lines#25, + is2=\E[1;25r\E[25;1H, use=pcvtXX, +pcvt28w|dec vt220 emulation with 28 lines and 132 cols, + cols#132, lines#28, + is2=\E[1;28r\E[28;1H, use=pcvtXX, +pcvt35w|dec vt220 emulation with 35 lines and 132 cols, + cols#132, lines#35, + is2=\E[1;35r\E[35;1H, use=pcvtXX, +pcvt40w|dec vt220 emulation with 40 lines and 132 cols, + cols#132, lines#40, + is2=\E[1;40r\E[40;1H, use=pcvtXX, +pcvt43w|dec vt220 emulation with 43 lines and 132 cols, + cols#132, lines#43, + is2=\E[1;43r\E[43;1H, use=pcvtXX, +pcvt50w|dec vt220 emulation with 50 lines and 132 cols, + cols#132, lines#50, + is2=\E[1;50r\E[50;1H, use=pcvtXX, + +# Terminfo entries to enable the use of the ncurses library in colour on a +# NetBSD-arm32 console (only tested on a RiscPC). +# Created by Dave Millen 22.07.98 +# modified codes for setf/setb to setaf/setab, then to klone+color, corrected +# typo in invis - TD +arm100|arm100-am|Arm(RiscPC) ncurses compatible (for 640x480), + am, bce, msgr, xenl, xon, + cols#80, it#8, lines#30, + acsc=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, + bel=^G, blink=\E[5m$<2>, bold=\E[1m$<2>, + clear=\E[H\E[J$<50>, cr=^M, csr=\E[%i%p1%d;%p2%dr, + cub=\E[%p1%dD, cub1=^H, cud=\E[%p1%dB, cud1=^J, + cuf=\E[%p1%dC, cuf1=\E[C$<2>, + cup=\E[%i%p1%d;%p2%dH$<5>, cuu=\E[%p1%dA, + cuu1=\E[A$<2>, ed=\E[J$<50>, el=\E[K$<3>, el1=\E[1K$<3>, + enacs=\E(B\E)0, home=\E[H, ht=^I, hts=\EH, ind=^J, + invis=\E[8m$<2>, ka1=\E[q, ka3=\E[s, kb2=\E[r, kbs=^H, + kc1=\E[p, kc3=\E[n, kcub1=\E[D, kcud1=\E[B, kcuf1=\E[C, + kcuu1=\E[A, kent=\E[M, kf0=\E[y, kf1=\E[P, kf10=\E[x, + kf2=\E[Q, kf3=\E[R, kf4=\E[S, kf5=\E[t, kf6=\E[u, kf7=\E[v, + kf8=\E[l, kf9=\E[w, rc=\E8, rev=\E[6m$<2>, ri=\EM$<5>, + rmacs=^O, rmam=\E[?7l, rmkx=\E[?1l\E>, rmso=\E[m$<2>, + rmul=\E[m$<2>, rs2=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h, + sc=\E7, + sgr=\E[0%?%p1%p6%|%t;1%;%?%p2%t;4%;%?%p1%p3%|%t;7%;%?%p4%t;5%;m%?%p9%t\016%e\017%;, + sgr0=\E[m\017$<2>, smacs=^N, smam=\E[?7h, smkx=\E[?1h\E=, + smso=\E[7m$<2>, smul=\E[4m$<2>, tbc=\E[3g, + use=ecma+sgr, use=klone+color, +arm100-w|arm100-wam|Arm(RiscPC) ncurses compatible (for 1024x768), + cols#132, lines#50, use=arm100, + +# NetBSD/x68k console vt200 emulator. This port runs on a 68K machine +# manufactured by Sharp for the Japenese market. +# From Minoura Makoto , 12 May 1996 +x68k|x68k-ite|NetBSD/x68k ITE, + cols#96, lines#32, + kclr=\E[9~, khlp=\E[28~, use=vt220, + +# : +# Entry for the DNARD OpenFirmware console, close to ANSI but not quite. +# +# (still unfinished, but good enough so far.) +ofcons, + bw, + cols#80, lines#30, + bel=^G, blink=\2337;2m, bold=\2331m, clear=^L, cr=^M, + cub=\233%p1%dD, cub1=\233D, cud=\233%p1%dB, cud1=\233B, + cuf=\233%p1%dC, cuf1=\233C, cup=\233%i%p1%d;%p2%dH, + cuu=\233%p1%dA, cuu1=\233A, dch=\233%p1%dP, dch1=\233P, + dim=\2332m, dl=\233%p1%dM, dl1=\233M, ed=\233J, el=\233K, + flash=^G, ht=^I, ich=\233%p1%d@, ich1=\233@, il=\233%p1%dL, + il1=\233L, ind=^J, invis=\2338m, kbs=^H, kcub1=\233D, + kcud1=\233B, kcuf1=\233C, kcuu1=\233A, kdch1=\233P, + kf1=\2330P, kf10=\2330M, kf2=\2330Q, kf3=\2330W, + kf4=\2330x, kf5=\2330t, kf6=\2330u, kf7=\2330q, kf8=\2330r, + kf9=\2330p, knp=\233/, kpp=\233?, nel=^M^J, rev=\2337m, + rmso=\2330m, rmul=\2330m, sgr0=\2330m, + +# NetBSD "wscons" emulator in vt220 mode +# These are micro-minimal and probably need to be redone for real +# after the manner of the pcvt entries. +wsvt25|NetBSD wscons in 25 line DEC VT220 mode, + cols#80, lines#25, use=vt220, + +wsvt25m|NetBSD wscons in 25 line DEC VT220 mode with Meta, + km, + cols#80, lines#25, use=vt220, + +# `rasterconsole' provided by 4.4BSD, NetBSD and OpenBSD on SPARC, and +# DECstation/pmax. +rcons|BSD rasterconsole, + use=sun-il, +# Color version of above. Color currenly only provided by NetBSD. +rcons-color|BSD rasterconsole with ANSI color, + bce, + colors#8, pairs#64, + op=\E[m, setab=\E[4%dm, setaf=\E[3%dm, use=rcons, + +#### FreeBSD console entries +# +# From: Andrey Chernov 29 Mar 1996 +# Andrey Chernov maintains the FreeBSD termcap distributions. +# +# Note: Users of FreeBSD 2.1.0 and older versions must either upgrade +# or comment out the :cb: capability in the console entry. +# +# Alexander Lukyanov reports: +# I have seen FreeBSD-2.1.5R... The old el1 bug changed, but it is still there. +# Now el1 clears not only to the line beginning, but also a large chunk +# of previous line. But there is another bug - ech does not work at all. +# + +# for syscons +# common entry without semigraphics +# Bug: The capability resets attributes. +# Bug? The ech and el1 attributes appear to move the cursor in some cases; for +# instance el1 does if the cursor is moved to the right margin first. Removed +# by T.Dickey 97/5/3 (ech=\E[%p1%dX, el1=\E[1K) +# +# Setting colors turns off reverse; we cannot guarantee order, so use ncv. +# Note that this disables standout with color. +cons25w|ansiw|ansi80x25-raw|freebsd console (25-line raw mode), + am, bce, bw, eo, msgr, npc, + colors#8, cols#80, it#8, lines#25, ncv#21, pairs#64, + bel=^G, blink=\E[5m, bold=\E[1m, cbt=\E[Z, clear=\E[H\E[J, + cnorm=\E[=0C, cr=^M, cub=\E[%p1%dD, cub1=^H, cud=\E[%p1%dB, + cud1=\E[B, cuf=\E[%p1%dC, cuf1=\E[C, + cup=\E[%i%p1%d;%p2%dH, cuu=\E[%p1%dA, cuu1=\E[A, + cvvis=\E[=1C, dch=\E[%p1%dP, dch1=\E[P, dim=\E[30;1m, + dl=\E[%p1%dM, dl1=\E[M, ed=\E[J, el=\E[K, home=\E[H, + hpa=\E[%i%p1%d`, ht=^I, ich=\E[%p1%d@, ich1=\E[@, + il=\E[%p1%dL, il1=\E[L, ind=\E[S, indn=\E[%p1%dS, kb2=\E[E, + kbs=^H, kcbt=\E[Z, kcub1=\E[D, kcud1=\E[B, kcuf1=\E[C, + kcuu1=\E[A, kdch1=\177, kend=\E[F, kf1=\E[M, kf10=\E[V, + kf11=\E[W, kf12=\E[X, kf2=\E[N, kf3=\E[O, kf4=\E[P, kf5=\E[Q, + kf6=\E[R, kf7=\E[S, kf8=\E[T, kf9=\E[U, khome=\E[H, + kich1=\E[L, knp=\E[G, kpp=\E[I, nel=\E[E, op=\E[x, rev=\E[7m, + ri=\E[T, rin=\E[%p1%dT, rmso=\E[m, rs1=\E[x\E[m\Ec, + setab=\E[4%p1%dm, setaf=\E[3%p1%dm, sgr0=\E[m, + smso=\E[7m, vpa=\E[%i%p1%dd, +cons25|ansis|ansi80x25|freebsd console (25-line ansi mode), + acsc=-\030.^Y0\333`\004a\260f\370g\361h\261i\025j\331k\277l\332m\300n\305q\304t\303u\264v\301w\302x\263y\363z\362~\371, + use=cons25w, +cons25-m|ansis-mono|ansi80x25-mono|freebsd console (25-line mono ansi mode), + colors@, pairs@, + bold@, dim@, op@, rmul=\E[m, setab@, setaf@, smul=\E[4m, use=cons25, +cons30|ansi80x30|freebsd console (30-line ansi mode), + lines#30, use=cons25, +cons30-m|ansi80x30-mono|freebsd console (30-line mono ansi mode), + lines#30, use=cons25-m, +cons43|ansi80x43|freebsd console (43-line ansi mode), + lines#43, use=cons25, +cons43-m|ansi80x43-mono|freebsd console (43-line mono ansi mode), + lines#43, use=cons25-m, +cons50|ansil|ansi80x50|freebsd console (50-line ansi mode), + lines#50, use=cons25, +cons50-m|ansil-mono|ansi80x50-mono|freebsd console (50-line mono ansi mode), + lines#50, use=cons25-m, +cons60|ansi80x60|freebsd console (60-line ansi mode), + lines#60, use=cons25, +cons60-m|ansi80x60-mono|freebsd console (60-line mono ansi mode), + lines#60, use=cons25-m, +cons25r|pc3r|ibmpc3r|cons25-koi8-r|freebsd console w/koi8-r cyrillic, + acsc=-\030.^Y0\215`\004a\220f\234h\221i\025j\205k\203l\202m\204n\212q\0t\206u\207v\211w\210x\201y\230z\231~\225, + use=cons25w, +cons25r-m|pc3r-m|ibmpc3r-mono|cons25-koi8r-m|freebsd console w/koi8-r cyrillic (mono), + colors@, pairs@, + op@, rmul=\E[m, setab@, setaf@, smul=\E[4m, use=cons25r, +cons50r|cons50-koi8r|freebsd console w/koi8-r cyrillic (50 lines), + lines#50, use=cons25r, +cons50r-m|cons50-koi8r-m|freebsd console w/koi8-r cyrillic (50-line mono), + lines#50, use=cons25r-m, +cons60r|cons60-koi8r|freebsd console w/koi8-r cyrillic (60 lines), + lines#60, use=cons25r, +cons60r-m|cons60-koi8r-m|freebsd console w/koi8-r cyrillic (60-line mono), + lines#60, use=cons25r-m, +# ISO 8859-1 FreeBSD console +cons25l1|cons25-iso8859|freebsd console w/iso 8859-1 chars, + acsc=+\253\,\273-\030.\031`\201a\202f\207g\210i\247j\213k\214l\215m\216n\217o\220p\221q\222r\223s\224t\225u\226v\227w\230x\231y\232z\233~\237, + use=cons25w, +cons25l1-m|cons25-iso-m|freebsd console w/iso 8859-1 chars (mono), + colors@, pairs@, + bold@, dim@, op@, rmul=\E[m, setab@, setaf@, smul=\E[4m, use=cons25l1, +cons50l1|cons50-iso8859|freebsd console w/iso 8859-1 chars (50 lines), + lines#50, use=cons25l1, +cons50l1-m|cons50-iso-m|freebsd console w/iso 8859-1 chars (50-line mono), + lines#50, use=cons25l1-m, +cons60l1|cons60-iso|freebsd console w/iso 8859-1 chars (60 lines), + lines#60, use=cons25l1, +cons60l1-m|cons60-iso-m|freebsd console w/iso 8859-1 chars (60-line mono), + lines#60, use=cons25l1-m, + +#### 386BSD and BSD/OS Consoles +# + +# This was the original 386BSD console entry (I think). +# Some places it's named oldpc3|oldibmpc3. +# From: Alex R.N. Wetmore +origpc3|origibmpc3|IBM PC 386BSD Console, + am, bw, eo, xon, + cols#80, lines#25, + acsc=j\331k\277l\332m\300n\305q\304t\303u\264v\301w\302x\263, + bold=\E[7m, clear=\Ec, cub1=^H, cud1=\E[B, cuf1=\E[C, + cup=\E[%i%p1%2d;%p2%2dH, cuu1=\E[A, ed=\E[J, el=\E[K, + home=\E[H, ind=\E[S, kcub1=\E[D, kcud1=\E[B, kcuf1=\E[C, + kcuu1=\E[A, khome=\E[Y, ri=\E[T, rmso=\E[1;0x\E[2;7x, + rmul=\E[1;0x\E[2;7x, sgr0=\E[m\E[1;0x\E[2;7x, + smso=\E[1;7x\E[2;0x, smul=\E[1;7x\E[2;0x, + +# description of BSD/386 console emulator in version 1.0 (supplied by BSDI) +oldpc3|oldibmpc3|old IBM PC BSD/386 Console, + km, + lines#25, + bel=^G, bold=\E[=15F, cr=^M, cud1=^J, dim=\E[=8F, dl1=\E[M, + ht=^I, il1=\E[L, ind=^J, kbs=^H, kcub1=\E[D, kcud1=\E[B, + kcuf1=\E[C, kcuu1=\E[A, khome=\E[H, kich1=\E[L, kll=\E[F, + knp=\E[G, kpp=\E[I, nel=^M^J, sgr0=\E[=R, + +# Description of BSD/OS console emulator in version 1.1, 2.0, 2.1 +# Note, the emulator supports many of the additional console features +# listed in the iBCS2 (e.g. character-set selection) though not all +# are described here. This entry really ought to be upgraded. +# Also note, the console will also work with fewer lines after doing +# "stty rows NN", e.g. to use 24 lines. +# (Color support from Kevin Rosenberg , 2 May 1996) +# Bug: The capability resets attributes. +bsdos-pc-nobold|BSD/OS PC console w/o bold, + am, eo, km, xon, + cols#80, it#8, lines#25, + bel=^G, clear=\Ec, cr=^M, cub=\E[%p1%dD, cub1=^H, + cud=\E[%p1%dB, cud1=^J, cuf=\E[%p1%dC, cuf1=\E[C, + cup=\E[%i%p1%d;%p2%dH, cuu=\E[%p1%dA, cuu1=\E[A, + dl=\E[%p1%dM, dl1=\E[M, ed=\E[J, el=\E[K, home=\E[H, ht=^I, + il=\E[%p1%dL, il1=\E[L, ind=^J, kbs=^H, kcub1=\E[D, + kcud1=\E[B, kcuf1=\E[C, kcuu1=\E[A, khome=\E[H, kich1=\E[L, + kll=\E[F, knp=\E[G, kpp=\E[I, nel=^M^J, rc=\E8, sc=\E7, + sgr=\E[0;10%?%p1%t;7%;%?%p3%t;7%;%?%p4%t;5%;%?%p6%t;1%;%?%p7%t;8%;%?%p9%t;11%;m%?%p5%t\E[=8F%;, + use=klone+sgr, use=klone+color, +bsdos-pc|IBM PC BSD/OS Console, + sgr=\E[0;10%?%p1%t;7%;%?%p2%t;1%;%?%p3%t;7%;%?%p4%t;5%;%?%p6%t;1%;%?%p7%t;8%;%?%p9%t;11%;m, use=bsdos-pc-nobold, + +# Old names for BSD/OS PC console used in releases before 4.1. +pc3|BSD/OS on the PC Console, + use=bsdos-pc-nobold, +ibmpc3|pc3-bold|BSD/OS on the PC Console with bold instead of underline, + use=bsdos-pc, + +# BSD/OS on the SPARC +bsdos-sparc|Sun SPARC BSD/OS Console, + use=sun, + +# BSD/OS on the PowerPC +bsdos-ppc|PowerPC BSD/OS Console, + use=bsdos-pc, + +#### DEC VT100 and compatibles +# +# DEC terminals from the vt100 forward are collected here. Older DEC terminals +# and micro consoles can be found in the `obsolete' section. More details on +# the relationship between the VT100 and ANSI X3.64/ISO 6429/ECMA-48 may be +# found near the end of this file. +# +# Except where noted, these entries are DEC's official terminfos. +# Contact Bill Hedberg of Terminal Support +# Engineering for more information. Updated terminfos and termcaps +# are kept available at ftp://gatekeeper.dec.com/pub/DEC/termcaps. +# +# In October 1995 DEC sold its terminals business, including the VT and Dorio +# line and trademark, to SunRiver Data Systems. SunRiver has since changed +# its name to Boundless Technologies; see http://www.boundless.com. +# + +# NOTE: Any VT100 emulation, whether in hardware or software, almost +# certainly includes what DEC called the `Level 1 editing extension' codes; +# only the very oldest VT100s lacked these and there probably aren't any of +# those left alive. To capture these, use one of the VT102 entries. +# +# Note that the glitch in vt100 is not quite the same as on the Concept, +# since the cursor is left in a different position while in the +# weird state (concept at beginning of next line, vt100 at end +# of this line) so all versions of vi before 3.7 don't handle +# right on vt100. The correct way to handle is when +# you output the char in column 80, immediately output CR LF +# and then assume you are in column 1 of the next line. If +# is on, am should be on too. +# +# I assume you have smooth scroll off or are at a slow enough baud +# rate that it doesn't matter (1200? or less). Also this assumes +# that you set auto-nl to "on", if you set it off use vt100-nam +# below. +# +# The padding requirements listed here are guesses. It is strongly +# recommended that xon/xoff be enabled, as this is assumed here. +# +# The vt100 uses and rather than // because the +# tab settings are in non-volatile memory and don't need to be +# reset upon login. Also setting the number of columns glitches +# the screen annoyingly. You can type "reset" to get them set. +# +# The VT100 series terminals have cursor ("arrows") keys which can operate +# in two different modes: Cursor Mode and Application Mode. Cursor Mode +# is the reset state, and is assumed to be the normal state. Application +# Mode is the "set" state. In Cursor Mode, the cursor keys transmit +# "Esc [ {code}" sequences, conforming to ANSI standards. In Application +# Mode, the cursor keys transmit "Esc O " sequences. Application Mode +# was provided primarily as an aid to the porting of VT52 applications. It is +# assumed that the cursor keys are normally in Cursor Mode, and expected that +# applications such as vi will always transmit the string. Therefore, +# the definitions for the cursor keys are made to match what the terminal +# transmits after the string is transmitted. If the string +# is a null string or is not defined, then cursor keys are assumed to be in +# "Cursor Mode", and the cursor keys definitions should match that assumption, +# else the appication may fail. It is also expected that applications will +# always transmit the string to the terminal before they exit. +# +# The VT100 series terminals have an auxilliary keypad, commonly referred to as +# the "Numeric Keypad", because it is a cluster of numeric and function keys. +# The Numeric Keypad which can operate in two different modes: Numeric Mode and +# Application Mode. Numeric Mode is the reset state, and is assumed to be +# the normal state. Application Mode is the "set" state. In Numeric Mode, +# the numeric and punctuation keys transmit ASCII 7-bit characters, and the +# Enter key transmits the same as the Return key (Note: the Return key +# can be configured to send either LF (\015) or CR LF). In Application Mode, +# all the keypad keys transmit "Esc O {code}" sequences. The PF1 - PF4 keys +# always send the same "Esc O {code}" sequences. It is assumed that the keypad +# is normally in Numeric Mode. If an application requires that the keypad be +# in Application Mode then it is expected that the user, or the application, +# will set the TERM environment variable to point to a terminfo entry which has +# defined the string to include the codes that switch the keypad into +# Application Mode, and the terminfo entry will also define function key +# fields to match the Application Mode control codes. If the string +# is a null string or is not defined, then the keypad is assumed to be in +# Numeric Mode. If the string switches the keypad into Application +# Mode, it is expected that the string will contain the control codes +# necessary to reset the keypad to "Normal" mode, and it is also expected that +# applications which transmit the string will also always transmit the +# string to the terminal before they exit. +# +# Here's a diagram of the VT100 keypad keys with their bindings. +# The top line is the name of the key (some DEC keyboards have the keys +# labelled somewhat differently, like GOLD instead of PF1, but this is +# the most "official" name). The second line is the escape sequence it +# generates in Application Keypad mode (where "$" means the ESC +# character). The third line contains two items, first the mapping of +# the key in terminfo, and then in termcap. +# _______________________________________ +# | PF1 | PF2 | PF3 | PF4 | +# | $OP | $OQ | $OR | $OS | +# |_kf1__k1_|_kf2__k2_|_kf3__k3_|_kf4__k4_| +# | 7 8 9 - | +# | $Ow | $Ox | $Oy | $Om | +# |_kf9__k9_|_kf10_k;_|_kf0__k0_|_________| +# | 4 | 5 | 6 | , | +# | $Ot | $Ou | $Ov | $Ol | +# |_kf5__k5_|_kf6__k6_|_kf7__k7_|_kf8__k8_| +# | 1 | 2 | 3 | | +# | $Oq | $Or | $Os | enter | +# |_ka1__K1_|_kb2__K2_|_ka3__K3_| $OM | +# | 0 | . | | +# | $Op | $On | | +# |___kc1_______K4____|_kc3__K5_|_kent_@8_| +# +# And here, for those of you with orphaned VT100s lacking documentation, is +# a description of the soft switches invoked when you do `Set Up'. +# +# Scroll 0-Jump Shifted 3 0-# +# | 1-Smooth | 1-British pound sign +# | Autorepeat 0-Off | Wrap Around 0-Off +# | | 1-On | | 1-On +# | | Screen 0-Dark Bkg | | New Line 0-Off +# | | | 1-Light Bkg | | | 1-On +# | | | Cursor 0-Underline | | | Interlace 0-Off +# | | | | 1-Block | | | | 1-On +# | | | | | | | | +# 1 1 0 1 1 1 1 1 0 1 0 0 0 0 1 0 <--Standard Settings +# | | | | | | | | +# | | | Auto XON/XOFF 0-Off | | | Power 0-60 Hz +# | | | 1-On | | | 1-50 Hz +# | | Ansi/VT52 0-VT52 | | Bits Per Char. 0-7 Bits +# | | 1-ANSI | | 1-8 Bits +# | Keyclick 0-Off | Parity 0-Off +# | 1-On | 1-On +# Margin Bell 0-Off Parity Sense 0-Odd +# 1-On 1-Even +# +# The following SET-UP modes are assumed for normal operation: +# ANSI_MODE AUTO_XON/XOFF_ON NEWLINE_OFF 80_COLUMNS +# WRAP_AROUND_ON JUMP_SCROLL_OFF +# Other SET-UP modes may be set for operator convenience or communication +# requirements; I recommend +# AUTOREPEAT_ON BLOCK_CURSOR MARGIN_BELL_OFF SHIFTED_3_# +# Unless you have a graphics add-on such as Digital Engineering's VT640 +# (and even then, whenever it can be arranged!) you should set +# INTERLACE_OFF +# +# (vt100: I added / based on the init string, also . -- esr) +vt100|vt100-am|dec vt100 (w/advanced video), + am, msgr, xenl, xon, + cols#80, it#8, lines#24, vt#3, + acsc=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, + bel=^G, blink=\E[5m$<2>, bold=\E[1m$<2>, + clear=\E[H\E[J$<50>, cr=^M, csr=\E[%i%p1%d;%p2%dr, + cub=\E[%p1%dD, cub1=^H, cud=\E[%p1%dB, cud1=^J, + cuf=\E[%p1%dC, cuf1=\E[C$<2>, + cup=\E[%i%p1%d;%p2%dH$<5>, cuu=\E[%p1%dA, + cuu1=\E[A$<2>, ed=\E[J$<50>, el=\E[K$<3>, el1=\E[1K$<3>, + enacs=\E(B\E)0, home=\E[H, ht=^I, hts=\EH, ind=^J, ka1=\EOq, + ka3=\EOs, kb2=\EOr, kbs=^H, kc1=\EOp, kc3=\EOn, kcub1=\EOD, + kcud1=\EOB, kcuf1=\EOC, kcuu1=\EOA, kent=\EOM, kf0=\EOy, + kf1=\EOP, kf10=\EOx, kf2=\EOQ, kf3=\EOR, kf4=\EOS, kf5=\EOt, + kf6=\EOu, kf7=\EOv, kf8=\EOl, kf9=\EOw, rc=\E8, + rev=\E[7m$<2>, ri=\EM$<5>, rmacs=^O, rmam=\E[?7l, + rmkx=\E[?1l\E>, rmso=\E[m$<2>, rmul=\E[m$<2>, + rs2=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h, sc=\E7, + sgr=\E[0%?%p1%p6%|%t;1%;%?%p2%t;4%;%?%p1%p3%|%t;7%;%?%p4%t;5%;m%?%p9%t\016%e\017%;, + sgr0=\E[m\017$<2>, smacs=^N, smam=\E[?7h, smkx=\E[?1h\E=, + smso=\E[7m$<2>, smul=\E[4m$<2>, tbc=\E[3g, +vt100nam|vt100-nam|vt100 no automargins, + am@, xenl@, use=vt100-am, +vt100-vb|dec vt100 (w/advanced video) & no beep, + bel@, flash=\E[?5h\E[?5l, use=vt100, + +# Ordinary vt100 in 132 column ("wide") mode. +vt100-w|vt100-w-am|dec vt100 132 cols (w/advanced video), + cols#132, lines#24, + rs2=\E>\E[?3h\E[?4l\E[?5l\E[?8h, use=vt100-am, +vt100-w-nam|vt100-nam-w|dec vt100 132 cols (w/advanced video no automargin), + cols#132, lines#14, vt@, + rs2=\E>\E[?3h\E[?4l\E[?5l\E[?8h, use=vt100-nam, + +# vt100 with no advanced video. +vt100-nav|vt100 without advanced video option, + xmc#1, + blink@, bold@, rev@, rmso=\E[m, rmul@, sgr@, sgr0@, smso=\E[7m, + smul@, + use=vt100, +vt100-nav-w|vt100-w-nav|dec vt100 132 cols 14 lines (no advanced video option), + cols#132, lines#14, use=vt100-nav, + +# vt100 with one of the 24 lines used as a status line. +# We put the status line on the top. +vt100-s|vt100-s-top|vt100-top-s|vt100 for use with top sysline, + eslok, hs, + lines#23, + clear=\E[2;1H\E[J$<50>, csr=\E[%i%i%p1%d;%p2%dr, + cup=\E[%i%p1%{1}%+%d;%p2%dH$<5>, dsl=\E7\E[1;24r\E8, + fsl=\E8, home=\E[2;1H, is2=\E7\E[2;24r\E8, + tsl=\E7\E[1;%p1%dH\E[1K, use=vt100-am, + +# Status line at bottom. +# Clearing the screen will clobber status line. +vt100-s-bot|vt100-bot-s|vt100 for use with bottom sysline, + eslok, hs, + lines#23, + dsl=\E7\E[1;24r\E8, fsl=\E8, is2=\E[1;23r\E[23;1H, + tsl=\E7\E[24;%p1%dH\E[1K, + use=vt100-am, + +# Most of the `vt100' emulators out there actually emulate a vt102 +# This entry (or vt102-nsgr) is probably the right thing to use for +# these. +vt102|dec vt102, + mir, + dch1=\E[P, dl1=\E[M, il1=\E[L, rmir=\E[4l, smir=\E[4h, use=vt100, +vt102-w|dec vt102 in wide mode, + cols#132, + rs3=\E[?3h, use=vt102, + +# Many brain-dead PC comm programs that pretend to be `vt100-compatible' +# fail to interpret the ^O and ^N escapes properly. Symptom: the +# string in the canonical vt100 entry above leaves the screen littered +# with little snowflake or star characters (IBM PC ROM character \017 = ^O) +# after highlight turnoffs. This entry should fix that, and even leave +# ACS support working, at the cost of making multiple-highlight changes +# slightly more expensive. +# From: Eric S. Raymond July 22 1995 +vt102-nsgr|vt102 no sgr (use if you see snowflakes after highlight changes), + sgr@, sgr0=\E[m, + use=vt102, + +# VT125 Graphics CRT. Clear screen also erases graphics +vt125|vt125 graphics terminal, + clear=\E[H\E[2J\EPpS(E)\E\\$<50>, use=vt100, + +# This isn't a DEC entry, it came from University of Wisconsin. +# (vt131: I added / based on the init string, also -- esr) +vt131|dec vt131, + am, xenl, + cols#80, it#8, lines#24, vt#3, + bel=^G, blink=\E[5m$<2/>, bold=\E[1m$<2/>, + clear=\E[;H\E[2J$<50/>, cr=^M, csr=\E[%i%p1%d;%p2%dr, + cub1=^H, cud1=^J, cuf1=\E[C$<2/>, + cup=\E[%i%p1%d;%p2%dH$<5/>, cuu1=\E[A$<2/>, + ed=\E[J$<50/>, el=\E[K$<3/>, home=\E[H, ht=^I, + is2=\E[1;24r\E[24;1H, kbs=^H, kcub1=\EOD, kcud1=\EOB, + kcuf1=\EOC, kcuu1=\EOA, kf1=\EOP, kf2=\EOQ, kf3=\EOR, + kf4=\EOS, nel=^M^J, rc=\E8, rev=\E[7m$<2/>, ri=\EM$<5/>, + rmam=\E[?7h, rmkx=\E[?1l\E>, rmso=\E[m$<2/>, + rmul=\E[m$<2/>, + rs1=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h, sc=\E7, + sgr0=\E[m$<2/>, smam=\E[?7h, smkx=\E[?1h\E=, + smso=\E[7m$<2/>, smul=\E[4m$<2/>, + +# vt132 - like vt100 but slower and has ins/del line and such. +# I'm told that / are backwards in the terminal from the +# manual and from the ANSI standard, this describes the actual +# terminal. I've never actually used a vt132 myself, so this +# is untested. +# +vt132|DEC vt132, + xenl, + dch1=\E[P$<7>, dl1=\E[M$<99>, il1=\E[L$<99>, ind=\n$<30>, + ip=$<7>, rmir=\E[4h, smir=\E[4l, + use=vt100, + +# This vt220 description maps F5--F9 to the second block of function keys +# at the top of the keyboard. The "DO" key is used as F10 to avoid conflict +# with the key marked (ESC) on the vt220. See vt220d for an alternate mapping. +# PF1--PF4 are used as F1--F4. +# +vt220-old|vt200-old|DEC VT220 in vt100 emulation mode, + am, mir, xenl, xon, + cols#80, lines#24, vt#3, + acsc=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, + bel=^G, blink=\E[5m$<2>, bold=\E[1m$<2>, civis=\E[?25l, + clear=\E[H\E[2J$<50>, cnorm=\E[?25h, cr=^M, + csr=\E[%i%p1%d;%p2%dr, cub1=^H, cud1=\E[B, cuf1=\E[C, + cup=\E[%i%p1%d;%p2%dH$<10>, cuu1=\E[A, dch1=\E[P, + dl1=\E[M, ed=\E[J$<50>, el=\E[K$<3>, home=\E[H, ht=^I, + if=/usr/share/tabset/vt100, il1=\E[L, ind=\ED$<20/>, + is2=\E[1;24r\E[24;1H, kbs=^H, kcub1=\E[D, kcud1=\E[B, + kcuf1=\E[C, kcuu1=\E[A, kdch1=\E[3~, kend=\E[4~, kf1=\EOP, + kf10=\E[29~, kf2=\EOQ, kf3=\EOR, kf4=\EOS, kf5=\E[17~, + kf6=\E[18~, kf7=\E[19~, kf8=\E[20~, kf9=\E[21~, + khome=\E[1~, kich1=\E[2~, knp=\E[6~, kpp=\E[5~, rc=\E8, + rev=\E[7m$<2>, rf=/usr/share/tabset/vt100, + ri=\EM$<14/>, rmacs=\E(B$<4>, rmam=\E[?7l, rmir=\E[4l, + rmso=\E[27m, rmul=\E[24m, + rs2=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h, sc=\E7, + sgr=\E[0%?%p6%t;1%;%?%p2%t;4%;%?%p4%t;5%;%?%p1%p3%|%t;7%;m%?%p9%t\E(0%e\E(B%;, + sgr0=\E[m$<2>, smacs=\E(0$<2>, smam=\E[?7h, smir=\E[4h, + smso=\E[7m, smul=\E[4m, + +# A much better description of the VT200/220; used to be vt220-8 +vt220|vt200|dec vt220, + am, mc5i, mir, msgr, xenl, xon, + cols#80, it#8, lines#24, vt#3, + acsc=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, + bel=^G, blink=\E[5m, bold=\E[1m, clear=\E[H\E[J, cr=^M, + csr=\E[%i%p1%d;%p2%dr, cub=\E[%p1%dD, cub1=^H, + cud=\E[%p1%dB, cud1=^J, cuf=\E[%p1%dC, cuf1=\E[C, + cup=\E[%i%p1%d;%p2%dH, cuu=\E[%p1%dA, cuu1=\E[A, + dch=\E[%p1%dP, dch1=\E[P, dl=\E[%p1%dM, dl1=\E[M, + ech=\E[%p1%dX, ed=\E[J, el=\E[K, el1=\E[1K, enacs=\E)0, + flash=\E[?5h$<200/>\E[?5l, home=\E[H, ht=^I, hts=\EH, + ich=\E[%p1%d@, if=/usr/share/tabset/vt100, + il=\E[%p1%dL, il1=\E[L, ind=\ED, + is2=\E[?7h\E[>\E[?1h\E F\E[?4l, kbs=^H, kcub1=\E[D, + kcud1=\E[B, kcuf1=\E[C, kcuu1=\E[A, kf1=\EOP, kf10=\E[21~, + kf11=\E[23~, kf12=\E[24~, kf13=\E[25~, kf14=\E[26~, + kf17=\E[31~, kf18=\E[32~, kf19=\E[33~, kf2=\EOQ, + kf20=\E[34~, kf3=\EOR, kf4=\EOS, kf6=\E[17~, kf7=\E[18~, + kf8=\E[19~, kf9=\E[20~, kfnd=\E[1~, khlp=\E[28~, + khome=\E[H, kich1=\E[2~, knp=\E[6~, kpp=\E[5~, krdo=\E[29~, + kslt=\E[4~, lf1=pf1, lf2=pf2, lf3=pf3, lf4=pf4, mc0=\E[i, + mc4=\E[4i, mc5=\E[5i, nel=\EE, rc=\E8, rev=\E[7m, ri=\EM, + rmacs=^O, rmam=\E[?7l, rmir=\E[4l, rmso=\E[27m, + rmul=\E[24m, rs1=\E[?3l, sc=\E7, sgr0=\E[m, smacs=^N, + smam=\E[?7h, smir=\E[4h, smso=\E[7m, smul=\E[4m, tbc=\E[3g, +vt220-w|vt200-w|DEC vt220 in wide mode, + cols#132, + rs3=\E[?3h, use=vt220, +vt220-8bit|vt220-8|vt200-8bit|vt200-8|dec vt220/200 in 8-bit mode, + am, mc5i, mir, msgr, xenl, xon, + cols#80, it#8, lines#24, vt#3, + acsc=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, + bel=^G, blink=\2335m, bold=\2331m, clear=\233H\233J, cr=^M, + csr=\233%i%p1%d;%p2%dr, cub=\233%p1%dD, cub1=^H, + cud=\233%p1%dB, cud1=^J, cuf=\233%p1%dC, cuf1=\233C, + cup=\233%i%p1%d;%p2%dH, cuu=\233%p1%dA, cuu1=\233A, + dch=\233%p1%dP, dch1=\233P, dl=\233%p1%dM, dl1=\233M, + ech=\233%p1%dX, ed=\233J, el=\233K, el1=\2331K, enacs=\E)0, + flash=\233?5h$<200/>\233?5l, home=\233H, ht=^I, hts=\EH, + ich=\233%p1%d@, if=/usr/share/tabset/vt100, + il=\233%p1%dL, il1=\233L, ind=\ED, + is2=\233?7h\233>\233?1h\E F\233?4l, kbs=^H, + kcub1=\233D, kcud1=\233B, kcuf1=\233C, kcuu1=\233A, + kf1=\EOP, kf10=\23321~, kf11=\23323~, kf12=\23324~, + kf13=\23325~, kf14=\23326~, kf17=\23331~, kf18=\23332~, + kf19=\23333~, kf2=\EOQ, kf20=\23334~, kf3=\EOR, kf4=\EOS, + kf6=\23317~, kf7=\23318~, kf8=\23319~, kf9=\23320~, + kfnd=\2331~, khlp=\23328~, khome=\233H, kich1=\2332~, + knp=\2336~, kpp=\2335~, krdo=\23329~, kslt=\2334~, lf1=pf1, + lf2=pf2, lf3=pf3, lf4=pf4, mc0=\233i, mc4=\2334i, mc5=\2335i, + nel=\EE, rc=\E8, rev=\2337m, ri=\EM, rmacs=^O, rmam=\233?7l, + rmir=\2334l, rmso=\23327m, rmul=\23324m, rs1=\233?3l, + sc=\E7, sgr0=\233m, smacs=^N, smam=\233?7h, smir=\2334h, + smso=\2337m, smul=\2334m, tbc=\2333g, + +# +# vt220d: +# This vt220 description regards F6--F10 as the second block of function keys +# at the top of the keyboard. This mapping follows the description given +# in the VT220 Programmer Reference Manual and agrees with the labeling +# on some terminals that emulate the vt220. There is no support for an F5. +# See vt220 for an alternate mapping. +# +vt220d|DEC VT220 in vt100 mode with DEC function key labeling, + kf10=\E[21~, kf11=\E[23~, kf12=\E[24~, kf13=\E[25~, + kf14=\E[26~, kf15=\E[28~, kf16=\E[29~, kf17=\E[31~, + kf18=\E[32~, kf19=\E[33~, kf20=\E[34~, kf5@, kf6=\E[17~, + kf7=\E[18~, kf8=\E[19~, kf9=\E[20~, + use=vt220-old, + +vt220-nam|v200-nam|VT220 in vt100 mode with no auto margins, + am@, + rs2=\E>\E[?3l\E[?4l\E[?5l\E[?7l\E[?8h, use=vt220, + +# vt220 termcap written Tue Oct 25 20:41:10 1988 by Alex Latzko +# (not an official DEC entry!) +# The problem with real vt220 terminals is they don't send escapes when in +# in vt220 mode. This can be gotten around two ways. 1> don't send +# escapes or 2> put the vt220 into vt100 mode and use all the nifty +# features of vt100 advanced video which it then has. +# +# This entry takes the view of putting a vt220 into vt100 mode so +# you can use the escape key in emacs and everything else which needs it. +# +# You probably don't want to use this on a VMS machine since VMS will think +# it has a vt220 and will get fouled up coming out of emacs +# +# From: Alexander Latzko , 30 Dec 1996 +# (Added vt100 , to quiet a tic warning -- esr) +vt200-js|vt220-js|dec vt200 series with jump scroll, + am, + cols#80, + bel=^G, clear=\E[H\E[J, cr=^M, csr=\E[%i%p1%d;%p2%dr, + cub1=^H, cud1=^J, cup=\E[%i%p1%d;%p2%dH, cuu1=\E[A, + dch1=\E[P, dl1=\E[M, ed=\E[J, el=\E[K, home=\E[H, ht=^I, + il1=\E[L, ind=\ED, + is2=\E[61"p\E[H\E[?3l\E[?4l\E[?1l\E[?5l\E[?6l\E[?7h\E[?8h\E[?25h\E>\E[m, + kbs=^H, kcub1=\EOD, kcud1=\EOB, kcuf1=\EOC, kcuu1=\EOA, + kf1=\EOP, kf2=\EOQ, kf3=\EOR, kf4=\EOS, nel=^M\ED, rc=\E8, + rf=/usr/lib/tabset/vt100, ri=\EM, rmdc=, rmir=\E[4l, + rmkx=\E[?1l\E>, rmso=\E[27m$<5/>, rmul=\E[24m, + rs1=\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h, sc=\E7, smdc=, + smir=\E[4h, smkx=\E[?1h\E=, smso=\E[7m$<5/>, smul=\E[4m, + + +# This was DEC's vt320. Use the purpose-built one below instead +#vt320|DEC VT320 in vt100 emulation mode, +# use=vt220, + +# +# Use v320n for SCO's LYRIX. Otherwise, use Adam Thompson's vt320-nam. +# +vt320nam|v320n|DEC VT320 in vt100 emul. mode with NO AUTO WRAP mode, + am@, + rs2=\E>\E[?3l\E[?4l\E[?5l\E[?7l\E[?8h, use=vt220, + +# These entries are not DEC's official ones, they were purpose-built for the +# VT320. Here are the designer's notes: +# is end on a PC kbd. Actually 'select' on a VT. Mapped to +# 'Erase to End of Field'... since nothing seems to use 'end' anyways... +# khome is Home on a PC kbd. Actually 'FIND' on a VT. +# Things that use usually use tab anyways... and things that don't use +# tab usually use instead... +# kprv is same as tab - Backtab is useless... +# I left out because of its RIDICULOUS complexity, +# and the resulting fact that it causes the termcap translation of the entry +# to SMASH the 1k-barrier... +# From: Adam Thompson Sept 10 1995 +# (vt320: uncommented , comnmmented out to avoid a conflict --esr) +vt320|vt300|dec vt320 7 bit terminal, + am, eslok, hs, mir, msgr, xenl, + cols#80, lines#24, wsl#80, + acsc=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, + bel=^G, blink=\E[5m, bold=\E[1m, civis=\E[?25l, + clear=\E[H\E[2J, cnorm=\E[?25h, cr=^M, + csr=\E[%i%p1%d;%p2%dr, cub=\E[%p1%dD, cub1=^H, + cud=\E[%p1%dB, cud1=^J, cuf=\E[%p1%dC, cuf1=\E[C, + cup=\E[%i%p1%d;%p2%dH, cuu=\E[%p1%dA, cuu1=\E[A, + dch=\E[%p1%dP, dch1=\E[P, dl=\E[%p1%dM, dl1=\E[M, + ech=\E[%p1%dX, ed=\E[J, el=\E[K, el1=\E[1K, fsl=\E[0$}, + home=\E[H, ht=^I, hts=\EH, ich=\E[%p1%d@, il=\E[%p1%dL, + il1=\E[L, ind=\ED, + is2=\E>\E[?3l\E[?4l\E[5?l\E[?7h\E[?8h\E[1;24r\E[24;1H, + ka1=\EOw, ka3=\EOy, kb2=\EOu, kbs=\177, kc1=\EOq, kc3=\EOs, + kcub1=\EOD, kcud1=\EOB, kcuf1=\EOC, kcuu1=\EOA, + kdch1=\E[3~, kel=\E[4~, kent=\EOM, kf1=\EOP, kf10=\E[21~, + kf11=\E[23~, kf12=\E[24~, kf13=\E[25~, kf14=\E[26~, + kf15=\E[28~, kf16=\E[29~, kf17=\E[31~, kf18=\E[32~, + kf19=\E[33~, kf2=\EOQ, kf20=\E[34~, kf3=\EOR, kf4=\EOS, + kf6=\E[17~, kf7=\E[18~, kf8=\E[19~, kf9=\E[20~, + khome=\E[1~, kich1=\E[2~, knp=\E[6~, knxt=^I, kpp=\E[5~, + kprv=\E[Z, mc0=\E[i, mc4=\E[?4i, mc5=\E[?5i, nel=\EE, rc=\E8, + rev=\E[7m, rf=/usr/share/tabset/vt300, ri=\EM, + rmacs=\E(B, rmam=\E[?7l, rmir=\E[4l, rmkx=\E[?1l\E>, + rmso=\E[m, rmul=\E[m, + rs2=\E>\E[?3l\E[?4l\E[5?l\E[?7h\E[?8h\E[1;24r\E[24;1H, + sc=\E7, sgr0=\E[m, smacs=\E(0, smam=\E[?7h, smir=\E[4h, + smkx=\E[?1h\E=, smso=\E[7m, smul=\E[4m, tbc=\E[3g, + tsl=\E[1$}\E[H\E[K, +vt320-nam|vt300-nam|dec vt320 7 bit terminal with no am to make SAS happy, + am@, + is2=\E>\E[?3l\E[?4l\E[5?l\E[?7l\E[?8h\E[1;24r\E[24;1H, + rs2=\E>\E[?3l\E[?4l\E[5?l\E[?7l\E[?8h\E[1;24r\E[24;1H, + use=vt320, +# We have to init 132-col mode, not 80-col mode. +vt320-w|vt300-w|dec vt320 wide 7 bit terminal, + cols#132, wsl#132, + is2=\E>\E[?3h\E[?4l\E[5?l\E[?7h\E[?8h\E[1;24r\E[24;1H, + rs2=\E>\E[?3h\E[?4l\E[5?l\E[?7h\E[?8h\E[1;24r\E[24;1H, + use=vt320, +vt320-w-nam|vt300-w-nam|dec vt320 wide 7 bit terminal with no am, + am@, + is2=\E>\E[?3h\E[?4l\E[5?l\E[?7l\E[?8h\E[1;24r\E[24;1H, + rs2=\E>\E[?3h\E[?4l\E[5?l\E[?7l\E[?8h\E[1;24r\E[24;1H, + use=vt320-w, + +# VT330 and VT340 -- These are ReGIS and SIXEL graphics terminals +# which are pretty much a superset of the VT320. They have the +# host writable status line, yet another different DRCS matrix size, +# and such, but they add the DEC Technical character set, Multiple text +# pages, selectable length pages, and the like. The difference between +# the vt330 and vt340 is that the latter has only 2 planes and a monochrome +# monitor, the former has 4 planes and a color monitor. These terminals +# support VT131 and ANSI block mode, but as with much of these things, +# termcap/terminfo doesn't deal with these features. +# +# Note that this entry is are set up in what was the standard way for GNU +# Emacs v18 terminal modes to deal with the cursor keys in that the arrow +# keys were switched into application mode at the same time the numeric pad +# is switched into application mode. This changes the definitions of the +# arrow keys. Emacs v19 is smarter and mines its keys directly out of +# your termcap or terminfo entry, +# +# From: Daniel Glasser , 13 Oct 1993 +# (vt340: string capability "sb=\E[M" corrected to "sr"; +# also, added / based on the init string -- esr) +vt340|dec-vt340|vt330|dec-vt330|dec vt340 graphics terminal with 24 line page, + am, eslok, hs, mir, msgr, xenl, xon, + cols#80, it#8, lines#24, vt#3, + acsc=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, + blink=\E[5m, bold=\E[1m, civis=\E[?25l, clear=\E[H\E[J, + cnorm=\E[?25h, cr=^M, csr=\E[%i%p1%d;%p2%dr, + cub=\E[%p1%dD, cub1=^H, cud=\E[%p1%dB, cud1=^J, + cuf=\E[%p1%dC, cuf1=\E[C, cup=\E[%i%p1%d;%p2%dH, + cuu=\E[%p1%dA, cuu1=\E[A, cvvis=\E[?25h, dch=\E[%p1%dP, + dch1=\E[P, dl=\E[%p1%dM, dl1=\E[M, + dsl=\E[2$~\r\E[1$}\E[K\E[$}, ed=\E[J, el=\E[K, + flash=\E[?5h\E[?5l$<200/>, fsl=\E[$}, home=\E[H, ht=^I, + hts=\EH, ich=\E[%p1%d@, il=\E[%p1%dL, il1=\E[L, ind=\ED, + is2=\E<\E F\E>\E[?1h\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h\E[1;24r\E[24;1H, + kbs=^H, kcub1=\EOD, kcud1=\EOB, kcuf1=\EOC, kcuu1=\EOA, + kf1=\EOP, kf2=\EOQ, kf3=\EOR, kf4=\EOS, kf6=\E[17~, + kf7=\E[18~, kf8=\E[19~, kf9=\E[20~, lf1=pf1, lf2=pf2, + lf3=pf3, lf4=pf4, nel=^M\ED, rc=\E8, rev=\E[7m, + rf=/usr/share/tabset/vt300, ri=\EM, rmacs=^O, + rmam=\E[?7l, rmir=\E[4l, rmkx=\E[?1l\E>, rmso=\E[27m, + rmul=\E[24m, rs1=\E[?3l, sc=\E7, sgr0=\E[m, smacs=^N, + smam=\E[?7h, smir=\E[4h, smkx=\E[?1h\E=, smso=\E[7m, + smul=\E[4m, tbc=\E[3g, tsl=\E[2$~\E[1$}\E[1;%dH, + +# DEC doesn't supply a vt400 description, so we add Daniel Glasser's +# (originally written with vt420 as its primary name, and usable for it). +# +# VT400/420 -- This terminal is a superset of the vt320. It adds the multiple +# text pages and long text pages with selectable length of the vt340, along +# with left and right margins, rectangular area text copy, fill, and erase +# operations, selected region character attribute change operations, +# page memory and rectangle checksums, insert/delete column, reception +# macros, and other features too numerous to remember right now. TERMCAP +# can only take advantage of a few of these added features. +# +# Note that this entry is are set up in what was the standard way for GNU +# Emacs v18 terminal modes to deal with the cursor keys in that the arrow +# keys were switched into application mode at the same time the numeric pad +# is switched into application mode. This changes the definitions of the +# arrow keys. Emacs v19 is smarter and mines its keys directly out of +# your termcap entry, +# +# From: Daniel Glasser , 13 Oct 1993 +# (vt400: string capability ":sb=\E[M:" corrected to ":sr=\E[M:"; +# also, added / based on the init string -- esr) +vt400|vt400-24|dec-vt400|dec vt400 24x80 column autowrap, + am, eslok, hs, mir, msgr, xenl, xon, + cols#80, it#8, lines#24, vt#3, + acsc=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, + blink=\E[5m, bold=\E[1m, civis=\E[?25l, + clear=\E[H\E[J$<10/>, cnorm=\E[?25h, cr=^M, + csr=\E[%i%p1%d;%p2%dr, cub=\E[%p1%dD, cub1=^H, + cud=\E[%p1%dB, cud1=^J, cuf=\E[%p1%dC, cuf1=\E[C, + cup=\E[%i%p1%d;%p2%dH, cuu=\E[%p1%dA, cuu1=\E[A, + cvvis=\E[?25h, dch=\E[%p1%dP, dch1=\E[P, dl=\E[%p1%dM, + dl1=\E[M, dsl=\E[2$~\r\E[1$}\E[K\E[$}, ed=\E[J$<10/>, + el=\E[K$<4/>, flash=\E[?5h\E[?5l$<200/>, fsl=\E[$}, + home=\E[H, ht=^I, hts=\EH, ich=\E[%p1%d@, ich1=\E[@, + il=\E[%p1%dL, il1=\E[L, ind=\ED, + is2=\E<\E F\E>\E[?1h\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h\E[1;24r\E[24;1H, + kbs=^H, kcub1=\EOD, kcud1=\EOB, kcuf1=\EOC, kcuu1=\EOA, + kf1=\EOP, kf2=\EOQ, kf3=\EOR, kf4=\EOS, kf6=\E[17~, + kf7=\E[18~, kf8=\E[19~, kf9=\E[20~, lf1=pf1, lf2=pf2, + lf3=pf3, lf4=pf4, nel=^M\ED, rc=\E8, rev=\E[7m, + rf=/usr/share/tabset/vt300, ri=\EM, rmacs=^O, + rmam=\E[?7l, rmir=\E[4l, rmkx=\E[?1l\E>, rmso=\E[27m, + rmul=\E[24m, rs1=\E<\E[?3l\E[!p\E[?7h, sc=\E7, sgr0=\E[m, + smacs=^N, smam=\E[?7h, smir=\E[4h, smkx=\E[?1h\E=, + smso=\E[7m, smul=\E[4m, tbc=\E[3g, + tsl=\E[2$~\E[1$}\E[1;%dH, + +# (vt420: I removed , it collided with . I also restored +# a missing -- esr) +vt420|DEC VT420, + am, mir, xenl, xon, + cols#80, lines#24, vt#3, + acsc=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, + bel=^G, blink=\E[5m$<2>, bold=\E[1m$<2>, + clear=\E[H\E[2J$<50>, cr=^M, csr=\E[%i%p1%d;%p2%dr, + cub1=^H, cud1=\E[B, cuf1=\E[C, + cup=\E[%i%p1%d;%p2%dH$<10>, cuu1=\E[A, dch1=\E[P, + dl1=\E[M, ed=\E[J$<50>, el=\E[K$<3>, home=\E[H, ht=^I, + if=/usr/share/tabset/vt300, il1=\E[L, ind=\ED, + is2=\E[1;24r\E[24;1H, is3=\E[?67h\E[64;1"p, kbs=^H, + kcub1=\E[D, kcud1=\E[B, kcuf1=\E[C, kcuu1=\E[A, + kdch1=\E[3~, kf1=\EOP, kf10=\E[29~, kf2=\EOQ, kf3=\EOR, + kf4=\EOS, kf5=\E[17~, kf6=\E[18~, kf7=\E[19~, kf8=\E[20~, + kf9=\E[21~, kfnd=\E[1~, kich1=\E[2~, knp=\E[6~, kpp=\E[5~, + kslt=\E[4~, rc=\E8, rev=\E[7m$<2>, + rf=/usr/share/tabset/vt300, ri=\EM, rmacs=\E(B$<4>, + rmam=\E[?7l, rmir=\E[4l, rmkx=\E>, + rmsc=\E[?0;0r\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h, + rmso=\E[m, rmul=\E[m, rs3=\E[?67h\E[64;1"p, sc=\E7, + sgr=\E[0%?%p6%t;1%;%?%p2%t;4%;%?%p4%t;5%;%?%p1%p3%|%t;7%;m%?%p9%t\E(0%e\E(B%;, + sgr0=\E[m$<2>, smacs=\E(0$<2>, smam=\E[?7h, smir=\E[4h, + smkx=\E=, smso=\E[7m, smul=\E[4m, + +# +# DEC VT220 and up support DECUDK (user-defined keys). DECUDK (i.e., pfx) +# takes two parameters, the key and the string. Translating the key is +# straightforward (keys 1-5 are not defined on real terminals, though some +# emulators define these): +# +# if (key < 16) then value = key; +# else if (key < 21) then value = key + 1; +# else if (key < 25) then value = key + 2; +# else if (key < 27) then value = key + 3; +# else if (key < 30) then value = key + 4; +# else value = key + 5; +# +# The string must be the hexadecimal equivalent, e.g., "5052494E" for "PRINT". +# There's no provision in terminfo for emitting a string in this format, so the +# application has to know it. +# +vt420pc|DEC VT420 w/PC keyboard, + kdch1=\177, kend=\E[4~, kf1=\E[11~, kf10=\E[21~, + kf11=\E[23~, kf12=\E[24~, kf13=\E[11;2~, kf14=\E[12;2~, + kf15=\E[13;2~, kf16=\E[14;2~, kf17=\E[15;2~, + kf18=\E[17;2~, kf19=\E[18;2~, kf2=\E[12~, kf20=\E[19;2~, + kf21=\E[20;2~, kf22=\E[21;2~, kf23=\E[23;2~, + kf24=\E[24;2~, kf25=\E[23~, kf26=\E[24~, kf27=\E[25~, + kf28=\E[26~, kf29=\E[28~, kf3=\E[13~, kf30=\E[29~, + kf31=\E[31~, kf32=\E[32~, kf33=\E[33~, kf34=\E[34~, + kf35=\E[35~, kf36=\E[36~, kf37=\E[23;2~, kf38=\E[24;2~, + kf39=\E[25;2~, kf4=\E[14~, kf40=\E[26;2~, kf41=\E[28;2~, + kf42=\E[29;2~, kf43=\E[31;2~, kf44=\E[32;2~, + kf45=\E[33;2~, kf46=\E[34;2~, kf47=\E[35;2~, + kf48=\E[36;2~, kf5=\E[15~, kf6=\E[17~, kf7=\E[18~, + kf8=\E[19~, kf9=\E[20~, khome=\E[H, + pctrm=USR_TERM\:vt420pcdos\:, + pfx=\EP1;1|%?%{16}%p1%>%t%{0}%e%{21}%p1%>%t%{1}%e%{25}%p1%>%t%{2}%e%{27}%p1%>%t%{3}%e%{30}%p1%>%t%{4}%e%{5}%;%p1%+%d/%p2%s\E\\, use=vt420, + +vt420pcdos|DEC VT420 w/PC for DOS Merge, + lines#25, + dispc=%?%p2%{19}%=%t\E\023\021%e%p2%{32}%<%t\E%p2%c%e%p2%{127}%=%t\E\177%e%p2%c%;, + pctrm@, + rmsc=\E[?0;0r\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h, sgr@, + sgr0=\E[m, smsc=\E[?1;2r\E[34h, + use=vt420pc, + +vt420f|DEC VT420 with VT kbd; VT400 mode; F1-F5 used as Fkeys, + kdch1=\177, kf1=\E[11~, kf10=\E[21~, kf11=\E[23~, + kf12=\E[24~, kf13=\E[25~, kf14=\E[26~, kf15=\E[28~, + kf16=\E[29~, kf17=\E[31~, kf18=\E[32~, kf19=\E[33~, + kf2=\E[12~, kf20=\E[34~, kf3=\E[13~, kf4=\E[14~, + kf5=\E[15~, kf6=\E[17~, kf7=\E[18~, kf8=\E[19~, kf9=\E[20~, + khome=\E[H, lf1=\EOP, lf2=\EOQ, lf3=\EOR, lf4=\EOS, + use=vt420, + +vt510|DEC VT510, + use=vt420, +vt510pc|DEC VT510 w/PC keyboard, + use=vt420pc, +vt510pcdos|DEC VT510 w/PC for DOS Merge, + use=vt420pcdos, + +# VT520/VT525 +# +# The VT520 is a monochrome text terminal capable of managing up to +# four independent sessions in the terminal. It has multiple ANSI +# emulations (VT520, VT420, VT320, VT220, VT100, VT PCTerm, SCO Console) +# and ASCII emulations (WY160/60, PCTerm, 50/50+, 150/120, TVI 950, +# 925 910+, ADDS A2). This terminfo data is for the ANSI emulations only. +# +# Terminal Set-Up is entered by pressing [F3], [Caps Lock]/[F3] or +# [Alt]/[Print Screen] depending upon which keyboard and which +# terminal mode is being used. If Set-Up has been disabled or +# assigned to an unknown key, Set-Up may be entered by pressing +# [F3] as the first key after power up, regardless of keyboard type. +# (vt520: I added / based on the init string, also -- esr) +vt520|DEC VT520, + am, mir, xenl, xon, + cols#80, lines#24, vt#3, + acsc=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, + bel=^G, blink=\E[5m$<2>, bold=\E[1m$<2>, + clear=\E[H\E[2J$<50>, cr=^M, csr=\E[%i%p1%d;%p2%dr, + cub1=^H, cud1=\E[B, cuf1=\E[C, + cup=\E[%i%p1%d;%p2%dH$<10>, cuu1=\E[A, dch1=\E[P, + dl1=\E[M, ed=\E[J$<50>, el=\E[K$<3>, home=\E[H, ht=^I, + if=/usr/share/tabset/vt300, il1=\E[L, ind=\ED, + is2=\E[1;24r\E[24;1H, is3=\E[?67h\E[64;1"p, kbs=^H, + kcub1=\E[D, kcud1=\E[B, kcuf1=\E[C, kcuu1=\E[A, + kdch1=\E[3~, kf0=\E[29~, kf1=\EOP, kf10=\E[29~, kf2=\EOQ, + kf3=\EOR, kf4=\EOS, kf5=\E[17~, kf6=\E[18~, kf7=\E[19~, + kf8=\E[20~, kf9=\E[21~, kfnd=\E[1~, kich1=\E[2~, knp=\E[6~, + kpp=\E[5~, kslt=\E[4~, + pfx=\EP1;1|%?%{16}%p1%>%t%{0}%e%{21}%p1%>%t%{1}%e%{25}%p1%>%t%{2}%e%{27}%p1%>%t%{3}%e%{30}%p1%>%t%{4}%e%{5}%;%p1%+%d/%p2%s\E\\, + rc=\E8, rev=\E[7m$<2>, rf=/usr/share/tabset/vt300, + ri=\EM, rmacs=\E(B$<4>, rmam=\E[?7l, rmir=\E[4l, + rmsc=\E[?0;0r\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h, + rmso=\E[m, rmul=\E[m, rs3=\E[?67h\E[64;1"p, sc=\E7, + sgr=\E[0%?%p6%t;1%;%?%p2%t;4%;%?%p4%t;5%;%?%p1%p3%|%t;7%;m%?%p9%t\E(0%e\E(B%;, + sgr0=\E[m$<2>, smacs=\E(0$<2>, smam=\E[?7h, smir=\E[4h, + smso=\E[7m, smul=\E[4m, + +# (vt525: I added / based on the init string; +# removed =\E[m, =\E[m, added -- esr) +vt525|DEC VT525, + am, mir, xenl, xon, + cols#80, lines#24, vt#3, + acsc=``aaffggjjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, + bel=^G, blink=\E[5m$<2>, bold=\E[1m$<2>, + clear=\E[H\E[2J$<50>, cr=^M, csr=\E[%i%p1%d;%p2%dr, + cub1=^H, cud1=\E[B, cuf1=\E[C, + cup=\E[%i%p1%d;%p2%dH$<10>, cuu1=\E[A, dch1=\E[P, + dl1=\E[M, ed=\E[J$<50>, el=\E[K$<3>, home=\E[H, ht=^I, + if=/usr/share/tabset/vt300, il1=\E[L, ind=\ED, + is2=\E[1;24r\E[24;1H, is3=\E[?67h\E[64;1"p, kbs=^H, + kcub1=\E[D, kcud1=\E[B, kcuf1=\E[C, kcuu1=\E[A, + kdch1=\E[3~, kf0=\E[29~, kf1=\EOP, kf10=\E[29~, kf2=\EOQ, + kf3=\EOR, kf4=\EOS, kf5=\E[17~, kf6=\E[18~, kf7=\E[19~, + kf8=\E[20~, kf9=\E[21~, kfnd=\E[1~, kich1=\E[2~, knp=\E[6~, + kpp=\E[5~, kslt=\E[4~, + pfx=\EP1;1|%?%{16}%p1%>%t%{0}%e%{21}%p1%>%t%{1}%e%{25}%p1%>%t%{2}%e%{27}%p1%>%t%{3}%e%{30}%p1%>%t%{4}%e%{5}%;%p1%+%d/%p2%s\E\\, + rc=\E8, rev=\E[7m$<2>, rf=/usr/share/tabset/vt300, + ri=\EM, rmacs=\E(B$<4>, rmam=\E[?7l, rmir=\E[4l, + rmsc=\E[?0;0r\E>\E[?3l\E[?4l\E[?5l\E[?7h\E[?8h, + rmso=\E[m, rmul=\E[m, rs3=\E[?67h\E[64;1"p, sc=\E7, + sgr=\E[0%?%p6%t;1%;%?%p2%t;4%;%?%p4%t;5%;%?%p1%p3%|%t;7%;m%?%p9%t\E(0%e\E(B%;, + sgr0=\E[m$<2>, smacs=\E(0$<2>, smam=\E[?7h, smir=\E[4h, + smso=\E[7m, smul=\E[4m, -- cgit v1.2.1 From a52ee9df900b5a77ffbfa9ef15cdf20461979957 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Thu, 5 Nov 2015 14:50:08 +0900 Subject: Add the lexer for IETF 7405 ABNF . --- pygments/lexers/_mapping.py | 1 + pygments/lexers/grammar_notation.py | 96 +++++++++ tests/examplefiles/abnf_example1.abnf | 58 ++++++ tests/examplefiles/abnf_example2.abnf | 58 ++++++ tests/examplefiles/abnf_example3.abnf | 382 ++++++++++++++++++++++++++++++++++ tests/examplefiles/abnf_example4.abnf | 382 ++++++++++++++++++++++++++++++++++ tests/examplefiles/abnf_example5.abnf | 7 + 7 files changed, 984 insertions(+) create mode 100644 pygments/lexers/grammar_notation.py create mode 100644 tests/examplefiles/abnf_example1.abnf create mode 100644 tests/examplefiles/abnf_example2.abnf create mode 100644 tests/examplefiles/abnf_example3.abnf create mode 100644 tests/examplefiles/abnf_example4.abnf create mode 100644 tests/examplefiles/abnf_example5.abnf diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index af7eec36..adf58313 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -17,6 +17,7 @@ from __future__ import print_function LEXERS = { 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)), + 'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)), 'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()), 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), diff --git a/pygments/lexers/grammar_notation.py b/pygments/lexers/grammar_notation.py new file mode 100644 index 00000000..65475646 --- /dev/null +++ b/pygments/lexers/grammar_notation.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.grammar_notation + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for parser generators. + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, DelegatingLexer, \ + include, bygroups, using, words +from pygments.token import Punctuation, Other, Text, Comment, Operator, \ + Keyword, Name, String, Number, Whitespace, Literal + +__all__ = ['AbnfLexer'] + + +# EBNF shold be moved here, i think. + +class AbnfLexer(RegexLexer): + """ + Lexer for `IETF 7405 ABNF + `_ + (Updates `5234 `_) + grammars. + + .. versionadded:: 2.1 + """ + + name = 'ABNF' + aliases = ['abnf'] + filenames = ['*.abnf'] + mimetypes = ['text/x-abnf'] + + _core_rules = ( + 'ALPHA', 'BIT', 'CHAR', 'CR', 'CRLF', 'CTL', 'DIGIT', + 'DQUOTE', 'HEXDIG', 'HTAB', 'LF', 'LWSP', 'OCTET', + 'SP', 'VCHAR', 'WSP',) + + def nonterminal_cb(self, match): + txt = match.group(0) + if txt in self._core_rules: + # Strictly speaking, these are not keyword but + # is called `Core Rule'. + yield match.start(), Keyword, txt + else: + yield match.start(), Name.Class, txt + + tokens = { + 'root': [ + # comment + (r';.*$', Comment.Single), + + # quoted + (r'(%[si])?"', Literal, 'quoted-termination'), + + # binary (but i have never seen...) + (r'%b[01]+\-[01]+\b', Literal), # range + (r'%b[01]+(\.[01]+)*\b', Literal), # concat + + # decimal + (r'%d[0-9]+\-[0-9]+\b', Literal), # range + (r'%d[0-9]+(\.[0-9]+)*\b', Literal), # concat + + # hexadecimal + (r'%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b', Literal), # range + (r'%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b', Literal), # concat + + # repetition (*element) including nRule + (r'\b[0-9]+\*[0-9]+', Operator), + (r'\b[0-9]+\*', Operator), + (r'\b[0-9]+', Operator), + (r'\*', Operator), + + # nonterminals (ALPHA *(ALPHA / DIGIT / "-")) + (r'[a-zA-Z][a-zA-Z0-9-]+\b', nonterminal_cb), + + # operators + (r'(=/|=|/)', Operator), + + # punctuation + (r'[\[\]()]', Punctuation), + + # fallback + (r'.', Text), + ], + 'quoted-termination': [ + # double quote itself in this state, it is as '%x22'. + (r'"', Literal, '#pop'), + (r'.', Literal), + ] + } diff --git a/tests/examplefiles/abnf_example1.abnf b/tests/examplefiles/abnf_example1.abnf new file mode 100644 index 00000000..c5bbe221 --- /dev/null +++ b/tests/examplefiles/abnf_example1.abnf @@ -0,0 +1,58 @@ +rulelist = 1*( rule / (*c-wsp c-nl) ) + +rule = rulename defined-as elements c-nl + ; continues if next line starts + ; with white space + +rulename = ALPHA *(ALPHA / DIGIT / "-") +defined-as = *c-wsp ("=" / "=/") *c-wsp + ; basic rules definition and + ; incremental alternatives + +elements = alternation *c-wsp + +c-wsp = WSP / (c-nl WSP) + +c-nl = comment / CRLF + ; comment or newline + +comment = ";" *(WSP / VCHAR) CRLF + +alternation = concatenation + *(*c-wsp "/" *c-wsp concatenation) + +concatenation = repetition *(1*c-wsp repetition) + +repetition = [repeat] element + +repeat = 1*DIGIT / (*DIGIT "*" *DIGIT) + +element = rulename / group / option / + char-val / num-val / prose-val + +group = "(" *c-wsp alternation *c-wsp ")" + +option = "[" *c-wsp alternation *c-wsp "]" + +char-val = DQUOTE *(%x20-21 / %x23-7E) DQUOTE + ; quoted string of SP and VCHAR + ; without DQUOTE + +num-val = "%" (bin-val / dec-val / hex-val) + +bin-val = "b" 1*BIT + [ 1*("." 1*BIT) / ("-" 1*BIT) ] + ; series of concatenated bit values + ; or single ONEOF range + +dec-val = "d" 1*DIGIT + [ 1*("." 1*DIGIT) / ("-" 1*DIGIT) ] + +hex-val = "x" 1*HEXDIG + [ 1*("." 1*HEXDIG) / ("-" 1*HEXDIG) ] + +prose-val = "<" *(%x20-3D / %x3F-7E) ">" + ; bracketed string of SP and VCHAR + ; without angles + ; prose description, to be used as + ; last resort diff --git a/tests/examplefiles/abnf_example2.abnf b/tests/examplefiles/abnf_example2.abnf new file mode 100644 index 00000000..77c79cb4 --- /dev/null +++ b/tests/examplefiles/abnf_example2.abnf @@ -0,0 +1,58 @@ + rulelist = 1*( rule / (*c-wsp c-nl) ) + + rule = rulename defined-as elements c-nl + ; continues if next line starts + ; with white space + + rulename = ALPHA *(ALPHA / DIGIT / "-") + defined-as = *c-wsp ("=" / "=/") *c-wsp + ; basic rules definition and + ; incremental alternatives + + elements = alternation *c-wsp + + c-wsp = WSP / (c-nl WSP) + + c-nl = comment / CRLF + ; comment or newline + + comment = ";" *(WSP / VCHAR) CRLF + + alternation = concatenation + *(*c-wsp "/" *c-wsp concatenation) + + concatenation = repetition *(1*c-wsp repetition) + + repetition = [repeat] element + + repeat = 1*DIGIT / (*DIGIT "*" *DIGIT) + + element = rulename / group / option / + char-val / num-val / prose-val + + group = "(" *c-wsp alternation *c-wsp ")" + + option = "[" *c-wsp alternation *c-wsp "]" + + char-val = DQUOTE *(%x20-21 / %x23-7E) DQUOTE + ; quoted string of SP and VCHAR + ; without DQUOTE + + num-val = "%" (bin-val / dec-val / hex-val) + + bin-val = "b" 1*BIT + [ 1*("." 1*BIT) / ("-" 1*BIT) ] + ; series of concatenated bit values + ; or single ONEOF range + + dec-val = "d" 1*DIGIT + [ 1*("." 1*DIGIT) / ("-" 1*DIGIT) ] + + hex-val = "x" 1*HEXDIG + [ 1*("." 1*HEXDIG) / ("-" 1*HEXDIG) ] + + prose-val = "<" *(%x20-3D / %x3F-7E) ">" + ; bracketed string of SP and VCHAR + ; without angles + ; prose description, to be used as + ; last resort diff --git a/tests/examplefiles/abnf_example3.abnf b/tests/examplefiles/abnf_example3.abnf new file mode 100644 index 00000000..51690f54 --- /dev/null +++ b/tests/examplefiles/abnf_example3.abnf @@ -0,0 +1,382 @@ +NO-WS-CTL = %d1-8 / ; US-ASCII control characters + %d11 / ; that do not include the + %d12 / ; carriage return, line feed, + %d14-31 / ; and white space characters + %d127 + +text = %d1-9 / ; Characters excluding CR and LF + %d11 / + %d12 / + %d14-127 / + obs-text + +specials = "(" / ")" / ; Special characters used in + "<" / ">" / ; other parts of the syntax + "[" / "]" / + ":" / ";" / + "@" / "\" / + "," / "." / + DQUOTE + +quoted-pair = ("\" text) / obs-qp + +FWS = ([*WSP CRLF] 1*WSP) / ; Folding white space + obs-FWS + +ctext = NO-WS-CTL / ; Non white space controls + + %d33-39 / ; The rest of the US-ASCII + %d42-91 / ; characters not including "(", + %d93-126 ; ")", or "\" + +ccontent = ctext / quoted-pair / comment + +comment = "(" *([FWS] ccontent) [FWS] ")" + +CFWS = *([FWS] comment) (([FWS] comment) / FWS) + + +atext = ALPHA / DIGIT / ; Any character except controls, + "!" / "#" / ; SP, and specials. + "$" / "%" / ; Used for atoms + "&" / "'" / + "*" / "+" / + "-" / "/" / + "=" / "?" / + "^" / "_" / + "`" / "{" / + "|" / "}" / + "~" + +atom = [CFWS] 1*atext [CFWS] + +dot-atom = [CFWS] dot-atom-text [CFWS] + +dot-atom-text = 1*atext *("." 1*atext) + +qtext = NO-WS-CTL / ; Non white space controls + + %d33 / ; The rest of the US-ASCII + %d35-91 / ; characters not including "\" + %d93-126 ; or the quote character + +qcontent = qtext / quoted-pair + +quoted-string = [CFWS] + DQUOTE *([FWS] qcontent) [FWS] DQUOTE + [CFWS] + +word = atom / quoted-string + +phrase = 1*word / obs-phrase + +utext = NO-WS-CTL / ; Non white space controls + %d33-126 / ; The rest of US-ASCII + obs-utext + +unstructured = *([FWS] utext) [FWS] + +date-time = [ day-of-week "," ] date FWS time [CFWS] + +day-of-week = ([FWS] day-name) / obs-day-of-week + +day-name = "Mon" / "Tue" / "Wed" / "Thu" / + "Fri" / "Sat" / "Sun" + +date = day month year + +year = 4*DIGIT / obs-year + +month = (FWS month-name FWS) / obs-month + +month-name = "Jan" / "Feb" / "Mar" / "Apr" / + "May" / "Jun" / "Jul" / "Aug" / + "Sep" / "Oct" / "Nov" / "Dec" + +day = ([FWS] 1*2DIGIT) / obs-day + +time = time-of-day FWS zone + +time-of-day = hour ":" minute [ ":" second ] + +hour = 2DIGIT / obs-hour + +minute = 2DIGIT / obs-minute + +second = 2DIGIT / obs-second + +zone = (( "+" / "-" ) 4DIGIT) / obs-zone + +address = mailbox / group + +mailbox = name-addr / addr-spec + +name-addr = [display-name] angle-addr + +angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr + +group = display-name ":" [mailbox-list / CFWS] ";" + [CFWS] + +display-name = phrase + +mailbox-list = (mailbox *("," mailbox)) / obs-mbox-list + +address-list = (address *("," address)) / obs-addr-list + +addr-spec = local-part "@" domain + +local-part = dot-atom / quoted-string / obs-local-part + +domain = dot-atom / domain-literal / obs-domain + +domain-literal = [CFWS] "[" *([FWS] dcontent) [FWS] "]" [CFWS] + +dcontent = dtext / quoted-pair + +dtext = NO-WS-CTL / ; Non white space controls + + %d33-90 / ; The rest of the US-ASCII + %d94-126 ; characters not including "[", + ; "]", or "\" + +message = (fields / obs-fields) + [CRLF body] + +body = *(*998text CRLF) *998text + +fields = *(trace + *(resent-date / + resent-from / + resent-sender / + resent-to / + resent-cc / + resent-bcc / + resent-msg-id)) + *(orig-date / + from / + sender / + reply-to / + to / + cc / + bcc / + message-id / + in-reply-to / + references / + subject / + comments / + keywords / + optional-field) + +orig-date = "Date:" date-time CRLF + +from = "From:" mailbox-list CRLF + +sender = "Sender:" mailbox CRLF + +reply-to = "Reply-To:" address-list CRLF + +to = "To:" address-list CRLF + +cc = "Cc:" address-list CRLF + +bcc = "Bcc:" (address-list / [CFWS]) CRLF + +message-id = "Message-ID:" msg-id CRLF + +in-reply-to = "In-Reply-To:" 1*msg-id CRLF + +references = "References:" 1*msg-id CRLF + +msg-id = [CFWS] "<" id-left "@" id-right ">" [CFWS] + +id-left = dot-atom-text / no-fold-quote / obs-id-left + +id-right = dot-atom-text / no-fold-literal / obs-id-right + +no-fold-quote = DQUOTE *(qtext / quoted-pair) DQUOTE + +no-fold-literal = "[" *(dtext / quoted-pair) "]" + +subject = "Subject:" unstructured CRLF + +comments = "Comments:" unstructured CRLF + +keywords = "Keywords:" phrase *("," phrase) CRLF + +resent-date = "Resent-Date:" date-time CRLF + +resent-from = "Resent-From:" mailbox-list CRLF + +resent-sender = "Resent-Sender:" mailbox CRLF + +resent-to = "Resent-To:" address-list CRLF + +resent-cc = "Resent-Cc:" address-list CRLF + +resent-bcc = "Resent-Bcc:" (address-list / [CFWS]) CRLF + +resent-msg-id = "Resent-Message-ID:" msg-id CRLF + +trace = [return] + 1*received + +return = "Return-Path:" path CRLF + +path = ([CFWS] "<" ([CFWS] / addr-spec) ">" [CFWS]) / + obs-path + +received = "Received:" name-val-list ";" date-time CRLF + +name-val-list = [CFWS] [name-val-pair *(CFWS name-val-pair)] + +name-val-pair = item-name CFWS item-value + +item-name = ALPHA *(["-"] (ALPHA / DIGIT)) + +item-value = 1*angle-addr / addr-spec / + atom / domain / msg-id + +optional-field = field-name ":" unstructured CRLF + +field-name = 1*ftext + +ftext = %d33-57 / ; Any character except + %d59-126 ; controls, SP, and + ; ":". + +obs-qp = "\" (%d0-127) + +obs-text = *LF *CR *(obs-char *LF *CR) + +obs-char = %d0-9 / %d11 / ; %d0-127 except CR and + %d12 / %d14-127 ; LF + +obs-utext = obs-text + +obs-phrase = word *(word / "." / CFWS) + +obs-phrase-list = phrase / 1*([phrase] [CFWS] "," [CFWS]) [phrase] + +obs-FWS = 1*WSP *(CRLF 1*WSP) + +obs-day-of-week = [CFWS] day-name [CFWS] + +obs-year = [CFWS] 2*DIGIT [CFWS] + +obs-month = CFWS month-name CFWS + +obs-day = [CFWS] 1*2DIGIT [CFWS] + +obs-hour = [CFWS] 2DIGIT [CFWS] + +obs-minute = [CFWS] 2DIGIT [CFWS] + +obs-second = [CFWS] 2DIGIT [CFWS] + +obs-zone = "UT" / "GMT" / ; Universal Time + ; North American UT + ; offsets + "EST" / "EDT" / ; Eastern: - 5/ - 4 + "CST" / "CDT" / ; Central: - 6/ - 5 + "MST" / "MDT" / ; Mountain: - 7/ - 6 + "PST" / "PDT" / ; Pacific: - 8/ - 7 + + %d65-73 / ; Military zones - "A" + %d75-90 / ; through "I" and "K" + %d97-105 / ; through "Z", both + %d107-122 ; upper and lower case + +obs-angle-addr = [CFWS] "<" [obs-route] addr-spec ">" [CFWS] + +obs-route = [CFWS] obs-domain-list ":" [CFWS] + +obs-domain-list = "@" domain *(*(CFWS / "," ) [CFWS] "@" domain) + +obs-local-part = word *("." word) + +obs-domain = atom *("." atom) + +obs-mbox-list = 1*([mailbox] [CFWS] "," [CFWS]) [mailbox] + +obs-addr-list = 1*([address] [CFWS] "," [CFWS]) [address] + +obs-fields = *(obs-return / + obs-received / + obs-orig-date / + obs-from / + obs-sender / + obs-reply-to / + obs-to / + obs-cc / + obs-bcc / + obs-message-id / + obs-in-reply-to / + obs-references / + obs-subject / + obs-comments / + obs-keywords / + obs-resent-date / + obs-resent-from / + obs-resent-send / + obs-resent-rply / + obs-resent-to / + obs-resent-cc / + obs-resent-bcc / + obs-resent-mid / + obs-optional) + +obs-orig-date = "Date" *WSP ":" date-time CRLF + +obs-from = "From" *WSP ":" mailbox-list CRLF + +obs-sender = "Sender" *WSP ":" mailbox CRLF + +obs-reply-to = "Reply-To" *WSP ":" mailbox-list CRLF + +obs-to = "To" *WSP ":" address-list CRLF + +obs-cc = "Cc" *WSP ":" address-list CRLF + +obs-bcc = "Bcc" *WSP ":" (address-list / [CFWS]) CRLF + +obs-message-id = "Message-ID" *WSP ":" msg-id CRLF + +obs-in-reply-to = "In-Reply-To" *WSP ":" *(phrase / msg-id) CRLF + +obs-references = "References" *WSP ":" *(phrase / msg-id) CRLF + +obs-id-left = local-part + +obs-id-right = domain + +obs-subject = "Subject" *WSP ":" unstructured CRLF + +obs-comments = "Comments" *WSP ":" unstructured CRLF + +obs-keywords = "Keywords" *WSP ":" obs-phrase-list CRLF + +obs-resent-from = "Resent-From" *WSP ":" mailbox-list CRLF + +obs-resent-send = "Resent-Sender" *WSP ":" mailbox CRLF + +obs-resent-date = "Resent-Date" *WSP ":" date-time CRLF + +obs-resent-to = "Resent-To" *WSP ":" address-list CRLF + +obs-resent-cc = "Resent-Cc" *WSP ":" address-list CRLF + +obs-resent-bcc = "Resent-Bcc" *WSP ":" + (address-list / [CFWS]) CRLF + +obs-resent-mid = "Resent-Message-ID" *WSP ":" msg-id CRLF + +obs-resent-rply = "Resent-Reply-To" *WSP ":" address-list CRLF + +obs-return = "Return-Path" *WSP ":" path CRLF + +obs-received = "Received" *WSP ":" name-val-list CRLF + +obs-path = obs-angle-addr + +obs-optional = field-name *WSP ":" unstructured CRLF diff --git a/tests/examplefiles/abnf_example4.abnf b/tests/examplefiles/abnf_example4.abnf new file mode 100644 index 00000000..78dc38cb --- /dev/null +++ b/tests/examplefiles/abnf_example4.abnf @@ -0,0 +1,382 @@ +NO-WS-CTL = %d1-8 / ; US-ASCII control characters + %d11 / ; that do not include the + %d12 / ; carriage return, line feed, + %d14-31 / ; and white space characters + %d127 + +text = %d1-9 / ; Characters excluding CR and LF + %d11 / + %d12 / + %d14-127 / + obs-text + +specials = "(" / ")" / ; Special characters used in + "<" / ">" / ; other parts of the syntax + "[" / "]" / + ":" / ";" / + "@" / "\" / + "," / "." / + DQUOTE + +quoted-pair = ("\" text) / obs-qp + +FWS = ([*WSP CRLF] 1*WSP) / ; Folding white space + obs-FWS + +ctext = NO-WS-CTL / ; Non white space controls + + %d33-39 / ; The rest of the US-ASCII + %d42-91 / ; characters not including "(", + %d93-126 ; ")", or "\" + +ccontent = ctext / quoted-pair / comment + +comment = "(" *([FWS] ccontent) [FWS] ")" + +CFWS = *([FWS] comment) (([FWS] comment) / FWS) + + +atext = ALPHA / DIGIT / ; Any character except controls, + "!" / "#" / ; SP, and specials. + "$" / "%" / ; Used for atoms + "&" / "'" / + "*" / "+" / + "-" / "/" / + "=" / "?" / + "^" / "_" / + "`" / "{" / + "|" / "}" / + "~" + +atom = [CFWS] 1*atext [CFWS] + +dot-atom = [CFWS] dot-atom-text [CFWS] + +dot-atom-text = 1*atext *("." 1*atext) + +qtext = NO-WS-CTL / ; Non white space controls + + %d33 / ; The rest of the US-ASCII + %d35-91 / ; characters not including "\" + %d93-126 ; or the quote character + +qcontent = qtext / quoted-pair + +quoted-string = [CFWS] + DQUOTE *([FWS] qcontent) [FWS] DQUOTE + [CFWS] + +word = atom / quoted-string + +phrase = 1*word / obs-phrase + +utext = NO-WS-CTL / ; Non white space controls + %d33-126 / ; The rest of US-ASCII + obs-utext + +unstructured = *([FWS] utext) [FWS] + +date-time = [ day-of-week "," ] date FWS time [CFWS] + +day-of-week = ([FWS] day-name) / obs-day-of-week + +day-name = %i"Mon" / %i"Tue" / %i"Wed" / %i"Thu" / + %i"Fri" / %i"Sat" / %i"Sun" + +date = day month year + +year = 4*DIGIT / obs-year + +month = (FWS month-name FWS) / obs-month + +month-name = %i"Jan" / %i"Feb" / %i"Mar" / %i"Apr" / + %i"May" / %i"Jun" / %i"Jul" / %i"Aug" / + %i"Sep" / %i"Oct" / %i"Nov" / %i"Dec" + +day = ([FWS] 1*2DIGIT) / obs-day + +time = time-of-day FWS zone + +time-of-day = hour ":" minute [ ":" second ] + +hour = 2DIGIT / obs-hour + +minute = 2DIGIT / obs-minute + +second = 2DIGIT / obs-second + +zone = (( "+" / "-" ) 4DIGIT) / obs-zone + +address = mailbox / group + +mailbox = name-addr / addr-spec + +name-addr = [display-name] angle-addr + +angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr + +group = display-name ":" [mailbox-list / CFWS] ";" + [CFWS] + +display-name = phrase + +mailbox-list = (mailbox *("," mailbox)) / obs-mbox-list + +address-list = (address *("," address)) / obs-addr-list + +addr-spec = local-part "@" domain + +local-part = dot-atom / quoted-string / obs-local-part + +domain = dot-atom / domain-literal / obs-domain + +domain-literal = [CFWS] "[" *([FWS] dcontent) [FWS] "]" [CFWS] + +dcontent = dtext / quoted-pair + +dtext = NO-WS-CTL / ; Non white space controls + + %d33-90 / ; The rest of the US-ASCII + %d94-126 ; characters not including "[", + ; "]", or "\" + +message = (fields / obs-fields) + [CRLF body] + +body = *(*998text CRLF) *998text + +fields = *(trace + *(resent-date / + resent-from / + resent-sender / + resent-to / + resent-cc / + resent-bcc / + resent-msg-id)) + *(orig-date / + from / + sender / + reply-to / + to / + cc / + bcc / + message-id / + in-reply-to / + references / + subject / + comments / + keywords / + optional-field) + +orig-date = %i"Date:" date-time CRLF + +from = %i"From:" mailbox-list CRLF + +sender = %i"Sender:" mailbox CRLF + +reply-to = %i"Reply-To:" address-list CRLF + +to = %i"To:" address-list CRLF + +cc = %i"Cc:" address-list CRLF + +bcc = %i"Bcc:" (address-list / [CFWS]) CRLF + +message-id = %i"Message-ID:" msg-id CRLF + +in-reply-to = %i"In-Reply-To:" 1*msg-id CRLF + +references = %i"References:" 1*msg-id CRLF + +msg-id = [CFWS] "<" id-left "@" id-right ">" [CFWS] + +id-left = dot-atom-text / no-fold-quote / obs-id-left + +id-right = dot-atom-text / no-fold-literal / obs-id-right + +no-fold-quote = DQUOTE *(qtext / quoted-pair) DQUOTE + +no-fold-literal = "[" *(dtext / quoted-pair) "]" + +subject = %i"Subject:" unstructured CRLF + +comments = %i"Comments:" unstructured CRLF + +keywords = %i"Keywords:" phrase *("," phrase) CRLF + +resent-date = %i"Resent-Date:" date-time CRLF + +resent-from = %i"Resent-From:" mailbox-list CRLF + +resent-sender = %i"Resent-Sender:" mailbox CRLF + +resent-to = %i"Resent-To:" address-list CRLF + +resent-cc = %i"Resent-Cc:" address-list CRLF + +resent-bcc = %i"Resent-Bcc:" (address-list / [CFWS]) CRLF + +resent-msg-id = %i"Resent-Message-ID:" msg-id CRLF + +trace = [return] + 1*received + +return = %i"Return-Path:" path CRLF + +path = ([CFWS] "<" ([CFWS] / addr-spec) ">" [CFWS]) / + obs-path + +received = %i"Received:" name-val-list ";" date-time CRLF + +name-val-list = [CFWS] [name-val-pair *(CFWS name-val-pair)] + +name-val-pair = item-name CFWS item-value + +item-name = ALPHA *(["-"] (ALPHA / DIGIT)) + +item-value = 1*angle-addr / addr-spec / + atom / domain / msg-id + +optional-field = field-name ":" unstructured CRLF + +field-name = 1*ftext + +ftext = %d33-57 / ; Any character except + %d59-126 ; controls, SP, and + ; ":". + +obs-qp = "\" (%d0-127) + +obs-text = *LF *CR *(obs-char *LF *CR) + +obs-char = %d0-9 / %d11 / ; %d0-127 except CR and + %d12 / %d14-127 ; LF + +obs-utext = obs-text + +obs-phrase = word *(word / "." / CFWS) + +obs-phrase-list = phrase / 1*([phrase] [CFWS] "," [CFWS]) [phrase] + +obs-FWS = 1*WSP *(CRLF 1*WSP) + +obs-day-of-week = [CFWS] day-name [CFWS] + +obs-year = [CFWS] 2*DIGIT [CFWS] + +obs-month = CFWS month-name CFWS + +obs-day = [CFWS] 1*2DIGIT [CFWS] + +obs-hour = [CFWS] 2DIGIT [CFWS] + +obs-minute = [CFWS] 2DIGIT [CFWS] + +obs-second = [CFWS] 2DIGIT [CFWS] + +obs-zone = %i"UT" / %i"GMT" / ; Universal Time + ; North American UT + ; offsets + %i"EST" / %i"EDT" / ; Eastern: - 5/ - 4 + %i"CST" / %i"CDT" / ; Central: - 6/ - 5 + %i"MST" / %i"MDT" / ; Mountain: - 7/ - 6 + %i"PST" / %i"PDT" / ; Pacific: - 8/ - 7 + + %d65-73 / ; Military zones - "A" + %d75-90 / ; through "I" and "K" + %d97-105 / ; through "Z", both + %d107-122 ; upper and lower case + +obs-angle-addr = [CFWS] "<" [obs-route] addr-spec ">" [CFWS] + +obs-route = [CFWS] obs-domain-list ":" [CFWS] + +obs-domain-list = "@" domain *(*(CFWS / "," ) [CFWS] "@" domain) + +obs-local-part = word *("." word) + +obs-domain = atom *("." atom) + +obs-mbox-list = 1*([mailbox] [CFWS] "," [CFWS]) [mailbox] + +obs-addr-list = 1*([address] [CFWS] "," [CFWS]) [address] + +obs-fields = *(obs-return / + obs-received / + obs-orig-date / + obs-from / + obs-sender / + obs-reply-to / + obs-to / + obs-cc / + obs-bcc / + obs-message-id / + obs-in-reply-to / + obs-references / + obs-subject / + obs-comments / + obs-keywords / + obs-resent-date / + obs-resent-from / + obs-resent-send / + obs-resent-rply / + obs-resent-to / + obs-resent-cc / + obs-resent-bcc / + obs-resent-mid / + obs-optional) + +obs-orig-date = %i"Date" *WSP ":" date-time CRLF + +obs-from = %i"From" *WSP ":" mailbox-list CRLF + +obs-sender = %i"Sender" *WSP ":" mailbox CRLF + +obs-reply-to = %i"Reply-To" *WSP ":" mailbox-list CRLF + +obs-to = %i"To" *WSP ":" address-list CRLF + +obs-cc = %i"Cc" *WSP ":" address-list CRLF + +obs-bcc = %i"Bcc" *WSP ":" (address-list / [CFWS]) CRLF + +obs-message-id = %i"Message-ID" *WSP ":" msg-id CRLF + +obs-in-reply-to = %i"In-Reply-To" *WSP ":" *(phrase / msg-id) CRLF + +obs-references = %i"References" *WSP ":" *(phrase / msg-id) CRLF + +obs-id-left = local-part + +obs-id-right = domain + +obs-subject = %i"Subject" *WSP ":" unstructured CRLF + +obs-comments = %i"Comments" *WSP ":" unstructured CRLF + +obs-keywords = %i"Keywords" *WSP ":" obs-phrase-list CRLF + +obs-resent-from = %i"Resent-From" *WSP ":" mailbox-list CRLF + +obs-resent-send = %i"Resent-Sender" *WSP ":" mailbox CRLF + +obs-resent-date = %i"Resent-Date" *WSP ":" date-time CRLF + +obs-resent-to = %i"Resent-To" *WSP ":" address-list CRLF + +obs-resent-cc = %i"Resent-Cc" *WSP ":" address-list CRLF + +obs-resent-bcc = %i"Resent-Bcc" *WSP ":" + (address-list / [CFWS]) CRLF + +obs-resent-mid = %i"Resent-Message-ID" *WSP ":" msg-id CRLF + +obs-resent-rply = %i"Resent-Reply-To" *WSP ":" address-list CRLF + +obs-return = %i"Return-Path" *WSP ":" path CRLF + +obs-received = %i"Received" *WSP ":" name-val-list CRLF + +obs-path = obs-angle-addr + +obs-optional = field-name *WSP ":" unstructured CRLF diff --git a/tests/examplefiles/abnf_example5.abnf b/tests/examplefiles/abnf_example5.abnf new file mode 100644 index 00000000..6ef65125 --- /dev/null +++ b/tests/examplefiles/abnf_example5.abnf @@ -0,0 +1,7 @@ +crlf = %d13.10 + +command = "command string" + +char-line = %x0D.0A *(%x20-7E) %x0D.0A + +oldrule =/ additional-alternatives -- cgit v1.2.1 From 7e380ac8b6880e08471ffea5ee855dd573a68099 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Thu, 5 Nov 2015 15:44:10 +0900 Subject: Delete examples1~4 because these examples may have license issue. --- tests/examplefiles/abnf_example1.abnf | 58 ------ tests/examplefiles/abnf_example2.abnf | 58 ------ tests/examplefiles/abnf_example3.abnf | 382 ---------------------------------- tests/examplefiles/abnf_example4.abnf | 382 ---------------------------------- 4 files changed, 880 deletions(-) delete mode 100644 tests/examplefiles/abnf_example1.abnf delete mode 100644 tests/examplefiles/abnf_example2.abnf delete mode 100644 tests/examplefiles/abnf_example3.abnf delete mode 100644 tests/examplefiles/abnf_example4.abnf diff --git a/tests/examplefiles/abnf_example1.abnf b/tests/examplefiles/abnf_example1.abnf deleted file mode 100644 index c5bbe221..00000000 --- a/tests/examplefiles/abnf_example1.abnf +++ /dev/null @@ -1,58 +0,0 @@ -rulelist = 1*( rule / (*c-wsp c-nl) ) - -rule = rulename defined-as elements c-nl - ; continues if next line starts - ; with white space - -rulename = ALPHA *(ALPHA / DIGIT / "-") -defined-as = *c-wsp ("=" / "=/") *c-wsp - ; basic rules definition and - ; incremental alternatives - -elements = alternation *c-wsp - -c-wsp = WSP / (c-nl WSP) - -c-nl = comment / CRLF - ; comment or newline - -comment = ";" *(WSP / VCHAR) CRLF - -alternation = concatenation - *(*c-wsp "/" *c-wsp concatenation) - -concatenation = repetition *(1*c-wsp repetition) - -repetition = [repeat] element - -repeat = 1*DIGIT / (*DIGIT "*" *DIGIT) - -element = rulename / group / option / - char-val / num-val / prose-val - -group = "(" *c-wsp alternation *c-wsp ")" - -option = "[" *c-wsp alternation *c-wsp "]" - -char-val = DQUOTE *(%x20-21 / %x23-7E) DQUOTE - ; quoted string of SP and VCHAR - ; without DQUOTE - -num-val = "%" (bin-val / dec-val / hex-val) - -bin-val = "b" 1*BIT - [ 1*("." 1*BIT) / ("-" 1*BIT) ] - ; series of concatenated bit values - ; or single ONEOF range - -dec-val = "d" 1*DIGIT - [ 1*("." 1*DIGIT) / ("-" 1*DIGIT) ] - -hex-val = "x" 1*HEXDIG - [ 1*("." 1*HEXDIG) / ("-" 1*HEXDIG) ] - -prose-val = "<" *(%x20-3D / %x3F-7E) ">" - ; bracketed string of SP and VCHAR - ; without angles - ; prose description, to be used as - ; last resort diff --git a/tests/examplefiles/abnf_example2.abnf b/tests/examplefiles/abnf_example2.abnf deleted file mode 100644 index 77c79cb4..00000000 --- a/tests/examplefiles/abnf_example2.abnf +++ /dev/null @@ -1,58 +0,0 @@ - rulelist = 1*( rule / (*c-wsp c-nl) ) - - rule = rulename defined-as elements c-nl - ; continues if next line starts - ; with white space - - rulename = ALPHA *(ALPHA / DIGIT / "-") - defined-as = *c-wsp ("=" / "=/") *c-wsp - ; basic rules definition and - ; incremental alternatives - - elements = alternation *c-wsp - - c-wsp = WSP / (c-nl WSP) - - c-nl = comment / CRLF - ; comment or newline - - comment = ";" *(WSP / VCHAR) CRLF - - alternation = concatenation - *(*c-wsp "/" *c-wsp concatenation) - - concatenation = repetition *(1*c-wsp repetition) - - repetition = [repeat] element - - repeat = 1*DIGIT / (*DIGIT "*" *DIGIT) - - element = rulename / group / option / - char-val / num-val / prose-val - - group = "(" *c-wsp alternation *c-wsp ")" - - option = "[" *c-wsp alternation *c-wsp "]" - - char-val = DQUOTE *(%x20-21 / %x23-7E) DQUOTE - ; quoted string of SP and VCHAR - ; without DQUOTE - - num-val = "%" (bin-val / dec-val / hex-val) - - bin-val = "b" 1*BIT - [ 1*("." 1*BIT) / ("-" 1*BIT) ] - ; series of concatenated bit values - ; or single ONEOF range - - dec-val = "d" 1*DIGIT - [ 1*("." 1*DIGIT) / ("-" 1*DIGIT) ] - - hex-val = "x" 1*HEXDIG - [ 1*("." 1*HEXDIG) / ("-" 1*HEXDIG) ] - - prose-val = "<" *(%x20-3D / %x3F-7E) ">" - ; bracketed string of SP and VCHAR - ; without angles - ; prose description, to be used as - ; last resort diff --git a/tests/examplefiles/abnf_example3.abnf b/tests/examplefiles/abnf_example3.abnf deleted file mode 100644 index 51690f54..00000000 --- a/tests/examplefiles/abnf_example3.abnf +++ /dev/null @@ -1,382 +0,0 @@ -NO-WS-CTL = %d1-8 / ; US-ASCII control characters - %d11 / ; that do not include the - %d12 / ; carriage return, line feed, - %d14-31 / ; and white space characters - %d127 - -text = %d1-9 / ; Characters excluding CR and LF - %d11 / - %d12 / - %d14-127 / - obs-text - -specials = "(" / ")" / ; Special characters used in - "<" / ">" / ; other parts of the syntax - "[" / "]" / - ":" / ";" / - "@" / "\" / - "," / "." / - DQUOTE - -quoted-pair = ("\" text) / obs-qp - -FWS = ([*WSP CRLF] 1*WSP) / ; Folding white space - obs-FWS - -ctext = NO-WS-CTL / ; Non white space controls - - %d33-39 / ; The rest of the US-ASCII - %d42-91 / ; characters not including "(", - %d93-126 ; ")", or "\" - -ccontent = ctext / quoted-pair / comment - -comment = "(" *([FWS] ccontent) [FWS] ")" - -CFWS = *([FWS] comment) (([FWS] comment) / FWS) - - -atext = ALPHA / DIGIT / ; Any character except controls, - "!" / "#" / ; SP, and specials. - "$" / "%" / ; Used for atoms - "&" / "'" / - "*" / "+" / - "-" / "/" / - "=" / "?" / - "^" / "_" / - "`" / "{" / - "|" / "}" / - "~" - -atom = [CFWS] 1*atext [CFWS] - -dot-atom = [CFWS] dot-atom-text [CFWS] - -dot-atom-text = 1*atext *("." 1*atext) - -qtext = NO-WS-CTL / ; Non white space controls - - %d33 / ; The rest of the US-ASCII - %d35-91 / ; characters not including "\" - %d93-126 ; or the quote character - -qcontent = qtext / quoted-pair - -quoted-string = [CFWS] - DQUOTE *([FWS] qcontent) [FWS] DQUOTE - [CFWS] - -word = atom / quoted-string - -phrase = 1*word / obs-phrase - -utext = NO-WS-CTL / ; Non white space controls - %d33-126 / ; The rest of US-ASCII - obs-utext - -unstructured = *([FWS] utext) [FWS] - -date-time = [ day-of-week "," ] date FWS time [CFWS] - -day-of-week = ([FWS] day-name) / obs-day-of-week - -day-name = "Mon" / "Tue" / "Wed" / "Thu" / - "Fri" / "Sat" / "Sun" - -date = day month year - -year = 4*DIGIT / obs-year - -month = (FWS month-name FWS) / obs-month - -month-name = "Jan" / "Feb" / "Mar" / "Apr" / - "May" / "Jun" / "Jul" / "Aug" / - "Sep" / "Oct" / "Nov" / "Dec" - -day = ([FWS] 1*2DIGIT) / obs-day - -time = time-of-day FWS zone - -time-of-day = hour ":" minute [ ":" second ] - -hour = 2DIGIT / obs-hour - -minute = 2DIGIT / obs-minute - -second = 2DIGIT / obs-second - -zone = (( "+" / "-" ) 4DIGIT) / obs-zone - -address = mailbox / group - -mailbox = name-addr / addr-spec - -name-addr = [display-name] angle-addr - -angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr - -group = display-name ":" [mailbox-list / CFWS] ";" - [CFWS] - -display-name = phrase - -mailbox-list = (mailbox *("," mailbox)) / obs-mbox-list - -address-list = (address *("," address)) / obs-addr-list - -addr-spec = local-part "@" domain - -local-part = dot-atom / quoted-string / obs-local-part - -domain = dot-atom / domain-literal / obs-domain - -domain-literal = [CFWS] "[" *([FWS] dcontent) [FWS] "]" [CFWS] - -dcontent = dtext / quoted-pair - -dtext = NO-WS-CTL / ; Non white space controls - - %d33-90 / ; The rest of the US-ASCII - %d94-126 ; characters not including "[", - ; "]", or "\" - -message = (fields / obs-fields) - [CRLF body] - -body = *(*998text CRLF) *998text - -fields = *(trace - *(resent-date / - resent-from / - resent-sender / - resent-to / - resent-cc / - resent-bcc / - resent-msg-id)) - *(orig-date / - from / - sender / - reply-to / - to / - cc / - bcc / - message-id / - in-reply-to / - references / - subject / - comments / - keywords / - optional-field) - -orig-date = "Date:" date-time CRLF - -from = "From:" mailbox-list CRLF - -sender = "Sender:" mailbox CRLF - -reply-to = "Reply-To:" address-list CRLF - -to = "To:" address-list CRLF - -cc = "Cc:" address-list CRLF - -bcc = "Bcc:" (address-list / [CFWS]) CRLF - -message-id = "Message-ID:" msg-id CRLF - -in-reply-to = "In-Reply-To:" 1*msg-id CRLF - -references = "References:" 1*msg-id CRLF - -msg-id = [CFWS] "<" id-left "@" id-right ">" [CFWS] - -id-left = dot-atom-text / no-fold-quote / obs-id-left - -id-right = dot-atom-text / no-fold-literal / obs-id-right - -no-fold-quote = DQUOTE *(qtext / quoted-pair) DQUOTE - -no-fold-literal = "[" *(dtext / quoted-pair) "]" - -subject = "Subject:" unstructured CRLF - -comments = "Comments:" unstructured CRLF - -keywords = "Keywords:" phrase *("," phrase) CRLF - -resent-date = "Resent-Date:" date-time CRLF - -resent-from = "Resent-From:" mailbox-list CRLF - -resent-sender = "Resent-Sender:" mailbox CRLF - -resent-to = "Resent-To:" address-list CRLF - -resent-cc = "Resent-Cc:" address-list CRLF - -resent-bcc = "Resent-Bcc:" (address-list / [CFWS]) CRLF - -resent-msg-id = "Resent-Message-ID:" msg-id CRLF - -trace = [return] - 1*received - -return = "Return-Path:" path CRLF - -path = ([CFWS] "<" ([CFWS] / addr-spec) ">" [CFWS]) / - obs-path - -received = "Received:" name-val-list ";" date-time CRLF - -name-val-list = [CFWS] [name-val-pair *(CFWS name-val-pair)] - -name-val-pair = item-name CFWS item-value - -item-name = ALPHA *(["-"] (ALPHA / DIGIT)) - -item-value = 1*angle-addr / addr-spec / - atom / domain / msg-id - -optional-field = field-name ":" unstructured CRLF - -field-name = 1*ftext - -ftext = %d33-57 / ; Any character except - %d59-126 ; controls, SP, and - ; ":". - -obs-qp = "\" (%d0-127) - -obs-text = *LF *CR *(obs-char *LF *CR) - -obs-char = %d0-9 / %d11 / ; %d0-127 except CR and - %d12 / %d14-127 ; LF - -obs-utext = obs-text - -obs-phrase = word *(word / "." / CFWS) - -obs-phrase-list = phrase / 1*([phrase] [CFWS] "," [CFWS]) [phrase] - -obs-FWS = 1*WSP *(CRLF 1*WSP) - -obs-day-of-week = [CFWS] day-name [CFWS] - -obs-year = [CFWS] 2*DIGIT [CFWS] - -obs-month = CFWS month-name CFWS - -obs-day = [CFWS] 1*2DIGIT [CFWS] - -obs-hour = [CFWS] 2DIGIT [CFWS] - -obs-minute = [CFWS] 2DIGIT [CFWS] - -obs-second = [CFWS] 2DIGIT [CFWS] - -obs-zone = "UT" / "GMT" / ; Universal Time - ; North American UT - ; offsets - "EST" / "EDT" / ; Eastern: - 5/ - 4 - "CST" / "CDT" / ; Central: - 6/ - 5 - "MST" / "MDT" / ; Mountain: - 7/ - 6 - "PST" / "PDT" / ; Pacific: - 8/ - 7 - - %d65-73 / ; Military zones - "A" - %d75-90 / ; through "I" and "K" - %d97-105 / ; through "Z", both - %d107-122 ; upper and lower case - -obs-angle-addr = [CFWS] "<" [obs-route] addr-spec ">" [CFWS] - -obs-route = [CFWS] obs-domain-list ":" [CFWS] - -obs-domain-list = "@" domain *(*(CFWS / "," ) [CFWS] "@" domain) - -obs-local-part = word *("." word) - -obs-domain = atom *("." atom) - -obs-mbox-list = 1*([mailbox] [CFWS] "," [CFWS]) [mailbox] - -obs-addr-list = 1*([address] [CFWS] "," [CFWS]) [address] - -obs-fields = *(obs-return / - obs-received / - obs-orig-date / - obs-from / - obs-sender / - obs-reply-to / - obs-to / - obs-cc / - obs-bcc / - obs-message-id / - obs-in-reply-to / - obs-references / - obs-subject / - obs-comments / - obs-keywords / - obs-resent-date / - obs-resent-from / - obs-resent-send / - obs-resent-rply / - obs-resent-to / - obs-resent-cc / - obs-resent-bcc / - obs-resent-mid / - obs-optional) - -obs-orig-date = "Date" *WSP ":" date-time CRLF - -obs-from = "From" *WSP ":" mailbox-list CRLF - -obs-sender = "Sender" *WSP ":" mailbox CRLF - -obs-reply-to = "Reply-To" *WSP ":" mailbox-list CRLF - -obs-to = "To" *WSP ":" address-list CRLF - -obs-cc = "Cc" *WSP ":" address-list CRLF - -obs-bcc = "Bcc" *WSP ":" (address-list / [CFWS]) CRLF - -obs-message-id = "Message-ID" *WSP ":" msg-id CRLF - -obs-in-reply-to = "In-Reply-To" *WSP ":" *(phrase / msg-id) CRLF - -obs-references = "References" *WSP ":" *(phrase / msg-id) CRLF - -obs-id-left = local-part - -obs-id-right = domain - -obs-subject = "Subject" *WSP ":" unstructured CRLF - -obs-comments = "Comments" *WSP ":" unstructured CRLF - -obs-keywords = "Keywords" *WSP ":" obs-phrase-list CRLF - -obs-resent-from = "Resent-From" *WSP ":" mailbox-list CRLF - -obs-resent-send = "Resent-Sender" *WSP ":" mailbox CRLF - -obs-resent-date = "Resent-Date" *WSP ":" date-time CRLF - -obs-resent-to = "Resent-To" *WSP ":" address-list CRLF - -obs-resent-cc = "Resent-Cc" *WSP ":" address-list CRLF - -obs-resent-bcc = "Resent-Bcc" *WSP ":" - (address-list / [CFWS]) CRLF - -obs-resent-mid = "Resent-Message-ID" *WSP ":" msg-id CRLF - -obs-resent-rply = "Resent-Reply-To" *WSP ":" address-list CRLF - -obs-return = "Return-Path" *WSP ":" path CRLF - -obs-received = "Received" *WSP ":" name-val-list CRLF - -obs-path = obs-angle-addr - -obs-optional = field-name *WSP ":" unstructured CRLF diff --git a/tests/examplefiles/abnf_example4.abnf b/tests/examplefiles/abnf_example4.abnf deleted file mode 100644 index 78dc38cb..00000000 --- a/tests/examplefiles/abnf_example4.abnf +++ /dev/null @@ -1,382 +0,0 @@ -NO-WS-CTL = %d1-8 / ; US-ASCII control characters - %d11 / ; that do not include the - %d12 / ; carriage return, line feed, - %d14-31 / ; and white space characters - %d127 - -text = %d1-9 / ; Characters excluding CR and LF - %d11 / - %d12 / - %d14-127 / - obs-text - -specials = "(" / ")" / ; Special characters used in - "<" / ">" / ; other parts of the syntax - "[" / "]" / - ":" / ";" / - "@" / "\" / - "," / "." / - DQUOTE - -quoted-pair = ("\" text) / obs-qp - -FWS = ([*WSP CRLF] 1*WSP) / ; Folding white space - obs-FWS - -ctext = NO-WS-CTL / ; Non white space controls - - %d33-39 / ; The rest of the US-ASCII - %d42-91 / ; characters not including "(", - %d93-126 ; ")", or "\" - -ccontent = ctext / quoted-pair / comment - -comment = "(" *([FWS] ccontent) [FWS] ")" - -CFWS = *([FWS] comment) (([FWS] comment) / FWS) - - -atext = ALPHA / DIGIT / ; Any character except controls, - "!" / "#" / ; SP, and specials. - "$" / "%" / ; Used for atoms - "&" / "'" / - "*" / "+" / - "-" / "/" / - "=" / "?" / - "^" / "_" / - "`" / "{" / - "|" / "}" / - "~" - -atom = [CFWS] 1*atext [CFWS] - -dot-atom = [CFWS] dot-atom-text [CFWS] - -dot-atom-text = 1*atext *("." 1*atext) - -qtext = NO-WS-CTL / ; Non white space controls - - %d33 / ; The rest of the US-ASCII - %d35-91 / ; characters not including "\" - %d93-126 ; or the quote character - -qcontent = qtext / quoted-pair - -quoted-string = [CFWS] - DQUOTE *([FWS] qcontent) [FWS] DQUOTE - [CFWS] - -word = atom / quoted-string - -phrase = 1*word / obs-phrase - -utext = NO-WS-CTL / ; Non white space controls - %d33-126 / ; The rest of US-ASCII - obs-utext - -unstructured = *([FWS] utext) [FWS] - -date-time = [ day-of-week "," ] date FWS time [CFWS] - -day-of-week = ([FWS] day-name) / obs-day-of-week - -day-name = %i"Mon" / %i"Tue" / %i"Wed" / %i"Thu" / - %i"Fri" / %i"Sat" / %i"Sun" - -date = day month year - -year = 4*DIGIT / obs-year - -month = (FWS month-name FWS) / obs-month - -month-name = %i"Jan" / %i"Feb" / %i"Mar" / %i"Apr" / - %i"May" / %i"Jun" / %i"Jul" / %i"Aug" / - %i"Sep" / %i"Oct" / %i"Nov" / %i"Dec" - -day = ([FWS] 1*2DIGIT) / obs-day - -time = time-of-day FWS zone - -time-of-day = hour ":" minute [ ":" second ] - -hour = 2DIGIT / obs-hour - -minute = 2DIGIT / obs-minute - -second = 2DIGIT / obs-second - -zone = (( "+" / "-" ) 4DIGIT) / obs-zone - -address = mailbox / group - -mailbox = name-addr / addr-spec - -name-addr = [display-name] angle-addr - -angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr - -group = display-name ":" [mailbox-list / CFWS] ";" - [CFWS] - -display-name = phrase - -mailbox-list = (mailbox *("," mailbox)) / obs-mbox-list - -address-list = (address *("," address)) / obs-addr-list - -addr-spec = local-part "@" domain - -local-part = dot-atom / quoted-string / obs-local-part - -domain = dot-atom / domain-literal / obs-domain - -domain-literal = [CFWS] "[" *([FWS] dcontent) [FWS] "]" [CFWS] - -dcontent = dtext / quoted-pair - -dtext = NO-WS-CTL / ; Non white space controls - - %d33-90 / ; The rest of the US-ASCII - %d94-126 ; characters not including "[", - ; "]", or "\" - -message = (fields / obs-fields) - [CRLF body] - -body = *(*998text CRLF) *998text - -fields = *(trace - *(resent-date / - resent-from / - resent-sender / - resent-to / - resent-cc / - resent-bcc / - resent-msg-id)) - *(orig-date / - from / - sender / - reply-to / - to / - cc / - bcc / - message-id / - in-reply-to / - references / - subject / - comments / - keywords / - optional-field) - -orig-date = %i"Date:" date-time CRLF - -from = %i"From:" mailbox-list CRLF - -sender = %i"Sender:" mailbox CRLF - -reply-to = %i"Reply-To:" address-list CRLF - -to = %i"To:" address-list CRLF - -cc = %i"Cc:" address-list CRLF - -bcc = %i"Bcc:" (address-list / [CFWS]) CRLF - -message-id = %i"Message-ID:" msg-id CRLF - -in-reply-to = %i"In-Reply-To:" 1*msg-id CRLF - -references = %i"References:" 1*msg-id CRLF - -msg-id = [CFWS] "<" id-left "@" id-right ">" [CFWS] - -id-left = dot-atom-text / no-fold-quote / obs-id-left - -id-right = dot-atom-text / no-fold-literal / obs-id-right - -no-fold-quote = DQUOTE *(qtext / quoted-pair) DQUOTE - -no-fold-literal = "[" *(dtext / quoted-pair) "]" - -subject = %i"Subject:" unstructured CRLF - -comments = %i"Comments:" unstructured CRLF - -keywords = %i"Keywords:" phrase *("," phrase) CRLF - -resent-date = %i"Resent-Date:" date-time CRLF - -resent-from = %i"Resent-From:" mailbox-list CRLF - -resent-sender = %i"Resent-Sender:" mailbox CRLF - -resent-to = %i"Resent-To:" address-list CRLF - -resent-cc = %i"Resent-Cc:" address-list CRLF - -resent-bcc = %i"Resent-Bcc:" (address-list / [CFWS]) CRLF - -resent-msg-id = %i"Resent-Message-ID:" msg-id CRLF - -trace = [return] - 1*received - -return = %i"Return-Path:" path CRLF - -path = ([CFWS] "<" ([CFWS] / addr-spec) ">" [CFWS]) / - obs-path - -received = %i"Received:" name-val-list ";" date-time CRLF - -name-val-list = [CFWS] [name-val-pair *(CFWS name-val-pair)] - -name-val-pair = item-name CFWS item-value - -item-name = ALPHA *(["-"] (ALPHA / DIGIT)) - -item-value = 1*angle-addr / addr-spec / - atom / domain / msg-id - -optional-field = field-name ":" unstructured CRLF - -field-name = 1*ftext - -ftext = %d33-57 / ; Any character except - %d59-126 ; controls, SP, and - ; ":". - -obs-qp = "\" (%d0-127) - -obs-text = *LF *CR *(obs-char *LF *CR) - -obs-char = %d0-9 / %d11 / ; %d0-127 except CR and - %d12 / %d14-127 ; LF - -obs-utext = obs-text - -obs-phrase = word *(word / "." / CFWS) - -obs-phrase-list = phrase / 1*([phrase] [CFWS] "," [CFWS]) [phrase] - -obs-FWS = 1*WSP *(CRLF 1*WSP) - -obs-day-of-week = [CFWS] day-name [CFWS] - -obs-year = [CFWS] 2*DIGIT [CFWS] - -obs-month = CFWS month-name CFWS - -obs-day = [CFWS] 1*2DIGIT [CFWS] - -obs-hour = [CFWS] 2DIGIT [CFWS] - -obs-minute = [CFWS] 2DIGIT [CFWS] - -obs-second = [CFWS] 2DIGIT [CFWS] - -obs-zone = %i"UT" / %i"GMT" / ; Universal Time - ; North American UT - ; offsets - %i"EST" / %i"EDT" / ; Eastern: - 5/ - 4 - %i"CST" / %i"CDT" / ; Central: - 6/ - 5 - %i"MST" / %i"MDT" / ; Mountain: - 7/ - 6 - %i"PST" / %i"PDT" / ; Pacific: - 8/ - 7 - - %d65-73 / ; Military zones - "A" - %d75-90 / ; through "I" and "K" - %d97-105 / ; through "Z", both - %d107-122 ; upper and lower case - -obs-angle-addr = [CFWS] "<" [obs-route] addr-spec ">" [CFWS] - -obs-route = [CFWS] obs-domain-list ":" [CFWS] - -obs-domain-list = "@" domain *(*(CFWS / "," ) [CFWS] "@" domain) - -obs-local-part = word *("." word) - -obs-domain = atom *("." atom) - -obs-mbox-list = 1*([mailbox] [CFWS] "," [CFWS]) [mailbox] - -obs-addr-list = 1*([address] [CFWS] "," [CFWS]) [address] - -obs-fields = *(obs-return / - obs-received / - obs-orig-date / - obs-from / - obs-sender / - obs-reply-to / - obs-to / - obs-cc / - obs-bcc / - obs-message-id / - obs-in-reply-to / - obs-references / - obs-subject / - obs-comments / - obs-keywords / - obs-resent-date / - obs-resent-from / - obs-resent-send / - obs-resent-rply / - obs-resent-to / - obs-resent-cc / - obs-resent-bcc / - obs-resent-mid / - obs-optional) - -obs-orig-date = %i"Date" *WSP ":" date-time CRLF - -obs-from = %i"From" *WSP ":" mailbox-list CRLF - -obs-sender = %i"Sender" *WSP ":" mailbox CRLF - -obs-reply-to = %i"Reply-To" *WSP ":" mailbox-list CRLF - -obs-to = %i"To" *WSP ":" address-list CRLF - -obs-cc = %i"Cc" *WSP ":" address-list CRLF - -obs-bcc = %i"Bcc" *WSP ":" (address-list / [CFWS]) CRLF - -obs-message-id = %i"Message-ID" *WSP ":" msg-id CRLF - -obs-in-reply-to = %i"In-Reply-To" *WSP ":" *(phrase / msg-id) CRLF - -obs-references = %i"References" *WSP ":" *(phrase / msg-id) CRLF - -obs-id-left = local-part - -obs-id-right = domain - -obs-subject = %i"Subject" *WSP ":" unstructured CRLF - -obs-comments = %i"Comments" *WSP ":" unstructured CRLF - -obs-keywords = %i"Keywords" *WSP ":" obs-phrase-list CRLF - -obs-resent-from = %i"Resent-From" *WSP ":" mailbox-list CRLF - -obs-resent-send = %i"Resent-Sender" *WSP ":" mailbox CRLF - -obs-resent-date = %i"Resent-Date" *WSP ":" date-time CRLF - -obs-resent-to = %i"Resent-To" *WSP ":" address-list CRLF - -obs-resent-cc = %i"Resent-Cc" *WSP ":" address-list CRLF - -obs-resent-bcc = %i"Resent-Bcc" *WSP ":" - (address-list / [CFWS]) CRLF - -obs-resent-mid = %i"Resent-Message-ID" *WSP ":" msg-id CRLF - -obs-resent-rply = %i"Resent-Reply-To" *WSP ":" address-list CRLF - -obs-return = %i"Return-Path" *WSP ":" path CRLF - -obs-received = %i"Received" *WSP ":" name-val-list CRLF - -obs-path = obs-angle-addr - -obs-optional = field-name *WSP ":" unstructured CRLF -- cgit v1.2.1 From 2d469850efe9b5ad590a0c686c968d7810d1b104 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Thu, 5 Nov 2015 15:50:33 +0900 Subject: re-create examples. --- tests/examplefiles/abnf_example1.abnf | 22 ++++++++++++++++++++++ tests/examplefiles/abnf_example2.abnf | 7 +++++++ tests/examplefiles/abnf_example5.abnf | 7 ------- 3 files changed, 29 insertions(+), 7 deletions(-) create mode 100644 tests/examplefiles/abnf_example1.abnf create mode 100644 tests/examplefiles/abnf_example2.abnf delete mode 100644 tests/examplefiles/abnf_example5.abnf diff --git a/tests/examplefiles/abnf_example1.abnf b/tests/examplefiles/abnf_example1.abnf new file mode 100644 index 00000000..5cd9cd25 --- /dev/null +++ b/tests/examplefiles/abnf_example1.abnf @@ -0,0 +1,22 @@ +; This examples from WikiPedia . + + postal-address = name-part street zip-part + + name-part = *(personal-part SP) last-name [SP suffix] CRLF + name-part =/ personal-part CRLF + + personal-part = first-name / (initial ".") + first-name = *ALPHA + initial = ALPHA + last-name = *ALPHA + suffix = ("Jr." / "Sr." / 1*("I" / "V" / "X")) + + street = [apt SP] house-num SP street-name CRLF + apt = 1*4DIGIT + house-num = 1*8(DIGIT / ALPHA) + street-name = 1*VCHAR + + zip-part = town-name "," SP state 1*2SP zip-code CRLF + town-name = 1*(ALPHA / SP) + state = 2ALPHA + zip-code = 5DIGIT ["-" 4DIGIT] diff --git a/tests/examplefiles/abnf_example2.abnf b/tests/examplefiles/abnf_example2.abnf new file mode 100644 index 00000000..19ccd856 --- /dev/null +++ b/tests/examplefiles/abnf_example2.abnf @@ -0,0 +1,7 @@ +crlf = %d13.10 + +command = "command string" + +char-line = %x0D.0A *(%x20-7E) %x0D.0A + +oldrule =/ additional-alternatives diff --git a/tests/examplefiles/abnf_example5.abnf b/tests/examplefiles/abnf_example5.abnf deleted file mode 100644 index 6ef65125..00000000 --- a/tests/examplefiles/abnf_example5.abnf +++ /dev/null @@ -1,7 +0,0 @@ -crlf = %d13.10 - -command = "command string" - -char-line = %x0D.0A *(%x20-7E) %x0D.0A - -oldrule =/ additional-alternatives -- cgit v1.2.1 From bb42aa11ad8c91127e9b87d196e8088cf90f4c26 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Thu, 5 Nov 2015 15:54:55 +0900 Subject: update example2. --- tests/examplefiles/abnf_example2.abnf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/examplefiles/abnf_example2.abnf b/tests/examplefiles/abnf_example2.abnf index 19ccd856..07e28a1e 100644 --- a/tests/examplefiles/abnf_example2.abnf +++ b/tests/examplefiles/abnf_example2.abnf @@ -4,4 +4,4 @@ command = "command string" char-line = %x0D.0A *(%x20-7E) %x0D.0A -oldrule =/ additional-alternatives +without-ws-and-ctl = %d1-8 / %d11 / %d12 / %d14-31 / %d127 -- cgit v1.2.1 From 666244c83ca89846209c6ae3ad5917b900423c8d Mon Sep 17 00:00:00 2001 From: hhsprings Date: Thu, 5 Nov 2015 18:12:54 +0900 Subject: Add the lexer for original BNF. --- pygments/lexers/_mapping.py | 1 + pygments/lexers/grammar_notation.py | 57 ++++++++++++++++++++++++++++++++----- tests/examplefiles/bnf_example1.bnf | 15 ++++++++++ 3 files changed, 66 insertions(+), 7 deletions(-) create mode 100644 tests/examplefiles/bnf_example1.bnf diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index adf58313..c9845733 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -51,6 +51,7 @@ LEXERS = { 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), 'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), 'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), + 'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)), 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), 'BoogieLexer': ('pygments.lexers.esoteric', 'Boogie', ('boogie',), ('*.bpl',), ()), 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), diff --git a/pygments/lexers/grammar_notation.py b/pygments/lexers/grammar_notation.py index 65475646..43171387 100644 --- a/pygments/lexers/grammar_notation.py +++ b/pygments/lexers/grammar_notation.py @@ -9,17 +9,60 @@ :license: BSD, see LICENSE for details. """ -import re +from pygments.lexer import RegexLexer, bygroups +from pygments.token import Punctuation, Text, Comment, Operator, \ + Keyword, Name, Literal -from pygments.lexer import RegexLexer, DelegatingLexer, \ - include, bygroups, using, words -from pygments.token import Punctuation, Other, Text, Comment, Operator, \ - Keyword, Name, String, Number, Whitespace, Literal +__all__ = ['BnfLexer', 'AbnfLexer'] -__all__ = ['AbnfLexer'] +class BnfLexer(RegexLexer): + """ + This lexer is for grammer notations which are similar to + original BNF. + + In order to maximize a number of targets of this lexer, + let's decide some designs: + + * We don't distinct `Terminal Symbol`. + + * We do assume that `NonTerminal Symbol` are always enclosed + with arrow brackets. + + * We do assume that `NonTerminal Symbol` may include + any printable characters except arrow brackets and + space (no `spaces`, just space, i.e., ASCII \x020). + This assumption is for `RBNF `_. + + * We do assume that target notation doesn't support comment. + + * We don't distinct any operators and punctuation except + `::=`. + + Though these desision making might cause too minimal highlighting + and you might be disappointed, but it is reasonable for us. + + .. versionadded:: 2.1 + """ + + name = 'BNF' + aliases = ['bnf'] + filenames = ['*.bnf'] + mimetypes = ['text/x-bnf'] + + tokens = { + 'root': [ + (r'(<)([ -;=?-~]+)(>)', + bygroups(Punctuation, Name.Class, Punctuation)), + + # an only operator + (r'::=', Operator), + + # fallback + (r'.', Text), + ], + } -# EBNF shold be moved here, i think. class AbnfLexer(RegexLexer): """ diff --git a/tests/examplefiles/bnf_example1.bnf b/tests/examplefiles/bnf_example1.bnf new file mode 100644 index 00000000..fe041a6e --- /dev/null +++ b/tests/examplefiles/bnf_example1.bnf @@ -0,0 +1,15 @@ +; This examples from WikiPedia . + + ::= + + ::= + | + + ::= "." | + + ::= + + ::= "," + + ::= "Sr." | "Jr." | | "" + ::= | "" -- cgit v1.2.1 From e6927f0e2184adc0cc07f709fcacfe96a18e08dc Mon Sep 17 00:00:00 2001 From: Hiroaki Itoh Date: Fri, 6 Nov 2015 08:15:53 +0000 Subject: Correct docstring of module. --- pygments/lexers/grammar_notation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/grammar_notation.py b/pygments/lexers/grammar_notation.py index 43171387..2dc9aad9 100644 --- a/pygments/lexers/grammar_notation.py +++ b/pygments/lexers/grammar_notation.py @@ -3,7 +3,7 @@ pygments.lexers.grammar_notation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Lexers for parser generators. + Lexers for grammer notations like BNF. :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. -- cgit v1.2.1 From 5c7cc2962a7f37f92e5a680769639d983058a009 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Fri, 6 Nov 2015 18:20:52 +0900 Subject: * Add lexer for `pkg-config `_ * Add lexer for `pacman.conf `_. --- pygments/lexers/_mapping.py | 2 + pygments/lexers/configs_pkgmng.py | 129 ++++++++++++++++++++++++++++++++ tests/examplefiles/pacman.conf | 49 ++++++++++++ tests/examplefiles/pkgconfig_example.pc | 18 +++++ 4 files changed, 198 insertions(+) create mode 100644 pygments/lexers/configs_pkgmng.py create mode 100644 tests/examplefiles/pacman.conf create mode 100644 tests/examplefiles/pkgconfig_example.pc diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index af7eec36..ef0b2b9e 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -283,6 +283,7 @@ LEXERS = { 'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), 'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), + 'PacmanConfLexer': ('pygments.lexers.configs_pkgmng', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()), 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()), 'ParaSailLexer': ('pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)), 'PawnLexer': ('pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), @@ -292,6 +293,7 @@ LEXERS = { 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), + 'PkgConfigLexer': ('pygments.lexers.configs_pkgmng', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), diff --git a/pygments/lexers/configs_pkgmng.py b/pygments/lexers/configs_pkgmng.py new file mode 100644 index 00000000..18fdffbe --- /dev/null +++ b/pygments/lexers/configs_pkgmng.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.configs_pkgmng + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for package manager configuration file formats. + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, bygroups, include, words +from pygments.token import Text, Comment, Operator, Name, \ + Punctuation, String, Keyword + +__all__ = ['PkgConfigLexer', 'PacmanConfLexer'] + + +class PkgConfigLexer(RegexLexer): + """ + Lexer for `pkg-config + `_ + (see also `manual page `_). + + .. versionadded:: 2.1 + """ + + name = 'PkgConfig' + aliases = ['pkgconfig',] + filenames = ['*.pc',] + mimetypes = [] + + tokens = { + 'root': [ + (r'#.*$', Comment.Single), + + # variable definitions + (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)), + + # keyword lines + (r'^([\w\.]+)(:)', + bygroups(Name.Tag, Punctuation), 'spvalue'), + + # variable references + include('interp'), + + # fallback + (r'.', Text), + ], + 'interp': [ + # you can escape literal "${" as "$${" + (r'\$\$\{', Text), + + # variable references + (r'\$\{', String.Interpol, 'curly'), + ], + 'curly': [ + (r'\}', String.Interpol, '#pop'), + (r'\w+', Name.Attribute), + ], + 'spvalue': [ + include('interp'), + + (r'#.*$', Comment.Single, '#pop'), + (r'\n', Text, '#pop'), + + # fallback + (r'.', Text), + ], + } + + +class PacmanConfLexer(RegexLexer): + """ + Lexer for `pacman.conf + `_. + + Actually, IniLexer works almost fine for this format, + but it yield error token. It is because pacman.conf has + a form without assignment like: + + UseSyslog + Color + TotalDownload + CheckSpace + VerbosePkgLists + + These are flags to switch on. + + .. versionadded:: 2.1 + """ + + name = 'PacmanConf' + aliases = ['pacmanconf',] + filenames = ['pacman.conf',] + mimetypes = [] + + tokens = { + 'root': [ + # comment + (r'#.*$', Comment.Single), + + # section header + (r'^\s*\[.*?\]\s*$', Keyword), + + # variable definitions + # (Leading space is allowed...) + (r'(\w+)(\s*)(=)', + bygroups(Name.Attribute, Text, Operator)), + + # flags to on + (r'^(\s*)(\w+)(\s*)$', + bygroups(Text, Name.Attribute, Text)), + + # built-in special values + (words(( + '$repo', # repository + '$arch', # architecture + '%o', # outfile + '%u', # url + ), suffix=r'\b'), + Name.Variable), + + # fallback + (r'.', Text), + ], + } diff --git a/tests/examplefiles/pacman.conf b/tests/examplefiles/pacman.conf new file mode 100644 index 00000000..78dbf5e1 --- /dev/null +++ b/tests/examplefiles/pacman.conf @@ -0,0 +1,49 @@ +# +# /etc/pacman.conf +# +# This example file has no relation to `pacman.ijs` +# but is of configuration of Arch Linux's package manager `pacman`. +# + +# +# GENERAL OPTIONS +# +[options] +RootDir = /opt/local/site-private +#DBPath = /var/lib/pacman/ +#CacheDir = /var/cache/pacman/pkg/ +LogFile = /opt/local/site-private/var/log/pacman.log +#GPGDir = /etc/pacman.d/gnupg/ +HoldPkg = pacman +#XferCommand = /usr/bin/curl -C - -f %u > %o +XferCommand = /usr/local/bin/wget --passive-ftp -c -O %o %u +#CleanMethod = KeepInstalled +#UseDelta = 0.7 +Architecture = auto + +#IgnorePkg = +#IgnoreGroup = + +NoUpgrade = etc/passwd etc/group etc/shadow +NoUpgrade = etc/fstab +#NoExtract = + +#UseSyslog +Color +#TotalDownload +CheckSpace +#VerbosePkgLists + +#SigLevel = Never +SigLevel = Required DatabaseOptional +LocalFileSigLevel = Optional +RemoteFileSigLevel = Required + +Server = ftp://ftp9.yaphatchpotchgen.net/$repo/os/$arch + +[fubar32] +Include = /etc/pacman.d/mirrorlist.fubar32 # comment is allowed here + +#[custom] +#SigLevel = Optional TrustAll +#Server = file:///home/custompkgs diff --git a/tests/examplefiles/pkgconfig_example.pc b/tests/examplefiles/pkgconfig_example.pc new file mode 100644 index 00000000..b7969bad --- /dev/null +++ b/tests/examplefiles/pkgconfig_example.pc @@ -0,0 +1,18 @@ +# This is for a fictional package `yet another portable hatchpotch generator'. +prefix=/usr/local/opt/site/private # define variable `prefix` +exec_prefix=${prefix} # using variable reference +libdir=${exec_prefix}/lib +includedir=${prefix}/include +just_for_test=$${this is not a part of variable reference} # escape with `$${` + +Name: YAPHatchPotchGen +Description: Yet Another Portable HatchPotch GENerator. +Version: 352.9.3 +URL: http://www9.yaphatchpotchgen.net # Don't access. +Requires: piyohogelib-9.0 = 9.5.3 +Requires.private: nyorolib-3.0 = 3.0.9 +Conflicts: apiyohoge <= 8.3 +Libs: -L${libdir} -lyaphatchpotchgen-352.9 # using variable reference +Libs.private: -ll -ly +Cflags: -I${includedir}/piyohogelib-9.0 -I${libdir}/yaphatchpotchgen/include + -- cgit v1.2.1 From 92b38ef6e43ea87f525bd753f0f9c4e823f3f7e7 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Fri, 6 Nov 2015 18:26:28 +0900 Subject: fix japanglish... --- pygments/lexers/grammar_notation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/grammar_notation.py b/pygments/lexers/grammar_notation.py index 2dc9aad9..60466def 100644 --- a/pygments/lexers/grammar_notation.py +++ b/pygments/lexers/grammar_notation.py @@ -88,7 +88,7 @@ class AbnfLexer(RegexLexer): txt = match.group(0) if txt in self._core_rules: # Strictly speaking, these are not keyword but - # is called `Core Rule'. + # are called `Core Rule'. yield match.start(), Keyword, txt else: yield match.start(), Name.Class, txt -- cgit v1.2.1 From d5bbabfbb6b3a981aa6d6139b86ef84f079948f3 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Fri, 6 Nov 2015 19:00:10 +0900 Subject: remake _mapping.py. --- pygments/lexers/_mapping.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index ef0b2b9e..2318c727 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -292,8 +292,8 @@ LEXERS = { 'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), - 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), 'PkgConfigLexer': ('pygments.lexers.configs_pkgmng', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), + 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), -- cgit v1.2.1 From 2c4e30b530e469c9c4b3098e592d23312e2f9b7a Mon Sep 17 00:00:00 2001 From: hhsprings Date: Sat, 7 Nov 2015 15:08:29 +0900 Subject: _mapping.py should keep NL not CRNL. --- pygments/formatters/_mapping.py | 6 +++--- pygments/lexers/_mapping.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py index a2e612ad..4504c01f 100755 --- a/pygments/formatters/_mapping.py +++ b/pygments/formatters/_mapping.py @@ -70,9 +70,9 @@ if __name__ == '__main__': # pragma: no cover footer = content[content.find("if __name__ == '__main__':"):] # write new file - with open(__file__, 'w') as fp: - fp.write(header) + with open(__file__, 'wb') as fp: + fp.write(header.replace("\r\n", "\n")) fp.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters)) - fp.write(footer) + fp.write(footer.replace("\r\n", "\n")) print ('=== %d formatters processed.' % len(found_formatters)) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index af7eec36..07886d7a 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -443,9 +443,9 @@ if __name__ == '__main__': # pragma: no cover footer = content[content.find("if __name__ == '__main__':"):] # write new file - with open(__file__, 'w') as fp: - fp.write(header) + with open(__file__, 'wb') as fp: + fp.write(header.replace("\r\n", "\n")) fp.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers)) - fp.write(footer) + fp.write(footer.replace("\r\n", "\n")) print ('=== %d lexers processed.' % len(found_lexers)) -- cgit v1.2.1 From ebf185503ab38761ea03be93ae9109d4012b621e Mon Sep 17 00:00:00 2001 From: hhsprings Date: Sat, 7 Nov 2015 20:26:57 +0900 Subject: See `#1164 `_. --- pygments/lexers/algebra.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/algebra.py b/pygments/lexers/algebra.py index 88683e82..be582056 100644 --- a/pygments/lexers/algebra.py +++ b/pygments/lexers/algebra.py @@ -213,8 +213,8 @@ class BCLexer(RegexLexer): (r'.', Text) ], 'comment': [ - (r'[^*/]', Comment.Multiline), + (r'[^*/]+', Comment.Multiline), (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline) + (r'[*/]+', Comment.Multiline) ], } -- cgit v1.2.1 From 5dd622f3a8530f87332891f5e41d5598ef7869e4 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Sat, 7 Nov 2015 22:28:41 +0900 Subject: See `#1164 `_. Before: 61676 bytes, 107.8900 [ms] / 0.001749 [ms/byte] 68185 bytes, 134.8400 [ms] / 0.001978 [ms/byte] 61676 bytes, 107.4600 [ms] / 0.001742 [ms/byte] 68185 bytes, 144.8300 [ms] / 0.002124 [ms/byte] 61676 bytes, 108.3800 [ms] / 0.001757 [ms/byte] 68185 bytes, 143.5400 [ms] / 0.002105 [ms/byte] After: 61676 bytes, 82.1200 [ms] / 0.001331 [ms/byte] 68185 bytes, 105.8000 [ms] / 0.001552 [ms/byte] 61676 bytes, 77.1700 [ms] / 0.001251 [ms/byte] 68185 bytes, 91.2400 [ms] / 0.001338 [ms/byte] 61676 bytes, 76.6100 [ms] / 0.001242 [ms/byte] 68185 bytes, 95.7300 [ms] / 0.001404 [ms/byte] x1.5 faster? --- pygments/lexers/configs.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py index 8a83b433..f6f280ee 100644 --- a/pygments/lexers/configs.py +++ b/pygments/lexers/configs.py @@ -660,6 +660,7 @@ class TermcapLexer(RegexLexer): 'data': [ (r'\\072', Literal), (r':', Punctuation, '#pop'), + (r'[^:\\]+', Literal), # for performance (r'.', Literal), ], } @@ -706,6 +707,7 @@ class TerminfoLexer(RegexLexer): 'data': [ (r'\\[,\\]', Literal), (r'(,)([ \t]*)', bygroups(Punctuation, Text), '#pop'), + (r'[^\\,]+', Literal), # for performance (r'.', Literal), ], } -- cgit v1.2.1 From d95d22cae75d2d60ce9c4553b92013b86ae7ceb8 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Sat, 7 Nov 2015 22:54:45 +0900 Subject: See `#1164 `_. Before: 810 bytes, 4.6200 [ms] / 0.005704 [ms/byte] 156 bytes, 1.1100 [ms] / 0.007115 [ms/byte] 580 bytes, 2.1100 [ms] / 0.003638 [ms/byte] 810 bytes, 4.5600 [ms] / 0.005630 [ms/byte] 156 bytes, 0.7300 [ms] / 0.004679 [ms/byte] 580 bytes, 1.5600 [ms] / 0.002690 [ms/byte] 810 bytes, 3.5000 [ms] / 0.004321 [ms/byte] 156 bytes, 0.6800 [ms] / 0.004359 [ms/byte] 580 bytes, 1.1900 [ms] / 0.002052 [ms/byte] After: 810 bytes, 1.9700 [ms] / 0.002432 [ms/byte] 156 bytes, 0.4300 [ms] / 0.002756 [ms/byte] 580 bytes, 0.7300 [ms] / 0.001259 [ms/byte] 810 bytes, 1.9400 [ms] / 0.002395 [ms/byte] 156 bytes, 0.4500 [ms] / 0.002885 [ms/byte] 580 bytes, 0.6700 [ms] / 0.001155 [ms/byte] 810 bytes, 1.9300 [ms] / 0.002383 [ms/byte] 156 bytes, 0.4600 [ms] / 0.002949 [ms/byte] 580 bytes, 0.6700 [ms] / 0.001155 [ms/byte] --- pygments/lexers/grammar_notation.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/pygments/lexers/grammar_notation.py b/pygments/lexers/grammar_notation.py index 60466def..1c5f1163 100644 --- a/pygments/lexers/grammar_notation.py +++ b/pygments/lexers/grammar_notation.py @@ -59,6 +59,7 @@ class BnfLexer(RegexLexer): (r'::=', Operator), # fallback + (r'[^<>:=]+', Text), # for performance (r'.', Text), ], } @@ -99,7 +100,8 @@ class AbnfLexer(RegexLexer): (r';.*$', Comment.Single), # quoted - (r'(%[si])?"', Literal, 'quoted-termination'), + # double quote itself in this state, it is as '%x22'. + (r'(%[si])?"[^"]*"', Literal), # binary (but i have never seen...) (r'%b[01]+\-[01]+\b', Literal), # range @@ -129,11 +131,7 @@ class AbnfLexer(RegexLexer): (r'[\[\]()]', Punctuation), # fallback + (r'\s+', Text), (r'.', Text), ], - 'quoted-termination': [ - # double quote itself in this state, it is as '%x22'. - (r'"', Literal, '#pop'), - (r'.', Literal), - ] } -- cgit v1.2.1 From 40a22adf790dd507cd31a1509436cbf7d0e7c79e Mon Sep 17 00:00:00 2001 From: hhsprings Date: Sat, 7 Nov 2015 23:13:45 +0900 Subject: See `#1164 `_. --- pygments/lexers/configs_pkgmng.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/configs_pkgmng.py b/pygments/lexers/configs_pkgmng.py index 18fdffbe..1c5e6f99 100644 --- a/pygments/lexers/configs_pkgmng.py +++ b/pygments/lexers/configs_pkgmng.py @@ -40,13 +40,14 @@ class PkgConfigLexer(RegexLexer): (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)), # keyword lines - (r'^([\w\.]+)(:)', + (r'^([\w.]+)(:)', bygroups(Name.Tag, Punctuation), 'spvalue'), # variable references include('interp'), # fallback + (r'[^${}#=:\n.]+', Text), (r'.', Text), ], 'interp': [ @@ -67,6 +68,7 @@ class PkgConfigLexer(RegexLexer): (r'\n', Text, '#pop'), # fallback + (r'[^${}#\n]+', Text), (r'.', Text), ], } -- cgit v1.2.1 From 7193da4476048396424e764a61ce277f97af9559 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Sun, 8 Nov 2015 17:23:44 +0900 Subject: * see `#1164 `_, TNX Tim! * I noticed bc doesn't support 1.0e-2 form. --- pygments/lexers/algebra.py | 7 ++++--- tests/examplefiles/example.bc | 8 ++++++++ 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/pygments/lexers/algebra.py b/pygments/lexers/algebra.py index be582056..fc54c3c3 100644 --- a/pygments/lexers/algebra.py +++ b/pygments/lexers/algebra.py @@ -208,13 +208,14 @@ class BCLexer(RegexLexer): 'warranty'), suffix=r'\b'), Keyword), (r'\+\+|--|\|\||&&|' r'([-<>+*%\^/!=])=?', Operator), - (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number), - (r'\.[0-9]+(?:e[0-9]+)?', Number), + # bc doesn't support exponential + (r'[0-9]+(\.[0-9]*)?', Number), + (r'\.[0-9]+', Number), (r'.', Text) ], 'comment': [ (r'[^*/]+', Comment.Multiline), (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]+', Comment.Multiline) + (r'[*/]', Comment.Multiline) ], } diff --git a/tests/examplefiles/example.bc b/tests/examplefiles/example.bc index 283b243c..6604cd31 100644 --- a/tests/examplefiles/example.bc +++ b/tests/examplefiles/example.bc @@ -43,3 +43,11 @@ if (a <= 2) { } if (a < 2) { } + +a /* /*/ * 2 /* == a * 2 */ +a //* /*/ 1.5 /* == a / 1.5 */ +a /*/*/ * 3 /* == a * 3 */ +a * 3 /**/ * 4 /* == a * 3 * 4 */ +a / 3 //*//*/ .4 /* == a / 3 / 0.4 */ +a / 3 //*//*/ 1.3 /* == a / 3 / 1.4 */ +a / 3 /*//*// 1.3 /* == a / 3 / 1.4 */ -- cgit v1.2.1 From 52b26a708b05a1fdd1c27bccd097b7e23752337b Mon Sep 17 00:00:00 2001 From: hhsprings Date: Mon, 9 Nov 2015 15:48:31 +0900 Subject: move `.replace` immediately after `read()`, and explain in a comment that it's for windows. --- pygments/formatters/_mapping.py | 11 +++++++++-- pygments/lexers/_mapping.py | 11 +++++++++-- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py index 4504c01f..569ae849 100755 --- a/pygments/formatters/_mapping.py +++ b/pygments/formatters/_mapping.py @@ -66,13 +66,20 @@ if __name__ == '__main__': # pragma: no cover # extract useful sourcecode from this file with open(__file__) as fp: content = fp.read() + # replace crnl to nl for Windows. + # + # Note that, originally, contributers should keep nl of master + # repository, for example by using some kind of automatic + # management EOL, like `EolExtension + # `. + content = content.replace("\r\n", "\n") header = content[:content.find('FORMATTERS = {')] footer = content[content.find("if __name__ == '__main__':"):] # write new file with open(__file__, 'wb') as fp: - fp.write(header.replace("\r\n", "\n")) + fp.write(header) fp.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters)) - fp.write(footer.replace("\r\n", "\n")) + fp.write(footer) print ('=== %d formatters processed.' % len(found_formatters)) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 88cd9dfb..8fd04490 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -440,13 +440,20 @@ if __name__ == '__main__': # pragma: no cover # extract useful sourcecode from this file with open(__file__) as fp: content = fp.read() + # replace crnl to nl for Windows. + # + # Note that, originally, contributers should keep nl of master + # repository, for example by using some kind of automatic + # management EOL, like `EolExtension + # `. + content = content.replace("\r\n", "\n") header = content[:content.find('LEXERS = {')] footer = content[content.find("if __name__ == '__main__':"):] # write new file with open(__file__, 'wb') as fp: - fp.write(header.replace("\r\n", "\n")) + fp.write(header) fp.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers)) - fp.write(footer.replace("\r\n", "\n")) + fp.write(footer) print ('=== %d lexers processed.' % len(found_lexers)) -- cgit v1.2.1 From cdc5478fa9a194fd37e0a4a9e2816285e307d402 Mon Sep 17 00:00:00 2001 From: hhsprings Date: Mon, 9 Nov 2015 17:34:26 +0900 Subject: The { is not required. See `comment-11592775 `_. --- pygments/lexers/configs_pkgmng.py | 6 ++---- tests/examplefiles/pkgconfig_example.pc | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/pygments/lexers/configs_pkgmng.py b/pygments/lexers/configs_pkgmng.py index 1c5e6f99..5012930f 100644 --- a/pygments/lexers/configs_pkgmng.py +++ b/pygments/lexers/configs_pkgmng.py @@ -9,8 +9,6 @@ :license: BSD, see LICENSE for details. """ -import re - from pygments.lexer import RegexLexer, bygroups, include, words from pygments.token import Text, Comment, Operator, Name, \ Punctuation, String, Keyword @@ -51,8 +49,8 @@ class PkgConfigLexer(RegexLexer): (r'.', Text), ], 'interp': [ - # you can escape literal "${" as "$${" - (r'\$\$\{', Text), + # you can escape literal "$" as "$$" + (r'\$\$', Text), # variable references (r'\$\{', String.Interpol, 'curly'), diff --git a/tests/examplefiles/pkgconfig_example.pc b/tests/examplefiles/pkgconfig_example.pc index b7969bad..2a59204e 100644 --- a/tests/examplefiles/pkgconfig_example.pc +++ b/tests/examplefiles/pkgconfig_example.pc @@ -3,7 +3,7 @@ prefix=/usr/local/opt/site/private # define variable `prefix` exec_prefix=${prefix} # using variable reference libdir=${exec_prefix}/lib includedir=${prefix}/include -just_for_test=$${this is not a part of variable reference} # escape with `$${` +just_for_test=$${this is not a part of variable reference} # escape with `$$` Name: YAPHatchPotchGen Description: Yet Another Portable HatchPotch GENerator. -- cgit v1.2.1 From b9e6386e3ef2d19f0472e6aca0b8880e7e3626bf Mon Sep 17 00:00:00 2001 From: hhsprings Date: Mon, 9 Nov 2015 18:13:46 +0900 Subject: Fix regarding to Tim's review except `can you confirm that range and concat can't be used together in the same literal?'. --- pygments/lexers/grammar_notation.py | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/pygments/lexers/grammar_notation.py b/pygments/lexers/grammar_notation.py index 1c5f1163..8923bbaa 100644 --- a/pygments/lexers/grammar_notation.py +++ b/pygments/lexers/grammar_notation.py @@ -9,7 +9,7 @@ :license: BSD, see LICENSE for details. """ -from pygments.lexer import RegexLexer, bygroups +from pygments.lexer import RegexLexer, bygroups, words from pygments.token import Punctuation, Text, Comment, Operator, \ Keyword, Name, Literal @@ -24,14 +24,13 @@ class BnfLexer(RegexLexer): In order to maximize a number of targets of this lexer, let's decide some designs: - * We don't distinct `Terminal Symbol`. + * We don't distinguish `Terminal Symbol`. * We do assume that `NonTerminal Symbol` are always enclosed with arrow brackets. * We do assume that `NonTerminal Symbol` may include - any printable characters except arrow brackets and - space (no `spaces`, just space, i.e., ASCII \x020). + any printable characters except arrow brackets and ASCII 0x20. This assumption is for `RBNF `_. * We do assume that target notation doesn't support comment. @@ -59,7 +58,7 @@ class BnfLexer(RegexLexer): (r'::=', Operator), # fallback - (r'[^<>:=]+', Text), # for performance + (r'[^<>:]+', Text), # for performance (r'.', Text), ], } @@ -83,16 +82,7 @@ class AbnfLexer(RegexLexer): _core_rules = ( 'ALPHA', 'BIT', 'CHAR', 'CR', 'CRLF', 'CTL', 'DIGIT', 'DQUOTE', 'HEXDIG', 'HTAB', 'LF', 'LWSP', 'OCTET', - 'SP', 'VCHAR', 'WSP',) - - def nonterminal_cb(self, match): - txt = match.group(0) - if txt in self._core_rules: - # Strictly speaking, these are not keyword but - # are called `Core Rule'. - yield match.start(), Keyword, txt - else: - yield match.start(), Name.Class, txt + 'SP', 'VCHAR', 'WSP') tokens = { 'root': [ @@ -121,8 +111,12 @@ class AbnfLexer(RegexLexer): (r'\b[0-9]+', Operator), (r'\*', Operator), + # Strictly speaking, these are not keyword but + # are called `Core Rule'. + (words(_core_rules, suffix=r'\b'), Keyword), + # nonterminals (ALPHA *(ALPHA / DIGIT / "-")) - (r'[a-zA-Z][a-zA-Z0-9-]+\b', nonterminal_cb), + (r'[a-zA-Z][a-zA-Z0-9-]+\b', Name.Class), # operators (r'(=/|=|/)', Operator), -- cgit v1.2.1 From 1e63d6299c70143063037b4e64360dc574a642ee Mon Sep 17 00:00:00 2001 From: hhsprings Date: Mon, 9 Nov 2015 18:39:39 +0900 Subject: update example. --- tests/examplefiles/abnf_example2.abnf | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/examplefiles/abnf_example2.abnf b/tests/examplefiles/abnf_example2.abnf index 07e28a1e..8781adfb 100644 --- a/tests/examplefiles/abnf_example2.abnf +++ b/tests/examplefiles/abnf_example2.abnf @@ -5,3 +5,5 @@ command = "command string" char-line = %x0D.0A *(%x20-7E) %x0D.0A without-ws-and-ctl = %d1-8 / %d11 / %d12 / %d14-31 / %d127 + +three-blank-lines = %x0D.0A.0D.0A.0D.0A -- cgit v1.2.1 From f3c27772939937acbce4fc5fd4fd589ebccfacbe Mon Sep 17 00:00:00 2001 From: hhsprings Date: Tue, 10 Nov 2015 01:37:30 +0900 Subject: Sorry... --- pygments/lexers/grammar_notation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/grammar_notation.py b/pygments/lexers/grammar_notation.py index 8923bbaa..460914f4 100644 --- a/pygments/lexers/grammar_notation.py +++ b/pygments/lexers/grammar_notation.py @@ -35,7 +35,7 @@ class BnfLexer(RegexLexer): * We do assume that target notation doesn't support comment. - * We don't distinct any operators and punctuation except + * We don't distinguish any operators and punctuation except `::=`. Though these desision making might cause too minimal highlighting -- cgit v1.2.1 From edceaf3c4e99b75451e1394729e315e9ff03d12c Mon Sep 17 00:00:00 2001 From: hhsprings Date: Tue, 10 Nov 2015 12:26:20 +0900 Subject: merge configs_pkgmng.py to configs.py. --- pygments/lexers/_mapping.py | 4 +- pygments/lexers/configs.py | 116 +++++++++++++++++++++++++++++++++- pygments/lexers/configs_pkgmng.py | 129 -------------------------------------- 3 files changed, 117 insertions(+), 132 deletions(-) delete mode 100644 pygments/lexers/configs_pkgmng.py diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index f75cface..64d6c852 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -284,7 +284,7 @@ LEXERS = { 'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), 'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), - 'PacmanConfLexer': ('pygments.lexers.configs_pkgmng', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()), + 'PacmanConfLexer': ('pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()), 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()), 'ParaSailLexer': ('pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)), 'PawnLexer': ('pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), @@ -293,7 +293,7 @@ LEXERS = { 'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), - 'PkgConfigLexer': ('pygments.lexers.configs_pkgmng', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), + 'PkgConfigLexer': ('pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py index f6f280ee..c46d8bb8 100644 --- a/pygments/lexers/configs.py +++ b/pygments/lexers/configs.py @@ -19,7 +19,8 @@ from pygments.lexers.shell import BashLexer __all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer', 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer', 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer', - 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer'] + 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer', + 'PkgConfigLexer', 'PacmanConfLexer'] class IniLexer(RegexLexer): @@ -711,3 +712,116 @@ class TerminfoLexer(RegexLexer): (r'.', Literal), ], } + + +class PkgConfigLexer(RegexLexer): + """ + Lexer for `pkg-config + `_ + (see also `manual page `_). + + .. versionadded:: 2.1 + """ + + name = 'PkgConfig' + aliases = ['pkgconfig',] + filenames = ['*.pc',] + mimetypes = [] + + tokens = { + 'root': [ + (r'#.*$', Comment.Single), + + # variable definitions + (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)), + + # keyword lines + (r'^([\w.]+)(:)', + bygroups(Name.Tag, Punctuation), 'spvalue'), + + # variable references + include('interp'), + + # fallback + (r'[^${}#=:\n.]+', Text), + (r'.', Text), + ], + 'interp': [ + # you can escape literal "$" as "$$" + (r'\$\$', Text), + + # variable references + (r'\$\{', String.Interpol, 'curly'), + ], + 'curly': [ + (r'\}', String.Interpol, '#pop'), + (r'\w+', Name.Attribute), + ], + 'spvalue': [ + include('interp'), + + (r'#.*$', Comment.Single, '#pop'), + (r'\n', Text, '#pop'), + + # fallback + (r'[^${}#\n]+', Text), + (r'.', Text), + ], + } + + +class PacmanConfLexer(RegexLexer): + """ + Lexer for `pacman.conf + `_. + + Actually, IniLexer works almost fine for this format, + but it yield error token. It is because pacman.conf has + a form without assignment like: + + UseSyslog + Color + TotalDownload + CheckSpace + VerbosePkgLists + + These are flags to switch on. + + .. versionadded:: 2.1 + """ + + name = 'PacmanConf' + aliases = ['pacmanconf',] + filenames = ['pacman.conf',] + mimetypes = [] + + tokens = { + 'root': [ + # comment + (r'#.*$', Comment.Single), + + # section header + (r'^\s*\[.*?\]\s*$', Keyword), + + # variable definitions + # (Leading space is allowed...) + (r'(\w+)(\s*)(=)', + bygroups(Name.Attribute, Text, Operator)), + + # flags to on + (r'^(\s*)(\w+)(\s*)$', + bygroups(Text, Name.Attribute, Text)), + + # built-in special values + (words(( + '$repo', # repository + '$arch', # architecture + '%o', # outfile + '%u', # url + ), suffix=r'\b'), + Name.Variable), + + # fallback + (r'.', Text), + ], + } diff --git a/pygments/lexers/configs_pkgmng.py b/pygments/lexers/configs_pkgmng.py deleted file mode 100644 index 5012930f..00000000 --- a/pygments/lexers/configs_pkgmng.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- -""" - pygments.lexers.configs_pkgmng - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - Lexers for package manager configuration file formats. - - :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -from pygments.lexer import RegexLexer, bygroups, include, words -from pygments.token import Text, Comment, Operator, Name, \ - Punctuation, String, Keyword - -__all__ = ['PkgConfigLexer', 'PacmanConfLexer'] - - -class PkgConfigLexer(RegexLexer): - """ - Lexer for `pkg-config - `_ - (see also `manual page `_). - - .. versionadded:: 2.1 - """ - - name = 'PkgConfig' - aliases = ['pkgconfig',] - filenames = ['*.pc',] - mimetypes = [] - - tokens = { - 'root': [ - (r'#.*$', Comment.Single), - - # variable definitions - (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)), - - # keyword lines - (r'^([\w.]+)(:)', - bygroups(Name.Tag, Punctuation), 'spvalue'), - - # variable references - include('interp'), - - # fallback - (r'[^${}#=:\n.]+', Text), - (r'.', Text), - ], - 'interp': [ - # you can escape literal "$" as "$$" - (r'\$\$', Text), - - # variable references - (r'\$\{', String.Interpol, 'curly'), - ], - 'curly': [ - (r'\}', String.Interpol, '#pop'), - (r'\w+', Name.Attribute), - ], - 'spvalue': [ - include('interp'), - - (r'#.*$', Comment.Single, '#pop'), - (r'\n', Text, '#pop'), - - # fallback - (r'[^${}#\n]+', Text), - (r'.', Text), - ], - } - - -class PacmanConfLexer(RegexLexer): - """ - Lexer for `pacman.conf - `_. - - Actually, IniLexer works almost fine for this format, - but it yield error token. It is because pacman.conf has - a form without assignment like: - - UseSyslog - Color - TotalDownload - CheckSpace - VerbosePkgLists - - These are flags to switch on. - - .. versionadded:: 2.1 - """ - - name = 'PacmanConf' - aliases = ['pacmanconf',] - filenames = ['pacman.conf',] - mimetypes = [] - - tokens = { - 'root': [ - # comment - (r'#.*$', Comment.Single), - - # section header - (r'^\s*\[.*?\]\s*$', Keyword), - - # variable definitions - # (Leading space is allowed...) - (r'(\w+)(\s*)(=)', - bygroups(Name.Attribute, Text, Operator)), - - # flags to on - (r'^(\s*)(\w+)(\s*)$', - bygroups(Text, Name.Attribute, Text)), - - # built-in special values - (words(( - '$repo', # repository - '$arch', # architecture - '%o', # outfile - '%u', # url - ), suffix=r'\b'), - Name.Variable), - - # fallback - (r'.', Text), - ], - } -- cgit v1.2.1 From 7d9698ecce2fa3505ee4a3a3522428c414a3e4c4 Mon Sep 17 00:00:00 2001 From: Nathan Whetsell Date: Wed, 11 Nov 2015 21:29:17 -0500 Subject: Make Csound Document lexer inherit from RegexLexer instead of XmlLexer --- pygments/lexers/csound.py | 74 +++++++++++++++++++++++++---------------------- 1 file changed, 40 insertions(+), 34 deletions(-) diff --git a/pygments/lexers/csound.py b/pygments/lexers/csound.py index f49cb409..cc1ee644 100644 --- a/pygments/lexers/csound.py +++ b/pygments/lexers/csound.py @@ -9,13 +9,13 @@ :license: BSD, see LICENSE for details. """ -import re +import copy, re from pygments.lexer import RegexLexer, bygroups, default, include, using, words from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \ String, Text from pygments.lexers._csound_builtins import OPCODES -from pygments.lexers.html import HtmlLexer, XmlLexer +from pygments.lexers.html import HtmlLexer from pygments.lexers.python import PythonLexer from pygments.lexers.scripting import LuaLexer @@ -311,7 +311,7 @@ class CsoundOrchestraLexer(CsoundLexer): } -class CsoundDocumentLexer(XmlLexer): +class CsoundDocumentLexer(RegexLexer): """ For `Csound `_ documents. @@ -321,34 +321,40 @@ class CsoundDocumentLexer(XmlLexer): name = 'Csound Document' aliases = ['csound-document', 'csound-csd'] filenames = ['*.csd'] - mimetypes = [] - - tokens = XmlLexer.tokens - for i, item in enumerate(tokens['root']): - if len(item) > 2 and item[2] == 'tag': - (tokens['root']).insert(i, (r'(<)(\s*)(CsInstruments)(\s*)', - bygroups(Name.Tag, Text, Name.Tag, Text), - ('orchestra content', 'tag'))) - (tokens['root']).insert(i, (r'(<)(\s*)(CsScore)(\s*)', - bygroups(Name.Tag, Text, Name.Tag, Text), - ('score content', 'tag'))) - (tokens['root']).insert(i, (r'(<)(\s*)(html)(\s*)', - bygroups(Name.Tag, Text, Name.Tag, Text), - ('HTML', 'tag'))) - break - - tokens['orchestra content'] = [ - (r'(<)(\s*)(/)(\s*)(CsInstruments)(\s*)(>)', - bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), '#pop'), - (r'.+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer)) - ] - tokens['score content'] = [ - (r'(<)(\s*)(/)(\s*)(CsScore)(\s*)(>)', - bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), '#pop'), - (r'.+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer)) - ] - tokens['HTML'] = [ - (r'(<)(\s*)(/)(\s*)(html)(\s*)(>)', - bygroups(Name.Tag, Text, Name.Tag, Text, Name.Tag, Text, Name.Tag), '#pop'), - (r'.+?(?=<\s*/\s*html\s*>)', using(HtmlLexer)) - ] + + # These tokens are based on those in XmlLexer in pygments/lexers/html.py + tokens = { + 'root': [ + newline, + (r'/[*](.|\n)*?[*]/', Comment.Multiline), + (r'[^<&;/]+', Text), + (r'<\s*CsInstruments', Name.Tag, ('orchestra', 'tag')), + (r'<\s*CsScore', Name.Tag, ('score', 'tag')), + (r'<\s*[hH][tT][mM][lL]', Name.Tag, ('HTML', 'tag')), + (r'<\s*[\w:.-]+', Name.Tag, 'tag'), + (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag) + ], + 'orchestra': [ + (r'<\s*/\s*CsInstruments\s*>', Name.Tag, '#pop'), + (r'(.|\n)+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer)) + ], + 'score': [ + (r'<\s*/\s*CsScore\s*>', Name.Tag, '#pop'), + (r'(.|\n)+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer)) + ], + 'HTML': [ + (r'<\s*/\s*[hH][tT][mM][lL]\s*>', Name.Tag, '#pop'), + (r'(.|\n)+?(?=<\s*/\s*[hH][tT][mM][lL]\s*>)', using(HtmlLexer)) + ], + 'tag': [ + (r'\s+', Text), + (r'[\w.:-]+\s*=', Name.Attribute, 'attr'), + (r'/?\s*>', Name.Tag, '#pop') + ], + 'attr': [ + (r'\s+', Text), + (r'".*?"', String, '#pop'), + (r"'.*?'", String, '#pop'), + (r'[^\s>]+', String, '#pop') + ] + } -- cgit v1.2.1 From 2b910cf6b576321b1261379ca2be2d2f19d88ae1 Mon Sep 17 00:00:00 2001 From: Nathan Whetsell Date: Fri, 13 Nov 2015 12:40:14 -0500 Subject: Add explanation for subclassing CsoundDocumentLexer from RegexLexer instead of XmlLexer --- pygments/lexers/csound.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/csound.py b/pygments/lexers/csound.py index cc1ee644..51414073 100644 --- a/pygments/lexers/csound.py +++ b/pygments/lexers/csound.py @@ -322,7 +322,13 @@ class CsoundDocumentLexer(RegexLexer): aliases = ['csound-document', 'csound-csd'] filenames = ['*.csd'] - # These tokens are based on those in XmlLexer in pygments/lexers/html.py + # These tokens are based on those in XmlLexer in pygments/lexers/html.py. Making + # CsoundDocumentLexer a subclass of XmlLexer rather than RegexLexer may seem like a + # better idea, since Csound Document files look like XML files. However, Csound + # Documents can contain Csound comments (preceded by //, for example) before and + # after the root element, unescaped bitwise AND & and less than < operators, etc. In + # other words, while Csound Document files look like XML files, they may not actually + # be XML files. tokens = { 'root': [ newline, -- cgit v1.2.1 -- cgit v1.2.1 From e86a6806c8b7471a466d8378d2455acb519b43b6 Mon Sep 17 00:00:00 2001 From: Brett Rehberg Date: Fri, 20 Nov 2015 13:39:01 +0000 Subject: added uppercase ABAP filename --- pygments/lexers/business.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/business.py b/pygments/lexers/business.py index c71d9c28..ea888245 100644 --- a/pygments/lexers/business.py +++ b/pygments/lexers/business.py @@ -244,7 +244,7 @@ class ABAPLexer(RegexLexer): """ name = 'ABAP' aliases = ['abap'] - filenames = ['*.abap'] + filenames = ['*.abap', '*.ABAP'] mimetypes = ['text/x-abap'] flags = re.IGNORECASE | re.MULTILINE -- cgit v1.2.1 From 9835a0a1a3f6bbf6151c4d025f07b2102a910377 Mon Sep 17 00:00:00 2001 From: Alexander Todorov Date: Sun, 29 Nov 2015 14:11:24 +0200 Subject: Add filename parameter to HtmlFormatter --- pygments/formatters/html.py | 10 ++++++++++ tests/test_html_formatter.py | 8 ++++++++ 2 files changed, 18 insertions(+) diff --git a/pygments/formatters/html.py b/pygments/formatters/html.py index b03a4bd5..12f2e83e 100644 --- a/pygments/formatters/html.py +++ b/pygments/formatters/html.py @@ -321,6 +321,12 @@ class HtmlFormatter(Formatter): .. versionadded:: 1.6 + `filename` + A string used to generate a filename when rendering
 blocks,
+        for example if displaying source code.
+
+        .. versionadded:: 2.1
+
 
     **Subclassing the HTML formatter**
 
@@ -388,6 +394,7 @@ class HtmlFormatter(Formatter):
         self.noclobber_cssfile = get_bool_opt(options, 'noclobber_cssfile', False)
         self.tagsfile = self._decodeifneeded(options.get('tagsfile', ''))
         self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', ''))
+        self.filename = self._decodeifneeded(options.get('filename', ''))
 
         if self.tagsfile:
             if not ctags:
@@ -692,6 +699,9 @@ class HtmlFormatter(Formatter):
             style.append('line-height: 125%')
         style = '; '.join(style)
 
+        if self.filename:
+            yield 0, ('' + self.filename + '')
+
         yield 0, ('')
         for tup in inner:
             yield tup
diff --git a/tests/test_html_formatter.py b/tests/test_html_formatter.py
index a82aaaf7..567de51f 100644
--- a/tests/test_html_formatter.py
+++ b/tests/test_html_formatter.py
@@ -192,3 +192,11 @@ class HtmlFormatterTest(unittest.TestCase):
             fmt.format(tokensource, outfile)
             self.assertTrue('test_ctags'
                             in outfile.getvalue())
+
+    def test_filename(self):
+        optdict = dict(filename="test.py")
+        outfile = StringIO()
+        fmt = HtmlFormatter(**optdict)
+        fmt.format(tokensource, outfile)
+        html = outfile.getvalue()
+        self.assertTrue(re.search("test.py
", html))
-- 
cgit v1.2.1


From aa671606019c3ed282e9fdbdbb832896405fd219 Mon Sep 17 00:00:00 2001
From: David Corbett 
Date: Sun, 13 Dec 2015 19:36:55 -0500
Subject: Allow strings in Inform 6 parameter lists

---
 pygments/lexers/int_fiction.py     | 1 +
 tests/examplefiles/inform6_example | 7 ++++---
 2 files changed, 5 insertions(+), 3 deletions(-)

diff --git a/pygments/lexers/int_fiction.py b/pygments/lexers/int_fiction.py
index 25c472b1..724f9b27 100644
--- a/pygments/lexers/int_fiction.py
+++ b/pygments/lexers/int_fiction.py
@@ -285,6 +285,7 @@ class Inform6Lexer(RegexLexer):
             include('_whitespace'),
             (r';', Punctuation, '#pop'),
             (r'\*', Punctuation),
+            (r'"', String.Double, 'plain-string'),
             (_name, Name.Variable)
         ],
         # Array
diff --git a/tests/examplefiles/inform6_example b/tests/examplefiles/inform6_example
index 73cdd087..6fa1fe5b 100644
--- a/tests/examplefiles/inform6_example
+++ b/tests/examplefiles/inform6_example
@@ -8,8 +8,8 @@ Switches d2SDq;
 Constant Story "Informal Testing";
 Constant Headline "^Not a game.^";!% This is a comment, not ICL.
 
-Release 2;
-Serial "140308";
+Release 3;
+Serial "151213";
 Version 5;
 
 Ifndef TARGET_ZCODE;
@@ -174,7 +174,8 @@ Extend 'wave' replace * -> NewWave;
 
 Extend only 'feel' 'touch' replace * noun -> Feel;
 
-[ TestSub a b o;
+[ TestSub "a\
+           " b o "@@98"; ! Not an escape sequence.
     string 25 low_string;
     print "Test what?> ";
     table->0 = 260;
-- 
cgit v1.2.1


From 3ba27e596ae05e2c807e95fd71e755ae442d2a03 Mon Sep 17 00:00:00 2001
From: Gregory Malecha 
Date: Fri, 18 Dec 2015 11:21:02 -0800
Subject: a few missing keyworkds.

---
 pygments/lexers/theorem.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/pygments/lexers/theorem.py b/pygments/lexers/theorem.py
index 47fdc8b6..30812f2f 100644
--- a/pygments/lexers/theorem.py
+++ b/pygments/lexers/theorem.py
@@ -43,7 +43,8 @@ class CoqLexer(RegexLexer):
         'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
         'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
         'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
-        'outside', 'Check',
+        'outside', 'Check', 'Global', 'Instance', 'Class',
+        'Universe', 'Polymorphic', 'Monomorphic'
     )
     keywords2 = (
         # Gallina
-- 
cgit v1.2.1


From 886fa12ba2c50a96dd057d978fd439745cebb4a3 Mon Sep 17 00:00:00 2001
From: Gregory Malecha 
Date: Fri, 18 Dec 2015 12:10:59 -0800
Subject: trying to do a bit more cleanup.

- land, lor, etc. are not operators
- float, int, array, etc. are not types
- adding more tactics
- adding more terminators
- adding Context as a vernac
---
 pygments/lexers/theorem.py | 23 ++++++++++++-----------
 1 file changed, 12 insertions(+), 11 deletions(-)

diff --git a/pygments/lexers/theorem.py b/pygments/lexers/theorem.py
index 30812f2f..60a101cc 100644
--- a/pygments/lexers/theorem.py
+++ b/pygments/lexers/theorem.py
@@ -43,8 +43,8 @@ class CoqLexer(RegexLexer):
         'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
         'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
         'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
-        'outside', 'Check', 'Global', 'Instance', 'Class',
-        'Universe', 'Polymorphic', 'Monomorphic'
+        'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
+        'Universe', 'Polymorphic', 'Monomorphic', 'Context'
     )
     keywords2 = (
         # Gallina
@@ -65,12 +65,16 @@ class CoqLexer(RegexLexer):
         'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
         'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
         'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
-        'split', 'left', 'right', 'autorewrite', 'tauto',
+        'split', 'left', 'right', 'autorewrite', 'tauto', 'setoid_rewrite',
+        'intuition', 'eauto', 'eapply', 'econstructor', 'etransitivity',
+        'constructor', 'erewrite', 'red', 'cbv', 'lazy', 'vm_compute',
+        'native_compute', 'subst',
     )
     keywords5 = (
         # Terminators
         'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
         'assumption', 'solve', 'contradiction', 'discriminate',
+        'congruence',
     )
     keywords6 = (
         # Control
@@ -88,15 +92,13 @@ class CoqLexer(RegexLexer):
         '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
         '<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
         r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
-        r'/\\', r'\\/',
+        r'/\\', r'\\/', r'\{\|', r'\|\}',
         u'Π', u'λ',
     )
     operators = r'[!$%&*+\./:<=>?@^|~-]'
-    word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
     prefix_syms = r'[!?~]'
     infix_syms = r'[=<>@^|&+\*/$%-]'
-    primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list',
-                  'array')
+    primitives = ('unit', 'nat', 'bool', 'string', 'ascii', 'list')
 
     tokens = {
         'root': [
@@ -109,11 +111,10 @@ class CoqLexer(RegexLexer):
             (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
             (words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
             (words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
-            (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
-            (r'\b([A-Z][\w\']*)', Name.Class),
+            # (r'\b([A-Z][\w\']*)(\.)', Name.Namespace, 'dotted'),
+            (r'\b([A-Z][\w\']*)', Name),
             (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
             (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
-            (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
             (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
 
             (r"[^\W\d][\w']*", Name),
@@ -131,7 +132,7 @@ class CoqLexer(RegexLexer):
 
             (r'"', String.Double, 'string'),
 
-            (r'[~?][a-z][\w\']*:', Name.Variable),
+            (r'[~?][a-z][\w\']*:', Name),
         ],
         'comment': [
             (r'[^(*)]+', Comment),
-- 
cgit v1.2.1


From 66709d71f8ab6fb367b9e8b1e57d421c8d62fb5d Mon Sep 17 00:00:00 2001
From: Robert Clipsham 
Date: Sat, 2 Jan 2016 22:29:39 +0000
Subject: Support .rs.in as a file extension for Rust files.

When using syntax extensions in stable or beta Rust channels using the syntex
package, it is common to use the file extension .rs.in for the source file, and
.rs for the generated file.
---
 pygments/lexers/rust.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pygments/lexers/rust.py b/pygments/lexers/rust.py
index d8939678..5d1162b8 100644
--- a/pygments/lexers/rust.py
+++ b/pygments/lexers/rust.py
@@ -23,7 +23,7 @@ class RustLexer(RegexLexer):
     .. versionadded:: 1.6
     """
     name = 'Rust'
-    filenames = ['*.rs']
+    filenames = ['*.rs', '*.rs.in']
     aliases = ['rust']
     mimetypes = ['text/rust']
 
-- 
cgit v1.2.1


From 2991f68acd61a59f7e411141dcc1640c3329658c Mon Sep 17 00:00:00 2001
From: Georg Brandl 
Date: Mon, 4 Jan 2016 11:05:47 +0100
Subject: Regenerate mapfiles.

---
 pygments/lexers/_mapping.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 39d5e85a..821a88d3 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -17,8 +17,8 @@ from __future__ import print_function
 
 LEXERS = {
     'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)),
-    'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
     'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()),
+    'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
     'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
     'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
     'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
@@ -346,7 +346,7 @@ LEXERS = {
     'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()),
     'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
     'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')),
-    'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs',), ('text/rust',)),
+    'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs', '*.rs.in'), ('text/rust',)),
     'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
     'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
     'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
-- 
cgit v1.2.1


From 4462f8c29ff72a659218e6c3e67e0665cc0a2db3 Mon Sep 17 00:00:00 2001
From: Georg Brandl 
Date: Mon, 4 Jan 2016 11:12:11 +0100
Subject: Regenerate mapfiles.

---
 pygments/lexers/_mapping.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 821a88d3..a08e806c 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -16,7 +16,7 @@
 from __future__ import print_function
 
 LEXERS = {
-    'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)),
+    'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)),
     'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()),
     'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
     'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
-- 
cgit v1.2.1


From fc18d3d35ffb5ba43f68f2dffb1d84dd177c3618 Mon Sep 17 00:00:00 2001
From: Georg Brandl 
Date: Mon, 4 Jan 2016 11:16:17 +0100
Subject: HTML formatter: pep8

---
 pygments/formatters/html.py | 44 ++++++++++++++++++++++----------------------
 1 file changed, 22 insertions(+), 22 deletions(-)

diff --git a/pygments/formatters/html.py b/pygments/formatters/html.py
index b03a4bd5..0985025f 100644
--- a/pygments/formatters/html.py
+++ b/pygments/formatters/html.py
@@ -521,7 +521,7 @@ class HtmlFormatter(Formatter):
                     cssfilename = os.path.join(os.path.dirname(filename),
                                                self.cssfile)
                 except AttributeError:
-                    print('Note: Cannot determine output file name, ' \
+                    print('Note: Cannot determine output file name, '
                           'using current directory as base for the CSS file name',
                           file=sys.stderr)
                     cssfilename = self.cssfile
@@ -530,21 +530,21 @@ class HtmlFormatter(Formatter):
                 if not os.path.exists(cssfilename) or not self.noclobber_cssfile:
                     cf = open(cssfilename, "w")
                     cf.write(CSSFILE_TEMPLATE %
-                            {'styledefs': self.get_style_defs('body')})
+                             {'styledefs': self.get_style_defs('body')})
                     cf.close()
             except IOError as err:
                 err.strerror = 'Error writing CSS file: ' + err.strerror
                 raise
 
             yield 0, (DOC_HEADER_EXTERNALCSS %
-                      dict(title     = self.title,
-                           cssfile   = self.cssfile,
-                           encoding  = self.encoding))
+                      dict(title=self.title,
+                           cssfile=self.cssfile,
+                           encoding=self.encoding))
         else:
             yield 0, (DOC_HEADER %
-                      dict(title     = self.title,
-                           styledefs = self.get_style_defs('body'),
-                           encoding  = self.encoding))
+                      dict(title=self.title,
+                           styledefs=self.get_style_defs('body'),
+                           encoding=self.encoding))
 
         for t, line in inner:
             yield t, line
@@ -623,35 +623,35 @@ class HtmlFormatter(Formatter):
         if self.noclasses:
             if sp:
                 for t, line in lines:
-                    if num%sp == 0:
+                    if num % sp == 0:
                         style = 'background-color: #ffffc0; padding: 0 5px 0 5px'
                     else:
                         style = 'background-color: #f0f0f0; padding: 0 5px 0 5px'
                     yield 1, '%*s ' % (
-                        style, mw, (num%st and ' ' or num)) + line
+                        style, mw, (num % st and ' ' or num)) + line
                     num += 1
             else:
                 for t, line in lines:
                     yield 1, ('%*s ' % (
-                              mw, (num%st and ' ' or num)) + line)
+                                  mw, (num % st and ' ' or num)) + line)
                     num += 1
         elif sp:
             for t, line in lines:
                 yield 1, '%*s ' % (
-                    num%sp == 0 and ' special' or '', mw,
-                    (num%st and ' ' or num)) + line
+                    num % sp == 0 and ' special' or '', mw,
+                    (num % st and ' ' or num)) + line
                 num += 1
         else:
             for t, line in lines:
                 yield 1, '%*s ' % (
-                    mw, (num%st and ' ' or num)) + line
+                    mw, (num % st and ' ' or num)) + line
                 num += 1
 
     def _wrap_lineanchors(self, inner):
         s = self.lineanchors
-        i = self.linenostart - 1 # subtract 1 since we have to increment i
-                                 # *before* yielding
+        # subtract 1 since we have to increment i *before* yielding
+        i = self.linenostart - 1
         for t, line in inner:
             if t:
                 i += 1
@@ -672,14 +672,14 @@ class HtmlFormatter(Formatter):
     def _wrap_div(self, inner):
         style = []
         if (self.noclasses and not self.nobackground and
-            self.style.background_color is not None):
+                self.style.background_color is not None):
             style.append('background: %s' % (self.style.background_color,))
         if self.cssstyles:
             style.append(self.cssstyles)
         style = '; '.join(style)
 
-        yield 0, ('')
+        yield 0, ('')
         for tup in inner:
             yield tup
         yield 0, '
\n' @@ -743,8 +743,8 @@ class HtmlFormatter(Formatter): if line: if lspan != cspan: line.extend(((lspan and ''), cspan, part, - (cspan and ''), lsep)) - else: # both are the same + (cspan and ''), lsep)) + else: # both are the same line.extend((part, (lspan and ''), lsep)) yield 1, ''.join(line) line = [] @@ -785,7 +785,7 @@ class HtmlFormatter(Formatter): for i, (t, value) in enumerate(tokensource): if t != 1: yield t, value - if i + 1 in hls: # i + 1 because Python indexes start at 0 + if i + 1 in hls: # i + 1 because Python indexes start at 0 if self.noclasses: style = '' if self.style.highlight_color is not None: -- cgit v1.2.1 From 2a5779a5b44337b8d49cb057fee8945d723589de Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Mon, 4 Jan 2016 11:53:10 +0100 Subject: Changelog update. --- CHANGES | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/CHANGES b/CHANGES index 7d648bce..7b9bb839 100644 --- a/CHANGES +++ b/CHANGES @@ -51,20 +51,22 @@ Version 2.1 * True color (24-bit) terminal ANSI sequences (#1142) (formatter alias: "16m") +- New "filename" option for HTML formatter (PR#527). + - Improved performance of the HTML formatter for long lines (PR#504). -- Updated autopygmentize script (PR#445) +- Updated autopygmentize script (PR#445). - Fixed style inheritance for non-standard token types in HTML output. - Added support for async/await to Python 3 lexer. - Rewrote linenos option for TerminalFormatter (it's better, but slightly - different output than before). (#1147) + different output than before) (#1147). -- Javascript lexer now supports most of ES6. (#1100) +- Javascript lexer now supports most of ES6 (#1100). -- Cocoa builtins updated for iOS 8.1 (PR#433) +- Cocoa builtins updated for iOS 8.1 (PR#433). - Combined BashSessionLexer and ShellSessionLexer, new version should support the prompt styles of either. @@ -72,11 +74,11 @@ Version 2.1 - Added option to pygmentize to show a full traceback on exceptions. - Fixed incomplete output on Windows and Python 3 (e.g. when using iPython - Notebook). (#1153) + Notebook) (#1153). -- Allowed more traceback styles in Python console lexer. (PR#253) +- Allowed more traceback styles in Python console lexer (PR#253). -- Added decorators to TypeScript. (PR#509) +- Added decorators to TypeScript (PR#509). Version 2.0.3 -- cgit v1.2.1 From 9fbc49268617422361a6b3f6f8fdff2d2c664db2 Mon Sep 17 00:00:00 2001 From: Rishav Kundu Date: Thu, 7 Jan 2016 18:38:58 +0530 Subject: Fix byte decoding in py3 for ImageFormatter subprocess.communicate returns a bytes object --- pygments/formatters/img.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/formatters/img.py b/pygments/formatters/img.py index 667a8697..a7b5d51e 100644 --- a/pygments/formatters/img.py +++ b/pygments/formatters/img.py @@ -83,7 +83,7 @@ class FontManager(object): if proc.returncode == 0: lines = stdout.splitlines() if lines: - path = lines[0].strip().strip(':') + path = lines[0].decode().strip().strip(':') return path def _create_nix(self): -- cgit v1.2.1 From 37527ca652581444ccaf6889e95354cdd76ef5af Mon Sep 17 00:00:00 2001 From: Kyle Brady Date: Mon, 11 Jan 2016 16:49:05 -0800 Subject: Fix the Chapel lexer's parsing of string literals The Chapel lexer was trying to parse both the single and double quoted forms of string literals with a single regex rule. This lead to errors on input like: "I'm a string literal!" The fix was to split the rule out into a ' and " version. --- pygments/lexers/chapel.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/chapel.py b/pygments/lexers/chapel.py index 5b7be4dd..d69c55f5 100644 --- a/pygments/lexers/chapel.py +++ b/pygments/lexers/chapel.py @@ -77,7 +77,8 @@ class ChapelLexer(RegexLexer): (r'[0-9]+', Number.Integer), # strings - (r'["\'](\\\\|\\"|[^"\'])*["\']', String), + (r'"(\\\\|\\"|[^"])*"', String), + (r"'(\\\\|\\'|[^'])*'", String), # tokens (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|' -- cgit v1.2.1